Skip to content

Commit

Permalink
debug
Browse files Browse the repository at this point in the history
  • Loading branch information
Kamforka committed May 1, 2024
1 parent 3c279bf commit d51db25
Show file tree
Hide file tree
Showing 14 changed files with 226 additions and 14 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/_build-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.11
python-version: 3.12
- name: Install build dependencies
run: pip install --no-cache-dir -U pip .['build']
- name: Build package
Expand Down
7 changes: 1 addition & 6 deletions .github/workflows/_integration-tests.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
name: integration-tests
on:
workflow_call:
secrets:
DOCKER_TOKEN:
required: true
jobs:
integration-tests:
name: Run integration tests
Expand All @@ -13,10 +10,8 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.11
python-version: 3.12
- name: Install dependencies
run: pip install --no-cache-dir -U pip .['test']
- name: Docker login
run: docker login -u kamforka -p ${{ secrets.DOCKER_TOKEN }}
- name: Run integration tests
run: scripts/ci.py --test
2 changes: 1 addition & 1 deletion .github/workflows/_static-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11"]
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/_upload-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.11
python-version: 3.12
- name: Install build dependencies
run: pip install --no-cache-dir -U pip .['build']
- name: Upload to PyPI
Expand Down
61 changes: 61 additions & 0 deletions .github/workflows/integrator.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
name: integrator-image
on:
push:
branches:
- main
pull_request:
jobs:
changes:
name: Change detection
runs-on: ubuntu-latest
outputs:
integrator: ${{ steps.filter.outputs.integrator }}
steps:
- uses: actions/checkout@v4
- uses: dorny/paths-filter@v3
id: filter
with:
filters: |
integrator:
- 'docker/thehive4py-integrator/**'
build:
name: Build and push
needs: changes
if: ${{ needs.changes.outputs.integrator == 'true' }}
runs-on: ubuntu-latest
env:
INTEGRATOR_BUILD_CTX: docker/thehive4py-integrator
INTEGRATOR_IMAGE_NAME: kamforka/thehive4py-integrator
THEHIVE_VERSION: 5.3.0

steps:
- name: Checkout
uses: actions/checkout@v4

- name: Set variables
id: variables
run: |
echo "integrator_image_fullname=$INTEGRATOR_IMAGE_NAME:thehive-$THEHIVE_VERSION" >> "$GITHUB_OUTPUT"
echo "integrator_image_fullname_with_hash=$INTEGRATOR_IMAGE_NAME:thehive-$THEHIVE_VERSION-$(git rev-parse --short HEAD)" >> "$GITHUB_OUTPUT"
- name: Set up QEMU
uses: docker/setup-qemu-action@v3

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: kamforka
password: ${{ secrets.DOCKER_TOKEN }}

- name: Build and push
uses: docker/build-push-action@v5
with:
context: ${{ env.INTEGRATOR_BUILD_CTX }}
platforms: linux/amd64,linux/arm64
push: ${{ github.ref == 'refs/heads/main' }}
tags: ${{ steps.variables.outputs.integrator_image_fullname }},${{ steps.variables.outputs.integrator_image_fullname_with_hash}}
build-args: |
THEHIVE_VERSION=${{ env.THEHIVE_VERSION }}
2 changes: 0 additions & 2 deletions .github/workflows/main-cicd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@ jobs:
uses: ./.github/workflows/_static-checks.yml
integration-tests:
uses: ./.github/workflows/_integration-tests.yml
secrets:
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
build-package:
uses: ./.github/workflows/_build-package.yml
upload-package:
Expand Down
50 changes: 50 additions & 0 deletions docker/thehive4py-integrator/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
FROM alpine:3.17 as base

# BUILDER STAGE
FROM base as builder

ARG ES_VERSION=7.17.19
ARG THEHIVE_VERSION=5.3.0

RUN apk update && apk upgrade && apk add curl

## ES DOWNLOAD
ARG ES_DOWNLOAD_URL=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-${ES_VERSION}-linux-x86_64.tar.gz

RUN curl -Lo /tmp/elasticsearch.tgz ${ES_DOWNLOAD_URL} \
&& tar -xzf /tmp/elasticsearch.tgz -C /tmp \
&& mv /tmp/elasticsearch-${ES_VERSION} /tmp/elasticsearch

## THEHIVE DOWNLOAD
ARG THEHIVE_DOWNLOAD_URL=https://archives.strangebee.com/zip/thehive-${THEHIVE_VERSION}-1.zip

RUN curl -Lo /tmp/thehive.zip ${THEHIVE_DOWNLOAD_URL}
RUN unzip -qo /tmp/thehive.zip -d /tmp \
&& mv /tmp/thehive-${THEHIVE_VERSION}-1 /tmp/thehive

# FINAL STAGE
FROM base
RUN apk update && apk upgrade && apk add --no-cache openjdk11-jre-headless bash su-exec curl

## ES SETUP
COPY --from=builder /tmp/elasticsearch /usr/share/elasticsearch
COPY configs/elasticsearch.yml /usr/share/elasticsearch/config/elasticsearch.yml

RUN adduser -u 1000 -g 1000 -Dh /usr/share/elasticsearch elasticsearch \
&& mkdir -p /usr/share/elasticsearch/data \
&& chown -R elasticsearch:elasticsearch /usr/share/elasticsearch \
&& rm -rf /usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64

## THEHIVE SETUP
COPY --from=builder /tmp/thehive /opt/thehive/
COPY configs/thehive.conf /opt/thehive/conf/application.conf

RUN adduser -u 1001 -g 1001 -Dh /opt/thehive thehive \
&& mkdir /var/log/thehive \
&& chown -R thehive:thehive /opt/thehive /var/log/thehive

## ENTRYPOINT
COPY entrypoint.sh /
RUN chmod +x entrypoint.sh
EXPOSE 9000
ENTRYPOINT /entrypoint.sh
7 changes: 7 additions & 0 deletions docker/thehive4py-integrator/configs/elasticsearch.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
http.host: 0.0.0.0
transport.host: 0.0.0.0
discovery.type: single-node
cluster.name: thehive4py
xpack.security.enabled: false
xpack.ml.enabled: false
script.allowed_types: "inline,stored"
22 changes: 22 additions & 0 deletions docker/thehive4py-integrator/configs/thehive.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
play.http.secret.key="supersecret"
play.http.parser.maxDiskBuffer: 20MB

db {
provider: janusgraph
janusgraph {
storage {
backend: berkeleyje
directory: /opt/thehive/db
}

index.search {
backend: elasticsearch
hostname: ["127.0.0.1"]
}
}
}

storage {
provider: localfs
localfs.location: /opt/thehive/data
}
35 changes: 35 additions & 0 deletions docker/thehive4py-integrator/entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
#!/bin/bash

wait_for_elastic() {
local health_url="http://localhost:9200/_cat/health"
local timeout=30

local start_time=$(date +%s)
while true; do
local current_time=$(date +%s)
local elapsed_time=$((current_time - start_time))

if [ "$elapsed_time" -ge "$timeout" ]; then
echo "error: elastic couldn't start in $timeout seconds"
exit 1
fi

local status_code=$(curl -so /dev/null -w %{http_code} ${health_url})
if [ "$status_code" -eq 200 ]; then
return
fi

sleep 0.25
done
}


echo "starting elasticsearch in the background"
export ES_JAVA_HOME=$(dirname $(dirname $(readlink -f $(which java))))
su-exec elasticsearch /usr/share/elasticsearch/bin/elasticsearch > /dev/null 2>&1 &

echo "waiting for elastic to start up..."
wait_for_elastic

echo "starting thehive in the foreground"
su-exec thehive /opt/thehive/bin/thehive -Dconfig.file=/opt/thehive/conf/application.conf
4 changes: 2 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
@pytest.fixture(scope="session")
def test_config():
return TestConfig(
image_name="kamforka/thehive4py-integrator:thehive-5.2.11",
container_name="thehive4py-integration-tests",
image_name="kamforka/thehive4py-integrator:thehive-5.3.0",
container_name="thehive4py-integration-tester",
user="[email protected]",
password="secret",
admin_org="admin",
Expand Down
2 changes: 2 additions & 0 deletions tests/test_case_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,7 @@ def test_share_and_unshare(self, thehive: TheHiveApi, test_case: OutputCase):
thehive.case.unshare(case_id=test_case["_id"], organisation_ids=[organisation])
assert len(thehive.case.list_shares(case_id=test_case["_id"])) == 1

@pytest.mark.skip(reason="integrator container only supports a single org ")
def test_share_and_remove_share(self, thehive: TheHiveApi, test_case: OutputCase):
organisation = "share-org"
share: InputShare = {"organisation": organisation}
Expand All @@ -220,6 +221,7 @@ def test_update_share(self, thehive: TheHiveApi, test_case: OutputCase):
updated_share = thehive.case.share(case_id=test_case["_id"], shares=[share])[0]
assert updated_share["profileName"] == update_profile

@pytest.mark.skip(reason="integrator container only supports a single org ")
def test_share_and_set_share(self, thehive: TheHiveApi, test_case: OutputCase):
organisation = "share-org"
share: InputShare = {"organisation": organisation}
Expand Down
1 change: 1 addition & 0 deletions tests/test_user_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ def test_delete(self, thehive: TheHiveApi, test_user: OutputUser):
with pytest.raises(TheHiveError):
thehive.user.get(user_id=user_id)

@pytest.mark.skip(reason="integrator container only supports a single org ")
def test_set_organisations(
self, test_config: TestConfig, thehive: TheHiveApi, test_user: OutputUser
):
Expand Down
43 changes: 42 additions & 1 deletion tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import requests

from thehive4py.client import TheHiveApi
from thehive4py.helpers import now_to_ts
from thehive4py.query.filters import Eq


Expand Down Expand Up @@ -129,6 +130,45 @@ def _reinit_hive_admin_org(hive_url: str, test_config: TestConfig) -> None:
)


def _initialize_hive_container(url: str, test_config: TestConfig):
hive = TheHiveApi(url=url, username=test_config.user, password=test_config.password)

try:
admin_user = hive.user.get_current()
current_license = hive.session.make_request("GET", "/api/v1/license/current")

if current_license["fallback"]["expiresAt"] < now_to_ts():
_destroy_container(container_name=test_config.container_name)
spawn_hive_container(test_config=test_config)

if not hive.organisation.find(filters=Eq("name", test_config.main_org)):
hive.organisation.create(
organisation={
"name": test_config.main_org,
"description": "main organisation for tests",
}
)

hive.user.set_organisations(
user_id=admin_user["_id"],
organisations=[
{
"organisation": test_config.admin_org,
"profile": "admin",
"default": True,
},
{
"organisation": test_config.main_org,
"profile": "org-admin",
"default": True,
},
],
)

except Exception as exc:
raise Exception(f"Could not initialize TheHive container due to: {str(exc)}")


def spawn_hive_container(test_config: TestConfig) -> str:
if not _is_container_exist(container_name=test_config.container_name):
_run_container(
Expand All @@ -141,6 +181,7 @@ def spawn_hive_container(test_config: TestConfig) -> str:
_destroy_container(container_name=test_config.container_name)
raise RuntimeError("Unable to startup test container for TheHive")

_initialize_hive_container(url=url, test_config=test_config)
return url


Expand All @@ -149,7 +190,7 @@ def reinit_hive_container(test_config: TestConfig) -> None:
with ThreadPoolExecutor() as executor:
for organisation in [
test_config.main_org,
test_config.share_org,
# test_config.share_org,
]:
executor.submit(_reinit_hive_org, hive_url, test_config, organisation)
executor.submit(_reinit_hive_admin_org, hive_url, test_config)

0 comments on commit d51db25

Please sign in to comment.