Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

test new way of wiring up tests #304

Merged
merged 30 commits into from
Feb 14, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
82f9de7
test new way of wiring up tests
sherwoodf Feb 10, 2025
ab37ff0
update ci commands
sherwoodf Feb 10, 2025
279bb04
update docker workflow
sherwoodf Feb 10, 2025
751ad13
change mac docker installation
sherwoodf Feb 10, 2025
b10456e
remove windows workflow
sherwoodf Feb 10, 2025
9cce4cb
trying to use colima for docker
sherwoodf Feb 12, 2025
6b7fb01
try using python-on-whales to start docker container
sherwoodf Feb 12, 2025
9324f3b
try installing docker first
sherwoodf Feb 12, 2025
63986f6
fix typo
sherwoodf Feb 12, 2025
beb8203
change python command
sherwoodf Feb 12, 2025
4a9fa3b
fix typo
sherwoodf Feb 12, 2025
02b919f
make mac command equivalent to linux
sherwoodf Feb 12, 2025
44e0818
add to github path
sherwoodf Feb 12, 2025
62afa2b
update python commands
sherwoodf Feb 12, 2025
7c37f52
add docker-compose plugin to brew install
sherwoodf Feb 12, 2025
ef87af0
try using colima
sherwoodf Feb 12, 2025
18b721f
try macos 13 runners
sherwoodf Feb 12, 2025
3f6012d
switch to just testing on linux since mac runners do not support nest…
sherwoodf Feb 12, 2025
77b77db
fix mac os to latest
sherwoodf Feb 12, 2025
8a4c828
try colima on macos-13
sherwoodf Feb 12, 2025
bfee5a2
fix macos-13
sherwoodf Feb 12, 2025
f19a94c
try --detach
sherwoodf Feb 12, 2025
1bb926c
give up on running docker on mac
sherwoodf Feb 12, 2025
dd1d914
added test for all export option
sherwoodf Feb 12, 2025
ffd5011
added order test
sherwoodf Feb 12, 2025
309a66a
clean up leftovers from previous commits, and use --wait to wait for …
sherwoodf Feb 13, 2025
18334bc
change to use makefiles
sherwoodf Feb 14, 2025
b2f7da0
whitespace change
sherwoodf Feb 14, 2025
07bb29f
fixes
sherwoodf Feb 14, 2025
dc4e65c
update make commands
sherwoodf Feb 14, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 24 additions & 15 deletions .github/workflows/core.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,33 +38,42 @@ jobs:
run: poetry build
- name: Run pytest
run: poetry run pytest
ci-make:
ci-test-with-api:
strategy:
fail-fast: false
matrix:
python-version: ["3.11"]
poetry-version: ["1.4.2"]
# NOTE - WE DO NOT TEST ON MAC/WINDOWS!!
# The current tests dockerize the test runner, so there is not much point on testing across platforms
# TODO: we want to change this to more closely follow how users actually run tests (setting up API first, then running tests)
# Ticket: https://app.clickup.com/t/8697kxtgx
# Note we do not test on windows or mac. Currently, github mac runners do not support nested virtualization, so we cannot run docker on them (since mac docker virtualises linux)
# See https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#limitations-for-arm64-macos-runners
os: [ubuntu-20.04]
project:
[
bia-export,
]
runs-on: ${{ matrix.os }}
defaults:
run:
working-directory: ${{ matrix.project }}
steps:
- uses: actions/checkout@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1 # Set up Docker Buildx for building and pushing Docker images
- name: Cache Docker layers
uses: actions/cache@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Run poetry image
uses: abatilo/actions-poetry@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: run makefile
run: make ${{matrix.project}}.test
poetry-version: ${{ matrix.poetry-version }}
- name: Run docker
run: make -C ../ api.up
- name: set up poetry
run: |
poetry env use python
poetry install
- name: Run pytest
run: poetry run pytest
# Always cleanup - even for cancelled jobs
- name: Docker Compose Down
run: make -C ../ api.down
if: ${{ always() }}

9 changes: 5 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
.PHONY: api.version client.generate bia-export.test
.PHONY: api.version client.generate api.up api.down

api.version:
@echo $(shell grep '^version =' api/pyproject.toml | awk -F\" '{print $$2}')
Expand All @@ -12,7 +12,8 @@ client.generate:
client.examples:
docker compose --profile client_examples up --build --force-recreate --remove-orphans --abort-on-container-exit

api.up:
docker compose up -d --build --wait

# Note that the github CI is set up to expect Make commands for testing of the form: make ${{matrix.project}}.test
bia-export.test:
docker compose --profile export_test up --build --force-recreate --remove-orphans --abort-on-container-exit
api.down:
docker compose down
24 changes: 18 additions & 6 deletions bia-export/bia_export/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,11 @@
app.add_typer(website, name="website")


DEFAULT_WEBSITE_STUDY_FILE_NAME = "bia-study-metadata.json"
DEFAULT_WEBSITE_IMAGE_FILE_NAME = "bia-image-metadata.json"
DEFAULT_WEBSITE_DATASET_FOR_IMAGE_FILE_NAME = "bia-dataset-metadata-for-images.json"


@website.command("all")
def generate_all(
id_list: Annotated[
Expand All @@ -47,18 +52,25 @@ def generate_all(
"-c",
),
] = None,
output_directory: Annotated[
Optional[Path],
typer.Option(
"--out_dir",
"-o",
),
] = None,
):
settings = Settings()

if not id_list:
id_list = get_study_ids(root_directory)

logger.info("Exporting study pages")
website_study(id_list=id_list, root_directory=root_directory, cache=cache)
website_study(id_list=id_list, root_directory=root_directory, cache=cache, output_filename=(output_directory / DEFAULT_WEBSITE_STUDY_FILE_NAME if output_directory else None))
logger.info("Exporting image pages")
website_image(id_list=id_list, root_directory=root_directory)
website_image(id_list=id_list, root_directory=root_directory, output_filename=(output_directory / DEFAULT_WEBSITE_IMAGE_FILE_NAME if output_directory else None))
logger.info("Exporting datasets for study pages")
datasets_for_website_image(id_list=id_list, root_directory=root_directory)
datasets_for_website_image(id_list=id_list, root_directory=root_directory, output_filename=(output_directory / DEFAULT_WEBSITE_DATASET_FOR_IMAGE_FILE_NAME if output_directory else None))


@website.command("study")
Expand All @@ -72,7 +84,7 @@ def website_study(
"--out_file",
"-o",
),
] = Path("bia-study-metadata.json"),
] = Path(DEFAULT_WEBSITE_STUDY_FILE_NAME),
root_directory: Annotated[
Optional[Path],
typer.Option(
Expand Down Expand Up @@ -117,7 +129,7 @@ def website_image(
"--out_file",
"-o",
),
] = Path("bia-image-metadata.json"),
] = Path(DEFAULT_WEBSITE_IMAGE_FILE_NAME),
root_directory: Annotated[
Optional[Path],
typer.Option(
Expand Down Expand Up @@ -151,7 +163,7 @@ def datasets_for_website_image(
"--out_file",
"-o",
),
] = Path("bia-dataset-metadata-for-images.json"),
] = Path(DEFAULT_WEBSITE_DATASET_FOR_IMAGE_FILE_NAME),
root_directory: Annotated[
Optional[Path],
typer.Option(
Expand Down
63 changes: 58 additions & 5 deletions bia-export/test/conftest.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,16 @@
from dotenv import load_dotenv
from pathlib import Path
import pytest
from bia_test_data.data_to_api import add_objects_to_api
from bia_test_data.data_to_api import add_objects_to_api, get_object_creation_client
from bia_export.settings import Settings
from bia_shared_datamodels.uuid_creation import create_study_uuid
from pathlib import Path
import json
import os
from glob import glob


def pytest_configure(config: pytest.Config):
if not os.environ.get("API_BASE_URL", None):
os.environ.setdefault("API_BASE_URL", "http://localhost:8080")
os.environ.setdefault("API_BASE_URL", "http://localhost:8080")


@pytest.fixture(scope="session")
Expand All @@ -28,4 +27,58 @@ def data_in_api():
json_dict = json.load(object_file)
object_list.append(json_dict)

add_objects_to_api(setttings.api_base_url, object_list)

private_client = get_object_creation_client(setttings.api_base_url)

add_objects_to_api(private_client, object_list)




@pytest.fixture(scope="session")
def api_studies_in_expected_order():
setttings = Settings()

base_study = Path(__file__).parent / "input_data" / "study" / "S-BIADTEST" / "a2fdbd58-ee11-4cd9-bc6a-f3d3da7fff71.json"

with open(base_study, "r") as object_file:
base_study_dict: dict = json.load(object_file)

object_list = []

study_1 = base_study_dict.copy() | {
"accession_id": "S-BIADTEST1",
"uuid": str(create_study_uuid("S-BIADTEST111")),
"release_date": "2024-01-01"
}
object_list.append(study_1)

study_2 = base_study_dict.copy() | {
"accession_id": "S-BIADTEST22",
"uuid": str(create_study_uuid("S-BIADTEST22")),
"release_date": "2024-01-01"
}
object_list.append(study_2)

study_3 = base_study_dict.copy() | {
"accession_id": "S-BIADTEST333",
"uuid": str(create_study_uuid("S-BIADTEST333")),
"release_date": "2024-01-03"
}
object_list.append(study_3)


study_4 = base_study_dict.copy() | {
"accession_id": "S-BIADTEST4444",
"uuid": str(create_study_uuid("S-BIADTEST4444")),
"release_date": "2024-01-02"
}
object_list.append(study_4)

private_client = get_object_creation_client(setttings.api_base_url)

add_objects_to_api(private_client, object_list)

expected_order_of_studies = [study_3, study_4, study_2, study_1]

return expected_order_of_studies
82 changes: 81 additions & 1 deletion bia-export/test/test_api_export.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,66 @@
from typer.testing import CliRunner
from pathlib import Path
from bia_export.cli import app
import pytest
from bia_export.cli import (
app,
DEFAULT_WEBSITE_STUDY_FILE_NAME,
DEFAULT_WEBSITE_IMAGE_FILE_NAME,
DEFAULT_WEBSITE_DATASET_FOR_IMAGE_FILE_NAME,
)
import json
import os

runner = CliRunner()


# Note that pytest fixture data_in_api is unused in test code,
# but requested in order to guarentee that data is in the test api before running the test
def test_cli_export_export_all_data_contains_at_least_expected_objects(
tmp_path: Path, data_in_api
):

outpath = tmp_path.resolve()

result = runner.invoke(
app,
[
"website",
"all",
"-o",
outpath,
],
)

assert result.exit_code == 0
assert len(os.listdir(tmp_path)) == 3

def check_file_contains_expected_object(outfile: Path, expected_output: Path):
with open(outfile, "r") as f:
json_result = json.load(f)

with open(expected_output) as f:
json_expected = json.load(f)

for key, value in json_expected.items():
assert key in json_result
assert value == json_result[key]

check_file_contains_expected_object(
outpath.joinpath(DEFAULT_WEBSITE_STUDY_FILE_NAME),
Path(__file__).parent.joinpath("output_data/bia-study-metadata.json"),
)

check_file_contains_expected_object(
outpath.joinpath(DEFAULT_WEBSITE_IMAGE_FILE_NAME),
Path(__file__).parent.joinpath("output_data/bia-image-metadata.json"),
)

check_file_contains_expected_object(
outpath.joinpath(DEFAULT_WEBSITE_DATASET_FOR_IMAGE_FILE_NAME),
Path(__file__).parent.joinpath("output_data/bia-dataset-metadata-for-images.json"),
)


# Note that pytest fixture data_in_api is unused in test code,
# but requested in order to guarentee that data is in the test api before running the test
def test_cli_export_website_studies(tmp_path: Path, data_in_api):
Expand Down Expand Up @@ -79,3 +134,28 @@ def test_cli_export_dataset_for_website_images(tmp_path: Path, data_in_api):
json_expected = json.load(f)

assert json_result == json_expected



def test_cli_export_study_ordering(tmp_path: Path, api_studies_in_expected_order: list[dict]):
outfile = tmp_path.joinpath("bia-dataset-metadata.json").resolve()

result = runner.invoke(
app,
[
"website",
"study",
"-o",
outfile,
],
)

assert result.exit_code == 0

expected_study_acc_id_order = [study["accession_id"] for study in api_studies_in_expected_order]

with open(outfile, "r") as f:
json_result: dict = json.load(f)

it = iter(json_result.keys())
assert all(acc_id in it for acc_id in expected_study_acc_id_order)
6 changes: 3 additions & 3 deletions bia-test-data/bia_test_data/data_to_api.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
from pathlib import Path
from time import sleep
from urllib3.exceptions import MaxRetryError
from dotenv import dotenv_values
from bia_integrator_api.api import PrivateApi
from bia_integrator_api import Configuration, ApiClient, exceptions
Expand Down Expand Up @@ -207,9 +209,7 @@ def order_object_for_api(object_list: list[dict]):
return ordered_object_list


def add_objects_to_api(api_base_url, object_list: list[dict]):
private_client = get_object_creation_client(api_base_url)

def add_objects_to_api(private_client, object_list: list[dict]):
ordered_object_list = order_object_for_api(object_list)

for bia_object_dict in ordered_object_list:
Expand Down
14 changes: 1 addition & 13 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,16 +59,4 @@ services:
volumes:
- ./clients/python:/mnt/share
profiles:
- client_examples
export-test:
depends_on:
bia-integrator-api:
condition: service_healthy
environment:
- API_BASE_URL=http://api:8080
container_name: export_test
image: bia-integrator-api
command: bash -c "echo $PWD && poetry install && poetry run pytest -vv"
working_dir: /bia-integrator/bia-export
profiles:
- export_test
- client_examples
Loading