Skip to content

Commit

Permalink
Merge pull request #392 from sanger/dpl-058-versioned-api
Browse files Browse the repository at this point in the history
DPL-058-01: Versioned API
  • Loading branch information
sdjmchattie authored Jul 21, 2021
2 parents 44daa62 + df80b90 commit b465b26
Show file tree
Hide file tree
Showing 11 changed files with 46 additions and 27 deletions.
2 changes: 1 addition & 1 deletion .release-version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.24.0
1.25.0
9 changes: 3 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -293,14 +293,11 @@ you start these dependencies here, there's no need to also attempt to do so in
the Lighthouse repository. They are the same resources in both and the second
one to be started will show exceptions about ports already being allocated:

docker-compose -f dependencies/docker-compose.yml up -d
./dependencies/up.sh

The `-d` is optional and runs the containers in the background. If you want to
observe the logs in real time or just prefer to keep the process going in
another terminal, exclude the option. If you've already used the option and
want to shut the databases back down, you can do so with:
When you want to shut the databases back down, you can do so with:

docker-compose -f dependencies/docker-compose.yml down
./dependencies/down.sh

To build and run the container for Crawler, run from the root of the repository:

Expand Down
17 changes: 11 additions & 6 deletions crawler/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,20 +22,25 @@ def create_app(config_object: str = None) -> Flask:
# setup logging
logging.config.dictConfig(app.config["LOGGING"])

if app.config.get("ENABLE_CHERRYPICKER_ENDPOINTS", False):
from crawler.blueprints import cherrypicker_test_data

app.register_blueprint(cherrypicker_test_data.bp)

if app.config.get("SCHEDULER_RUN", False):
scheduler.init_app(app)
scheduler.start()

setup_routes(app)

@app.get("/health")
def health_check():
def _():
"""Checks the health of Crawler by checking that there is a scheduled job to run Crawler periodically"""
if scheduler.get_job(SCHEDULER_JOB_ID_RUN_CRAWLER):
return "Crawler is working", HTTPStatus.OK

return "Crawler is not working correctly", HTTPStatus.INTERNAL_SERVER_ERROR

return app


def setup_routes(app):
if app.config.get("ENABLE_CHERRYPICKER_ENDPOINTS", False):
from crawler.routes.v1 import routes as v1_routes

app.register_blueprint(v1_routes.bp, url_prefix="/v1")
File renamed without changes.
Empty file.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
from datetime import datetime, timezone

from flask import Blueprint, request
from flask import request

from crawler.constants import FIELD_STATUS_COMPLETED, FLASK_ERROR_MISSING_PARAMETERS, FLASK_ERROR_UNEXPECTED
from crawler.helpers.responses import bad_request, internal_server_error, ok
Expand All @@ -10,11 +10,8 @@

logger = logging.getLogger(__name__)

bp = Blueprint("cherrypicker", __name__)


@bp.post("/cherrypick-test-data")
def generate_test_data_endpoint() -> FlaskResponse:
def generate_test_data_v1() -> FlaskResponse:
"""Generates cherrypicker test data for a number of plates with defined
numbers of positives per plate.
Expand Down
Empty file added crawler/routes/v1/__init__.py
Empty file.
11 changes: 11 additions & 0 deletions crawler/routes/v1/routes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from flask import Blueprint

from crawler.routes.common.cherrypicker_test_data import generate_test_data_v1
from crawler.types import FlaskResponse

bp = Blueprint("v1_routes", __name__)


@bp.post("/cherrypick-test-data")
def generate_test_data_endpoint() -> FlaskResponse:
return generate_test_data_v1()
4 changes: 4 additions & 0 deletions dependencies/down.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash

cd "$(dirname "$0")"
docker-compose down
4 changes: 4 additions & 0 deletions dependencies/up.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash

cd "$(dirname "$0")"
docker-compose up -d
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
from crawler.helpers.general_helpers import is_found_in_list
from crawler.jobs.cherrypicker_test_data import CherrypickerDataError

barcode_metadata = [
ENDPOINT = "/v1/cherrypick-test-data"
BARCODE_METADATA = [
["Plate-1", "positive samples: 30"],
["Plate-2", "positive samples: 50"],
]
Expand All @@ -18,7 +19,7 @@

@pytest.fixture
def logger_messages():
with patch("crawler.blueprints.cherrypicker_test_data.logger") as logger:
with patch("crawler.routes.common.cherrypicker_test_data.logger") as logger:
infos = []
logger.info.side_effect = lambda msg: infos.append(msg)

Expand All @@ -30,13 +31,13 @@ def logger_messages():

@pytest.fixture
def process_mock():
with patch("crawler.blueprints.cherrypicker_test_data.process") as process:
with patch("crawler.routes.common.cherrypicker_test_data.process") as process:
yield process


@pytest.mark.parametrize("json", [{}, {"run_id": None}])
def test_generate_endpoint_invalid_json(json, client, logger_messages):
response = client.post("/cherrypick-test-data", json=json)
response = client.post(ENDPOINT, json=json)
assert response.status_code == HTTPStatus.BAD_REQUEST
assert "run_id" not in response.json
assert "plates" not in response.json
Expand All @@ -47,12 +48,12 @@ def test_generate_endpoint_invalid_json(json, client, logger_messages):


def test_generate_endpoint_success(client, logger_messages, process_mock):
process_mock.return_value = barcode_metadata
process_mock.return_value = BARCODE_METADATA
test_run_id = "0123456789abcdef01234567"
response = client.post("/cherrypick-test-data", json={"run_id": test_run_id})
response = client.post(ENDPOINT, json={"run_id": test_run_id})
assert response.status_code == HTTPStatus.OK
assert response.json["run_id"] == test_run_id
assert response.json["plates"] == barcode_metadata
assert response.json["plates"] == BARCODE_METADATA
assert response.json["status"] == FIELD_STATUS_COMPLETED
assert "timestamp" in response.json
assert "errors" not in response.json
Expand All @@ -63,7 +64,7 @@ def test_generate_endpoint_handles_CherrypickerDataError_exception(client, logge
test_error_message = "Test Error!"
test_error = CherrypickerDataError(test_error_message)
process_mock.side_effect = test_error
response = client.post("/cherrypick-test-data", json={"run_id": "test_id"})
response = client.post(ENDPOINT, json={"run_id": "test_id"})
assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR
assert "run_id" not in response.json
assert "plates" not in response.json
Expand All @@ -76,7 +77,7 @@ def test_generate_endpoint_handles_CherrypickerDataError_exception(client, logge
def test_generate_endpoint_handles_generic_exception(client, logger_messages, process_mock):
test_error = ConnectionError()
process_mock.side_effect = test_error
response = client.post("/cherrypick-test-data", json={"run_id": "test_id"})
response = client.post(ENDPOINT, json={"run_id": "test_id"})
assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR
assert "run_id" not in response.json
assert "plates" not in response.json
Expand Down

0 comments on commit b465b26

Please sign in to comment.