Skip to content

Commit

Permalink
test(openapi): add configuration driven openapi test
Browse files Browse the repository at this point in the history
* examples includes for structured properties and timeline
  • Loading branch information
david-leifker committed Aug 11, 2024
1 parent f92236f commit a2514f9
Show file tree
Hide file tree
Showing 10 changed files with 1,771 additions and 68 deletions.
1 change: 1 addition & 0 deletions smoke-test/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,4 @@ types-requests>=2.28.11.6,<=2.31.0.3
types-PyYAML
# https://github.com/docker/docker-py/issues/3256
requests<=2.31.0
deepdiff
12 changes: 8 additions & 4 deletions smoke-test/tests/openapi/README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@

# Goal

This test is a simply a collection of json files which contain request/response sequence intended to
This test is configuration driven by json files which contain request/response sequences intended to
detect unexpected regressions between releases.

Files can be executed in parallel but each request within the file is sequential.
Expand All @@ -18,11 +18,15 @@ expected status code and optional body.
[
{
"request": {

"urn": "",
"description": "",
"method": "",
"json": {}
},
"response": {
"status_code": 200,
"body": {}
"status_codes": [200],
"exclude_regex_paths": [],
"json": {}
}
}
]
Expand Down
111 changes: 83 additions & 28 deletions smoke-test/tests/openapi/test_openapi.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
import logging
import requests_wrapper as requests
import concurrent.futures
import glob
import json
import logging

import pytest
from datahub.ingestion.graph.client import DatahubClientConfig, DataHubGraph
from deepdiff import DeepDiff

from tests.utils import (
get_gms_url,
)
import requests_wrapper as requests
from tests.utils import get_gms_url

logger = logging.getLogger(__name__)

Expand All @@ -19,38 +18,94 @@ def test_healthchecks(wait_for_healthchecks):
pass


def list_files():
return glob.glob("tests/openapi/**/*.json")


def load_tests():
for test_fixture in list_files():
def load_tests(fixture_glob="tests/openapi/**/*.json"):
for test_fixture in glob.glob(fixture_glob):
with open(test_fixture) as f:
yield (test_fixture, json.load(f))


def execute_request(request):
session = requests.Session()
method = request.pop("method")
if "method" in request:
method = request.pop("method")
else:
method = "post"

url = get_gms_url() + request.pop("url")

return getattr(session, method)(url, **request)


def evaluate_test(test_name, test_data):
try:
for idx, req_resp in enumerate(test_data):
if "description" in req_resp["request"]:
description = req_resp["request"].pop("description")
else:
description = None
url = req_resp["request"]["url"]
actual_resp = execute_request(req_resp["request"])
try:
if "response" in req_resp and "status_codes" in req_resp["response"]:
assert (
actual_resp.status_code in req_resp["response"]["status_codes"]
)
else:
assert actual_resp.status_code in [200, 202, 204]
if "response" in req_resp:
if "json" in req_resp["response"]:
if "exclude_regex_paths" in req_resp["response"]:
exclude_regex_paths = req_resp["response"][
"exclude_regex_paths"
]
else:
exclude_regex_paths = []
diff = DeepDiff(
actual_resp.json(),
req_resp["response"]["json"],
exclude_regex_paths=exclude_regex_paths,
)
assert not diff
else:
logger.warning("No expected response json found")
except Exception as e:
logger.error(
f"Error executing step: {idx}, url: {url}, test: {test_name}"
)
if description:
logger.error(f"Step {idx} Description: {description}")
logger.error(f"Response content: {actual_resp.content}")
raise e
except Exception as e:
logger.error(f"Error executing test: {test_name}")
raise e


def run_tests(fixture_glob, num_workers=3):
with concurrent.futures.ThreadPoolExecutor(max_workers=num_workers) as executor:
futures = []
for test_fixture, test_data in load_tests(fixture_glob=fixture_glob):
futures.append(executor.submit(evaluate_test, test_fixture, test_data))

for future in concurrent.futures.as_completed(futures):
logger.info(future.result())


@pytest.mark.dependency(depends=["test_healthchecks"])
def test_openapi():
for test_fixture, test_data in load_tests():
try:
for req_resp in test_data:
actual_resp = execute_request(req_resp["request"])
try:
assert actual_resp.status_code == req_resp["response"]["status_code"]
assert actual_resp == req_resp["response"]["body"]
except Exception as e:
logger.error(f"Error executing test fixture: {test_fixture}")
logger.error(f"Response body: {actual_resp.content}")
raise e
except Exception as e:
logger.error(f"Error executing test fixture: {test_fixture}")
raise e
def test_openapi_all():
run_tests(fixture_glob="tests/openapi/**/*.json", num_workers=10)


# @pytest.mark.dependency(depends=["test_healthchecks"])
# def test_openapi_v1():
# run_tests(fixture_glob="tests/openapi/v1/*.json", num_workers=4)
#
#
# @pytest.mark.dependency(depends=["test_healthchecks"])
# def test_openapi_v2():
# run_tests(fixture_glob="tests/openapi/v2/*.json", num_workers=4)
#
#
# @pytest.mark.dependency(depends=["test_healthchecks"])
# def test_openapi_v3():
# run_tests(fixture_glob="tests/openapi/v3/*.json", num_workers=4)
Empty file.
Loading

0 comments on commit a2514f9

Please sign in to comment.