-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[MAINTENANCE] run lambda as docker in integration tests (#117)
* draft: update integration tests for running against lambda * fix: add new line * feat: run tests as compose * fix: added a new line
- Loading branch information
1 parent
ee474b6
commit c33ab86
Showing
7 changed files
with
148 additions
and
69 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
version: '3' | ||
services: | ||
data-test: | ||
build: | ||
context: ./functions/data_test | ||
environment: | ||
- BUCKET=dqg-settings-local | ||
- S3_HOST=$S3_HOST | ||
- S3_PORT=4566 | ||
- ENVIRONMENT=local | ||
- REPORTS_WEB=test | ||
- AWS_ACCESS_KEY_ID=test | ||
- AWS_SECRET_ACCESS_KEY=test | ||
- AWS_DEFAULT_REGION=us-east-1 | ||
data-integration-test: | ||
build: | ||
context: ./tests/integration_tests/data_test | ||
environment: | ||
- S3_HOST=$S3_HOST | ||
- LAMBDA_HOST=data-test | ||
- LAMBDA_PORT=8080 | ||
depends_on: | ||
- data-test | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,12 +1,5 @@ | ||
ARG IMAGE_NAME=data-test | ||
ARG VERSION=latest | ||
FROM ${IMAGE_NAME}:${VERSION} | ||
RUN pip install pytest==7.3.1 | ||
ENV ENVIRONMENT local | ||
ENV REPORTS_WEB test | ||
ENV AWS_ACCESS_KEY_ID test | ||
ENV AWS_SECRET_ACCESS_KEY test | ||
ENV AWS_DEFAULT_REGION us-east-1 | ||
FROM python:3.9 | ||
RUN pip install pytest==7.3.1 jsonschema==4.0.1 boto3==1.26.66 requests==2.31.0 | ||
COPY test_data ./test_data | ||
COPY ./*.py ./ | ||
ENTRYPOINT ["pytest", "-qvs", "test_data_test.py"] | ||
CMD ["pytest", "-qvs", "test_data_test.py"]" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,61 +1,116 @@ | ||
import data_test as ds | ||
import awswrangler as wr | ||
import os | ||
from jsonschema import validate | ||
import boto3 | ||
import pytest | ||
import requests | ||
import json | ||
import os | ||
|
||
schema = { | ||
"$schema": "http://json-schema.org/draft-04/schema#", | ||
"type": "object", | ||
"properties": { | ||
"path": {"type": "array", "items": [{"type": "string"}]}, | ||
"file": {"type": "string"}, | ||
"profiling": {"type": "string"}, | ||
"test_suite": {"type": "string"}, | ||
"suite_name": {"type": "string"}, | ||
"folder_key": {"type": "string"}, | ||
"run_name": {"type": "string"}, | ||
"validate_id": {"type": "string"} | ||
"path": { | ||
"type": "array", | ||
"items": [ | ||
{ | ||
"type": "string" | ||
} | ||
] | ||
}, | ||
"file": { | ||
"type": "string" | ||
}, | ||
"profiling": { | ||
"type": "string" | ||
}, | ||
"test_suite": { | ||
"type": "string" | ||
}, | ||
"suite_name": { | ||
"type": "string" | ||
}, | ||
"folder_key": { | ||
"type": "string" | ||
}, | ||
"run_name": { | ||
"type": "string" | ||
}, | ||
"validate_id": { | ||
"type": "string" | ||
} | ||
}, | ||
"required": [ | ||
"path", "file", "profiling", "test_suite", "suite_name", | ||
"folder_key", "run_name", "validate_id" | ||
"path", | ||
"file", | ||
"profiling", | ||
"test_suite", | ||
"suite_name", | ||
"folder_key", | ||
"run_name", | ||
"validate_id" | ||
] | ||
} | ||
|
||
|
||
@pytest.fixture(scope="function") | ||
def s3_test_data(request): | ||
url = f"http://{os.environ['S3_HOST']}:4566" | ||
wr.config.s3_endpoint_url = url | ||
b_name = "dataqa" | ||
bucket_name = "dataqa" | ||
file_name = request.param | ||
file_path = f"{b_name}/{file_name}" | ||
local_path = f"./test_data/{file_name}" | ||
event = { | ||
"run_name": "local_test", | ||
"source_root": b_name, | ||
"source_data": file_path, | ||
"engine": "s3" | ||
} | ||
s3 = boto3.resource("s3", endpoint_url=url) | ||
qa_bucket_name = os.environ['BUCKET'] | ||
gx_config_local_path = "great_expectations/great_expectations.yml" | ||
config_path = f"{qa_bucket_name}/great_expectations/great_expectations.yml" | ||
s3.Bucket(qa_bucket_name).download_file(config_path, gx_config_local_path) | ||
s3.create_bucket(Bucket=b_name) | ||
s3.Object(b_name, file_path).put(Body=open(local_path, 'rb')) | ||
result = ds.handler(event, {}) | ||
validate(instance=result, schema=schema) | ||
file_path = f"{bucket_name}/{file_name}" | ||
s3 = _create_boto_s3_resource() | ||
_upload_file_to_s3(s3, bucket_name, file_path, file_name) | ||
response = _invoke_lambda(file_path) | ||
json_response = json.loads(response.text) | ||
validate(instance=json_response, schema=schema) | ||
yield file_path | ||
s3.Object(b_name, file_path).delete() | ||
_delete_s3_file(s3, bucket_name, file_path) | ||
|
||
|
||
@pytest.mark.parametrize("s3_test_data", ["titanic.csv", | ||
"titanic.parquet", | ||
"titanic.json", | ||
"titanic_nested.json"], | ||
@pytest.mark.parametrize("s3_test_data", | ||
["titanic.csv", | ||
"titanic.parquet", | ||
"titanic.json", | ||
"titanic_nested.json"], | ||
indirect=True) | ||
def test_data_test(s3_test_data): | ||
def test_data_test(s3_test_data: str): | ||
pass | ||
|
||
|
||
def _delete_s3_file(s3, bucket_name: str, file_path: str): | ||
s3.Object(bucket_name, file_path).delete() | ||
|
||
|
||
def _upload_file_to_s3(s3, bucket_name: str, file_path: str, file_name: str): | ||
local_path = f"./test_data/{file_name}" | ||
s3.create_bucket(Bucket=bucket_name) | ||
s3.Object(bucket_name, file_path).put(Body=open(local_path, 'rb')) | ||
|
||
|
||
def _create_boto_s3_resource(): | ||
host = os.environ["S3_HOST"] | ||
url = f"http://{host}:4566" | ||
s3 = boto3.resource("s3", endpoint_url=url, | ||
aws_access_key_id="test", | ||
aws_secret_access_key="test") | ||
return s3 | ||
|
||
|
||
def _invoke_lambda(file_path: str): | ||
lambda_host = os.environ["LAMBDA_HOST"] | ||
lambda_port = os.environ["LAMBDA_PORT"] | ||
lambda_url = f"http://{lambda_host}:{lambda_port}/2015-03-31/functions/function/invocations" | ||
|
||
payload = json.dumps({ | ||
"run_name": "local_test", | ||
"source_root": "dataqa", | ||
"source_data": f"{file_path}", | ||
"engine": "s3" | ||
}) | ||
headers = { | ||
'Content-Type': 'application/json' | ||
} | ||
response = requests.request("POST", | ||
lambda_url, | ||
headers=headers, | ||
data=payload) | ||
return response |