From 5833c84d0ab00808f269c5d14b625eb9f772cd36 Mon Sep 17 00:00:00 2001 From: Alexandre Girard Date: Fri, 4 Nov 2022 18:14:18 -0700 Subject: [PATCH] Revert "reset to master" This reverts commit 3fa6a004c1df0bac1ccbd414bd93b32c0b40ab16. --- .github/actions/start-aws-runner/action.yml | 2 +- .../source-alpha-vantage/.dockerignore | 6 - .../source-alpha-vantage/Dockerfile | 38 -- .../connectors/source-alpha-vantage/README.md | 79 ---- .../source-alpha-vantage/__init__.py | 3 - .../acceptance-test-docker.sh | 16 - .../source-alpha-vantage/build.gradle | 9 - .../integration_tests/__init__.py | 3 - .../integration_tests/abnormal_state.json | 5 - .../integration_tests/acceptance.py | 16 - .../integration_tests/configured_catalog.json | 31 -- .../integration_tests/invalid_config.json | 8 - .../integration_tests/sample_config.json | 8 - .../integration_tests/sample_state.json | 5 - .../connectors/source-alpha-vantage/main.py | 13 - .../source-alpha-vantage/requirements.txt | 2 - .../connectors/source-alpha-vantage/setup.py | 29 -- .../source_alpha_vantage/__init__.py | 8 - .../source_alpha_vantage/alpha_vantage.yaml | 107 ----- .../object_dpath_extractor.py | 88 ---- .../schemas/time_series_daily.json | 21 - .../schemas/time_series_daily_adjusted.json | 30 -- .../schemas/time_series_intraday.json | 21 - .../schemas/time_series_monthly.json | 21 - .../schemas/time_series_monthly_adjusted.json | 27 -- .../schemas/time_series_weekly.json | 21 - .../schemas/time_series_weekly_adjusted.json | 27 -- .../source_alpha_vantage/source.py | 18 - .../source_alpha_vantage/spec.yaml | 53 --- .../unit_tests/test_object_dpath_extractor.py | 172 -------- .../connectors/source-datadog/.dockerignore | 6 - .../connectors/source-datadog/Dockerfile | 16 - .../connectors/source-datadog/README.md | 109 ----- .../source-datadog/acceptance-test-config.yml | 25 -- .../source-datadog/acceptance-test-docker.sh | 15 - .../connectors/source-datadog/build.gradle | 13 - .../integration_tests/__init__.py | 3 - .../integration_tests/abnormal_state.json | 5 - .../integration_tests/acceptance.py | 14 - .../integration_tests/configured_catalog.json | 73 ---- .../inc_configured_catalog.json | 17 - .../integration_tests/invalid_config.json | 7 - .../integration_tests/sample_config.json | 8 - .../connectors/source-datadog/main.py | 13 - .../source-datadog/requirements.txt | 3 - .../connectors/source-datadog/setup.py | 26 -- .../source_datadog/schemas/audit_logs.json | 49 --- .../source_datadog/schemas/dashboards.json | 55 --- .../source_datadog/schemas/downtimes.json | 103 ----- .../schemas/incident_teams.json | 92 ---- .../source_datadog/schemas/incidents.json | 135 ------ .../source_datadog/schemas/logs.json | 61 --- .../source_datadog/schemas/metrics.json | 5 - .../schemas/synthetic_tests.json | 105 ----- .../source_datadog/schemas/users.json | 76 ---- .../source-datadog/source_datadog/source.py | 64 --- .../source-datadog/source_datadog/spec.yaml | 56 --- .../source-datadog/source_datadog/streams.py | 288 ------------- .../source-datadog/unit_tests/__init__.py | 3 - .../source-datadog/unit_tests/conftest.py | 392 ------------------ .../source-datadog/unit_tests/test_source.py | 58 --- .../source-datadog/unit_tests/test_streams.py | 100 ----- .../connectors/source-greenhouse/Dockerfile | 10 +- .../connectors/source-greenhouse/build.gradle | 4 + .../connectors/source-greenhouse/setup.py | 7 +- .../connectors/source-gridly/.dockerignore | 6 - .../connectors/source-gridly/Dockerfile | 38 -- .../connectors/source-gridly/README.md | 132 ------ .../source-gridly/acceptance-test-config.yml | 20 - .../source-gridly/acceptance-test-docker.sh | 16 - .../connectors/source-gridly/build.gradle | 9 - .../integration_tests/__init__.py | 3 - .../integration_tests/abnormal_state.json | 5 - .../integration_tests/acceptance.py | 16 - .../integration_tests/configured_catalog.json | 13 - .../integration_tests/invalid_config.json | 3 - .../integration_tests/sample_state.json | 5 - .../connectors/source-gridly/main.py | 13 - .../connectors/source-gridly/requirements.txt | 2 - .../connectors/source-gridly/setup.py | 29 -- .../source-gridly/source_gridly/__init__.py | 8 - .../source-gridly/source_gridly/helpers.py | 136 ------ .../source-gridly/source_gridly/source.py | 125 ------ .../source-gridly/source_gridly/spec.yaml | 16 - .../source-gridly/unit_tests/__init__.py | 3 - .../source-gridly/unit_tests/test_helpers.py | 74 ---- .../source-gridly/unit_tests/test_source.py | 23 - .../connectors/source-sendgrid/Dockerfile | 9 +- .../connectors/source-sendgrid/build.gradle | 4 + .../connectors/source-sendgrid/setup.py | 6 +- .../src/main/groovy/airbyte-docker.gradle | 21 +- tools/bin/build_image.sh | 16 +- tools/bin/ci_integration_test.sh | 3 - 93 files changed, 57 insertions(+), 3600 deletions(-) delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/.dockerignore delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/Dockerfile delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/README.md delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/__init__.py delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/acceptance-test-docker.sh delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/build.gradle delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/integration_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/integration_tests/abnormal_state.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/integration_tests/acceptance.py delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/integration_tests/sample_config.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/integration_tests/sample_state.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/main.py delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/setup.py delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/__init__.py delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/alpha_vantage.yaml delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/object_dpath_extractor.py delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_daily.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_daily_adjusted.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_intraday.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_monthly.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_monthly_adjusted.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_weekly.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_weekly_adjusted.json delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/source.py delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/spec.yaml delete mode 100644 airbyte-integrations/connectors/source-alpha-vantage/unit_tests/test_object_dpath_extractor.py delete mode 100644 airbyte-integrations/connectors/source-datadog/.dockerignore delete mode 100644 airbyte-integrations/connectors/source-datadog/Dockerfile delete mode 100644 airbyte-integrations/connectors/source-datadog/README.md delete mode 100644 airbyte-integrations/connectors/source-datadog/acceptance-test-config.yml delete mode 100644 airbyte-integrations/connectors/source-datadog/acceptance-test-docker.sh delete mode 100644 airbyte-integrations/connectors/source-datadog/build.gradle delete mode 100644 airbyte-integrations/connectors/source-datadog/integration_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-datadog/integration_tests/abnormal_state.json delete mode 100644 airbyte-integrations/connectors/source-datadog/integration_tests/acceptance.py delete mode 100644 airbyte-integrations/connectors/source-datadog/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-datadog/integration_tests/inc_configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-datadog/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/source-datadog/integration_tests/sample_config.json delete mode 100644 airbyte-integrations/connectors/source-datadog/main.py delete mode 100644 airbyte-integrations/connectors/source-datadog/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-datadog/setup.py delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/schemas/audit_logs.json delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/schemas/dashboards.json delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/schemas/downtimes.json delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/schemas/incident_teams.json delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/schemas/incidents.json delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/schemas/logs.json delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/schemas/metrics.json delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/schemas/synthetic_tests.json delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/schemas/users.json delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/source.py delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/spec.yaml delete mode 100644 airbyte-integrations/connectors/source-datadog/source_datadog/streams.py delete mode 100644 airbyte-integrations/connectors/source-datadog/unit_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-datadog/unit_tests/conftest.py delete mode 100644 airbyte-integrations/connectors/source-datadog/unit_tests/test_source.py delete mode 100644 airbyte-integrations/connectors/source-datadog/unit_tests/test_streams.py delete mode 100644 airbyte-integrations/connectors/source-gridly/.dockerignore delete mode 100644 airbyte-integrations/connectors/source-gridly/Dockerfile delete mode 100644 airbyte-integrations/connectors/source-gridly/README.md delete mode 100644 airbyte-integrations/connectors/source-gridly/acceptance-test-config.yml delete mode 100644 airbyte-integrations/connectors/source-gridly/acceptance-test-docker.sh delete mode 100644 airbyte-integrations/connectors/source-gridly/build.gradle delete mode 100644 airbyte-integrations/connectors/source-gridly/integration_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-gridly/integration_tests/abnormal_state.json delete mode 100644 airbyte-integrations/connectors/source-gridly/integration_tests/acceptance.py delete mode 100644 airbyte-integrations/connectors/source-gridly/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-gridly/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/source-gridly/integration_tests/sample_state.json delete mode 100644 airbyte-integrations/connectors/source-gridly/main.py delete mode 100644 airbyte-integrations/connectors/source-gridly/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-gridly/setup.py delete mode 100644 airbyte-integrations/connectors/source-gridly/source_gridly/__init__.py delete mode 100644 airbyte-integrations/connectors/source-gridly/source_gridly/helpers.py delete mode 100644 airbyte-integrations/connectors/source-gridly/source_gridly/source.py delete mode 100644 airbyte-integrations/connectors/source-gridly/source_gridly/spec.yaml delete mode 100644 airbyte-integrations/connectors/source-gridly/unit_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-gridly/unit_tests/test_helpers.py delete mode 100644 airbyte-integrations/connectors/source-gridly/unit_tests/test_source.py diff --git a/.github/actions/start-aws-runner/action.yml b/.github/actions/start-aws-runner/action.yml index ed50c903122b..c3b94df610b6 100644 --- a/.github/actions/start-aws-runner/action.yml +++ b/.github/actions/start-aws-runner/action.yml @@ -9,7 +9,7 @@ inputs: required: true ec2-image-id: # github-self-hosted-runner-ubuntu-20-100g-disk-with-cypress-deps - default: "ami-005924fb76f7477ce" + default: "ami-0f23be2f917510c26" required: true ec2-instance-type: default: "c5.2xlarge" diff --git a/airbyte-integrations/connectors/source-alpha-vantage/.dockerignore b/airbyte-integrations/connectors/source-alpha-vantage/.dockerignore deleted file mode 100644 index 5a1cff6bff4a..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_alpha_vantage -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-alpha-vantage/Dockerfile b/airbyte-integrations/connectors/source-alpha-vantage/Dockerfile deleted file mode 100644 index 7735d2dd4e93..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_alpha_vantage ./source_alpha_vantage - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/source-alpha-vantage diff --git a/airbyte-integrations/connectors/source-alpha-vantage/README.md b/airbyte-integrations/connectors/source-alpha-vantage/README.md deleted file mode 100644 index bb054c73dec7..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# Alpha Vantage Source - -This is the repository for the Alpha Vantage configuration based source connector. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/alpha-vantage). - -## Local development - -#### Building via Gradle -You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. - -To build using Gradle, from the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:source-alpha-vantage:build -``` - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/alpha-vantage) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_alpha_vantage/spec.yaml` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source alpha-vantage test creds` -and place them into `secrets/config.json`. - -### Locally running the connector docker image - -#### Build -First, make sure you build the latest Docker image: -``` -docker build . -t airbyte/source-alpha-vantage:dev -``` - -You can also build the connector image via Gradle: -``` -./gradlew :airbyte-integrations:connectors:source-alpha-vantage:airbyteDocker -``` -When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-alpha-vantage:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-alpha-vantage:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-alpha-vantage:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-alpha-vantage:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` -## Testing - -#### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -To run your integration tests with docker - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:source-alpha-vantage:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:source-alpha-vantage:integrationTest -``` - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing unit and integration tests. -1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). -1. Create a Pull Request. -1. Pat yourself on the back for being an awesome contributor. -1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-alpha-vantage/__init__.py b/airbyte-integrations/connectors/source-alpha-vantage/__init__.py deleted file mode 100644 index 1100c1c58cf5..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-alpha-vantage/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-alpha-vantage/acceptance-test-docker.sh deleted file mode 100644 index c51577d10690..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/acceptance-test-docker.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env sh - -# Build latest connector image -docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) - -# Pull latest acctest image -docker pull airbyte/source-acceptance-test:latest - -# Run -docker run --rm -it \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v /tmp:/tmp \ - -v $(pwd):/test_input \ - airbyte/source-acceptance-test \ - --acceptance-test-config /test_input - diff --git a/airbyte-integrations/connectors/source-alpha-vantage/build.gradle b/airbyte-integrations/connectors/source-alpha-vantage/build.gradle deleted file mode 100644 index f8d1a7d118a1..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/build.gradle +++ /dev/null @@ -1,9 +0,0 @@ -plugins { - id 'airbyte-python' - id 'airbyte-docker' - id 'airbyte-source-acceptance-test' -} - -airbytePython { - moduleDirectory 'source_alpha_vantage' -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/__init__.py b/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/__init__.py deleted file mode 100644 index 1100c1c58cf5..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/abnormal_state.json deleted file mode 100644 index 52b0f2c2118f..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/abnormal_state.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "todo-stream-name": { - "todo-field-name": "todo-abnormal-value" - } -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/acceptance.py deleted file mode 100644 index 1302b2f57e10..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/acceptance.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("source_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - # TODO: setup test dependencies if needed. otherwise remove the TODO comments - yield - # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/configured_catalog.json deleted file mode 100644 index dfafcb1e168f..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/configured_catalog.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "time_series_daily", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "time_series_weekly", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "time_series_monthly", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/invalid_config.json deleted file mode 100644 index b8e7d27bcb7d..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/invalid_config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "api_key": "INVALID_KEY", - "symbol": "FAKE_SYMBOL", - "interval": "60min", - "outputsize": "compact", - "slice": "incorrect_slice", - "adjusted": false -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/sample_config.json deleted file mode 100644 index 1f1be557c956..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/sample_config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "api_key": "YOUR_API_KEY", - "symbol": "TSLA", - "interval": "60min", - "outputsize": "compact", - "slice": "year1month1", - "adjusted": false -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/sample_state.json deleted file mode 100644 index 3587e579822d..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/integration_tests/sample_state.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "todo-stream-name": { - "todo-field-name": "value" - } -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/main.py b/airbyte-integrations/connectors/source-alpha-vantage/main.py deleted file mode 100644 index c4d96e59a041..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/main.py +++ /dev/null @@ -1,13 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_alpha_vantage import SourceAlphaVantage - -if __name__ == "__main__": - source = SourceAlphaVantage() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-alpha-vantage/requirements.txt b/airbyte-integrations/connectors/source-alpha-vantage/requirements.txt deleted file mode 100644 index 0411042aa091..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ --e ../../bases/source-acceptance-test --e . diff --git a/airbyte-integrations/connectors/source-alpha-vantage/setup.py b/airbyte-integrations/connectors/source-alpha-vantage/setup.py deleted file mode 100644 index 5bc457370080..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/setup.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.4", -] - -TEST_REQUIREMENTS = [ - "pytest~=6.1", - "pytest-mock~=3.6.1", - "source-acceptance-test", -] - -setup( - name="source_alpha_vantage", - description="Source implementation for Alpha Vantage.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/__init__.py b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/__init__.py deleted file mode 100644 index 086d696c81dd..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from .source import SourceAlphaVantage - -__all__ = ["SourceAlphaVantage"] diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/alpha_vantage.yaml b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/alpha_vantage.yaml deleted file mode 100644 index 027e51ef11f3..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/alpha_vantage.yaml +++ /dev/null @@ -1,107 +0,0 @@ -version: "0.1.0" - -definitions: - schema_loader: - type: JsonSchema - file_path: "./source_alpha_vantage/schemas/{{ options['name'] }}.json" - selector: - type: RecordSelector - extractor: - class_name: source_alpha_vantage.object_dpath_extractor.ObjectDpathExtractor - field_pointer: - - "{{ options['dpath'] }}" - inject_key_as_field: "{{ options['key_field'] }}" - requester: - url_base: "https://www.alphavantage.co" - http_method: "GET" - authenticator: - type: NoAuth - request_options_provider: - request_parameters: - apikey: "{{ config['api_key'] }}" - symbol: "{{ config['symbol'] }}" - function: "{{ options['function'] }}" - datatype: json - outputsize: "{{ config['outputsize'] }}" - interval: "{{ config['interval'] }}" - adjusted: "{{ config['adjusted'] }}" - retriever: - record_selector: - $ref: "*ref(definitions.selector)" - paginator: - type: NoPagination - requester: - $ref: "*ref(definitions.requester)" - base_stream: - retriever: - $ref: "*ref(definitions.retriever)" - time_series_intraday_stream: - $ref: "*ref(definitions.base_stream)" - $options: - name: "time_series_intraday" - dpath: "Time Series ({{ config['interval'] }})" - function: "TIME_SERIES_INTRADAY" - path: "/query" - key_field: "timestamp" - time_series_daily_stream: - $ref: "*ref(definitions.base_stream)" - $options: - name: "time_series_daily" - dpath: "Time Series (Daily)" - function: "TIME_SERIES_DAILY" - path: "/query" - key_field: "date" - time_series_daily_adjusted_stream: - $ref: "*ref(definitions.base_stream)" - $options: - name: "time_series_daily_adjusted" - dpath: "Time Series (Daily)" - function: "TIME_SERIES_DAILY_ADJUSTED" - path: "/query" - key_field: "date" - time_series_weekly_stream: - $ref: "*ref(definitions.base_stream)" - $options: - name: "time_series_weekly" - dpath: "Weekly Time Series" - function: "TIME_SERIES_WEEKLY" - path: "/query" - key_field: "date" - time_series_weekly_adjusted_stream: - $ref: "*ref(definitions.base_stream)" - $options: - name: "time_series_weekly_adjusted" - dpath: "Weekly Adjusted Time Series" - function: "TIME_SERIES_WEEKLY_ADJUSTED" - path: "/query" - key_field: "date" - time_series_monthly_stream: - $ref: "*ref(definitions.base_stream)" - $options: - name: "time_series_monthly" - dpath: "Monthly Time Series" - function: "TIME_SERIES_MONTHLY" - path: "/query" - key_field: "date" - time_series_monthly_adjusted_stream: - $ref: "*ref(definitions.base_stream)" - $options: - name: "time_series_monthly_adjusted" - dpath: "Monthly Adjusted Time Series" - function: "TIME_SERIES_MONTHLY_ADJUSTED" - path: "/query" - key_field: "date" - -streams: - - "*ref(definitions.time_series_intraday_stream)" - - "*ref(definitions.time_series_daily_stream)" - - "*ref(definitions.time_series_daily_adjusted_stream)" - - "*ref(definitions.time_series_weekly_stream)" - - "*ref(definitions.time_series_weekly_adjusted_stream)" - - "*ref(definitions.time_series_monthly_stream)" - - "*ref(definitions.time_series_monthly_adjusted_stream)" - -check: - stream_names: - - "time_series_weekly" - - "time_series_weekly_adjusted" diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/object_dpath_extractor.py b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/object_dpath_extractor.py deleted file mode 100644 index 1ee461ba13a3..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/object_dpath_extractor.py +++ /dev/null @@ -1,88 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from dataclasses import dataclass -from typing import Any, Mapping, Union - -import dpath.util -import requests -from airbyte_cdk.sources.declarative.extractors.dpath_extractor import DpathExtractor -from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString -from airbyte_cdk.sources.declarative.types import Record - - -@dataclass -class ObjectDpathExtractor(DpathExtractor): - """ - Record extractor that searches a decoded response over a path defined as an array of fields. - - Extends the DpathExtractor to allow for a list of records to be generated from a dpath that points - to an object, where the object's values are individual records. - - Example data: - ``` - { - "data": { - "2022-01-01": { - "id": "id1", - "name": "name1", - ... - }, - "2022-01-02": { - "id": "id2", - "name": "name2", - ... - }, - ... - } - ``` - - There is an optional `inject_key_as_field` parameter that can be used to inject the key of the object - as a field in the record. For example, if `inject_key_as_field` is set to `date`, the above data would - be transformed to: - ``` - [ - { - "date": "2022-01-01", - "id": "id1", - "name": "name1", - ... - }, - { - "date": "2022-01-02", - "id": "id2", - "name": "name2", - ... - }, - ... - ] - """ - - inject_key_as_field: Union[str, InterpolatedString] = None - - def __post_init__(self, options: Mapping[str, Any]): - self.inject_key_as_field = InterpolatedString.create(self.inject_key_as_field, options=options) - for pointer_index in range(len(self.field_pointer)): - if isinstance(self.field_pointer[pointer_index], str): - self.field_pointer[pointer_index] = InterpolatedString.create(self.field_pointer[pointer_index], options=options) - - def extract_records(self, response: requests.Response) -> list[Record]: - response_body = self.decoder.decode(response) - if len(self.field_pointer) == 0: - extracted = response_body - else: - pointer = [pointer.eval(self.config) for pointer in self.field_pointer] - extracted = dpath.util.get(response_body, pointer, default=[]) - if isinstance(extracted, list): - return extracted - elif isinstance(extracted, dict) and all(isinstance(v, dict) for v in extracted.values()): # Ensure object is dict[Hashable, dict] - if not self.inject_key_as_field: - return [value for _, value in extracted.items()] - else: - key_field = self.inject_key_as_field.eval(self.config) - return [{key_field: key, **value} for key, value in extracted.items()] - elif extracted: - return [extracted] - else: - return [] diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_daily.json b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_daily.json deleted file mode 100644 index 666425ebf3dd..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_daily.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "1. open": { - "type": ["string", "null"] - }, - "2. high": { - "type": ["string", "null"] - }, - "3. low": { - "type": ["string", "null"] - }, - "4. close": { - "type": ["string", "null"] - }, - "5. volume": { - "type": ["string", "null"] - } - } -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_daily_adjusted.json b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_daily_adjusted.json deleted file mode 100644 index 49f6c50c55eb..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_daily_adjusted.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "1. open": { - "type": "string" - }, - "2. high": { - "type": "string" - }, - "3. low": { - "type": "string" - }, - "4. close": { - "type": "string" - }, - "5. adjusted close": { - "type": "string" - }, - "6. volume": { - "type": "string" - }, - "7. dividend amount": { - "type": "string" - }, - "8. split coefficient": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_intraday.json b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_intraday.json deleted file mode 100644 index 666425ebf3dd..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_intraday.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "1. open": { - "type": ["string", "null"] - }, - "2. high": { - "type": ["string", "null"] - }, - "3. low": { - "type": ["string", "null"] - }, - "4. close": { - "type": ["string", "null"] - }, - "5. volume": { - "type": ["string", "null"] - } - } -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_monthly.json b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_monthly.json deleted file mode 100644 index 666425ebf3dd..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_monthly.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "1. open": { - "type": ["string", "null"] - }, - "2. high": { - "type": ["string", "null"] - }, - "3. low": { - "type": ["string", "null"] - }, - "4. close": { - "type": ["string", "null"] - }, - "5. volume": { - "type": ["string", "null"] - } - } -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_monthly_adjusted.json b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_monthly_adjusted.json deleted file mode 100644 index 962674bfc903..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_monthly_adjusted.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "1. open": { - "type": "string" - }, - "2. high": { - "type": "string" - }, - "3. low": { - "type": "string" - }, - "4. close": { - "type": "string" - }, - "5. adjusted close": { - "type": "string" - }, - "6. volume": { - "type": "string" - }, - "7. dividend amount": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_weekly.json b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_weekly.json deleted file mode 100644 index 666425ebf3dd..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_weekly.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "1. open": { - "type": ["string", "null"] - }, - "2. high": { - "type": ["string", "null"] - }, - "3. low": { - "type": ["string", "null"] - }, - "4. close": { - "type": ["string", "null"] - }, - "5. volume": { - "type": ["string", "null"] - } - } -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_weekly_adjusted.json b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_weekly_adjusted.json deleted file mode 100644 index 962674bfc903..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/schemas/time_series_weekly_adjusted.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "1. open": { - "type": "string" - }, - "2. high": { - "type": "string" - }, - "3. low": { - "type": "string" - }, - "4. close": { - "type": "string" - }, - "5. adjusted close": { - "type": "string" - }, - "6. volume": { - "type": "string" - }, - "7. dividend amount": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/source.py b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/source.py deleted file mode 100644 index b8e22cf0db5d..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/source.py +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource - -""" -This file provides the necessary constructs to interpret a provided declarative YAML configuration file into -source connector. - -WARNING: Do not modify this file. -""" - - -# Declarative Source -class SourceAlphaVantage(YamlDeclarativeSource): - def __init__(self): - super().__init__(**{"path_to_yaml": "alpha_vantage.yaml"}) diff --git a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/spec.yaml b/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/spec.yaml deleted file mode 100644 index cc8a1401b28f..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/source_alpha_vantage/spec.yaml +++ /dev/null @@ -1,53 +0,0 @@ -documentationUrl: https://docs.airbyte.io/integrations/sources/alpha-vantage -connectionSpecification: - $schema: http://json-schema.org/draft-07/schema# - title: Alpha Vantage Spec - type: object - required: - - api_key - - symbol - properties: - api_key: - title: API Key - type: string - description: API Key - airbyte_secret: true - order: 0 - symbol: - title: Symbol - type: string - description: Stock symbol (with exchange code) - examples: - - AAPL - - TSCO.LON - order: 1 - interval: - title: Interval - type: string - description: | - Time-series data point interval. Required for intraday endpoints. - enum: - - 1min - - 5min - - 15min - - 30min - - 60min - default: 1min - order: 2 - adjusted: - title: Adjusted? - type: boolean - description: | - Whether to return adjusted data. Only applicable to intraday endpoints. - default: false - order: 4 - outputsize: - title: Output Size - type: string - description: | - Whether to return full or compact data (the last 100 data points). - enum: - - compact - - full - default: compact - order: 5 diff --git a/airbyte-integrations/connectors/source-alpha-vantage/unit_tests/test_object_dpath_extractor.py b/airbyte-integrations/connectors/source-alpha-vantage/unit_tests/test_object_dpath_extractor.py deleted file mode 100644 index 3324ed175a7b..000000000000 --- a/airbyte-integrations/connectors/source-alpha-vantage/unit_tests/test_object_dpath_extractor.py +++ /dev/null @@ -1,172 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -import json -from typing import Any - -from requests import Response -from source_alpha_vantage.object_dpath_extractor import ObjectDpathExtractor - - -def _create_response_with_body(body: Any) -> Response: - response = Response() - response._content = json.dumps(body).encode("utf-8") - return response - - -def _create_response_with_dict_of_records() -> Response: - response_body = { - "data": { - "2022-01-01": { - "id": "id1", - "name": "name1", - }, - "2022-01-02": { - "id": "id2", - "name": "name2", - }, - } - } - return _create_response_with_body(response_body) - - -def _create_response_with_list_of_records() -> Response: - response_body = { - "data": [ - { - "id": "id1", - "name": "name1", - }, - { - "id": "id2", - "name": "name2", - }, - ] - } - return _create_response_with_body(response_body) - - -def test_no_key_injection(): - extractor = ObjectDpathExtractor( - field_pointer=["data"], - config={}, - options={}, - ) - - response = _create_response_with_dict_of_records() - records = extractor.extract_records(response) - - assert records == [ - { - "id": "id1", - "name": "name1", - }, - { - "id": "id2", - "name": "name2", - }, - ] - - -def test_key_injection(): - extractor = ObjectDpathExtractor( - field_pointer=["data"], - inject_key_as_field="date", - config={}, - options={}, - ) - - response = _create_response_with_dict_of_records() - records = extractor.extract_records(response) - - assert records == [ - { - "date": "2022-01-01", - "id": "id1", - "name": "name1", - }, - { - "date": "2022-01-02", - "id": "id2", - "name": "name2", - }, - ] - - -def test_key_injection_with_interpolation(): - extractor = ObjectDpathExtractor( - field_pointer=["data"], - inject_key_as_field="{{ config['key_field'] }}", - config={"key_field": "date"}, - options={}, - ) - - response = _create_response_with_dict_of_records() - records = extractor.extract_records(response) - - assert records == [ - { - "date": "2022-01-01", - "id": "id1", - "name": "name1", - }, - { - "date": "2022-01-02", - "id": "id2", - "name": "name2", - }, - ] - - -def test_list_of_records(): - extractor = ObjectDpathExtractor( - field_pointer=["data"], - config={}, - options={}, - ) - - response = _create_response_with_dict_of_records() - records = extractor.extract_records(response) - - assert records == [ - { - "id": "id1", - "name": "name1", - }, - { - "id": "id2", - "name": "name2", - }, - ] - - -def test_no_records(): - extractor = ObjectDpathExtractor( - field_pointer=["data"], - config={}, - options={}, - ) - - obj_response = _create_response_with_body({"data": {}}) - obj_records = extractor.extract_records(obj_response) - - assert obj_records == [] - - list_response = _create_response_with_body({"data": []}) - list_records = extractor.extract_records(list_response) - - assert list_records == [] - - -def test_single_record(): - extractor = ObjectDpathExtractor( - field_pointer=["data"], - config={}, - options={}, - ) - - response = _create_response_with_body({"data": {"id": "id1", "name": "name1"}}) - records = extractor.extract_records(response) - - assert records == [{"id": "id1", "name": "name1"}] diff --git a/airbyte-integrations/connectors/source-datadog/.dockerignore b/airbyte-integrations/connectors/source-datadog/.dockerignore deleted file mode 100644 index 6807347e1915..000000000000 --- a/airbyte-integrations/connectors/source-datadog/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_datadog -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-datadog/Dockerfile b/airbyte-integrations/connectors/source-datadog/Dockerfile deleted file mode 100644 index bdb53751e867..000000000000 --- a/airbyte-integrations/connectors/source-datadog/Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM python:3.9-slim - -# Bash is installed for more convenient debugging. -RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* - -WORKDIR /airbyte/integration_code -COPY source_datadog ./source_datadog -COPY main.py ./ -COPY setup.py ./ -RUN pip install . - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/source-datadog diff --git a/airbyte-integrations/connectors/source-datadog/README.md b/airbyte-integrations/connectors/source-datadog/README.md deleted file mode 100644 index a0d9e1192749..000000000000 --- a/airbyte-integrations/connectors/source-datadog/README.md +++ /dev/null @@ -1,109 +0,0 @@ -# Datadog Source - -This is the repository for the Datadog source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/datadog). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.9.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:source-datadog:build -``` - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/datadog) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_datadog/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source datadog test creds` -and place them into `secrets/config.json`. - - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - -#### Build -First, make sure you build the latest Docker image: -``` -docker build . -t airbyte/source-datadog:dev -``` - -You can also build the connector image via Gradle: -``` -./gradlew :airbyte-integrations:connectors:source-datadog:airbyteDocker -``` -When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-datadog:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-datadog:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-datadog:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-datadog:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -#### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -To run your integration tests with acceptance tests, from the connector root, run -``` -docker build . --no-cache -t airbyte/source-datadog:dev \ -&& python -m pytest -p source_acceptance_test.plugin -``` - -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` - -### Integration Tests -1. From the airbyte project root, run `./gradlew :airbyte-integrations:connectors:source-datadog:integrationTest` to run the standard integration test suite. -1. To run additional integration tests, place your integration tests in a new directory `integration_tests` and run them with `python -m pytest -s integration_tests`. - Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing unit and integration tests -1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use SemVer). -1. Create a Pull Request -1. Pat yourself on the back for being an awesome contributor -1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master diff --git a/airbyte-integrations/connectors/source-datadog/acceptance-test-config.yml b/airbyte-integrations/connectors/source-datadog/acceptance-test-config.yml deleted file mode 100644 index 6e1df480373e..000000000000 --- a/airbyte-integrations/connectors/source-datadog/acceptance-test-config.yml +++ /dev/null @@ -1,25 +0,0 @@ -# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-datadog:dev -tests: - spec: - - spec_path: "source_datadog/spec.yaml" - connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/config.json" - basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: - - incident_teams - incremental: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/inc_configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" - full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-datadog/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-datadog/acceptance-test-docker.sh deleted file mode 100644 index c522eebbd94e..000000000000 --- a/airbyte-integrations/connectors/source-datadog/acceptance-test-docker.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env sh - -# Build latest connector image -docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2) - -# Pull latest acctest image -docker pull airbyte/source-acceptance-test:latest - -# Run -docker run --rm -it \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v /tmp:/tmp \ - -v $(pwd):/test_input \ - airbyte/source-acceptance-test \ - --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-datadog/build.gradle b/airbyte-integrations/connectors/source-datadog/build.gradle deleted file mode 100644 index 7d7e44963718..000000000000 --- a/airbyte-integrations/connectors/source-datadog/build.gradle +++ /dev/null @@ -1,13 +0,0 @@ -plugins { - id 'airbyte-python' - id 'airbyte-docker' - id 'airbyte-source-acceptance-test' -} - -airbytePython { - moduleDirectory 'source_datadog' -} - -dependencies { - implementation files(project(':airbyte-integrations:bases:source-acceptance-test').airbyteDocker.outputs) -} diff --git a/airbyte-integrations/connectors/source-datadog/integration_tests/__init__.py b/airbyte-integrations/connectors/source-datadog/integration_tests/__init__.py deleted file mode 100644 index 1100c1c58cf5..000000000000 --- a/airbyte-integrations/connectors/source-datadog/integration_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-datadog/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-datadog/integration_tests/abnormal_state.json deleted file mode 100644 index b93902698218..000000000000 --- a/airbyte-integrations/connectors/source-datadog/integration_tests/abnormal_state.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "logs": { - "sync_date": "2024-10-10T00:10:00Z" - } -} diff --git a/airbyte-integrations/connectors/source-datadog/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-datadog/integration_tests/acceptance.py deleted file mode 100644 index 950b53b59d41..000000000000 --- a/airbyte-integrations/connectors/source-datadog/integration_tests/acceptance.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("source_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - yield diff --git a/airbyte-integrations/connectors/source-datadog/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-datadog/integration_tests/configured_catalog.json deleted file mode 100644 index b8a33996b8d4..000000000000 --- a/airbyte-integrations/connectors/source-datadog/integration_tests/configured_catalog.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "dashboards", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "downtimes", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "incident_teams", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "logs", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["sync_date"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "full_refresh", - "cursor_field": ["sync_date"], - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "metrics", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "synthetic_tests", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "users", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/airbyte-integrations/connectors/source-datadog/integration_tests/inc_configured_catalog.json b/airbyte-integrations/connectors/source-datadog/integration_tests/inc_configured_catalog.json deleted file mode 100644 index ee26bd107c2e..000000000000 --- a/airbyte-integrations/connectors/source-datadog/integration_tests/inc_configured_catalog.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "logs", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["sync_date"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["sync_date"], - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connectors/source-datadog/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-datadog/integration_tests/invalid_config.json deleted file mode 100644 index 6e27d0fcd606..000000000000 --- a/airbyte-integrations/connectors/source-datadog/integration_tests/invalid_config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "api_key": "", - "application_key": "", - "query": "sample query", - "limit": 100, - "start_date": "2222-10-10T00:00:00Z" -} diff --git a/airbyte-integrations/connectors/source-datadog/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-datadog/integration_tests/sample_config.json deleted file mode 100644 index 9be54f53bd05..000000000000 --- a/airbyte-integrations/connectors/source-datadog/integration_tests/sample_config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "api_key": "", - "application_key": "", - "query": "sample query", - "limit": 100, - "start_date": "2022-10-10T00:00:00Z", - "end_date": "2022-10-10T00:10:00Z" -} diff --git a/airbyte-integrations/connectors/source-datadog/main.py b/airbyte-integrations/connectors/source-datadog/main.py deleted file mode 100644 index 55bd46911b4f..000000000000 --- a/airbyte-integrations/connectors/source-datadog/main.py +++ /dev/null @@ -1,13 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_datadog import SourceDatadog - -if __name__ == "__main__": - source = SourceDatadog() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-datadog/requirements.txt b/airbyte-integrations/connectors/source-datadog/requirements.txt deleted file mode 100644 index 7be17a56d745..000000000000 --- a/airbyte-integrations/connectors/source-datadog/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies. --e ../../bases/source-acceptance-test --e . diff --git a/airbyte-integrations/connectors/source-datadog/setup.py b/airbyte-integrations/connectors/source-datadog/setup.py deleted file mode 100644 index 3156ffa7e737..000000000000 --- a/airbyte-integrations/connectors/source-datadog/setup.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1", "requests==2.25.1"] - -TEST_REQUIREMENTS = [ - "pytest==6.1.2", - "source-acceptance-test", -] - -setup( - name="source_datadog", - description="Source implementation for Datadog.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json", "schemas/*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/audit_logs.json b/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/audit_logs.json deleted file mode 100644 index f4d05f4a807f..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/audit_logs.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "attributes": { - "type": ["object", "null"], - "description": "JSON object containing all event attributes and their associated values.", - "properties": { - "attributes": { - "type": ["object", "null"], - "description": "JSON object of attributes from Audit Logs events.", - "additionalProperties": true - }, - "service": { - "type": ["string", "null"], - "description": "Name of the application or service generating Audit Logs events." - }, - "tags": { - "type": ["array", "null"], - "items": { - "type": "string" - }, - "description": "Array of tags associated with your event." - }, - "timestamp": { - "type": ["string", "null"], - "format": "date-time", - "description": "Timestamp of your event." - } - }, - "additionalProperties": true - }, - "id": { - "type": ["string", "null"], - "description": "Unique ID of the event.", - "readOnly": true - }, - "sync_date": { - "type": ["null", "string"] - }, - "type": { - "type": ["string", "null"], - "enum": ["audit"], - "description": "Type of the event. Allowed enum values: audit", - "readOnly": true - } - }, - "additionalProperties": true -} diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/dashboards.json b/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/dashboards.json deleted file mode 100644 index c767d3fadc03..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/dashboards.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "author_handle": { - "type": ["string", "null"], - "description": "Identifier of the dashboard author.", - "readOnly": true - }, - "created_at": { - "type": ["string", "null"], - "format": "date-time", - "description": "Creation date of the dashboard.", - "readOnly": true - }, - "description": { - "type": ["string", "null"], - "description": "Description of the dashboard.", - "readOnly": true - }, - "id": { - "type": ["string", "null"], - "description": "Dashboard identifier.", - "readOnly": true - }, - "is_read_only": { - "type": ["boolean", "null"], - "description": "Whether this dashboard is read-only. If True, only the author and admins can make changes to it.", - "readOnly": true - }, - "layout_type": { - "type": ["string", "null"], - "enum": ["ordered", "free"], - "description": "Layout type of the dashboard", - "readOnly": true - }, - "modified_at": { - "type": ["string", "null"], - "format": "date-time", - "description": "Modification date of the dashboard.", - "readOnly": true - }, - "title": { - "type": ["string", "null"], - "description": "Title of the dashboard.", - "readOnly": true - }, - "url": { - "type": ["string", "null"], - "description": "URL of the dashboard.", - "readOnly": true - } - }, - "additionalProperties": true -} diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/downtimes.json b/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/downtimes.json deleted file mode 100644 index a996968a6ecd..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/downtimes.json +++ /dev/null @@ -1,103 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "active": { - "type": ["boolean", "null"], - "description": "If a scheduled downtime currently exists.", - "readOnly": true - }, - "active_child": { - "type": ["object", "null"], - "description": "The downtime object definition of the active child for the original parent recurring downtime.", - "additionalProperties": true - }, - "canceled": { - "type": ["integer", "null"], - "description": "If a scheduled downtime is canceled.", - "readOnly": true - }, - "creator_id": { - "type": ["integer", "null"], - "description": "User ID of the downtime creator.", - "readOnly": true - }, - "disabled": { - "type": ["boolean", "null"], - "description": "If a downtime has been disabled.", - "readOnly": true - }, - "downtime_type": { - "type": ["integer", "null"], - "description": "0 for a downtime applied on * or all, 1 when the downtime is only scoped to hosts, or 2 when the downtime is scoped to anything but hosts.", - "readOnly": true - }, - "end": { - "type": ["integer", "null"], - "description": "POSIX timestamp to end the downtime. If not provided, the downtime is in effect indefinitely until you cancel it.", - "readOnly": true - }, - "id": { - "type": ["integer", "null"], - "description": "The downtime ID.", - "readOnly": true - }, - "message": { - "type": ["string", "null"], - "description": "A message to include with notifications for this downtime.", - "readOnly": true - }, - "monitor_id": { - "type": ["integer", "null"], - "description": "A single monitor to which the downtime applies. If not provided, the downtime applies to all monitors.", - "readOnly": true - }, - "monitor_tags": { - "type": ["array", "null"], - "items": { - "type": "string" - }, - "description": "A comma-separated list of monitor tags.", - "readOnly": true - }, - "mute_first_recovery_notification": { - "type": ["boolean", "null"], - "description": "If the first recovery notification during a downtime should be muted.", - "readOnly": true - }, - "parent_id": { - "type": ["integer", "null"], - "description": "ID of the parent Downtime.", - "readOnly": true - }, - "recurrence": { - "type": ["object", "null"], - "description": "An object defining the recurrence of the downtime.", - "additionalProperties": true - }, - "scope": { - "type": ["array", "null"], - "items": { - "type": "string" - }, - "description": "The scope(s) to which the downtime applies.", - "readOnly": true - }, - "start": { - "type": ["integer", "null"], - "description": "POSIX timestamp to start the downtime. If not provided, the downtime starts the moment it is created.", - "readOnly": true - }, - "timezone": { - "type": ["string", "null"], - "description": "The timezone in which to display the downtime's start and end times in Datadog applications.", - "readOnly": true - }, - "updater_id": { - "type": ["integer", "null"], - "description": "ID of the last user that updated the downtime.", - "readOnly": true - } - }, - "additionalProperties": true -} diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/incident_teams.json b/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/incident_teams.json deleted file mode 100644 index 3411ac94cfb4..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/incident_teams.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "attributes": { - "type": ["object", "null"], - "description": "The incident team's attributes from a response.", - "properties": { - "created": { - "type": ["string", "null"], - "format": "date-time", - "description": "Timestamp of when the incident team was created." - }, - "modified": { - "type": ["string", "null"], - "format": "date-time", - "description": "Timestamp of when the incident team was modified." - }, - "name": { - "type": ["string", "null"], - "description": "Name of the incident team." - } - }, - "additionalProperties": true - }, - "id": { - "type": ["string", "null"], - "description": "The incident team's ID.", - "readOnly": true - }, - "relationships": { - "type": ["object", "null"], - "description": "The incident team's relationships.", - "properties": { - "created_by": { - "type": ["object", "null"], - "description": "Relationship to user.", - "properties": { - "data": { - "type": ["object"], - "description": "Relationship to user object.", - "properties": { - "id": { - "type": ["string"], - "description": "A unique identifier that represents the user." - }, - "type": { - "type": ["string"], - "enum": ["users"], - "description": "Users resource type. Allowed enum values: users" - } - }, - "additionalProperties": true - } - }, - "additionalProperties": true - }, - "last_modified_by": { - "type": ["object", "null"], - "description": "Relationship to user.", - "properties": { - "data": { - "type": ["object"], - "description": "Relationship to user object.", - "properties": { - "id": { - "type": ["string"], - "description": "A unique identifier that represents the user." - }, - "type": { - "type": ["string"], - "enum": ["users"], - "description": "Users resource type. Allowed enum values: users" - } - }, - "additionalProperties": true - } - }, - "additionalProperties": true - } - }, - "additionalProperties": true - }, - "type": { - "type": ["string", "null"], - "enum": ["teams"], - "description": "Incident Team resource type. Allowed enum values: teams", - "readOnly": true - } - }, - "additionalProperties": true -} diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/incidents.json b/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/incidents.json deleted file mode 100644 index 91b29ea4a799..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/incidents.json +++ /dev/null @@ -1,135 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "attributes": { - "type": ["object", "null"], - "description": "The incident's attributes from a response.", - "properties": { - "created": { - "type": ["string", "null"], - "format": "date-time", - "description": "Timestamp when the incident was created." - }, - "customer_impact_duration": { - "type": ["integer", "null"], - "description": "Length of the incident's customer impact in seconds. Equals the difference between customer_impact_start and customer_impact_end." - }, - "customer_impact_end": { - "type": ["string", "null"], - "format": "date-time", - "description": "Timestamp when customers were no longer impacted by the incident." - }, - "customer_impact_scope": { - "type": ["string", "null"], - "description": "A summary of the impact customers experienced during the incident." - }, - "customer_impact_start": { - "type": ["string", "null"], - "format": "date-time", - "description": "Timestamp when customers began being impacted by the incident." - }, - "customer_impacted": { - "type": ["boolean", "null"], - "description": "A flag indicating whether the incident caused customer impact." - }, - "detected": { - "type": ["string", "null"], - "format": "date-time", - "description": "Timestamp when the incident was detected." - }, - "fields": { - "type": ["object", "null"], - "description": "A condensed view of the user-defined fields attached to incidents.", - "additionalProperties": true - }, - "modified": { - "type": ["string", "null"], - "format": "date-time", - "description": "Timestamp when the incident was last modified." - }, - "notification_handles": { - "type": ["array", "null"], - "items": { - "type": "object" - }, - "description": "Notification handles that will be notified of the incident during update." - }, - "public_id": { - "type": ["integer", "null"], - "description": "The monotonically increasing integer ID for the incident." - }, - "resolved": { - "type": ["string", "null"], - "format": "date-time", - "description": "Timestamp when the incident's state was last changed from active or stable to resolved or completed." - }, - "time_to_detect": { - "type": ["integer", "null"], - "description": "The amount of time in seconds to detect the incident. Equals the difference between customer_impact_start and detected." - }, - "time_to_internal_response": { - "type": ["integer", "null"], - "description": "The amount of time in seconds to call incident after detection. Equals the difference of detected and created." - }, - "time_to_repair": { - "type": ["integer", "null"], - "description": "The amount of time in seconds to resolve customer impact after detecting the issue. Equals the difference between customer_impact_end and detected." - }, - "time_to_resolve": { - "type": ["integer", "null"], - "description": "The amount of time in seconds to resolve the incident after it was created. Equals the difference between created and resolved." - }, - "title": { - "type": ["string"], - "description": "The title of the incident, which summarizes what happened." - } - }, - "additionalProperties": true - }, - "id": { - "type": ["string"], - "description": "The incident's ID.", - "readOnly": true - }, - "relationships": { - "type": ["object", "null"], - "description": "The incident's relationships from a response.", - "properties": { - "attachments": { - "type": ["object", "null"], - "description": "A relationship reference for attachments.", - "additionalProperties": true - }, - "commander_user": { - "type": ["object", "null"], - "description": "Relationship to user.", - "additionalProperties": true - }, - "created_by_user": { - "type": ["object", "null"], - "description": "Relationship to user.", - "additionalProperties": true - }, - "integrations": { - "type": ["object", "null"], - "description": "A relationship reference for multiple integration metadata objects.", - "additionalProperties": true - }, - "last_modified_by_user": { - "type": ["object", "null"], - "description": "Relationship to user.", - "additionalProperties": true - } - }, - "additionalProperties": true - }, - "type": { - "type": ["string"], - "enum": ["incidents"], - "description": "Incident resource type. Allowed enum values: incidents", - "readOnly": true - } - }, - "additionalProperties": true -} diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/logs.json b/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/logs.json deleted file mode 100644 index 1e1d2bc70818..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/logs.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "attributes": { - "type": ["object", "null"], - "description": "JSON object containing all log attributes and their associated values.", - "properties": { - "attributes": { - "type": ["object", "null"], - "description": "JSON object of attributes from your log.", - "additionalProperties": true - }, - "host": { - "type": ["string", "null"], - "description": "Name of the machine from where the logs are being sent." - }, - "message": { - "type": ["string", "null"], - "description": "The message reserved attribute of your log." - }, - "service": { - "type": ["string", "null"], - "description": "The name of the application or service generating the log events." - }, - "status": { - "type": ["string", "null"], - "description": "Status of the message associated with your log." - }, - "tags": { - "type": ["array", "null"], - "items": { - "type": "string" - }, - "description": "Array of tags associated with your log." - }, - "timestamp": { - "type": ["string", "null"], - "format": "date-time", - "description": "Timestamp of your log." - } - }, - "additionalProperties": true - }, - "id": { - "type": ["string", "null"], - "description": "Unique ID of the Log.", - "readOnly": true - }, - "sync_date": { - "type": ["null", "string"] - }, - "type": { - "type": ["string", "null"], - "enum": ["log"], - "description": "Type of the event. Allowed enum values: log", - "readOnly": true - } - }, - "additionalProperties": true -} diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/metrics.json b/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/metrics.json deleted file mode 100644 index c74e6c5a5915..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/metrics.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "additionalProperties": true -} diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/synthetic_tests.json b/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/synthetic_tests.json deleted file mode 100644 index a2b7d0e0d91b..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/synthetic_tests.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "config": { - "type": ["object", "null"], - "description": "Configuration object for a Synthetic test.", - "additionalProperties": true - }, - "creator": { - "type": ["object", "null"], - "description": "Object describing the creator of the shared element.", - "properties": { - "email": { - "type": ["string", "null"], - "description": "Email of the creator." - }, - "handle": { - "type": ["string", "null"], - "description": "Handle of the creator." - }, - "name": { - "type": ["string", "null"], - "description": "Name of the creator." - } - }, - "additionalProperties": true - }, - "locations": { - "type": ["array", "null"], - "items": { - "type": "string" - }, - "description": "Array of locations used to run the test.", - "readOnly": true - }, - "message": { - "type": ["string", "null"], - "description": "Notification message associated with the test.", - "readOnly": true - }, - "monitor_id": { - "type": ["integer", "null"], - "description": "The associated monitor ID.", - "readOnly": true - }, - "name": { - "type": ["string", "null"], - "description": "Name of the test.", - "readOnly": true - }, - "options": { - "type": ["object", "null"], - "description": "Object describing the extra options for a Synthetic test.", - "additionalProperties": true - }, - "public_id": { - "type": ["string", "null"], - "description": "The test public ID.", - "readOnly": true - }, - "status": { - "type": ["string", "null"], - "enum": ["live", "paused"], - "description": "Define whether you want to start (live) or pause (paused) a Synthetic test. Allowed enum values: live,paused", - "readOnly": true - }, - "steps": { - "type": ["object", "null"], - "description": "For browser test, the steps of the test.", - "additionalProperties": true - }, - "subtype": { - "type": ["string", "null"], - "enum": [ - "http", - "ssl", - "tcp", - "dns", - "multi", - "icmp", - "udp", - "websocket", - "grpc" - ], - "description": "The subtype of the Synthetic API test, http, ssl, tcp, dns, icmp, udp, websocket, grpc or multi. Allowed enum values: http,ssl,tcp,dns,multi,icmp,udp,websocket,grpc", - "readOnly": true - }, - "tags": { - "type": ["array", "null"], - "items": { - "type": "string" - }, - "description": "Array of tags attached to the test.", - "readOnly": true - }, - "type": { - "type": ["string", "null"], - "enum": ["api", "browser"], - "description": "Type of the Synthetic test, either api or browser. Allowed enum values: api,browser", - "readOnly": true - } - }, - "additionalProperties": true -} diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/users.json b/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/users.json deleted file mode 100644 index d9d80590a4f4..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/schemas/users.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "attributes": { - "type": ["object", "null"], - "description": "Attributes of user object returned by the API.", - "properties": { - "created_at": { - "type": ["string", "null"], - "format": "date-time", - "description": "Creation time of the user." - }, - "disabled": { - "type": ["boolean", "null"], - "description": "Whether the user is disabled." - }, - "email": { - "type": ["string", "null"], - "description": "Email of the user." - }, - "handle": { - "type": ["string", "null"], - "description": "Handle of the user." - }, - "icon": { - "type": ["string", "null"], - "description": "URL of the user's icon." - }, - "modified_at": { - "type": ["string", "null"], - "format": "date-time", - "description": "Time that the user was last modified." - }, - "name": { - "type": ["string", "null"], - "description": "Name of the user." - }, - "service_account": { - "type": ["boolean", "null"], - "description": "Whether the user is a service account." - }, - "status": { - "type": ["string", "null"], - "description": "Status of the user." - }, - "title": { - "type": ["string", "null"], - "description": "Title of the user." - }, - "verified": { - "type": ["boolean", "null"], - "description": "Whether the user is verified." - } - }, - "additionalProperties": true - }, - "id": { - "type": ["string", "null"], - "description": "ID of the user.", - "readOnly": true - }, - "relationships": { - "type": ["object", "null"], - "description": "Relationships of the user object returned by the API.", - "readOnly": true - }, - "type": { - "type": ["string", "null"], - "enum": ["users"], - "description": "Users resource type. Allowed enum values: users", - "readOnly": true - } - }, - "additionalProperties": true -} diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/source.py b/airbyte-integrations/connectors/source-datadog/source_datadog/source.py deleted file mode 100644 index 164c2be2ae5a..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/source.py +++ /dev/null @@ -1,64 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from datetime import datetime -from typing import Any, List, Mapping, Optional, Tuple - -import requests -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from pydantic.datetime_parse import timedelta -from source_datadog.streams import AuditLogs, Dashboards, Downtimes, Incidents, IncidentTeams, Logs, Metrics, SyntheticTests, Users - - -class SourceDatadog(AbstractSource): - @staticmethod - def _get_authenticator(config: Mapping[str, Any]): - return DatadogAuthenticator(api_key=config["api_key"], application_key=config["application_key"]) - - def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]: - try: - args = self.connector_config(config) - dashboards_stream = Dashboards(**args) - records = dashboards_stream.read_records(sync_mode=SyncMode.full_refresh) - next(records, None) - return True, None - except Exception as e: - return False, e - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - args = self.connector_config(config) - return [ - AuditLogs(**args), - Dashboards(**args), - Downtimes(**args), - Incidents(**args), - IncidentTeams(**args), - Logs(**args), - Metrics(**args), - SyntheticTests(**args), - Users(**args), - ] - - def connector_config(self, config: Mapping[str, Any]) -> Mapping[str, Any]: - return { - "authenticator": self._get_authenticator(config), - "query": config.get("query", ""), - "max_records_per_request": config.get("max_records_per_request", 5000), - "start_date": config.get("start_date", datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")), - "end_date": config.get("end_date", (datetime.now() + timedelta(seconds=1)).strftime("%Y-%m-%dT%H:%M:%SZ")), - } - - -class DatadogAuthenticator(requests.auth.AuthBase): - def __init__(self, api_key: str, application_key: str): - self.api_key = api_key - self.application_key = application_key - - def __call__(self, r): - r.headers["DD-API-KEY"] = self.api_key - r.headers["DD-APPLICATION-KEY"] = self.application_key - return r diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/spec.yaml b/airbyte-integrations/connectors/source-datadog/source_datadog/spec.yaml deleted file mode 100644 index de0e69d3aa9f..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/spec.yaml +++ /dev/null @@ -1,56 +0,0 @@ -documentationUrl: https://docs.airbyte.com/integrations/sources/datadog -connectionSpecification: - $schema: http://json-schema.org/draft-07/schema# - title: Datadog Source Spec - type: object - required: - - api_key - - application_key - additionalProperties: true - properties: - api_key: - title: API Key - description: Datadog API key - type: string - airbyte_secret: true - order: 1 - application_key: - title: Application Key - description: Datadog application key - type: string - airbyte_secret: true - order: 2 - query: - title: Query - description: The search query. This just applies to Incremental syncs. If empty, it'll collect all logs. - type: string - order: 3 - max_records_per_request: - type: integer - title: Max records per requests - default: 5000 - minimum: 1 - maximum: 5000 - description: Maximum number of records to collect per request. - order: 4 - start_date: - title: Start date - pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ - description: >- - UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will - not be replicated. This just applies to Incremental syncs. - type: string - examples: - - "2022-10-01T00:00:00Z" - order: 5 - end_date: - title: End date - pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ - description: >- - UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will - not be replicated. An empty value will represent the current datetime for each - execution. This just applies to Incremental syncs. - examples: - - "2022-10-01T00:00:00Z" - type: string - order: 6 diff --git a/airbyte-integrations/connectors/source-datadog/source_datadog/streams.py b/airbyte-integrations/connectors/source-datadog/source_datadog/streams.py deleted file mode 100644 index 9099a47b0fef..000000000000 --- a/airbyte-integrations/connectors/source-datadog/source_datadog/streams.py +++ /dev/null @@ -1,288 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from abc import ABC, abstractmethod -from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Union - -import requests -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams import IncrementalMixin -from airbyte_cdk.sources.streams.http import HttpStream - - -class DatadogStream(HttpStream, ABC): - """ - Datadog API Reference: https://docs.datadoghq.com/api/latest/ - """ - - primary_key: Optional[str] = None - parse_response_root: Optional[str] = None - - def __init__(self, query: str, max_records_per_request: int, start_date: str, end_date: str, **kwargs): - super().__init__(**kwargs) - self.query = query - self.max_records_per_request = max_records_per_request - self.start_date = start_date - self.end_date = end_date - self._cursor_value = None - - @property - def url_base(self) -> str: - return "https://api.datadoghq.com/api" - - def request_headers(self, **kwargs) -> Mapping[str, Any]: - return { - "Accept": "application/json", - "Content-Type": "application/json", - } - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - params: Dict[str, str] = {} - - if next_page_token: - params.update(next_page_token) - - return params - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_json = response.json() - records = response_json if not self.parse_response_root else response_json.get(self.parse_response_root, []) - for record in records: - yield self.transform(record=record, **kwargs) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - return record - - -class V1ApiDatadogStream(DatadogStream, ABC): - @property - def url_base(self) -> str: - return f"{super().url_base}/v1/" - - @property - def http_method(self) -> str: - return "GET" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - return None - - -class Dashboards(V1ApiDatadogStream): - """ - https://docs.datadoghq.com/api/latest/dashboards/#get-all-dashboards - """ - - parse_response_root: Optional[str] = "dashboards" - - def path(self, **kwargs) -> str: - return "dashboard" - - -class Downtimes(V1ApiDatadogStream): - """ - https://docs.datadoghq.com/api/latest/downtimes/#get-all-downtimes - """ - - def path(self, **kwargs) -> str: - return "downtime" - - -class SyntheticTests(V1ApiDatadogStream): - """ - https://docs.datadoghq.com/api/latest/synthetics/#get-the-list-of-all-tests - """ - - parse_response_root: Optional[str] = "tests" - - def path(self, **kwargs) -> str: - return "synthetics/tests" - - -class V2ApiDatadogStream(DatadogStream, ABC): - @property - def url_base(self) -> str: - return f"{super().url_base}/v2/" - - -class IncrementalSearchableStream(V2ApiDatadogStream, IncrementalMixin, ABC): - primary_key: Optional[str] = "id" - parse_response_root: Optional[str] = "data" - - def __init__(self, query: str, max_records_per_request: int, start_date: str, end_date: str, **kwargs): - super().__init__(query, max_records_per_request, start_date, end_date, **kwargs) - self._cursor_value = "" - - @property - def http_method(self) -> str: - return "POST" - - @property - def state(self) -> Mapping[str, Any]: - if self._cursor_value: - return {self.cursor_field: self._cursor_value} - else: - return {self.cursor_field: self.start_date} - - @state.setter - def state(self, value: Mapping[str, Any]): - self._cursor_value = value[self.cursor_field] - - @property - def cursor_field(self) -> Union[str, List[str]]: - return "sync_date" - - def request_body_json( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> Optional[Mapping]: - cursor = None - if next_page_token: - cursor = next_page_token.get("page", {}).get("cursor", {}) - return self.get_payload(cursor) - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - response_json = response.json() - cursor = response_json.get("meta", {}).get("page", {}).get("after", {}) - if not cursor: - self._cursor_value = self.end_date - else: - return self.get_payload(cursor) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record[self.cursor_field] = self._cursor_value if self._cursor_value else self.end_date - return record - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, Any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: - if self.start_date >= self.end_date or self.end_date <= self._cursor_value: - return [] - return super().read_records(sync_mode, cursor_field, stream_slice, stream_state) - - def get_payload(self, cursor: Optional[str]) -> Mapping[str, Any]: - payload = { - "filter": {"query": self.query, "from": self._cursor_value if self._cursor_value else self.start_date, "to": self.end_date}, - "page": {"limit": self.max_records_per_request}, - } - if cursor: - payload["page"]["cursor"] = cursor - - return payload - - -class AuditLogs(IncrementalSearchableStream): - """ - https://docs.datadoghq.com/api/latest/audit/#search-audit-logs-events - """ - - def path(self, **kwargs) -> str: - return "audit/events/search" - - -class Logs(IncrementalSearchableStream): - """ - https://docs.datadoghq.com/api/latest/logs/#search-logs - """ - - def path(self, **kwargs) -> str: - return "logs/events/search" - - -class BasedListStream(V2ApiDatadogStream, ABC): - parse_response_root: Optional[str] = "data" - - @property - def http_method(self) -> str: - return "GET" - - -class Metrics(BasedListStream): - """ - https://docs.datadoghq.com/api/latest/metrics/#get-a-list-of-metrics - """ - - def path(self, **kwargs) -> str: - return "metrics?window[seconds]=1209600" # max value allowed (2 weeks) - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - return None - - -class PaginatedBasedListStream(BasedListStream, ABC): - primary_key: Optional[str] = "id" - - def path( - self, - *, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> str: - offset = None - if next_page_token: - offset = next_page_token.get("offset") - return self.get_url_path(offset) - - @abstractmethod - def get_url_path(self, offset: Optional[str]) -> str: - """ - Returns the relative URL with the corresponding offset - """ - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - response_json = response.json() - next_offset = response_json.get("meta", {}).get("pagination", {}).get("next_offset", -1) - current_offset = response_json.get("meta", {}).get("pagination", {}).get("offset", -1) - next_page_token = None - if next_offset != current_offset: - next_page_token = {"offset": next_offset} - return next_page_token - - -class Incidents(PaginatedBasedListStream): - """ - https://docs.datadoghq.com/api/latest/incidents/#get-a-list-of-incidents - """ - - def get_url_path(self, offset: Optional[str]) -> str: - params = f"&page[offset]={offset}" if offset else "" - return f"incidents?page[size]={self.max_records_per_request}{params}" - - -class IncidentTeams(PaginatedBasedListStream): - """ - https://docs.datadoghq.com/api/latest/incident-teams/#get-a-list-of-all-incident-teams - """ - - def get_url_path(self, offset: Optional[str]) -> str: - params = f"&page[offset]={offset}" if offset else "" - return f"teams?page[size]={self.max_records_per_request}{params}" - - -class Users(PaginatedBasedListStream): - """ - https://docs.datadoghq.com/api/latest/users/#list-all-users - """ - - current_page = 0 - - def get_url_path(self, offset: Optional[int]) -> str: - params = f"&page[number]={offset}" if offset else "" - return f"users?page[size]={self.max_records_per_request}{params}" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - response_json = response.json() - next_page_token = None - if len(response_json.get("data", [])) > 0: - self.current_page += 1 - next_page_token = {"offset": self.current_page} - return next_page_token diff --git a/airbyte-integrations/connectors/source-datadog/unit_tests/__init__.py b/airbyte-integrations/connectors/source-datadog/unit_tests/__init__.py deleted file mode 100644 index 1100c1c58cf5..000000000000 --- a/airbyte-integrations/connectors/source-datadog/unit_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-datadog/unit_tests/conftest.py b/airbyte-integrations/connectors/source-datadog/unit_tests/conftest.py deleted file mode 100644 index 8a149e2666cd..000000000000 --- a/airbyte-integrations/connectors/source-datadog/unit_tests/conftest.py +++ /dev/null @@ -1,392 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from pytest import fixture - - -@fixture(name="config") -def config_fixture(): - return { - "api_key": "test_api_key", - "application_key": "test_application_key", - "query": "", - "max_records_per_request": 5000, - "start_date": "2022-10-10T00:00:00Z", - "end_date": "2022-10-10T00:10:00Z", - } - - -@fixture(name="mock_responses") -def mock_responses(): - return { - "Dashboards": { - "dashboards": [ - { - "author_handle": "string", - "created_at": "2019-09-19T10:00:00.000Z", - "description": "string", - "id": "string", - "is_read_only": False, - "layout_type": "ordered", - "modified_at": "2019-09-19T10:00:00.000Z", - "title": "string", - "url": "string", - } - ], - }, - "Downtimes": { - "active": True, - "active_child": { - "active": True, - "canceled": 1412799983, - "creator_id": 123456, - "disabled": False, - "downtime_type": 2, - "end": 1412793983, - "id": 1626, - "message": "Message on the downtime", - "monitor_id": 123456, - "monitor_tags": ["*"], - "mute_first_recovery_notification": False, - "parent_id": 123, - "recurrence": { - "period": 1, - "rrule": "FREQ=MONTHLY;BYSETPOS=3;BYDAY=WE;INTERVAL=1", - "type": "weeks", - "until_date": 1447786293, - "until_occurrences": 2, - "week_days": ["Mon", "Tue"], - }, - "scope": ["env:staging"], - "start": 1412792983, - "timezone": "America/New_York", - "updater_id": 123456, - }, - "canceled": 1412799983, - "creator_id": 123456, - "disabled": False, - "downtime_type": 2, - "end": 1412793983, - "id": 1625, - "message": "Message on the downtime", - "monitor_id": 123456, - "monitor_tags": ["*"], - "mute_first_recovery_notification": False, - "parent_id": 123, - "recurrence": { - "period": 1, - "rrule": "FREQ=MONTHLY;BYSETPOS=3;BYDAY=WE;INTERVAL=1", - "type": "weeks", - "until_date": 1447786293, - "until_occurrences": 2, - "week_days": ["Mon", "Tue"], - }, - "scope": ["env:staging"], - "start": 1412792983, - "timezone": "America/New_York", - "updater_id": 123456, - }, - "SyntheticTests": { - "tests": [ - { - "config": { - "assertions": [{"operator": "contains", "property": "string", "target": 123456, "type": "statusCode"}], - "configVariables": [ - {"example": "string", "id": "string", "name": "VARIABLE_NAME", "pattern": "string", "type": "text"} - ], - "request": { - "allow_insecure": False, - "basicAuth": {"password": "PaSSw0RD!", "type": "web", "username": "my_username"}, - "body": "string", - "certificate": { - "cert": {"content": "string", "filename": "string", "updatedAt": "string"}, - "key": {"content": "string", "filename": "string", "updatedAt": "string"}, - }, - "certificateDomains": [], - "dnsServer": "string", - "dnsServerPort": "integer", - "follow_redirects": False, - "headers": {"": "string"}, - "host": "string", - "message": "string", - "metadata": {"": "string"}, - "method": "GET", - "noSavingResponseBody": False, - "numberOfPackets": "integer", - "port": "integer", - "proxy": {"headers": {"": "string"}, "url": "https://example.com"}, - "query": {}, - "servername": "string", - "service": "string", - "shouldTrackHops": False, - "timeout": "number", - "url": "https://example.com", - }, - "variables": [{"example": "string", "id": "string", "name": "VARIABLE_NAME", "pattern": "string", "type": "text"}], - }, - "creator": {"email": "string", "handle": "string", "name": "string"}, - "locations": ["aws:eu-west-3"], - "message": "string", - "monitor_id": "integer", - "name": "string", - "options": { - "accept_self_signed": False, - "allow_insecure": False, - "checkCertificateRevocation": False, - "ci": {"executionRule": "string"}, - "device_ids": ["laptop_large"], - "disableCors": False, - "disableCsp": False, - "follow_redirects": False, - "ignoreServerCertificateError": False, - "initialNavigationTimeout": "integer", - "min_failure_duration": "integer", - "min_location_failed": "integer", - "monitor_name": "string", - "monitor_options": {"renotify_interval": "integer"}, - "monitor_priority": "integer", - "noScreenshot": False, - "restricted_roles": ["xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"], - "retry": {"count": "integer", "interval": "number"}, - "rumSettings": {"applicationId": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", "clientTokenId": 12345, "isEnabled": True}, - "tick_every": "integer", - }, - "public_id": "string", - "status": "live", - "steps": [ - { - "allowFailure": False, - "isCritical": False, - "name": "string", - "params": {}, - "timeout": "integer", - "type": "assertElementContent", - } - ], - "subtype": "http", - "tags": [], - "type": "string", - } - ] - }, - "Metrics": {"data": [{"id": "test.metric.latency", "type": "metrics"}]}, - "Incidents": { - "data": [ - { - "attributes": { - "created": "2019-09-19T10:00:00.000Z", - "customer_impact_duration": "integer", - "customer_impact_end": "2019-09-19T10:00:00.000Z", - "customer_impact_scope": "An example customer impact scope", - "customer_impact_start": "2019-09-19T10:00:00.000Z", - "customer_impacted": False, - "detected": "2019-09-19T10:00:00.000Z", - "fields": {"": {}}, - "modified": "2019-09-19T10:00:00.000Z", - "notification_handles": [{"display_name": "Jane Doe", "handle": "@test.user@test.com"}], - "public_id": 1, - "resolved": "2019-09-19T10:00:00.000Z", - "time_to_detect": "integer", - "time_to_internal_response": "integer", - "time_to_repair": "integer", - "time_to_resolve": "integer", - "title": "A test incident title", - }, - "id": "00000000-0000-0000-1234-000000000000", - "relationships": { - "attachments": {"data": [{"id": "00000000-0000-abcd-1000-000000000000", "type": "incident_attachments"}]}, - "commander_user": {"data": {"id": "00000000-0000-0000-0000-000000000000", "type": "users"}}, - "created_by_user": {"data": {"id": "00000000-0000-0000-2345-000000000000", "type": "users"}}, - "integrations": {"data": [{"id": "00000000-abcd-0001-0000-000000000000", "type": "incident_integrations"}]}, - "last_modified_by_user": {"data": {"id": "00000000-0000-0000-2345-000000000000", "type": "users"}}, - }, - "type": "incidents", - } - ], - "included": [ - { - "attributes": { - "created_at": "2019-09-19T10:00:00.000Z", - "disabled": False, - "email": "string", - "handle": "string", - "icon": "string", - "modified_at": "2019-09-19T10:00:00.000Z", - "name": "string", - "service_account": False, - "status": "string", - "title": "string", - "verified": False, - }, - "id": "string", - "relationships": { - "org": {"data": {"id": "00000000-0000-beef-0000-000000000000", "type": "orgs"}}, - "other_orgs": {"data": [{"id": "00000000-0000-beef-0000-000000000000", "type": "orgs"}]}, - "other_users": {"data": [{"id": "00000000-0000-0000-2345-000000000000", "type": "users"}]}, - "roles": {"data": [{"id": "3653d3c6-0c75-11ea-ad28-fb5701eabc7d", "type": "roles"}]}, - }, - "type": "users", - } - ], - "meta": {"pagination": {"next_offset": 1000, "offset": 10, "size": 1000}}, - }, - "IncidentTeams": { - "data": [ - { - "attributes": {"created": "2019-09-19T10:00:00.000Z", "modified": "2019-09-19T10:00:00.000Z", "name": "team name"}, - "id": "00000000-7ea3-0000-000a-000000000000", - "relationships": { - "created_by": {"data": {"id": "00000000-0000-0000-2345-000000000000", "type": "users"}}, - "last_modified_by": {"data": {"id": "00000000-0000-0000-2345-000000000000", "type": "users"}}, - }, - "type": "teams", - } - ], - "included": [ - { - "attributes": { - "created_at": "2019-09-19T10:00:00.000Z", - "disabled": False, - "email": "string", - "handle": "string", - "icon": "string", - "modified_at": "2019-09-19T10:00:00.000Z", - "name": "string", - "service_account": False, - "status": "string", - "title": "string", - "verified": False, - }, - "id": "string", - "relationships": { - "org": {"data": {"id": "00000000-0000-beef-0000-000000000000", "type": "orgs"}}, - "other_orgs": {"data": [{"id": "00000000-0000-beef-0000-000000000000", "type": "orgs"}]}, - "other_users": {"data": [{"id": "00000000-0000-0000-2345-000000000000", "type": "users"}]}, - "roles": {"data": [{"id": "3653d3c6-0c75-11ea-ad28-fb5701eabc7d", "type": "roles"}]}, - }, - "type": "users", - } - ], - "meta": {"pagination": {"next_offset": 1000, "offset": 10, "size": 1000}}, - }, - "Users": { - "data": [ - { - "attributes": { - "created_at": "2019-09-19T10:00:00.000Z", - "disabled": False, - "email": "string", - "handle": "string", - "icon": "string", - "modified_at": "2019-09-19T10:00:00.000Z", - "name": "string", - "service_account": False, - "status": "string", - "title": "string", - "verified": False, - }, - "id": "string", - "relationships": { - "org": {"data": {"id": "00000000-0000-beef-0000-000000000000", "type": "orgs"}}, - "other_orgs": {"data": [{"id": "00000000-0000-beef-0000-000000000000", "type": "orgs"}]}, - "other_users": {"data": [{"id": "00000000-0000-0000-2345-000000000000", "type": "users"}]}, - "roles": {"data": [{"id": "3653d3c6-0c75-11ea-ad28-fb5701eabc7d", "type": "roles"}]}, - }, - "type": "users", - } - ], - "included": [ - { - "attributes": { - "created_at": "2019-09-19T10:00:00.000Z", - "description": "string", - "disabled": False, - "modified_at": "2019-09-19T10:00:00.000Z", - "name": "string", - "public_id": "string", - "sharing": "string", - "url": "string", - }, - "id": "string", - "type": "orgs", - } - ], - "meta": {"page": {"total_count": "integer", "total_filtered_count": "integer"}}, - }, - "Logs": { - "data": [ - { - "attributes": { - "attributes": {"customAttribute": 123, "duration": 2345}, - "host": "i-0123", - "message": "Host connected to remote", - "service": "agent", - "status": "INFO", - "tags": ["team:A"], - "timestamp": "2019-01-02T09:42:36.320Z", - }, - "id": "AAAAAWgN8Xwgr1vKDQAAAABBV2dOOFh3ZzZobm1mWXJFYTR0OA", - "type": "log", - } - ], - "links": { - "next": "https://app.datadoghq.com/api/v2/logs/event?filter[query]=foo\u0026page[cursor]=eyJzdGFydEF0IjoiQVFBQUFYS2tMS3pPbm40NGV3QUFBQUJCV0V0clRFdDZVbG8zY3pCRmNsbHJiVmxDWlEifQ==" - }, - "meta": { - "elapsed": 132, - "page": {"after": "eyJzdGFydEF0IjoiQVFBQUFYS2tMS3pPbm40NGV3QUFBQUJCV0V0clRFdDZVbG8zY3pCRmNsbHJiVmxDWlEifQ=="}, - "request_id": "MWlFUjVaWGZTTTZPYzM0VXp1OXU2d3xLSVpEMjZKQ0VKUTI0dEYtM3RSOFVR", - "status": "done", - "warnings": [ - { - "code": "unknown_index", - "detail": "indexes: foo, bar", - "title": "One or several indexes are missing or invalid, results hold data from the other indexes", - } - ], - }, - }, - "AuditLogs": { - "data": [ - { - "attributes": { - "attributes": {"customAttribute": 123, "duration": 2345}, - "service": "web-app", - "tags": ["team:A"], - "timestamp": "2019-01-02T09:42:36.320Z", - }, - "id": "AAAAAWgN8Xwgr1vKDQAAAABBV2dOOFh3ZzZobm1mWXJFYTR0OA", - "type": "audit", - } - ], - "links": { - "next": "https://app.datadoghq.com/api/v2/audit/event?filter[query]=foo\u0026page[cursor]=eyJzdGFydEF0IjoiQVFBQUFYS2tMS3pPbm40NGV3QUFBQUJCV0V0clRFdDZVbG8zY3pCRmNsbHJiVmxDWlEifQ==" - }, - "meta": { - "elapsed": 132, - "page": {"after": "eyJzdGFydEF0IjoiQVFBQUFYS2tMS3pPbm40NGV3QUFBQUJCV0V0clRFdDZVbG8zY3pCRmNsbHJiVmxDWlEifQ=="}, - "request_id": "MWlFUjVaWGZTTTZPYzM0VXp1OXU2d3xLSVpEMjZKQ0VKUTI0dEYtM3RSOFVR", - "status": "done", - "warnings": [ - { - "code": "unknown_index", - "detail": "indexes: foo, bar", - "title": "One or several indexes are missing or invalid, results hold data from the other indexes", - } - ], - }, - }, - } - - -@fixture(name="mock_stream") -def mock_stream_fixture(requests_mock): - def _mock_stream(path, response=None): - if response is None: - response = {} - - url = f"https://api.datadoghq.com/api/v1/{path}" - requests_mock.get(url, json=response) - - return _mock_stream diff --git a/airbyte-integrations/connectors/source-datadog/unit_tests/test_source.py b/airbyte-integrations/connectors/source-datadog/unit_tests/test_source.py deleted file mode 100644 index c2d4067f14e4..000000000000 --- a/airbyte-integrations/connectors/source-datadog/unit_tests/test_source.py +++ /dev/null @@ -1,58 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from unittest.mock import MagicMock - -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models import ConnectorSpecification -from source_datadog.source import SourceDatadog - -logger = AirbyteLogger() - - -def test_check_connection_ok(config, mock_stream, mock_responses): - mock_stream("dashboard", response=mock_responses.get("Dashboards")) - ok, error_msg = SourceDatadog().check_connection(logger, config=config) - - assert ok - assert not error_msg - - -def test_check_connection_exception(config, mock_stream, mock_responses): - mock_stream("invalid_path", response=mock_responses.get("Dashboards")) - ok, error_msg = SourceDatadog().check_connection(logger, config=config) - - assert not ok - assert error_msg - - -def test_check_connection_empty_config(config): - config = {} - - ok, error_msg = SourceDatadog().check_connection(logger, config=config) - - assert not ok - assert error_msg - - -def test_check_connection_invalid_config(config): - config.pop("api_key") - - ok, error_msg = SourceDatadog().check_connection(logger, config=config) - - assert not ok - assert error_msg - - -def test_streams(config): - streams = SourceDatadog().streams(config) - - assert len(streams) == 9 - - -def test_spec(): - logger_mock = MagicMock() - spec = SourceDatadog().spec(logger_mock) - - assert isinstance(spec, ConnectorSpecification) diff --git a/airbyte-integrations/connectors/source-datadog/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-datadog/unit_tests/test_streams.py deleted file mode 100644 index c20e60872421..000000000000 --- a/airbyte-integrations/connectors/source-datadog/unit_tests/test_streams.py +++ /dev/null @@ -1,100 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -import json -from unittest.mock import MagicMock, patch - -import pytest -import requests -import requests_mock as req_mock -from airbyte_cdk.models import SyncMode -from source_datadog.source import SourceDatadog -from source_datadog.streams import ( - AuditLogs, - Dashboards, - DatadogStream, - Downtimes, - Incidents, - IncidentTeams, - Logs, - Metrics, - SyntheticTests, - Users, -) - - -@pytest.mark.parametrize( - "stream", - [AuditLogs, Dashboards, Downtimes, Incidents, IncidentTeams, Logs, Metrics, SyntheticTests, Users], -) -def test_task_stream(requests_mock, stream, config, mock_responses): - requests_mock.get(req_mock.ANY, json=mock_responses.get(stream.__name__)) - requests_mock.post(req_mock.ANY, json=mock_responses.get(stream.__name__)) - args = SourceDatadog().connector_config(config) - instance = stream(**args) - - stream_slice = instance.stream_slices(sync_mode=SyncMode.full_refresh) - record = next(instance.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice)) - - assert record - - -@patch.multiple(DatadogStream, __abstractmethods__=set()) -def test_next_page_token(config): - stream = DatadogStream( - query=config["query"], - max_records_per_request=config["max_records_per_request"], - start_date=config["start_date"], - end_date=config["end_date"], - ) - inputs = {"response": MagicMock()} - assert stream.next_page_token(**inputs) is None - - -@pytest.mark.parametrize( - "stream", - [AuditLogs, Dashboards, Downtimes, Incidents, IncidentTeams, Logs, Metrics, SyntheticTests, Users], -) -def test_next_page_token_empty_response(stream, config): - expected_token = None - args = SourceDatadog().connector_config(config) - instance = stream(**args) - response = requests.Response() - response._content = json.dumps({}).encode("utf-8") - assert instance.next_page_token(response=response) == expected_token - - -@pytest.mark.parametrize( - "stream", - [AuditLogs, Logs], -) -def test_next_page_token_inc(stream, config): - args = SourceDatadog().connector_config(config) - instance = stream(**args) - response = requests.Response() - body_content = {"meta": {"page": {"after": "test_cursor"}}} - response._content = json.dumps(body_content).encode("utf-8") - result = instance.next_page_token(response=response) - assert result.get("page").get("cursor") == "test_cursor" - - -@pytest.mark.parametrize( - "stream", - [Incidents, IncidentTeams], -) -def test_next_page_token_paginated(stream, config): - args = SourceDatadog().connector_config(config) - instance = stream(**args) - response = requests.Response() - body_content = { - "meta": { - "pagination": { - "offset": 998, - "next_offset": 999, - } - } - } - response._content = json.dumps(body_content).encode("utf-8") - result = instance.next_page_token(response=response) - assert result.get("offset") == 999 diff --git a/airbyte-integrations/connectors/source-greenhouse/Dockerfile b/airbyte-integrations/connectors/source-greenhouse/Dockerfile index dedbaeca1b11..72b8f80ec06d 100644 --- a/airbyte-integrations/connectors/source-greenhouse/Dockerfile +++ b/airbyte-integrations/connectors/source-greenhouse/Dockerfile @@ -4,10 +4,14 @@ FROM python:3.9-slim RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* WORKDIR /airbyte/integration_code -COPY setup.py ./ + +ENV AIRBYTE_ROOT "/airbyte/integration_code" +COPY ./airbyte-cdk/ ./airbyte-cdk + +COPY ./airbyte-integrations/connectors/source-greenhouse/source_greenhouse ./source_greenhouse +COPY ./airbyte-integrations/connectors/source-greenhouse/main.py . +COPY ./airbyte-integrations/connectors/source-greenhouse/setup.py . RUN pip install . -COPY source_greenhouse ./source_greenhouse -COPY main.py ./ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] diff --git a/airbyte-integrations/connectors/source-greenhouse/build.gradle b/airbyte-integrations/connectors/source-greenhouse/build.gradle index 330da24791ab..0ae6899c0018 100644 --- a/airbyte-integrations/connectors/source-greenhouse/build.gradle +++ b/airbyte-integrations/connectors/source-greenhouse/build.gradle @@ -8,6 +8,10 @@ airbytePython { moduleDirectory 'source_greenhouse' } +airbyteDocker { + runFromPath "." +} + dependencies { implementation files(project(':airbyte-integrations:bases:source-acceptance-test').airbyteDocker.outputs) } diff --git a/airbyte-integrations/connectors/source-greenhouse/setup.py b/airbyte-integrations/connectors/source-greenhouse/setup.py index ed5e097474ea..667c14fcc196 100644 --- a/airbyte-integrations/connectors/source-greenhouse/setup.py +++ b/airbyte-integrations/connectors/source-greenhouse/setup.py @@ -3,8 +3,13 @@ # +import os + from setuptools import find_packages, setup +AIRBYTE_ROOT = os.environ.get("AIRBYTE_ROOT") or "/".join(os.getcwd().split("/")[:-3]) +PATH_TO_CDK = f"{AIRBYTE_ROOT}/airbyte-cdk/python#egg=airbyte_cdk" + TEST_REQUIREMENTS = [ "pytest~=6.1", "pytest-mock~=3.6", @@ -16,7 +21,7 @@ author="Airbyte", author_email="contact@airbyte.io", packages=find_packages(), - install_requires=["airbyte-cdk~=0.1", "dataclasses-jsonschema==2.15.1"], + install_requires=[f"airbyte-cdk @ file://{PATH_TO_CDK}#egg=airbyte_cdk", "dataclasses-jsonschema==2.15.1"], package_data={"": ["*.json", "*.yaml", "schemas/*.json"]}, extras_require={ "tests": TEST_REQUIREMENTS, diff --git a/airbyte-integrations/connectors/source-gridly/.dockerignore b/airbyte-integrations/connectors/source-gridly/.dockerignore deleted file mode 100644 index f86364dadaf3..000000000000 --- a/airbyte-integrations/connectors/source-gridly/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_gridly -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-gridly/Dockerfile b/airbyte-integrations/connectors/source-gridly/Dockerfile deleted file mode 100644 index f8f0fba5a280..000000000000 --- a/airbyte-integrations/connectors/source-gridly/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.13-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_gridly ./source_gridly - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/source-gridly diff --git a/airbyte-integrations/connectors/source-gridly/README.md b/airbyte-integrations/connectors/source-gridly/README.md deleted file mode 100644 index 375f788f5b32..000000000000 --- a/airbyte-integrations/connectors/source-gridly/README.md +++ /dev/null @@ -1,132 +0,0 @@ -# Gridly Source - -This is the repository for the Gridly source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/gridly). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.9.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Building via Gradle -You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. - -To build using Gradle, from the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:source-gridly:build -``` - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/gridly) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_gridly/spec.yaml` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source gridly test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - -#### Build -First, make sure you build the latest Docker image: -``` -docker build . -t airbyte/source-gridly:dev -``` - -You can also build the connector image via Gradle: -``` -./gradlew :airbyte-integrations:connectors:source-gridly:airbyteDocker -``` -When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-gridly:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-gridly:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-gridly:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-gridly:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` -## Testing -Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` - -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests -``` -#### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -To run your integration tests with acceptance tests, from the connector root, run -``` -python -m pytest integration_tests -p integration_tests.acceptance -``` -To run your integration tests with docker - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:source-gridly:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:source-gridly:integrationTest -``` - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing unit and integration tests. -1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). -1. Create a Pull Request. -1. Pat yourself on the back for being an awesome contributor. -1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-gridly/acceptance-test-config.yml b/airbyte-integrations/connectors/source-gridly/acceptance-test-config.yml deleted file mode 100644 index db3e7174fe01..000000000000 --- a/airbyte-integrations/connectors/source-gridly/acceptance-test-config.yml +++ /dev/null @@ -1,20 +0,0 @@ -# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-gridly:dev -tests: - spec: - - spec_path: "source_gridly/spec.yaml" - connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/config.json" - basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: [] - full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-gridly/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-gridly/acceptance-test-docker.sh deleted file mode 100644 index c51577d10690..000000000000 --- a/airbyte-integrations/connectors/source-gridly/acceptance-test-docker.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env sh - -# Build latest connector image -docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) - -# Pull latest acctest image -docker pull airbyte/source-acceptance-test:latest - -# Run -docker run --rm -it \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v /tmp:/tmp \ - -v $(pwd):/test_input \ - airbyte/source-acceptance-test \ - --acceptance-test-config /test_input - diff --git a/airbyte-integrations/connectors/source-gridly/build.gradle b/airbyte-integrations/connectors/source-gridly/build.gradle deleted file mode 100644 index fa61deefe078..000000000000 --- a/airbyte-integrations/connectors/source-gridly/build.gradle +++ /dev/null @@ -1,9 +0,0 @@ -plugins { - id 'airbyte-python' - id 'airbyte-docker' - id 'airbyte-source-acceptance-test' -} - -airbytePython { - moduleDirectory 'source_gridly' -} diff --git a/airbyte-integrations/connectors/source-gridly/integration_tests/__init__.py b/airbyte-integrations/connectors/source-gridly/integration_tests/__init__.py deleted file mode 100644 index 1100c1c58cf5..000000000000 --- a/airbyte-integrations/connectors/source-gridly/integration_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-gridly/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-gridly/integration_tests/abnormal_state.json deleted file mode 100644 index 52b0f2c2118f..000000000000 --- a/airbyte-integrations/connectors/source-gridly/integration_tests/abnormal_state.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "todo-stream-name": { - "todo-field-name": "todo-abnormal-value" - } -} diff --git a/airbyte-integrations/connectors/source-gridly/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-gridly/integration_tests/acceptance.py deleted file mode 100644 index 1302b2f57e10..000000000000 --- a/airbyte-integrations/connectors/source-gridly/integration_tests/acceptance.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("source_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - # TODO: setup test dependencies if needed. otherwise remove the TODO comments - yield - # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-gridly/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-gridly/integration_tests/configured_catalog.json deleted file mode 100644 index 4ddbb011b027..000000000000 --- a/airbyte-integrations/connectors/source-gridly/integration_tests/configured_catalog.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "Default view", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/airbyte-integrations/connectors/source-gridly/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-gridly/integration_tests/invalid_config.json deleted file mode 100644 index f3732995784f..000000000000 --- a/airbyte-integrations/connectors/source-gridly/integration_tests/invalid_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "todo-wrong-field": "this should be an incomplete config file, used in standard tests" -} diff --git a/airbyte-integrations/connectors/source-gridly/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-gridly/integration_tests/sample_state.json deleted file mode 100644 index 3587e579822d..000000000000 --- a/airbyte-integrations/connectors/source-gridly/integration_tests/sample_state.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "todo-stream-name": { - "todo-field-name": "value" - } -} diff --git a/airbyte-integrations/connectors/source-gridly/main.py b/airbyte-integrations/connectors/source-gridly/main.py deleted file mode 100644 index 00d2562d8b72..000000000000 --- a/airbyte-integrations/connectors/source-gridly/main.py +++ /dev/null @@ -1,13 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_gridly import SourceGridly - -if __name__ == "__main__": - source = SourceGridly() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-gridly/requirements.txt b/airbyte-integrations/connectors/source-gridly/requirements.txt deleted file mode 100644 index 0411042aa091..000000000000 --- a/airbyte-integrations/connectors/source-gridly/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ --e ../../bases/source-acceptance-test --e . diff --git a/airbyte-integrations/connectors/source-gridly/setup.py b/airbyte-integrations/connectors/source-gridly/setup.py deleted file mode 100644 index 60ae90355d17..000000000000 --- a/airbyte-integrations/connectors/source-gridly/setup.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk", -] - -TEST_REQUIREMENTS = [ - "pytest~=6.1", - "pytest-mock~=3.6.1", - "source-acceptance-test", -] - -setup( - name="source_gridly", - description="Source implementation for Gridly.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-gridly/source_gridly/__init__.py b/airbyte-integrations/connectors/source-gridly/source_gridly/__init__.py deleted file mode 100644 index 7eedbd6d1c51..000000000000 --- a/airbyte-integrations/connectors/source-gridly/source_gridly/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from .source import SourceGridly - -__all__ = ["SourceGridly"] diff --git a/airbyte-integrations/connectors/source-gridly/source_gridly/helpers.py b/airbyte-integrations/connectors/source-gridly/source_gridly/helpers.py deleted file mode 100644 index fc3de2bc9644..000000000000 --- a/airbyte-integrations/connectors/source-gridly/source_gridly/helpers.py +++ /dev/null @@ -1,136 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from typing import Any, Dict - -import requests -from airbyte_cdk.models import AirbyteStream -from airbyte_cdk.models.airbyte_protocol import DestinationSyncMode, SyncMode -from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator - - -class Helpers(object): - base_url = "https://api.gridly.com/v1/" - - @staticmethod - def view_detail_url(view_id: str) -> str: - return Helpers.base_url + f"views/{view_id}" - - @staticmethod - def view_list_url(grid_id: str) -> str: - return Helpers.base_url + f"views?gridId={grid_id}" - - @staticmethod - def grid_detail_url(grid_id: str) -> str: - return Helpers.base_url + f"grids/{grid_id}" - - @staticmethod - def get_views(auth: TokenAuthenticator, grid_id: str) -> Dict[str, Any]: - url = Helpers.view_list_url(grid_id) - try: - response = requests.get(url, headers=auth.get_auth_header()) - response.raise_for_status() - except requests.exceptions.HTTPError as e: - if e.response.status_code == 401: - raise Exception("Invalid API Key") - elif e.response.status_code == 404: - raise Exception(f"Grid id '{grid_id}' not found") - else: - raise Exception(f"Error getting listing views of grid '{grid_id}'") - - return response.json() - - @staticmethod - def get_grid(auth: TokenAuthenticator, grid_id: str) -> Dict[str, Any]: - url = Helpers.grid_detail_url(grid_id) - try: - response = requests.get(url, headers=auth.get_auth_header()) - response.raise_for_status() - except requests.exceptions.HTTPError as e: - if e.response.status_code == 401: - raise Exception("Invalid API Key") - elif e.response.status_code == 404: - raise Exception(f"Grid '{grid_id}' not found") - else: - raise Exception(f"Error getting grid {grid_id}: {e}") - return response.json() - - @staticmethod - def get_view(auth: TokenAuthenticator, view_id: str) -> Dict[str, Any]: - url = Helpers.view_detail_url(view_id) - try: - response = requests.get(url, headers=auth.get_auth_header()) - response.raise_for_status() - except requests.exceptions.HTTPError as e: - if e.response.status_code == 401: - raise Exception("Invalid API Key") - elif e.response.status_code == 404: - raise Exception(f"View '{view_id}' not found") - else: - raise Exception(f"Error getting view {view_id}: {e}") - return response.json() - - @staticmethod - def to_airbyte_data_type(column_type: str) -> str: - if column_type == "number": - return "number" - elif column_type == "boolean": - return "boolean" - else: - return "string" - - @staticmethod - def get_json_schema(view: Dict[str, Any]) -> Dict[str, str]: - columns = view.get("columns", {}) - properties = {} - - for column in columns: - column_id = column.get("id") - column_type = column.get("type", "singleLine") - properties[column_id] = {"type": ["null", Helpers.to_airbyte_data_type(column_type)]} - - json_schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": properties, - } - return json_schema - - @staticmethod - def get_airbyte_stream(view: Dict[str, Any]) -> AirbyteStream: - view_name = view.get("name") - columns = view.get("columns", {}) - properties = {} - - for column in columns: - column_id = column.get("id") - column_type = column.get("type", "singleLine") - properties[column_id] = {"type": ["null", Helpers.to_airbyte_data_type(column_type)]} - - json_schema = Helpers.get_json_schema(view) - - return AirbyteStream( - name=view_name, - json_schema=json_schema, - supported_sync_modes=[SyncMode.full_refresh], - supported_destination_sync_modes=[DestinationSyncMode.overwrite, DestinationSyncMode.append_dedup], - ) - - @staticmethod - def transform_record(record: Dict[str, Any], schema: Dict[str, Any]) -> Dict[str, Any]: - schema_properties = schema.get("properties") - columns = [k for k, v in schema_properties.items()] - - cells = record.get("cells") - - transformed_record = {} - if "_recordId" in columns: - transformed_record["_recordId"] = record.get("id") - if "_path" in columns: - transformed_record["_path"] = record.get("path", "") - - for cell in cells: - transformed_record[cell.get("columnId")] = cell.get("value") - - return transformed_record diff --git a/airbyte-integrations/connectors/source-gridly/source_gridly/source.py b/airbyte-integrations/connectors/source-gridly/source_gridly/source.py deleted file mode 100644 index f96d8dcb7e22..000000000000 --- a/airbyte-integrations/connectors/source-gridly/source_gridly/source.py +++ /dev/null @@ -1,125 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -import logging -import math -from abc import ABC -from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Tuple - -import requests -from airbyte_cdk.models import AirbyteCatalog -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http import HttpStream -from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator -from source_gridly.helpers import Helpers - - -# Basic full refresh stream -class GridlyStream(HttpStream, ABC): - url_base = Helpers.base_url - primary_key = "id" - current_page = 1 - limit = 100 - - def __init__(self, view_id: str, view_name: str, schema: Dict[str, Any], **kwargs): - super().__init__(**kwargs) - self.view_id = view_id - self.view_name = view_name - self.schema = schema - - @property - def name(self): - return self.view_name - - def get_json_schema(self) -> Mapping[str, Any]: - return self.schema - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - total_count = response.headers.get("x-total-count") - total_page = math.ceil(int(total_count) / self.limit) - - self.logger.info("Total page: " + str(total_page)) - - if self.current_page >= total_page: - self.logger.info("No more page to load " + str(self.current_page)) - return None - - page_token = {"offset": self.current_page * self.limit, "limit": self.limit} - self.current_page += 1 - - return page_token - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - if next_page_token is None: - return {} - - offset = next_page_token.get("offset") - limit = next_page_token.get("limit") - - page = '{"offset":' + str(offset) + ',"limit":' + str(limit) + "}" - - self.logger.info("Fetching page: " + page) - - return {"page": page} - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - records = response.json() - if isinstance(records, list): - for record in records: - yield Helpers.transform_record(record, self.schema) - else: - Exception(f"Unsupported type of response data for stream {self.name}") - - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: - return f"views/{self.view_id}/records" - - -# Source -class SourceGridly(AbstractSource): - def check_connection(self, logger, config) -> Tuple[bool, any]: - api_key = config.get("api_key") - grid_id = config.get("grid_id") - auth = TokenAuthenticator(auth_method="ApiKey", token=api_key) - - logger.info(f"Checking connection on grid {grid_id}") - Helpers.get_grid(auth=auth, grid_id=grid_id) - - return True, None - - def discover(self, logger: logging.Logger, config: Mapping[str, Any]) -> AirbyteCatalog: - api_key = config.get("api_key") - grid_id = config.get("grid_id") - auth = TokenAuthenticator(auth_method="ApiKey", token=api_key) - - logger.info(f"Running discovery on grid {grid_id}") - views = Helpers.get_views(auth=auth, grid_id=grid_id) - - streams = [] - for view in views: - stream = Helpers.get_airbyte_stream(view) - streams.append(stream) - - return AirbyteCatalog(streams=streams) - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - api_key = config.get("api_key") - grid_id = config.get("grid_id") - auth = TokenAuthenticator(auth_method="ApiKey", token=api_key) - views = Helpers.get_views(auth=auth, grid_id=grid_id) - - streams = [] - for view in views: - view_id = view.get("id") - view_name = view.get("name") - schema = Helpers.get_json_schema(view) - stream = GridlyStream(view_id=view_id, view_name=view_name, schema=schema, authenticator=auth) - streams.append(stream) - - return streams diff --git a/airbyte-integrations/connectors/source-gridly/source_gridly/spec.yaml b/airbyte-integrations/connectors/source-gridly/source_gridly/spec.yaml deleted file mode 100644 index f03e53aca457..000000000000 --- a/airbyte-integrations/connectors/source-gridly/source_gridly/spec.yaml +++ /dev/null @@ -1,16 +0,0 @@ -documentationUrl: https://docs.airbyte.com/integrations/sources/gridly -connectionSpecification: - $schema: http://json-schema.org/draft-07/schema# - title: Gridly Spec - type: object - required: - - api_key - properties: - api_key: - type: string - title: API Key - airbyte_secret: true - grid_id: - type: string - title: Grid ID - description: ID of a grid, or can be ID of a branch diff --git a/airbyte-integrations/connectors/source-gridly/unit_tests/__init__.py b/airbyte-integrations/connectors/source-gridly/unit_tests/__init__.py deleted file mode 100644 index 1100c1c58cf5..000000000000 --- a/airbyte-integrations/connectors/source-gridly/unit_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-gridly/unit_tests/test_helpers.py b/airbyte-integrations/connectors/source-gridly/unit_tests/test_helpers.py deleted file mode 100644 index c6c7b577dae0..000000000000 --- a/airbyte-integrations/connectors/source-gridly/unit_tests/test_helpers.py +++ /dev/null @@ -1,74 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -import pytest -from source_gridly.helpers import Helpers - - -@pytest.fixture -def gridly_column_type(): - return "singleLine" - - -@pytest.fixture -def expected_data_type(): - return "string" - - -@pytest.fixture -def view_response(): - return { - "id": "view1", - "name": "Default view", - "columns": [{"id": "_recordId"}, {"id": "column1", "type": "singleLine"}, {"id": "column2", "type": "number"}], - } - - -@pytest.fixture -def record_response(): - return [ - {"id": "record1", "cells": [{"columnId": "column1", "value": "Value 1"}, {"columnId": "column2", "value": 1}]}, - {"id": "record2", "cells": [{"columnId": "column1", "value": "Value 2"}, {"columnId": "column2", "value": 2}]}, - ] - - -@pytest.fixture -def expected_json_schema(): - return { - "$schema": "http://json-schema.org/draft-07/schema#", - "properties": { - "_recordId": {"type": ["null", "string"]}, - "column1": {"type": ["null", "string"]}, - "column2": {"type": ["null", "number"]}, - }, - "type": "object", - } - - -@pytest.fixture -def expected_transformed_record(): - return {"_recordId": "record1", "column1": "Value 1", "column2": 1} - - -def test_to_airbyte_data_type(gridly_column_type, expected_data_type): - assert expected_data_type == Helpers.to_airbyte_data_type(gridly_column_type) - - -def test_get_json_schema(view_response, expected_json_schema): - json_schema = Helpers.get_json_schema(view_response) - assert json_schema == expected_json_schema - - -def test_get_airbyte_stream(view_response, expected_json_schema): - stream = Helpers.get_airbyte_stream(view_response) - assert stream - assert stream.name == view_response.get("name") - assert stream.json_schema == expected_json_schema - - -def test_transform_record(view_response, record_response, expected_transformed_record): - json_schema = Helpers.get_json_schema(view_response) - record1 = record_response[0] - transformed_record = Helpers.transform_record(record1, json_schema) - assert expected_transformed_record == transformed_record diff --git a/airbyte-integrations/connectors/source-gridly/unit_tests/test_source.py b/airbyte-integrations/connectors/source-gridly/unit_tests/test_source.py deleted file mode 100644 index 30d09ffcfe94..000000000000 --- a/airbyte-integrations/connectors/source-gridly/unit_tests/test_source.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from unittest.mock import MagicMock - -from source_gridly.source import SourceGridly - -CONFIG = {"api_key": "IbuIBdkFjrJps6", "grid_id": "4539o52kmdjmzwp"} - - -def test_check_connection(mocker): - source = SourceGridly() - logger_mock, config_mock = MagicMock(), CONFIG - assert source.check_connection(logger_mock, config_mock) == (True, None) - - -def test_streams(mocker): - source = SourceGridly() - config_mock = CONFIG - streams = source.streams(config_mock) - expected_streams_number = 2 - assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-sendgrid/Dockerfile b/airbyte-integrations/connectors/source-sendgrid/Dockerfile index 9fb8c810bb2f..29e1da67d5d2 100644 --- a/airbyte-integrations/connectors/source-sendgrid/Dockerfile +++ b/airbyte-integrations/connectors/source-sendgrid/Dockerfile @@ -4,9 +4,12 @@ FROM python:3.9-slim RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* WORKDIR /airbyte/integration_code -COPY source_sendgrid ./source_sendgrid -COPY main.py ./ -COPY setup.py ./ +ENV AIRBYTE_ROOT "/airbyte/integration_code" +COPY ./airbyte-cdk/ ./airbyte-cdk + +COPY ./airbyte-integrations/connectors/source-sendgrid/source_sendgrid ./source_sendgrid +COPY ./airbyte-integrations/connectors/source-sendgrid/main.py . +COPY ./airbyte-integrations/connectors/source-sendgrid/setup.py . RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" diff --git a/airbyte-integrations/connectors/source-sendgrid/build.gradle b/airbyte-integrations/connectors/source-sendgrid/build.gradle index fb82926f18a5..327ac56d7456 100644 --- a/airbyte-integrations/connectors/source-sendgrid/build.gradle +++ b/airbyte-integrations/connectors/source-sendgrid/build.gradle @@ -8,6 +8,10 @@ airbytePython { moduleDirectory 'source_sendgrid' } +airbyteDocker { + runFromPath "." +} + dependencies { implementation files(project(':airbyte-integrations:bases:source-acceptance-test').airbyteDocker.outputs) } diff --git a/airbyte-integrations/connectors/source-sendgrid/setup.py b/airbyte-integrations/connectors/source-sendgrid/setup.py index 2a287c1387a6..a3ba456ad3ff 100644 --- a/airbyte-integrations/connectors/source-sendgrid/setup.py +++ b/airbyte-integrations/connectors/source-sendgrid/setup.py @@ -2,11 +2,15 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +import os from setuptools import find_packages, setup +AIRBYTE_ROOT = os.environ.get("AIRBYTE_ROOT") or "/".join(os.getcwd().split("/")[:-3]) +PATH_TO_CDK = f"{AIRBYTE_ROOT}/airbyte-cdk/python#egg=airbyte_cdk" + MAIN_REQUIREMENTS = [ - "airbyte-cdk", + f"airbyte-cdk @ file://{PATH_TO_CDK}#egg=airbyte_cdk", "backoff", "requests", ] diff --git a/buildSrc/src/main/groovy/airbyte-docker.gradle b/buildSrc/src/main/groovy/airbyte-docker.gradle index f89b20896e04..f33a1ee2c68a 100644 --- a/buildSrc/src/main/groovy/airbyte-docker.gradle +++ b/buildSrc/src/main/groovy/airbyte-docker.gradle @@ -2,20 +2,16 @@ import org.gradle.api.DefaultTask import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.file.FileCollection -import org.gradle.api.tasks.CacheableTask -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.InputDirectory -import org.gradle.api.tasks.InputFiles -import org.gradle.api.tasks.Internal -import org.gradle.api.tasks.OutputFile -import org.gradle.api.tasks.PathSensitive -import org.gradle.api.tasks.PathSensitivity -import org.gradle.api.tasks.TaskAction +import org.gradle.api.tasks.* import org.slf4j.Logger import java.nio.file.Path -import java.security.MessageDigest import java.nio.file.Paths +import java.security.MessageDigest + +class AirbyteDockerConfiguration { + String runFrom +} @CacheableTask abstract class AirbyteDockerTask extends DefaultTask { @@ -36,6 +32,9 @@ abstract class AirbyteDockerTask extends DefaultTask { @Input String dockerfileName + @Input + String runFromPath = "" + @Input boolean followSymlinks = false @@ -51,7 +50,7 @@ abstract class AirbyteDockerTask extends DefaultTask { def buildPlatform = System.getenv('DOCKER_BUILD_PLATFORM') ?: isArm64 ? 'linux/arm64' : 'amd64' project.exec { - commandLine scriptPath, rootDir.absolutePath, projectDir.absolutePath, dockerfileName, tag, idFileOutput.absolutePath, followSymlinks, buildPlatform + commandLine scriptPath, rootDir.absolutePath, projectDir.absolutePath, dockerfileName, tag, idFileOutput.absolutePath, followSymlinks, buildPlatform, runFromPath } } } diff --git a/tools/bin/build_image.sh b/tools/bin/build_image.sh index 9684d07d6116..1d29cd19b793 100755 --- a/tools/bin/build_image.sh +++ b/tools/bin/build_image.sh @@ -8,6 +8,9 @@ DOCKERFILE="$3" TAGGED_IMAGE="$4" ID_FILE="$5" FOLLOW_SYMLINKS="$6" +BUILD_ARCH="$7" +BUILD_FROM="$8" +echo "build_from: $BUILD_FROM" DOCKER_BUILD_ARCH="${DOCKER_BUILD_ARCH:-amd64}" # https://docs.docker.com/develop/develop-images/build_enhancements/ export DOCKER_BUILDKIT=1 @@ -16,7 +19,16 @@ cd "$ROOT_DIR" . tools/lib/lib.sh assert_root -cd "$PROJECT_DIR" +FULL_PATH_TO_DOCKERFILE="${PROJECT_DIR}/${DOCKERFILE}" + +if [ -n "$BUILD_FROM" ]; then + cd $BUILD_FROM + echo "cd'd into projectdir=${BUILD_FROM}" +else + cd "$PROJECT_DIR" + echo "cd'd into projectdir=${PROJECT_DIR}" +fi + function validate_dockerignore() { excludes_all=$(grep -w '^\*$' .dockerignore) @@ -27,7 +39,7 @@ function validate_dockerignore() { } args=( - -f "$DOCKERFILE" + -f "${FULL_PATH_TO_DOCKERFILE}" -t "$TAGGED_IMAGE" --iidfile "$ID_FILE" ) diff --git a/tools/bin/ci_integration_test.sh b/tools/bin/ci_integration_test.sh index 8fb208fdf83a..0d2b0ed90ab0 100755 --- a/tools/bin/ci_integration_test.sh +++ b/tools/bin/ci_integration_test.sh @@ -135,9 +135,6 @@ write_logs() { echo "# $connector" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY -# Cut the $GITHUB_STEP_SUMMARY with head if its larger than 1MB -echo "$GITHUB_STEP_SUMMARY" | head -c 1048576 >> $GITHUB_STEP_SUMMARY - # Copy command output to extract gradle scan link. run | tee build.out # return status of "run" command, not "tee"