diff --git a/.env b/.env deleted file mode 100644 index 7279aca5d7fc..000000000000 --- a/.env +++ /dev/null @@ -1,117 +0,0 @@ -# This file only contains Docker relevant variables. -# -# Variables with defaults have been omitted to avoid duplication of defaults. -# The only exception to the non-default rule are env vars related to scaling. -# -# See https://github.com/airbytehq/airbyte/blob/master/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java -# for the latest environment variables. -# -# # Contributors - please organise this env file according to the above linked file. - - -### SHARED ### -VERSION=0.40.32 - -# When using the airbyte-db via default docker image -CONFIG_ROOT=/data -DATA_DOCKER_MOUNT=airbyte_data -DB_DOCKER_MOUNT=airbyte_db - -# Workspace storage for running jobs (logs, etc) -WORKSPACE_ROOT=/tmp/workspace -WORKSPACE_DOCKER_MOUNT=airbyte_workspace - -# Local mount to access local files from filesystem -# todo (cgardens) - when we are mount raw directories instead of named volumes, *_DOCKER_MOUNT must -# be the same as *_ROOT. -# Issue: https://github.com/airbytehq/airbyte/issues/578 -LOCAL_ROOT=/tmp/airbyte_local -LOCAL_DOCKER_MOUNT=/tmp/airbyte_local -# todo (cgardens) - hack to handle behavior change in docker compose. *_PARENT directories MUST -# already exist on the host filesystem and MUST be parents of *_ROOT. -# Issue: https://github.com/airbytehq/airbyte/issues/577 -HACK_LOCAL_ROOT_PARENT=/tmp - -# Proxy Configuration -# Set to empty values, e.g. "" to disable basic auth -BASIC_AUTH_USERNAME=airbyte -BASIC_AUTH_PASSWORD=password -BASIC_AUTH_PROXY_TIMEOUT=600 - -### DATABASE ### -# Airbyte Internal Job Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db -DATABASE_USER=docker -DATABASE_PASSWORD=docker -DATABASE_HOST=db -DATABASE_PORT=5432 -DATABASE_DB=airbyte -# translate manually DATABASE_URL=jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT}/${DATABASE_DB} (do not include the username or password here) -DATABASE_URL=jdbc:postgresql://db:5432/airbyte -JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=0.40.26.001 - -# Airbyte Internal Config Database, defaults to Job Database if empty. Explicitly left empty to mute docker compose warnings. -CONFIG_DATABASE_USER= -CONFIG_DATABASE_PASSWORD= -CONFIG_DATABASE_URL= -CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=0.40.23.002 - -### AIRBYTE SERVICES ### -TEMPORAL_HOST=airbyte-temporal:7233 -INTERNAL_API_HOST=airbyte-server:8001 -CONNECTOR_BUILDER_API_HOST=airbyte-connector-builder-server:80 -WEBAPP_URL=http://localhost:8000/ -# Although not present as an env var, required for webapp configuration. -API_URL=/api/v1/ -CONNECTOR_BUILDER_API_URL=/connector-builder-api - -### JOBS ### -# Relevant to scaling. -SYNC_JOB_MAX_ATTEMPTS=3 -SYNC_JOB_MAX_TIMEOUT_DAYS=3 -JOB_MAIN_CONTAINER_CPU_REQUEST= -JOB_MAIN_CONTAINER_CPU_LIMIT= -JOB_MAIN_CONTAINER_MEMORY_REQUEST= -JOB_MAIN_CONTAINER_MEMORY_LIMIT= - -NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_LIMIT= -NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_REQUEST= -NORMALIZATION_JOB_MAIN_CONTAINER_CPU_LIMIT= -NORMALIZATION_JOB_MAIN_CONTAINER_CPU_REQUEST= - -### LOGGING/MONITORING/TRACKING ### -TRACKING_STRATEGY=segment -JOB_ERROR_REPORTING_STRATEGY=logging -# Although not present as an env var, expected by Log4J configuration. -LOG_LEVEL=INFO - - -### APPLICATIONS ### -# Worker # -WORKERS_MICRONAUT_ENVIRONMENTS=control-plane -# Cron # -CRON_MICRONAUT_ENVIRONMENTS=control-plane -# Relevant to scaling. -MAX_SYNC_WORKERS=5 -MAX_SPEC_WORKERS=5 -MAX_CHECK_WORKERS=5 -MAX_DISCOVER_WORKERS=5 -MAX_NOTIFY_WORKERS=5 -SHOULD_RUN_NOTIFY_WORKFLOWS=false -# Temporal Activity configuration -ACTIVITY_MAX_ATTEMPT= -ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS= -ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS= -WORKFLOW_FAILURE_RESTART_DELAY_SECONDS= - -### FEATURE FLAGS ### -AUTO_DISABLE_FAILING_CONNECTIONS=false -FORCE_MIGRATE_SECRET_STORE=false - -### MONITORING FLAGS ### -# Accepted values are datadog and otel (open telemetry) -METRIC_CLIENT= -# Useful only when metric client is set to be otel. Must start with http:// or https://. -OTEL_COLLECTOR_ENDPOINT="http://host.docker.internal:4317" - -USE_STREAM_CAPABLE_STATE=true -AUTO_DETECT_SCHEMA=true diff --git a/.env.dev b/.env.dev deleted file mode 100644 index 19480e7072f9..000000000000 --- a/.env.dev +++ /dev/null @@ -1,38 +0,0 @@ -# For internal Airbyte dev use. - -VERSION=dev -DATABASE_USER=docker -DATABASE_PASSWORD=docker -DATABASE_DB=airbyte -DATABASE_URL=jdbc:postgresql://db:5432/airbyte -CONFIG_ROOT=/data -WORKSPACE_ROOT=/tmp/workspace -DATA_DOCKER_MOUNT=airbyte_data_dev -DB_DOCKER_MOUNT=airbyte_db_dev -WORKSPACE_DOCKER_MOUNT=airbyte_workspace_dev -# todo (cgardens) - when we are mount raw directories instead of named volumes, *_DOCKER_MOUNT must -# be the same as *_ROOT. -# Issue: https://github.com/airbytehq/airbyte/issues/578 -LOCAL_ROOT=/tmp/airbyte_local_dev -LOCAL_DOCKER_MOUNT=/tmp/airbyte_local_dev -TRACKING_STRATEGY=logging -# todo (cgardens) - hack to handle behavior change in docker compose. *_PARENT directories MUST -# already exist on the host filesystem and MUST be parents of *_ROOT. -# Issue: https://github.com/airbytehq/airbyte/issues/577 -HACK_LOCAL_ROOT_PARENT=/tmp -WEBAPP_URL=http://localhost:8000/ -API_URL=/api/v1/ -INTERNAL_API_HOST=airbyte-server:8001 -CONNECTOR_BUILDER_API_HOST=airbyte-connector-builder-server:80 -SYNC_JOB_MAX_ATTEMPTS=3 -SYNC_JOB_MAX_TIMEOUT_DAYS=3 -WORKERS_MICRONAUT_ENVIRONMENTS=control-plane -CRON_MICRONAUT_ENVIRONMENTS=control-plane -AUTO_DETECT_SCHEMA=true - -# Sentry -SENTRY_DSN="" - -# Migration Configuration -CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=0.35.15.001 -JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=0.29.15.001 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index b39fbe5dc752..4a92f4f7541b 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,11 +3,6 @@ /airbyte-integrations/connector-templates/ @airbytehq/connector-extensibility /airbyte-integrations/bases/connector-acceptance-tests/ @airbytehq/connector-operations -# Oauth -/airbyte-oauth/ @airbytehq/connector-operations -/airbyte-server/src/main/java/io/airbyte/server/handlers/OAuthHandler.java @airbytehq/connector-operations -/airbyte-server/src/test/java/io/airbyte/server/handlers/OAuthHandlerTest.java @airbytehq/connector-operations - # Protocol related items /docs/understanding-airbyte/airbyte-protocol.md @airbytehq/protocol-reviewers diff --git a/.github/actions/build-and-push-branch/action.yml b/.github/actions/build-and-push-branch/action.yml deleted file mode 100644 index f26e9495543a..000000000000 --- a/.github/actions/build-and-push-branch/action.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: "Build OSS Branch and Push Minimum Required OSS Images" -description: "Build jars and docker images tagged for a particular branch. Primarily used for running OSS branch code in Cloud." -inputs: - branch_version_tag: - description: 'Used to tag jars and docker images with a branch-specific version (should use the form "dev-" to pass AirbyteVersion validation)' - required: false - dockerhub_username: - description: "Used to log in to dockerhub for pushing images" - required: true - dockerhub_token: - description: "Used to log in to dockerhub for pushing images" - required: true -runs: - using: "composite" - steps: - - name: Build - id: build - uses: ./.github/actions/build-branch - with: - branch_version_tag: ${{ inputs.branch_version_tag }} - - - name: Login to Docker (on Master) - uses: docker/login-action@v2 - with: - username: ${{ inputs.dockerhub_username }} - password: ${{ inputs.dockerhub_token }} - - - name: Push Docker Images - run: | - GIT_REVISION=$(git rev-parse HEAD) - [ [ -z "$GIT_REVISION" ] ] && echo "Couldn't get the git revision..." && exit 1 - VERSION=${{ steps.build.outputs.branch_version_tag }} GIT_REVISION=$GIT_REVISION docker compose -f docker-compose-cloud.buildx.yaml push - shell: bash diff --git a/.github/actions/build-branch/action.yml b/.github/actions/build-branch/action.yml deleted file mode 100644 index 1f27e6c478b1..000000000000 --- a/.github/actions/build-branch/action.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: "Build OSS Branch" -description: "Build jars and docker images tagged for a particular branch. Primarily used for running OSS branch code in Cloud." -inputs: - branch_version_tag: - description: 'Used to tag jars and docker images with a branch-specific version (should use the form "dev-" to pass AirbyteVersion validation)' - required: false - build_docker_images: - description: 'Build docker images' - default: 'true' - required: false -outputs: - branch_version_tag: - description: "Tag used for jars and docker images. Either user specified or auto generated as `dev-`" - value: ${{ steps.parse-input.outputs.branch_version_tag }} -runs: - using: "composite" - steps: - - name: "Parse Input" - id: parse-input - shell: bash - run: |- - # if the *branch_version_tag* input param is not specified, then generate it as 'dev-` - # - [[ "${{ inputs.branch_version_tag }}" != '' ]] && echo "branch_version_tag=${{ inputs.branch_version_tag }}" >> $GITHUB_OUTPUT \ - || { short_hash=$(git rev-parse --short=10 HEAD); echo "branch_version_tag=dev-$short_hash" >> $GITHUB_OUTPUT ; } - - - name: Prepare Runner for Building - uses: ./.github/actions/runner-prepare-for-build - - - name: Build with Docker Images - if: inputs.build_docker_images == 'true' - run: VERSION=${{ steps.parse-input.outputs.branch_version_tag }} SUB_BUILD=PLATFORM ./gradlew build --scan - shell: bash - - - name: Build without Docker Images - if: inputs.build_docker_images != 'true' - run: VERSION=${{ steps.parse-input.outputs.branch_version_tag }} SUB_BUILD=PLATFORM ./gradlew test --scan - shell: bash - - - name: Publish to Maven Local - run: VERSION=${{ steps.parse-input.outputs.branch_version_tag }} SUB_BUILD=PLATFORM ./gradlew publishToMavenLocal - shell: bash diff --git a/.github/workflows/build-report.yml b/.github/workflows/build-report.yml index 4cdd45eaaabd..9f662ddd5854 100644 --- a/.github/workflows/build-report.yml +++ b/.github/workflows/build-report.yml @@ -15,7 +15,6 @@ jobs: timeout-minutes: 5 runs-on: ubuntu-latest if: github.ref == 'refs/heads/master' - environment: more-secrets steps: - name: Checkout Airbyte uses: actions/checkout@v3 diff --git a/.github/workflows/create-oss-pr-snapshot.yml b/.github/workflows/create-oss-pr-snapshot.yml index a28e27c78f18..9ef2eb1a65e8 100644 --- a/.github/workflows/create-oss-pr-snapshot.yml +++ b/.github/workflows/create-oss-pr-snapshot.yml @@ -41,7 +41,7 @@ jobs: static-args: | repository=${{ github.repository }} branch=${{ needs.check.outputs.pull_request_branch }} - pr_number=${{ github.event.issue_comment.issue.number }} + pr_number=${{ github.event.issue.number }} commands: | create-platform-pr - name: Publish comment with error message @@ -50,4 +50,4 @@ jobs: with: comment-id: ${{ github.event.comment.id }} body: | - > Error: ${{ steps.slash-command-dispatch.outputs.error-message }} \ No newline at end of file + > Error: ${{ steps.slash-command-dispatch.outputs.error-message }} diff --git a/.github/workflows/doc-link-check.yml b/.github/workflows/doc-link-check.yml index 0d3626150cfc..8e4980b5dc4d 100644 --- a/.github/workflows/doc-link-check.yml +++ b/.github/workflows/doc-link-check.yml @@ -12,7 +12,6 @@ jobs: markdown-link-check: timeout-minutes: 50 runs-on: ubuntu-latest - environment: more-secrets steps: - uses: actions/checkout@master # check all files on master diff --git a/.github/workflows/fe-validate-links.yml b/.github/workflows/fe-validate-links.yml deleted file mode 100644 index 77254e824ef3..000000000000 --- a/.github/workflows/fe-validate-links.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: Check for broken links in FE - -on: - workflow_dispatch: - schedule: - - cron: "0 14 * * *" - -jobs: - validate-frontend-links: - name: "Validate frontend links" - runs-on: ubuntu-latest - timeout-minutes: 15 - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - - - uses: actions/setup-java@v3 - with: - distribution: "zulu" - java-version: "17" - - - name: Set up CI Gradle Properties - run: | - mkdir -p ~/.gradle/ - cat > ~/.gradle/gradle.properties <- - {\"channel\":\"C03088BTMFC\", \"blocks\":[ - {\"type\":\"section\",\"text\":{\"type\":\"mrkdwn\",\"text\":\":alarm: The periodic link validation failed!\n\n\"}}, - {\"type\":\"section\",\"text\":{\"type\":\"mrkdwn\",\"text\":\"See details on \n\"}}]} diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index a1c63f5f4b62..31b07166dc6c 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -1,4 +1,4 @@ -name: Airbyte CI +name: Airbyte Connectors & Octavia CI env: S3_BUILD_CACHE_ACCESS_KEY_ID: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} @@ -80,12 +80,6 @@ jobs: - 'airbyte-commons-worker/**' db: - 'airbyte-db/**' - frontend: - - 'airbyte-api/src/main/openapi/config.yaml' - - 'airbyte-connector-builder-server/CDK_VERSION' - - 'airbyte-connector-builder-server/src/main/openapi/openapi.yaml' - - 'airbyte-webapp/**' - - 'airbyte-webapp-e2e-tests/**' # Uncomment to debug. # changes-output: @@ -155,13 +149,6 @@ jobs: attempt_limit: 3 attempt_delay: 5000 # in ms - - name: Build Platform Docker Images - uses: Wandalen/wretry.action@master - with: - command: SUB_BUILD=PLATFORM ./gradlew --no-daemon assemble --scan - attempt_limit: 3 - attempt_delay: 5000 # in ms - - name: Run integration tests uses: Wandalen/wretry.action@master with: @@ -350,728 +337,12 @@ jobs: label: ${{ needs.start-connectors-base-build-runner.outputs.label }} ec2-instance-id: ${{ needs.start-connectors-base-build-runner.outputs.ec2-instance-id }} - ## Frontend Test - # In case of self-hosted EC2 errors, remove this block. - start-frontend-runner: - name: "Frontend: Start EC2 Runner" - needs: - - changes - # Because scheduled builds on master require us to skip the changes job. Use always() to force this to run on master. - if: | - needs.changes.outputs.frontend == 'true' || needs.changes.outputs.build == 'true' || github.ref == 'refs/heads/master' - || (always() && needs.changes.outputs.backend == 'true') - timeout-minutes: 10 - runs-on: ubuntu-latest - outputs: - label: ${{ steps.start-ec2-runner.outputs.label }} - ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Start AWS Runner - id: start-ec2-runner - uses: ./.github/actions/start-aws-runner - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - github-token: ${{ env.PAT }} - frontend-build: - name: "Frontend: Build" - needs: - - start-frontend-runner - runs-on: ${{ needs.start-frontend-runner.outputs.label }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - # We need to fetch at least one more commmit for the Chromatic action not to fail - # but since we don't do screenshot comparison we don't need to fetch the full history. - with: - fetch-depth: 2 - - - name: Cache Build Artifacts - uses: ./.github/actions/cache-build-artifacts - with: - cache-key: ${{ secrets.CACHE_VERSION }} - cache-python: "false" - - - uses: actions/setup-java@v3 - with: - distribution: "zulu" - java-version: "17" - - - uses: actions/setup-python@v4 - with: - python-version: "3.9" - - - name: Set up CI Gradle Properties - run: | - mkdir -p ~/.gradle/ - cat > ~/.gradle/gradle.properties < ~/.gradle/gradle.properties < ~/.gradle/gradle.properties < ~/.gradle/gradle.properties <> $GITHUB_OUTPUT - - release-chart: - name: Chart release - runs-on: ubuntu-22.04 - needs: ["generate-semantic-version"] - permissions: write-all - steps: - - uses: actions/checkout@v3 - with: - path: "airbyte" - fetch-depth: 0 - - - uses: actions/checkout@v3 - with: - repository: "airbytehq/helm-charts" - token: ${{ secrets.GH_PAT_MAINTENANCE_OSS }} - path: "airbyte-oss" - - - name: Replace semantic version in main chart for deps - shell: bash - working-directory: ./airbyte/charts - run: | - sed -i -E "s/ version: [[:digit:]].[[:digit:]].[[:digit:]]/ version: ${{ needs.generate-semantic-version.outputs.next-version }}/g" airbyte/Chart.yaml - sed -i -E 's/version: [0-9]+\.[0-9]+\.[0-9]+/version: ${{ needs.generate-semantic-version.outputs.next-version }}/' airbyte/Chart.yaml - - - name: "Helm package" - shell: bash - run: | - declare -a StringArray=("airbyte-bootloader" "airbyte-server" "airbyte-temporal" "airbyte-webapp" "airbyte-pod-sweeper" "airbyte-worker" "airbyte-metrics" "airbyte-cron" "airbyte-connector-builder-server") - for val in ${StringArray[@]}; do - cd ./airbyte/charts/${val} && helm dep update && cd $GITHUB_WORKSPACE - sed -i -E 's/version: \"[0-9]+\.[0-9]+\.[0-9]+\"/version: \"${{ needs.generate-semantic-version.outputs.next-version }}\"/' ./airbyte/charts/${val}/Chart.yaml - helm package ./airbyte/charts/${val} -d airbyte-oss --version ${{ needs.generate-semantic-version.outputs.next-version }} - done - helm repo index airbyte-oss/ - - - name: Commit and push changes - uses: EndBug/add-and-commit@v9 - with: - message: "Bump release to ${{ needs.generate-semantic-version.outputs.next-version }}" - add: "." - cwd: "./airbyte-oss/" - - - name: "Helm package main chart" - shell: bash - run: | - echo "Waiting for published charts to be synced in helm-charts repo" - sleep 300 - declare -a StringArray=("airbyte") - for val in ${StringArray[@]}; do - cd ./airbyte/charts/${val} && cat Chart.yaml && helm dep update && cd $GITHUB_WORKSPACE - helm package ./airbyte/charts/${val} -d airbyte-oss --version ${{ needs.generate-semantic-version.outputs.next-version }} - done - helm repo index airbyte-oss/ - - - name: Commit and push changes - uses: EndBug/add-and-commit@v9 - with: - message: "Bump release to ${{ needs.generate-semantic-version.outputs.next-version }}" - add: "." - cwd: "./airbyte-oss/" - - - name: "Generate changelog" - shell: bash - id: changelog - run: | - cd ./airbyte/ - changelog=$(PAGER=cat git log $(git describe --tags --match "*-helm" $(git rev-list --tags --max-count=1))..HEAD --oneline --decorate=no) - echo "changelog<> $GITHUB_ENV - echo "$changelog" >> $GITHUB_ENV - echo "EOF" >> $GITHUB_ENV - - - name: Create Pull Request - uses: peter-evans/create-pull-request@v4 - with: - path: ./airbyte/ - branch: update-helm-chart-version-ref - branch-suffix: random - title: Bump helm chart version reference to ${{ needs.generate-semantic-version.outputs.next-version }} - body: | - ## What - Bump version reference in all Chart.yaml files to ${{ needs.generate-semantic-version.outputs.next-version }} - CHANGELOG: - ${{ env.changelog }} - commit-message: Bump helm chart version reference to ${{ needs.generate-semantic-version.outputs.next-version }} - delete-branch: true - - - name: Create tag - shell: bash - run: | - cd ./airbyte/ - git tag ${{ needs.generate-semantic-version.outputs.tag }} - git push origin ${{ needs.generate-semantic-version.outputs.tag }} diff --git a/.github/workflows/publish-oss-for-cloud.yml b/.github/workflows/publish-oss-for-cloud.yml deleted file mode 100644 index fda551da9f3e..000000000000 --- a/.github/workflows/publish-oss-for-cloud.yml +++ /dev/null @@ -1,190 +0,0 @@ -name: Publish OSS Artifacts for Cloud - -env: - # enable gradle remote build cache - S3_BUILD_CACHE_ACCESS_KEY_ID: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - S3_BUILD_CACHE_SECRET_KEY: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - -on: - workflow_dispatch: - inputs: - oss_ref: - description: "Publish artifacts for the following git ref (if unspecified, uses the latest commit for the current branch):" - required: false -jobs: - start-runner: - name: "Start Runner on AWS" - timeout-minutes: 10 - runs-on: ubuntu-latest - outputs: - label: ${{ steps.start-ec2-runner.outputs.label }} - ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Start AWS Runner - id: start-ec2-runner - uses: ./.github/actions/start-aws-runner - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - github-token: ${{ env.PAT }} - - generate-tags: - name: "Generate Dev and Master Tags" - runs-on: ubuntu-latest - outputs: - dev_tag: ${{ steps.set-outputs.outputs.dev_tag }} - master_tag: ${{ steps.set-outputs.outputs.master_tag }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - ref: ${{ github.event.inputs.oss_ref || github.ref }} - - name: Generate Outputs - id: set-outputs - shell: bash - run: |- - set -x - - commit_sha=$(git rev-parse --short=10 HEAD) - - # set dev_tag - # AirbyteVersion.java allows versions that have a prefix of 'dev' - echo "dev_tag=dev-${commit_sha}" >> $GITHUB_OUTPUT - - # If this commit is on the master branch, also set master_tag - if $(git merge-base --is-ancestor "${commit_sha}" master); then - echo "master_tag=${commit_sha}" >> $GITHUB_OUTPUT - fi - cat $GITHUB_OUTPUT || true # for the sake of investigation - - oss-branch-build: - concurrency: - # only allow one workflow run at a time for a given SHA - # to prevent multiple runs from pushing artifacts for the same SHA at the same time - # note: using inputs in the group expression only works when specifying concurrency at the job level - group: ${{ github.workflow }}-${{ inputs.oss_ref || github.sha }} - cancel-in-progress: false - name: "Gradle Build and Publish" - needs: - - start-runner - - generate-tags - runs-on: ${{ needs.start-runner.outputs.label }} - environment: more-secrets - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - ref: ${{ github.event.inputs.oss_ref || github.ref }} - - - name: Build Branch - uses: ./.github/actions/build-branch - with: - branch_version_tag: ${{ needs.generate-tags.outputs.dev_tag }} - build_docker_images: 'false' - - - name: Publish Dev Jars - env: - CLOUDREPO_USER: ${{ secrets.CLOUDREPO_USER }} - CLOUDREPO_PASSWORD: ${{ secrets.CLOUDREPO_PASSWORD }} - run: VERSION=${{ needs.generate-tags.outputs.dev_tag }} SUB_BUILD=PLATFORM ./gradlew publish - shell: bash - - - name: Publish Master Jars - if: needs.generate-tags.outputs.master_tag != '' - env: - CLOUDREPO_USER: ${{ secrets.CLOUDREPO_USER }} - CLOUDREPO_PASSWORD: ${{ secrets.CLOUDREPO_PASSWORD }} - run: VERSION=${{ needs.generate-tags.outputs.master_tag }} SUB_BUILD=PLATFORM ./gradlew publish - shell: bash - - docker-push: - concurrency: - # only allow one workflow run at a time for a given SHA - # to prevent multiple runs from pushing artifacts for the same SHA at the same time - # note: using inputs in the group expression only works when specifying concurrency at the job level - group: ${{ github.workflow }}-${{ inputs.oss_ref || github.sha }} - cancel-in-progress: false - name: "Push Docker Images" - needs: - - start-runner - - generate-tags - - oss-branch-build - runs-on: ${{ needs.start-runner.outputs.label }} - steps: - - name: Login to Docker (on Master) - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKER_HUB_USERNAME }} - password: ${{ secrets.DOCKER_HUB_PASSWORD }} - - - name: Prepare Docker buildx - run: | - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - docker buildx create --name oss-buildx --driver docker-container --use - shell: bash - - - name: Set Git Revision - run: | - GIT_REVISION=$(git rev-parse HEAD) - [ [ -z "$GIT_REVISION" ] ] && echo "Couldn't get the git revision..." && exit 1 - echo "GIT_REVISION=${GIT_REVISION}" >> $GITHUB_ENV - shell: bash - - - name: Prepare Runner for Building - uses: ./.github/actions/runner-prepare-for-build - - # Put tars/artifacts in the correct build directories so docker buildx can find the artifacts it needs when building docker images - - name: Prepare Docker context - run: VERSION=${{ needs.generate-tags.outputs.dev_tag }} SUB_BUILD=PLATFORM ./gradlew copyGeneratedTar airbyte-db:db-lib:copyInitSql - shell: bash - - # Build docker images using docker buildx (for multi platform) - - name: Push Docker Images - env: - VERSION: ${{ needs.generate-tags.outputs.dev_tag }} - ALT_TAG: ${{ needs.generate-tags.outputs.master_tag }} - run: GIT_REVISION=$GIT_REVISION docker buildx bake -f docker-compose-cloud.buildx.yaml --push - shell: bash - - - name: Cleanup Docker buildx - run: docker buildx rm oss-buildx - shell: bash - - stop-runner: - name: "Stop Build EC2 Runner" - timeout-minutes: 10 - needs: - - start-runner # required to get output from the start-runner job - - docker-push # wait until all publish steps are done - runs-on: ubuntu-latest - # Always is required to stop the runner even if the previous job has errors. However always() runs even if the previous step is skipped. - # Thus, we check for skipped here. - if: ${{ always() && needs.start-runner.result != 'skipped'}} - steps: - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-2 - - name: Checkout Airbyte - uses: actions/checkout@v3 - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Stop EC2 runner - uses: airbytehq/ec2-github-runner@base64v1.1.0 - with: - mode: stop - github-token: ${{ env.PAT }} - label: ${{ needs.start-runner.outputs.label }} - ec2-instance-id: ${{ needs.start-runner.outputs.ec2-instance-id }} diff --git a/.github/workflows/release-airbyte-os.yml b/.github/workflows/release-airbyte-os.yml index 461b87cb41cc..0d1d33cd0f88 100644 --- a/.github/workflows/release-airbyte-os.yml +++ b/.github/workflows/release-airbyte-os.yml @@ -1,6 +1,10 @@ name: Release Open Source Airbyte concurrency: release-airbyte + +# TODO: If we continue to use this action from here it needs to be updated to point to airbyte-platform +# where appropriate + on: workflow_dispatch: inputs: @@ -37,7 +41,6 @@ jobs: # In case of self-hosted EC2 errors, removed the `needs` line and switch back to running on ubuntu-latest. needs: start-release-airbyte-runner # required to start the main job when the runner is ready runs-on: ${{ needs.start-release-airbyte-runner.outputs.label }} # run the job on the newly created runner - environment: more-secrets steps: - name: Checkout uses: actions/checkout@v3 @@ -71,7 +74,6 @@ jobs: # - The self hosted runner used in releaseAirbyte does not have the docker buildx command to build multi-arch images releaseOctavia: runs-on: ubuntu-latest - environment: more-secrets steps: - name: Checkout uses: actions/checkout@v3 @@ -98,7 +100,6 @@ jobs: - releaseAirbyte - releaseOctavia runs-on: ubuntu-latest - environment: more-secrets steps: - name: Checkout uses: actions/checkout@v3 diff --git a/.github/workflows/run-performance-test.yml b/.github/workflows/run-performance-test.yml deleted file mode 100644 index 929df1102cca..000000000000 --- a/.github/workflows/run-performance-test.yml +++ /dev/null @@ -1,56 +0,0 @@ -name: Run Performance Test -on: - workflow_dispatch: - inputs: - repo: - description: "Repo to check out code from. Defaults to the main airbyte repo. Set this when building connectors from forked repos." - required: false - default: "airbytehq/airbyte" - gitref: - description: "The git ref to check out from the specified repository." - required: false - default: master - test-name: - description: "Test to run classname" - required: true - -jobs: - single-test-runner: - timeout-minutes: 300 - needs: start-platform-build-runner # required to start the main job when the runner is ready - runs-on: ${{ needs.start-platform-build-runner.outputs.label }} # run the job on the newly created runner - environment: more-secrets - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - repository: ${{ github.event.inputs.repo }} - ref: ${{ github.event.inputs.gitref }} - - - name: Npm Caching - uses: actions/cache@v3 - with: - path: | - ~/.npm - key: ${{ secrets.CACHE_VERSION }}-npm-${{ runner.os }}-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ secrets.CACHE_VERSION }}-npm-${{ runner.os }}- - - # this intentionally does not use restore-keys so we don't mess with gradle caching - - name: Gradle Caching - uses: actions/cache@v3 - with: - path: | - ~/.gradle/caches - ~/.gradle/wrapper - **/.venv - key: ${{ secrets.CACHE_VERSION }}-${{ runner.os }}-${{ hashFiles('**/*.gradle*') }}-${{ hashFiles('**/package-lock.json') }} - - - uses: actions/setup-java@v3 - with: - distribution: "zulu" - java-version: "14" - - - name: Build - id: run-specific-test - run: ./gradlew allTest --tests *${{ github.event.inputs.test-name }} diff --git a/.github/workflows/test-command.yml b/.github/workflows/test-command.yml index 11f2214d9b83..0bcf8f49ee3f 100644 --- a/.github/workflows/test-command.yml +++ b/.github/workflows/test-command.yml @@ -58,7 +58,6 @@ jobs: timeout-minutes: 240 needs: start-test-runner runs-on: ${{ needs.start-test-runner.outputs.label }} - environment: more-secrets steps: - name: Link comment to workflow run if: github.event.inputs.comment-id diff --git a/.github/workflows/test-performance-command.yml b/.github/workflows/test-performance-command.yml index b8c1a040d370..703be55140e1 100644 --- a/.github/workflows/test-performance-command.yml +++ b/.github/workflows/test-performance-command.yml @@ -1,6 +1,6 @@ # runs ./tools/bin/ci_performance_test.sh # which is more or less ./gradlew performanceTest limited to connectors with changes -name: Run Performance Test +name: Run Connectors Performance Test on: workflow_dispatch: inputs: @@ -57,7 +57,6 @@ jobs: timeout-minutes: 240 needs: start-test-runner runs-on: ${{ needs.start-test-runner.outputs.label }} - environment: more-secrets steps: - name: Search for valid connector name format id: regex diff --git a/.gitignore b/.gitignore index a6e0049c2f68..ed73a6a9f4c8 100644 --- a/.gitignore +++ b/.gitignore @@ -83,3 +83,9 @@ charts/**/charts # Datadog dd-java-agent.jar + +# Files needed to run airbyte-platform that are downloaded on-the-fly via run-ab-platform.sh +docker-compose.yaml +.env +.env.dev +flags.yml diff --git a/README.md b/README.md index c91aff0042b4..20753fd79879 100644 --- a/README.md +++ b/README.md @@ -44,12 +44,12 @@ Explore our [demo app](https://demo.airbyte.io/). ### Run Airbyte locally -You can run Airbyte locally with Docker. +You can run Airbyte locally with Docker. The shell script below will retrieve the requisite docker files from the [platform repository](https://github.com/airbytehq/airbyte-platform) and run docker compose for you. ```bash git clone --depth 1 https://github.com/airbytehq/airbyte.git cd airbyte -docker compose up +./run-ab-platform.sh ``` Login to the web app at [http://localhost:8000](http://localhost:8000) by entering the default credentials found in your .env file. @@ -81,6 +81,8 @@ Sign up for [Airbyte Cloud](https://cloud.airbyte.io/signup). Get started by checking Github issues and creating a Pull Request. An easy way to start contributing is to update an existing connector or create a new connector using the low-code and Python CDKs. You can find the code for existing connectors in the [connectors](https://github.com/airbytehq/airbyte/tree/master/airbyte-integrations/connectors) directory. The Airbyte platform is written in Java, and the frontend in React. You can also contribute to our docs and tutorials. Advanced Airbyte users can apply to the [Maintainer program](https://airbyte.com/maintainer-program) and [Writer Program](https://airbyte.com/write-for-the-community). +If you would like to make a contribution to the platform itself, please refer to guides in [the platform repository](https://github.com/airbytehq/airbyte-platform) + Read the [Contributing guide](https://docs.airbyte.com/contributing-to-airbyte/). ## Reporting vulnerabilities diff --git a/airbyte-analytics/build.gradle b/airbyte-analytics/build.gradle deleted file mode 100644 index f411e8c42508..000000000000 --- a/airbyte-analytics/build.gradle +++ /dev/null @@ -1,14 +0,0 @@ -plugins { - id 'java-library' -} - -dependencies { - api libs.segment.java.analytics - api libs.micronaut.http - - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-config:config-persistence') - implementation project(':airbyte-json-validation') -} - -Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-analytics/readme.md b/airbyte-analytics/readme.md deleted file mode 100644 index 4c66fb35c24d..000000000000 --- a/airbyte-analytics/readme.md +++ /dev/null @@ -1,3 +0,0 @@ -# airbyte-analytics - -Java library with shared code for telemetry tracking including Segment. diff --git a/airbyte-analytics/src/main/java/io/airbyte/analytics/Deployment.java b/airbyte-analytics/src/main/java/io/airbyte/analytics/Deployment.java deleted file mode 100644 index 14e73d584511..000000000000 --- a/airbyte-analytics/src/main/java/io/airbyte/analytics/Deployment.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.analytics; - -import com.google.common.base.Preconditions; -import io.airbyte.config.Configs; -import io.airbyte.config.Configs.DeploymentMode; -import io.airbyte.config.Configs.WorkerEnvironment; -import java.util.UUID; - -public class Deployment { - - /** - * deployment - deployment tracking info. - */ - private final DeploymentMode deploymentMode; - /** - * deploymentId - Identifier for the deployment. - * - * This identifier tracks an install of Airbyte. Any time Airbyte is started up with new volumes or - * persistence, it will be assigned a new deployment id. This is different from the lifecycle of the - * rest of the data layer which may be persisted across deployments. - */ - private final UUID deploymentId; - /** - * deploymentEnvironment - the environment that airbyte is running in. - */ - private final Configs.WorkerEnvironment deploymentEnv; - - public Deployment(final DeploymentMode deploymentMode, final UUID deploymentId, final WorkerEnvironment deploymentEnv) { - Preconditions.checkNotNull(deploymentMode); - Preconditions.checkNotNull(deploymentId); - Preconditions.checkNotNull(deploymentEnv); - - this.deploymentMode = deploymentMode; - this.deploymentId = deploymentId; - this.deploymentEnv = deploymentEnv; - } - - public DeploymentMode getDeploymentMode() { - return deploymentMode; - } - - public UUID getDeploymentId() { - return deploymentId; - } - - public WorkerEnvironment getDeploymentEnv() { - return deploymentEnv; - } - -} diff --git a/airbyte-analytics/src/main/java/io/airbyte/analytics/LoggingTrackingClient.java b/airbyte-analytics/src/main/java/io/airbyte/analytics/LoggingTrackingClient.java deleted file mode 100644 index 7b9416269e96..000000000000 --- a/airbyte-analytics/src/main/java/io/airbyte/analytics/LoggingTrackingClient.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.analytics; - -import io.airbyte.commons.version.AirbyteVersion; -import java.util.Collections; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.function.Function; -import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LoggingTrackingClient implements TrackingClient { - - private static final Logger LOGGER = LoggerFactory.getLogger(LoggingTrackingClient.class); - - private final Function identityFetcher; - - public LoggingTrackingClient(final Function identityFetcher) { - this.identityFetcher = identityFetcher; - } - - @Override - public void identify(final UUID workspaceId) { - LOGGER.info("identify. userId: {}", identityFetcher.apply(workspaceId).getCustomerId()); - } - - @Override - public void alias(final UUID workspaceId, final String previousCustomerId) { - LOGGER.info("merge. userId: {} previousUserId: {}", identityFetcher.apply(workspaceId).getCustomerId(), previousCustomerId); - } - - @Override - public void track(@Nullable final UUID workspaceId, final String action) { - track(workspaceId, action, Collections.emptyMap()); - } - - @Override - public void track(@Nullable final UUID workspaceId, final String action, final Map metadata) { - String version = null; - UUID userId = null; - if (workspaceId != null) { - version = Optional.ofNullable(identityFetcher.apply(workspaceId).getAirbyteVersion()).map(AirbyteVersion::serialize).orElse(null); - userId = identityFetcher.apply(workspaceId).getCustomerId(); - } - LOGGER.info("track. version: {}, userId: {}, action: {}, metadata: {}", - version, - userId, - action, - metadata); - } - -} diff --git a/airbyte-analytics/src/main/java/io/airbyte/analytics/SegmentTrackingClient.java b/airbyte-analytics/src/main/java/io/airbyte/analytics/SegmentTrackingClient.java deleted file mode 100644 index d8d6991daf5d..000000000000 --- a/airbyte-analytics/src/main/java/io/airbyte/analytics/SegmentTrackingClient.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.analytics; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Strings; -import com.segment.analytics.Analytics; -import com.segment.analytics.messages.AliasMessage; -import com.segment.analytics.messages.IdentifyMessage; -import com.segment.analytics.messages.TrackMessage; -import io.airbyte.config.StandardWorkspace; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.context.ServerRequestContext; -import java.time.Instant; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.function.Function; -import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class is a wrapper around the Segment backend Java SDK. - *

- * In general, the Segment SDK events have two pieces to them, a top-level userId field and a map of - * properties. - *

- * As of 2021/11/03, the top level userId field is standardised on the - * {@link StandardWorkspace#getCustomerId()} field. This field is a random UUID generated when a - * workspace model is created. This standardisation is through OSS Airbyte and Cloud Airbyte. This - * join key now underpins Airbyte OSS Segment tracking. Although the id is meaningless and the name - * confusing, it is not worth performing a migration at this time. Interested parties can look at - * https://github.com/airbytehq/airbyte/issues/7456 for more context. - *

- * Consumers utilising this class must understand that the top-level userId field is subject to this - * constraint. - *

- * See the following document for details on tracked events. Please update this document if tracked - * events change. - * https://docs.google.com/spreadsheets/d/1lGLmLIhiSPt_-oaEf3CpK-IxXnCO0NRHurvmWldoA2w/edit#gid=1567609168 - */ -public class SegmentTrackingClient implements TrackingClient { - - private static final Logger LOGGER = LoggerFactory.getLogger(SegmentTrackingClient.class); - - public static final String AIRBYTE_ANALYTIC_SOURCE_HEADER = "X-Airbyte-Analytic-Source"; - public static final String CUSTOMER_ID_KEY = "user_id"; - private static final String SEGMENT_WRITE_KEY = "7UDdp5K55CyiGgsauOr2pNNujGvmhaeu"; - protected static final String AIRBYTE_VERSION_KEY = "airbyte_version"; - private static final String AIRBYTE_ROLE = "airbyte_role"; - protected static final String AIRBYTE_SOURCE = "airbyte_source"; - private static final String AIRBYTE_TRACKED_AT = "tracked_at"; - - // Analytics is threadsafe. - private final Analytics analytics; - private final Function identityFetcher; - private final Deployment deployment; - private final String airbyteRole; - - @VisibleForTesting - SegmentTrackingClient(final Function identityFetcher, - final Deployment deployment, - final String airbyteRole, - final Analytics analytics) { - this.identityFetcher = identityFetcher; - this.deployment = deployment; - this.analytics = analytics; - this.airbyteRole = airbyteRole; - } - - public SegmentTrackingClient(final Function identityFetcher, - final Deployment deployment, - - final String airbyteRole) { - this(identityFetcher, deployment, airbyteRole, Analytics.builder(SEGMENT_WRITE_KEY).build()); - } - - @Override - public void identify(final UUID workspaceId) { - final TrackingIdentity trackingIdentity = identityFetcher.apply(workspaceId); - final Map identityMetadata = new HashMap<>(); - - // deployment - identityMetadata.put(AIRBYTE_VERSION_KEY, trackingIdentity.getAirbyteVersion().serialize()); - identityMetadata.put("deployment_mode", deployment.getDeploymentMode()); - identityMetadata.put("deployment_env", deployment.getDeploymentEnv()); - identityMetadata.put("deployment_id", deployment.getDeploymentId()); - - // workspace (includes info that in the future we would store in an organization) - identityMetadata.put("anonymized", trackingIdentity.isAnonymousDataCollection()); - identityMetadata.put("subscribed_newsletter", trackingIdentity.isNews()); - identityMetadata.put("subscribed_security", trackingIdentity.isSecurityUpdates()); - trackingIdentity.getEmail().ifPresent(email -> identityMetadata.put("email", email)); - - // other - if (!Strings.isNullOrEmpty(airbyteRole)) { - identityMetadata.put(AIRBYTE_ROLE, airbyteRole); - } - - final String joinKey = trackingIdentity.getCustomerId().toString(); - analytics.enqueue(IdentifyMessage.builder() - // user id is scoped by workspace. there is no cross-workspace tracking. - .userId(joinKey) - .traits(identityMetadata)); - } - - @Override - public void alias(final UUID workspaceId, final String previousCustomerId) { - final var joinKey = identityFetcher.apply(workspaceId).getCustomerId().toString(); - analytics.enqueue(AliasMessage.builder(previousCustomerId).userId(joinKey)); - } - - @Override - public void track(@Nullable final UUID workspaceId, final String action) { - track(workspaceId, action, Collections.emptyMap()); - } - - @Override - public void track(@Nullable final UUID workspaceId, final String action, final Map metadata) { - if (workspaceId == null) { - LOGGER.error("Could not track action {} due to null workspaceId", action); - return; - } - final Map mapCopy = new HashMap<>(metadata); - final TrackingIdentity trackingIdentity = identityFetcher.apply(workspaceId); - final Optional airbyteSource = getAirbyteSource(); - - airbyteSource.ifPresent(a -> mapCopy.put(AIRBYTE_SOURCE, a)); - - // Always add these traits. - mapCopy.put(AIRBYTE_VERSION_KEY, trackingIdentity.getAirbyteVersion().serialize()); - mapCopy.put(CUSTOMER_ID_KEY, trackingIdentity.getCustomerId()); - mapCopy.put(AIRBYTE_TRACKED_AT, Instant.now().toString()); - if (!metadata.isEmpty()) { - trackingIdentity.getEmail().ifPresent(email -> mapCopy.put("email", email)); - } - - final var joinKey = trackingIdentity.getCustomerId().toString(); - analytics.enqueue(TrackMessage.builder(action) - .userId(joinKey) - .properties(mapCopy)); - } - - private Optional getAirbyteSource() { - final Optional> currentRequest = ServerRequestContext.currentRequest(); - if (currentRequest.isPresent()) { - return Optional.ofNullable(currentRequest.get().getHeaders().get(AIRBYTE_ANALYTIC_SOURCE_HEADER)); - } - - return Optional.empty(); - } - -} diff --git a/airbyte-analytics/src/main/java/io/airbyte/analytics/TrackingClient.java b/airbyte-analytics/src/main/java/io/airbyte/analytics/TrackingClient.java deleted file mode 100644 index 955ad5e6f5c1..000000000000 --- a/airbyte-analytics/src/main/java/io/airbyte/analytics/TrackingClient.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.analytics; - -import java.util.Map; -import java.util.UUID; -import javax.annotation.Nullable; - -/** - * General interface for user level Airbyte usage reporting. We use Segment for behavioural - * reporting, so this interface mirrors the Segment backend api sdk. - *

- * For more information see - * https://segment.com/docs/connections/sources/catalog/libraries/server/http-api/. - *

- * This interface allows us to easily stub this out via the {@link LoggingTrackingClient}. The main - * implementation is in {@link SegmentTrackingClient}. - *

- * Although the methods seem to take in workspace id, this id is used to index into more metadata. - * See {@link SegmentTrackingClient} for more information. - *

- * Keep in mind that this interface is also relied on in Airbyte Cloud. - */ -public interface TrackingClient { - - void identify(UUID workspaceId); - - void alias(UUID workspaceId, String previousCustomerId); - - void track(@Nullable UUID workspaceId, String action); - - void track(@Nullable UUID workspaceId, String action, Map metadata); - -} diff --git a/airbyte-analytics/src/main/java/io/airbyte/analytics/TrackingClientSingleton.java b/airbyte-analytics/src/main/java/io/airbyte/analytics/TrackingClientSingleton.java deleted file mode 100644 index b9bddee61889..000000000000 --- a/airbyte-analytics/src/main/java/io/airbyte/analytics/TrackingClientSingleton.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.analytics; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.UUID; -import java.util.function.Function; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class TrackingClientSingleton { - - private static final Logger LOGGER = LoggerFactory.getLogger(LoggingTrackingClient.class); - - private static final Object lock = new Object(); - private static TrackingClient trackingClient; - - public static TrackingClient get() { - synchronized (lock) { - if (trackingClient == null) { - LOGGER.warn("Attempting to fetch an initialized track client. Initializing a default one."); - initialize(); - } - return trackingClient; - } - } - - @VisibleForTesting - static void initialize(final TrackingClient trackingClient) { - synchronized (lock) { - TrackingClientSingleton.trackingClient = trackingClient; - } - } - - public static void initialize(final Configs.TrackingStrategy trackingStrategy, - final Deployment deployment, - final String airbyteRole, - final AirbyteVersion airbyteVersion, - final ConfigRepository configRepository) { - initialize(createTrackingClient( - trackingStrategy, - deployment, - airbyteRole, - (workspaceId) -> getTrackingIdentity(configRepository, airbyteVersion, workspaceId))); - } - - // fallback on a logging client with an empty identity. - private static void initialize() { - initialize(new LoggingTrackingClient(workspaceId -> TrackingIdentity.empty())); - } - - @VisibleForTesting - static TrackingIdentity getTrackingIdentity(final ConfigRepository configRepository, final AirbyteVersion airbyteVersion, final UUID workspaceId) { - try { - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); - String email = null; - if (workspace.getEmail() != null && workspace.getAnonymousDataCollection() != null && !workspace.getAnonymousDataCollection()) { - email = workspace.getEmail(); - } - return new TrackingIdentity( - airbyteVersion, - workspace.getCustomerId(), - email, - workspace.getAnonymousDataCollection(), - workspace.getNews(), - workspace.getSecurityUpdates()); - } catch (final ConfigNotFoundException e) { - throw new RuntimeException("could not find workspace with id: " + workspaceId, e); - } catch (final JsonValidationException | IOException e) { - throw new RuntimeException(e); - } - } - - // todo (cgardens) - trackingIdentityFetcher should probably have some sort of caching where it is - // only re-fetched on identify or alias. - /** - * Creates a tracking client that uses the appropriate strategy from an identity supplier. - * - * @param trackingStrategy - what type of tracker we want to use. - * @param deployment - deployment tracking info. static because it should not change once the - * instance is running. - * @param airbyteRole - * @param trackingIdentityFetcher - how we get the identity of the user. we have a function that - * takes in workspaceId and returns the tracking identity. it does not have any caching as - * email or other fields on the identity can change over time. - * @return tracking client - */ - @VisibleForTesting - static TrackingClient createTrackingClient(final Configs.TrackingStrategy trackingStrategy, - final Deployment deployment, - final String airbyteRole, - final Function trackingIdentityFetcher) { - return switch (trackingStrategy) { - case SEGMENT -> new SegmentTrackingClient(trackingIdentityFetcher, deployment, airbyteRole); - case LOGGING -> new LoggingTrackingClient(trackingIdentityFetcher); - default -> throw new IllegalStateException("unrecognized tracking strategy"); - }; - } - -} diff --git a/airbyte-analytics/src/main/java/io/airbyte/analytics/TrackingIdentity.java b/airbyte-analytics/src/main/java/io/airbyte/analytics/TrackingIdentity.java deleted file mode 100644 index e112eca4d202..000000000000 --- a/airbyte-analytics/src/main/java/io/airbyte/analytics/TrackingIdentity.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.analytics; - -import io.airbyte.commons.version.AirbyteVersion; -import java.util.Objects; -import java.util.Optional; -import java.util.UUID; - -@SuppressWarnings("PMD.CompareObjectsWithEquals") -public class TrackingIdentity { - - private final AirbyteVersion airbyteVersion; - private final UUID customerId; - private final String email; - private final Boolean anonymousDataCollection; - private final Boolean news; - private final Boolean securityUpdates; - - public static TrackingIdentity empty() { - return new TrackingIdentity(null, null, null, null, null, null); - } - - public TrackingIdentity( - final AirbyteVersion airbyteVersion, - final UUID customerId, - final String email, - final Boolean anonymousDataCollection, - final Boolean news, - final Boolean securityUpdates) { - this.airbyteVersion = airbyteVersion; - this.customerId = customerId; - this.email = email; - this.anonymousDataCollection = anonymousDataCollection; - this.news = news; - this.securityUpdates = securityUpdates; - } - - public AirbyteVersion getAirbyteVersion() { - return airbyteVersion; - } - - public UUID getCustomerId() { - return customerId; - } - - public Optional getEmail() { - return Optional.ofNullable(email); - } - - public boolean isAnonymousDataCollection() { - return anonymousDataCollection != null && anonymousDataCollection; - } - - public boolean isNews() { - return news != null && news; - } - - public boolean isSecurityUpdates() { - return securityUpdates != null && securityUpdates; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final TrackingIdentity that = (TrackingIdentity) o; - return anonymousDataCollection == that.anonymousDataCollection && - news == that.news && - securityUpdates == that.securityUpdates && - Objects.equals(customerId, that.customerId) && - Objects.equals(email, that.email); - } - - @Override - public int hashCode() { - return Objects.hash(customerId, email, anonymousDataCollection, news, securityUpdates); - } - -} diff --git a/airbyte-analytics/src/test/java/io/airbyte/analytics/SegmentTrackingClientTest.java b/airbyte-analytics/src/test/java/io/airbyte/analytics/SegmentTrackingClientTest.java deleted file mode 100644 index 3eff8a3cf8a1..000000000000 --- a/airbyte-analytics/src/test/java/io/airbyte/analytics/SegmentTrackingClientTest.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.analytics; - -import static io.airbyte.analytics.SegmentTrackingClient.AIRBYTE_ANALYTIC_SOURCE_HEADER; -import static io.airbyte.analytics.SegmentTrackingClient.AIRBYTE_SOURCE; -import static io.airbyte.analytics.SegmentTrackingClient.AIRBYTE_VERSION_KEY; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; -import com.segment.analytics.Analytics; -import com.segment.analytics.messages.IdentifyMessage; -import com.segment.analytics.messages.TrackMessage; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.micronaut.http.HttpHeaders; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.context.ServerRequestContext; -import java.util.Map; -import java.util.Objects; -import java.util.UUID; -import java.util.function.Function; -import java.util.function.Supplier; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; - -class SegmentTrackingClientTest { - - private static final AirbyteVersion AIRBYTE_VERSION = new AirbyteVersion("dev"); - private static final Deployment DEPLOYMENT = new Deployment(Configs.DeploymentMode.OSS, UUID.randomUUID(), WorkerEnvironment.DOCKER); - private static final String EMAIL = "a@airbyte.io"; - private static final TrackingIdentity IDENTITY = new TrackingIdentity(AIRBYTE_VERSION, UUID.randomUUID(), EMAIL, false, false, true); - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final Function MOCK_TRACKING_IDENTITY = (workspaceId) -> IDENTITY; - private static final String JUMP = "jump"; - private static final String EMAIL_KEY = "email"; - - private Analytics analytics; - private SegmentTrackingClient segmentTrackingClient; - private Supplier roleSupplier; - - @BeforeEach - @SuppressWarnings("unchecked") - void setup() { - analytics = mock(Analytics.class); - roleSupplier = mock(Supplier.class); - segmentTrackingClient = new SegmentTrackingClient(MOCK_TRACKING_IDENTITY, DEPLOYMENT, null, analytics); - } - - @SuppressWarnings("OptionalGetWithoutIsPresent") - @Test - void testIdentify() { - // equals is not defined on MessageBuilder, so we need to use ArgumentCaptor to inspect each field - // manually. - final ArgumentCaptor mockBuilder = ArgumentCaptor.forClass(IdentifyMessage.Builder.class); - - segmentTrackingClient.identify(WORKSPACE_ID); - - verify(analytics).enqueue(mockBuilder.capture()); - final IdentifyMessage actual = mockBuilder.getValue().build(); - final Map expectedTraits = ImmutableMap.builder() - .put("anonymized", IDENTITY.isAnonymousDataCollection()) - .put(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION.serialize()) - .put("deployment_env", DEPLOYMENT.getDeploymentEnv()) - .put("deployment_mode", DEPLOYMENT.getDeploymentMode()) - .put("deployment_id", DEPLOYMENT.getDeploymentId()) - .put(EMAIL_KEY, IDENTITY.getEmail().get()) - .put("subscribed_newsletter", IDENTITY.isNews()) - .put("subscribed_security", IDENTITY.isSecurityUpdates()) - .build(); - assertEquals(IDENTITY.getCustomerId().toString(), actual.userId()); - assertEquals(expectedTraits, actual.traits()); - } - - @Test - void testIdentifyWithRole() { - segmentTrackingClient = new SegmentTrackingClient((workspaceId) -> IDENTITY, DEPLOYMENT, "role", analytics); - // equals is not defined on MessageBuilder, so we need to use ArgumentCaptor to inspect each field - // manually. - final ArgumentCaptor mockBuilder = ArgumentCaptor.forClass(IdentifyMessage.Builder.class); - when(roleSupplier.get()).thenReturn("role"); - - segmentTrackingClient.identify(WORKSPACE_ID); - - verify(analytics).enqueue(mockBuilder.capture()); - final IdentifyMessage actual = mockBuilder.getValue().build(); - final Map expectedTraits = ImmutableMap.builder() - .put("airbyte_role", "role") - .put(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION.serialize()) - .put("anonymized", IDENTITY.isAnonymousDataCollection()) - .put("deployment_env", DEPLOYMENT.getDeploymentEnv()) - .put("deployment_mode", DEPLOYMENT.getDeploymentMode()) - .put("deployment_id", DEPLOYMENT.getDeploymentId()) - .put(EMAIL_KEY, IDENTITY.getEmail().get()) - .put("subscribed_newsletter", IDENTITY.isNews()) - .put("subscribed_security", IDENTITY.isSecurityUpdates()) - .build(); - assertEquals(IDENTITY.getCustomerId().toString(), actual.userId()); - assertEquals(expectedTraits, actual.traits()); - } - - @Test - void testTrack() { - final ArgumentCaptor mockBuilder = ArgumentCaptor.forClass(TrackMessage.Builder.class); - final ImmutableMap metadata = - ImmutableMap.of(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION.serialize(), "user_id", IDENTITY.getCustomerId()); - - segmentTrackingClient.track(WORKSPACE_ID, JUMP); - - verify(analytics).enqueue(mockBuilder.capture()); - final TrackMessage actual = mockBuilder.getValue().build(); - assertEquals(JUMP, actual.event()); - assertEquals(IDENTITY.getCustomerId().toString(), actual.userId()); - assertEquals(metadata, filterTrackedAtProperty(Objects.requireNonNull(actual.properties()))); - } - - @Test - void testTrackWithMetadata() { - final ArgumentCaptor mockBuilder = ArgumentCaptor.forClass(TrackMessage.Builder.class); - final ImmutableMap metadata = ImmutableMap.of( - AIRBYTE_VERSION_KEY, AIRBYTE_VERSION.serialize(), - EMAIL_KEY, EMAIL, - "height", "80 meters", - "user_id", IDENTITY.getCustomerId()); - - segmentTrackingClient.track(WORKSPACE_ID, JUMP, metadata); - - verify(analytics).enqueue(mockBuilder.capture()); - final TrackMessage actual = mockBuilder.getValue().build(); - assertEquals(JUMP, actual.event()); - assertEquals(IDENTITY.getCustomerId().toString(), actual.userId()); - assertEquals(metadata, filterTrackedAtProperty(Objects.requireNonNull(actual.properties()))); - } - - @Test - void testTrackNullWorkspace() { - segmentTrackingClient.track(null, JUMP); - - verify(analytics, never()).enqueue(any()); - } - - @Test - void testTrackAirbyteAnalyticSource() { - final String analyticSource = "test"; - final HttpHeaders httpHeaders = mock(HttpHeaders.class); - final HttpRequest httpRequest = mock(HttpRequest.class); - - when(httpHeaders.get(AIRBYTE_ANALYTIC_SOURCE_HEADER)).thenReturn(analyticSource); - when(httpRequest.getHeaders()).thenReturn(httpHeaders); - ServerRequestContext.set(httpRequest); - - final ArgumentCaptor mockBuilder = ArgumentCaptor.forClass(TrackMessage.Builder.class); - final ImmutableMap metadata = ImmutableMap.of( - AIRBYTE_VERSION_KEY, AIRBYTE_VERSION.serialize(), - EMAIL_KEY, EMAIL, - "height", "80 meters", - "user_id", IDENTITY.getCustomerId()); - - segmentTrackingClient.track(WORKSPACE_ID, JUMP, metadata); - - verify(analytics).enqueue(mockBuilder.capture()); - final TrackMessage actual = mockBuilder.getValue().build(); - assertEquals(analyticSource, actual.properties().get(AIRBYTE_SOURCE)); - } - - private static ImmutableMap filterTrackedAtProperty(final Map properties) { - final String trackedAtKey = "tracked_at"; - assertTrue(properties.containsKey(trackedAtKey)); - final Builder builder = ImmutableMap.builder(); - properties.forEach((key, value) -> { - if (!trackedAtKey.equals(key)) { - builder.put(key, value); - } - }); - return builder.build(); - } - -} diff --git a/airbyte-analytics/src/test/java/io/airbyte/analytics/TrackingClientSingletonTest.java b/airbyte-analytics/src/test/java/io/airbyte/analytics/TrackingClientSingletonTest.java deleted file mode 100644 index 8d02a0f6eaeb..000000000000 --- a/airbyte-analytics/src/test/java/io/airbyte/analytics/TrackingClientSingletonTest.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.analytics; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.Geography; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.UUID; -import java.util.function.Function; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class TrackingClientSingletonTest { - - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final AirbyteVersion AIRBYTE_VERSION = new AirbyteVersion("dev"); - private static final String EMAIL = "a@airbyte.io"; - private static final Deployment DEPLOYMENT = new Deployment(Configs.DeploymentMode.OSS, UUID.randomUUID(), WorkerEnvironment.DOCKER); - private static final TrackingIdentity IDENTITY = new TrackingIdentity(AIRBYTE_VERSION, UUID.randomUUID(), EMAIL, false, false, true); - private static final Function MOCK_TRACKING_IDENTITY = (workspaceId) -> IDENTITY; - - private ConfigRepository configRepository; - - @BeforeEach - void setup() { - configRepository = mock(ConfigRepository.class); - // equivalent of resetting TrackingClientSingleton to uninitialized state. - TrackingClientSingleton.initialize(null); - } - - @Test - void testCreateTrackingClientLogging() { - assertTrue( - TrackingClientSingleton.createTrackingClient( - Configs.TrackingStrategy.LOGGING, - DEPLOYMENT, - "role", - MOCK_TRACKING_IDENTITY) instanceof LoggingTrackingClient); - } - - @Test - void testCreateTrackingClientSegment() { - assertTrue( - TrackingClientSingleton.createTrackingClient( - Configs.TrackingStrategy.SEGMENT, - DEPLOYMENT, - "role", - MOCK_TRACKING_IDENTITY) instanceof SegmentTrackingClient); - } - - @Test - void testGet() { - final TrackingClient client = mock(TrackingClient.class); - TrackingClientSingleton.initialize(client); - assertEquals(client, TrackingClientSingleton.get()); - } - - @Test - void testGetUninitialized() { - assertTrue(TrackingClientSingleton.get() instanceof LoggingTrackingClient); - } - - @Test - void testGetTrackingIdentityRespectsWorkspaceId() throws JsonValidationException, IOException, ConfigNotFoundException { - final StandardWorkspace workspace1 = new StandardWorkspace().withWorkspaceId(WORKSPACE_ID).withCustomerId(UUID.randomUUID()); - final StandardWorkspace workspace2 = new StandardWorkspace().withWorkspaceId(UUID.randomUUID()).withCustomerId(UUID.randomUUID()); - - when(configRepository.getStandardWorkspaceNoSecrets(workspace1.getWorkspaceId(), true)).thenReturn(workspace1); - when(configRepository.getStandardWorkspaceNoSecrets(workspace2.getWorkspaceId(), true)).thenReturn(workspace2); - - final TrackingIdentity workspace1Actual = - TrackingClientSingleton.getTrackingIdentity(configRepository, AIRBYTE_VERSION, workspace1.getWorkspaceId()); - final TrackingIdentity workspace2Actual = - TrackingClientSingleton.getTrackingIdentity(configRepository, AIRBYTE_VERSION, workspace2.getWorkspaceId()); - final TrackingIdentity workspace1Expected = new TrackingIdentity(AIRBYTE_VERSION, workspace1.getCustomerId(), null, null, null, null); - final TrackingIdentity workspace2Expected = new TrackingIdentity(AIRBYTE_VERSION, workspace2.getCustomerId(), null, null, null, null); - - assertEquals(workspace1Expected, workspace1Actual); - assertEquals(workspace2Expected, workspace2Actual); - } - - @Test - void testGetTrackingIdentityInitialSetupNotComplete() throws JsonValidationException, IOException, ConfigNotFoundException { - final StandardWorkspace workspace = new StandardWorkspace().withWorkspaceId(WORKSPACE_ID).withCustomerId(UUID.randomUUID()); - - when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)).thenReturn(workspace); - - final TrackingIdentity actual = TrackingClientSingleton.getTrackingIdentity(configRepository, AIRBYTE_VERSION, WORKSPACE_ID); - final TrackingIdentity expected = new TrackingIdentity(AIRBYTE_VERSION, workspace.getCustomerId(), null, null, null, null); - - assertEquals(expected, actual); - } - - @Test - void testGetTrackingIdentityNonAnonymous() throws JsonValidationException, IOException, ConfigNotFoundException { - final StandardWorkspace workspace = new StandardWorkspace() - .withWorkspaceId(WORKSPACE_ID) - .withCustomerId(UUID.randomUUID()) - .withEmail(EMAIL) - .withAnonymousDataCollection(false) - .withNews(true) - .withSecurityUpdates(true) - .withDefaultGeography(Geography.AUTO); - - when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)).thenReturn(workspace); - - final TrackingIdentity actual = TrackingClientSingleton.getTrackingIdentity(configRepository, AIRBYTE_VERSION, WORKSPACE_ID); - final TrackingIdentity expected = new TrackingIdentity(AIRBYTE_VERSION, workspace.getCustomerId(), workspace.getEmail(), false, true, true); - - assertEquals(expected, actual); - } - - @Test - void testGetTrackingIdentityAnonymous() throws JsonValidationException, IOException, ConfigNotFoundException { - final StandardWorkspace workspace = new StandardWorkspace() - .withWorkspaceId(WORKSPACE_ID) - .withCustomerId(UUID.randomUUID()) - .withEmail("a@airbyte.io") - .withAnonymousDataCollection(true) - .withNews(true) - .withSecurityUpdates(true) - .withDefaultGeography(Geography.AUTO); - - when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)).thenReturn(workspace); - - final TrackingIdentity actual = TrackingClientSingleton.getTrackingIdentity(configRepository, AIRBYTE_VERSION, WORKSPACE_ID); - final TrackingIdentity expected = new TrackingIdentity(AIRBYTE_VERSION, workspace.getCustomerId(), null, true, true, true); - - assertEquals(expected, actual); - } - -} diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile deleted file mode 100644 index 06e50598ed7a..000000000000 --- a/airbyte-bootloader/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:1.0 -FROM ${JDK_IMAGE} - -ARG VERSION=0.40.32 - -ENV APPLICATION airbyte-bootloader -ENV VERSION ${VERSION} - -WORKDIR /app - -ADD bin/${APPLICATION}-${VERSION}.tar /app - - -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-${VERSION}/bin/${APPLICATION}"] diff --git a/airbyte-bootloader/Readme.md b/airbyte-bootloader/Readme.md deleted file mode 100644 index 831613918da9..000000000000 --- a/airbyte-bootloader/Readme.md +++ /dev/null @@ -1,6 +0,0 @@ -# airbyte-bootloader - -This application runs at start up for Airbyte. It is responsible for making sure that the environment is upgraded and in a good state. e.g. It makes sure the database has been migrated to the correct version. - -## Entrypoint -* Application.java - has the main method for running the bootloader. diff --git a/airbyte-bootloader/build.gradle b/airbyte-bootloader/build.gradle deleted file mode 100644 index d61eaedd6be1..000000000000 --- a/airbyte-bootloader/build.gradle +++ /dev/null @@ -1,76 +0,0 @@ -plugins { - id 'application' -} - -dependencies { - annotationProcessor platform(libs.micronaut.bom) - annotationProcessor libs.bundles.micronaut.annotation.processor - - implementation platform(libs.micronaut.bom) - implementation libs.bundles.micronaut - - // Ensure that the versions defined in deps.toml are used - // instead of versions from transitive dependencies - implementation (libs.flyway.core) { - force = true - } - implementation (libs.jooq) { - force = true - } - - implementation project(':airbyte-config:init') - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-config:config-persistence') - implementation project(':airbyte-db:db-lib') - implementation project(":airbyte-json-validation") - implementation libs.airbyte.protocol - implementation project(':airbyte-persistence:job-persistence') - - testAnnotationProcessor platform(libs.micronaut.bom) - testAnnotationProcessor libs.bundles.micronaut.test.annotation.processor - - testImplementation libs.bundles.micronaut.test - testImplementation libs.bundles.junit - testImplementation libs.junit.jupiter.system.stubs - testImplementation libs.platform.testcontainers.postgresql -} - -mainClassName = 'io.airbyte.bootloader.Application' - -application { - applicationName = project.name - mainClass = mainClassName - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -Properties env = new Properties() -rootProject.file('.env.dev').withInputStream { env.load(it) } - -run { - // default for running on local machine. - env.each { entry -> - environment entry.getKey(), entry.getValue() - } - - environment 'AIRBYTE_ROLE', System.getenv('AIRBYTE_ROLE') - environment 'AIRBYTE_VERSION', env.VERSION - environment 'DATABASE_URL', 'jdbc:postgresql://localhost:5432/airbyte' -} - -test { - // Required to enable mocked beans - systemProperty("mockito.test.enabled", "true") -} - -// produce reproducible archives -// (see https://docs.gradle.org/current/userguide/working_with_files.html#sec:reproducible_archives) -tasks.withType(AbstractArchiveTask) { - preserveFileTimestamps = false - reproducibleFileOrder = true -} - -tasks.named("buildDockerImage") { - dependsOn copyGeneratedTar -} - -Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-bootloader/gradle.properties b/airbyte-bootloader/gradle.properties deleted file mode 100644 index 03cf6f280051..000000000000 --- a/airbyte-bootloader/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -dockerImageName=bootloader diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Application.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Application.java deleted file mode 100644 index 35150ec70671..000000000000 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Application.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader; - -import io.micronaut.context.ApplicationContext; -import io.micronaut.runtime.Micronaut; -import lombok.extern.slf4j.Slf4j; - -/** - * Main application entry point responsible for starting the server and invoking the bootstrapping - * of the Airbyte environment. - */ -@Slf4j -public class Application { - - public static void main(final String[] args) { - try { - final ApplicationContext applicationContext = Micronaut.run(Application.class, args); - final Bootloader bootloader = applicationContext.getBean(Bootloader.class); - bootloader.load(); - System.exit(0); - } catch (final Exception e) { - log.error("Unable to bootstrap Airbyte environment.", e); - System.exit(-1); - } - } - -} diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Bootloader.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Bootloader.java deleted file mode 100644 index 1785b31437ae..000000000000 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Bootloader.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Geography; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.init.DefinitionsProvider; -import io.airbyte.config.init.PostLoadExecutor; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.init.DatabaseInitializer; -import io.airbyte.db.instance.DatabaseMigrator; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Value; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.Optional; -import java.util.UUID; -import lombok.extern.slf4j.Slf4j; - -/** - * Ensures that the databases are migrated to the appropriate level. - */ -@Singleton -@Slf4j -public class Bootloader { - - private static final AirbyteVersion VERSION_BREAK = new AirbyteVersion("0.32.0-alpha"); - - private final boolean autoUpgradeConnectors; - private final ConfigRepository configRepository; - private final DatabaseMigrator configsDatabaseMigrator; - private final DatabaseInitializer configsDatabaseInitializer; - private final AirbyteVersion currentAirbyteVersion; - private final Optional definitionsProvider; - private final FeatureFlags featureFlags; - private final DatabaseInitializer jobsDatabaseInitializer; - private final DatabaseMigrator jobsDatabaseMigrator; - private final JobPersistence jobPersistence; - private final PostLoadExecutor postLoadExecution; - private final ProtocolVersionChecker protocolVersionChecker; - private final boolean runMigrationOnStartup; - private final SecretMigrator secretMigrator; - - public Bootloader( - @Value("${airbyte.bootloader.auto-upgrade-connectors}") final boolean autoUpgradeConnectors, - final ConfigRepository configRepository, - @Named("configsDatabaseInitializer") final DatabaseInitializer configsDatabaseInitializer, - @Named("configsDatabaseMigrator") final DatabaseMigrator configsDatabaseMigrator, - final AirbyteVersion currentAirbyteVersion, - final Optional definitionsProvider, - final FeatureFlags featureFlags, - @Named("jobsDatabaseInitializer") final DatabaseInitializer jobsDatabaseInitializer, - @Named("jobsDatabaseMigrator") final DatabaseMigrator jobsDatabaseMigrator, - final JobPersistence jobPersistence, - final ProtocolVersionChecker protocolVersionChecker, - @Value("${airbyte.bootloader.run-migration-on-startup}") final boolean runMigrationOnStartup, - final SecretMigrator secretMigrator, - final PostLoadExecutor postLoadExecution) { - this.autoUpgradeConnectors = autoUpgradeConnectors; - this.configRepository = configRepository; - this.configsDatabaseInitializer = configsDatabaseInitializer; - this.configsDatabaseMigrator = configsDatabaseMigrator; - this.currentAirbyteVersion = currentAirbyteVersion; - this.definitionsProvider = definitionsProvider; - this.featureFlags = featureFlags; - this.jobsDatabaseInitializer = jobsDatabaseInitializer; - this.jobsDatabaseMigrator = jobsDatabaseMigrator; - this.jobPersistence = jobPersistence; - this.protocolVersionChecker = protocolVersionChecker; - this.runMigrationOnStartup = runMigrationOnStartup; - this.secretMigrator = secretMigrator; - this.postLoadExecution = postLoadExecution; - } - - /** - * Performs all required bootstrapping for the Airbyte environment. This includes the following: - *

    - *
  • Initializes the databases
  • - *
  • Check database migration compatibility
  • - *
  • Check protocol version compatibility
  • - *
  • Migrate databases
  • - *
  • Create default workspace
  • - *
  • Create default deployment
  • - *
  • Perform post migration tasks
  • - *
- * - * @throws Exception if unable to perform any of the bootstrap operations. - */ - public void load() throws Exception { - log.info("Initializing databases..."); - initializeDatabases(); - - log.info("Checking migration compatibility..."); - assertNonBreakingMigration(jobPersistence, currentAirbyteVersion); - - log.info("Checking protocol version constraints..."); - assertNonBreakingProtocolVersionConstraints(protocolVersionChecker, jobPersistence, autoUpgradeConnectors); - - log.info("Running database migrations..."); - runFlywayMigration(runMigrationOnStartup, configsDatabaseMigrator, jobsDatabaseMigrator); - - log.info("Creating workspace (if none exists)..."); - createWorkspaceIfNoneExists(configRepository); - - log.info("Creating deployment (if none exists)..."); - createDeploymentIfNoneExists(jobPersistence); - - final String airbyteVersion = currentAirbyteVersion.serialize(); - log.info("Setting Airbyte version to '{}'...", airbyteVersion); - jobPersistence.setVersion(airbyteVersion); - log.info("Set version to '{}'", airbyteVersion); - - if (postLoadExecution != null) { - postLoadExecution.execute(); - log.info("Finished running post load Execution."); - } - - log.info("Finished bootstrapping Airbyte environment."); - } - - private void assertNonBreakingMigration(final JobPersistence jobPersistence, final AirbyteVersion airbyteVersion) - throws IOException { - // version in the database when the server main method is called. may be empty if this is the first - // time the server is started. - log.info("Checking for illegal upgrade..."); - final Optional initialAirbyteDatabaseVersion = jobPersistence.getVersion().map(AirbyteVersion::new); - if (!isLegalUpgrade(initialAirbyteDatabaseVersion.orElse(null), airbyteVersion)) { - final String attentionBanner = MoreResources.readResource("banner/attention-banner.txt"); - log.error(attentionBanner); - final String message = String.format( - "Cannot upgrade from version %s to version %s directly. First you must upgrade to version %s. After that upgrade is complete, you may upgrade to version %s", - initialAirbyteDatabaseVersion.get().serialize(), - airbyteVersion.serialize(), - VERSION_BREAK.serialize(), - airbyteVersion.serialize()); - - log.error(message); - throw new RuntimeException(message); - } - } - - private void assertNonBreakingProtocolVersionConstraints(final ProtocolVersionChecker protocolVersionChecker, - final JobPersistence jobPersistence, - final boolean autoUpgradeConnectors) - throws Exception { - final Optional newProtocolRange = protocolVersionChecker.validate(autoUpgradeConnectors); - if (newProtocolRange.isEmpty()) { - throw new RuntimeException( - "Aborting bootloader to avoid breaking existing connection after an upgrade. " + - "Please address airbyte protocol version support issues in the connectors before retrying."); - } - trackProtocolVersion(jobPersistence, newProtocolRange.get()); - } - - private void createDeploymentIfNoneExists(final JobPersistence jobPersistence) throws IOException { - final Optional deploymentOptional = jobPersistence.getDeployment(); - if (deploymentOptional.isPresent()) { - log.info("Running deployment: {}", deploymentOptional.get()); - } else { - final UUID deploymentId = UUID.randomUUID(); - jobPersistence.setDeployment(deploymentId); - log.info("Created deployment: {}", deploymentId); - } - } - - private void createWorkspaceIfNoneExists(final ConfigRepository configRepository) throws JsonValidationException, IOException { - if (!configRepository.listStandardWorkspaces(true).isEmpty()) { - log.info("Workspace already exists for the deployment."); - return; - } - - final UUID workspaceId = UUID.randomUUID(); - final StandardWorkspace workspace = new StandardWorkspace() - .withWorkspaceId(workspaceId) - .withCustomerId(UUID.randomUUID()) - .withName(workspaceId.toString()) - .withSlug(workspaceId.toString()) - .withInitialSetupComplete(false) - .withDisplaySetupWizard(true) - .withTombstone(false) - .withDefaultGeography(Geography.AUTO); - // NOTE: it's safe to use the NoSecrets version since we know that the user hasn't supplied any - // secrets yet. - configRepository.writeStandardWorkspaceNoSecrets(workspace); - } - - private void initializeDatabases() throws DatabaseInitializationException { - log.info("Initializing databases..."); - configsDatabaseInitializer.initialize(); - jobsDatabaseInitializer.initialize(); - log.info("Databases initialized."); - } - - @VisibleForTesting - boolean isLegalUpgrade(final AirbyteVersion airbyteDatabaseVersion, final AirbyteVersion airbyteVersion) { - // means there was no previous version so upgrade even needs to happen. always legal. - if (airbyteDatabaseVersion == null) { - log.info("No previous Airbyte Version set."); - return true; - } - - log.info("Current Airbyte version: {}", airbyteDatabaseVersion); - log.info("Future Airbyte version: {}", airbyteVersion); - final var futureVersionIsAfterVersionBreak = airbyteVersion.greaterThan(VERSION_BREAK) || airbyteVersion.isDev(); - final var isUpgradingThroughVersionBreak = airbyteDatabaseVersion.lessThan(VERSION_BREAK) && futureVersionIsAfterVersionBreak; - return !isUpgradingThroughVersionBreak; - } - - private void runFlywayMigration(final boolean runDatabaseMigrationOnStartup, - final DatabaseMigrator configDbMigrator, - final DatabaseMigrator jobDbMigrator) { - log.info("Creating baseline for config database..."); - configDbMigrator.createBaseline(); - log.info("Creating baseline for job database..."); - jobDbMigrator.createBaseline(); - - if (runDatabaseMigrationOnStartup) { - log.info("Migrating configs database..."); - configDbMigrator.migrate(); - log.info("Migrating jobs database..."); - jobDbMigrator.migrate(); - } else { - log.info("Auto database migration has been skipped."); - } - } - - private void trackProtocolVersion(final JobPersistence jobPersistence, final AirbyteProtocolVersionRange protocolVersionRange) - throws IOException { - jobPersistence.setAirbyteProtocolVersionMin(protocolVersionRange.min()); - jobPersistence.setAirbyteProtocolVersionMax(protocolVersionRange.max()); - log.info("AirbyteProtocol version support range: [{}:{}]", protocolVersionRange.min().serialize(), protocolVersionRange.max().serialize()); - } - -} diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/DefaultPostLoadExecutor.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/DefaultPostLoadExecutor.java deleted file mode 100644 index ce92c1f42afa..000000000000 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/DefaultPostLoadExecutor.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader; - -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.config.init.ApplyDefinitionsHelper; -import io.airbyte.config.init.PostLoadExecutor; -import io.airbyte.persistence.job.JobPersistence; -import jakarta.inject.Singleton; -import lombok.extern.slf4j.Slf4j; - -/** - * Default implementation of the tasks that should be executed after a successful bootstrapping of - * the Airbyte environment. - *

- *

- * This implementation performs the following tasks: - *

    - *
  • Applies the latest definitions from the provider to the repository
  • - *
  • If enables, migrates secrets
  • - *
- */ -@Singleton -@Slf4j -public class DefaultPostLoadExecutor implements PostLoadExecutor { - - private final ApplyDefinitionsHelper applyDefinitionsHelper; - private final FeatureFlags featureFlags; - private final JobPersistence jobPersistence; - private final SecretMigrator secretMigrator; - - public DefaultPostLoadExecutor(final ApplyDefinitionsHelper applyDefinitionsHelper, - final FeatureFlags featureFlags, - final JobPersistence jobPersistence, - final SecretMigrator secretMigrator) { - this.applyDefinitionsHelper = applyDefinitionsHelper; - this.featureFlags = featureFlags; - this.jobPersistence = jobPersistence; - this.secretMigrator = secretMigrator; - } - - @Override - public void execute() throws Exception { - applyDefinitionsHelper.apply(); - - if (featureFlags.forceSecretMigration() || !jobPersistence.isSecretMigrated()) { - if (this.secretMigrator != null) { - this.secretMigrator.migrateSecrets(); - log.info("Secrets successfully migrated."); - } - } - log.info("Loaded seed data."); - } - -} diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/ProtocolVersionChecker.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/ProtocolVersionChecker.java deleted file mode 100644 index c03cfde922c8..000000000000 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/ProtocolVersionChecker.java +++ /dev/null @@ -1,220 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader; - -import io.airbyte.commons.version.AirbyteProtocolVersion; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorType; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.init.DefinitionsProvider; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.JobPersistence; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.HashSet; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; - -/** - * Validates that all connectors support the desired target Airbyte protocol version. - */ -@Singleton -@Slf4j -public class ProtocolVersionChecker { - - private final JobPersistence jobPersistence; - private final AirbyteProtocolVersionRange airbyteProtocolTargetVersionRange; - private final ConfigRepository configRepository; - private final Optional definitionsProvider; - - /** - * Constructs a new protocol version checker that verifies all connectors are within the provided - * target protocol version range. - * - * @param jobPersistence A {@link JobPersistence} instance. - * @param airbyteProtocolTargetVersionRange The target Airbyte protocol version range. - * @param configRepository A {@link ConfigRepository} instance - * @param definitionsProvider An {@link Optional} that may contain a {@link DefinitionsProvider} - * instance. - */ - public ProtocolVersionChecker(final JobPersistence jobPersistence, - final AirbyteProtocolVersionRange airbyteProtocolTargetVersionRange, - final ConfigRepository configRepository, - final Optional definitionsProvider) { - this.jobPersistence = jobPersistence; - this.airbyteProtocolTargetVersionRange = airbyteProtocolTargetVersionRange; - this.configRepository = configRepository; - this.definitionsProvider = definitionsProvider; - } - - /** - * Validate the AirbyteProtocolVersion support range between the platform and the connectors. - *

- * The goal is to make sure that we do not end up disabling existing connections after an upgrade - * that changes the protocol support range. - * - * @param supportAutoUpgrade whether the connectors will be automatically upgraded by the platform - * @return the supported protocol version range if check is successful, Optional.empty() if we would - * break existing connections. - * @throws IOException - */ - public Optional validate(final boolean supportAutoUpgrade) throws IOException { - final Optional currentAirbyteVersion = getCurrentAirbyteVersion(); - final Optional currentRange = jobPersistence.getCurrentProtocolVersionRange(); - final AirbyteProtocolVersionRange targetRange = getTargetProtocolVersionRange(); - - // Checking if there is a pre-existing version of airbyte. - // Without this check, the first run of the validation would fail because we do not have the tables - // set yet - // which means that the actor definitions lookup will throw SQLExceptions. - if (currentAirbyteVersion.isEmpty()) { - log.info("No previous version of Airbyte detected, assuming this is a fresh deploy."); - return Optional.of(targetRange); - } - - if (currentRange.isEmpty() || currentRange.get().equals(targetRange)) { - log.info("Using AirbyteProtocolVersion range [{}:{}]", targetRange.min().serialize(), targetRange.max().serialize()); - return Optional.of(targetRange); - } - - log.info("Detected an AirbyteProtocolVersion range change from [{}:{}] to [{}:{}]", - currentRange.get().min().serialize(), currentRange.get().max().serialize(), - targetRange.min().serialize(), targetRange.max().serialize()); - - final Map> conflicts = getConflictingActorDefinitions(targetRange); - - if (conflicts.isEmpty()) { - log.info("No protocol version conflict detected."); - return Optional.of(targetRange); - } - - final Set destConflicts = conflicts.getOrDefault(ActorType.DESTINATION, new HashSet<>()); - final Set sourceConflicts = conflicts.getOrDefault(ActorType.SOURCE, new HashSet<>()); - - if (!supportAutoUpgrade) { - // If we do not support auto upgrade, any conflict of used connectors must be resolved before being - // able to upgrade the platform. - log.warn("The following connectors need to be upgraded before being able to upgrade the platform"); - formatActorDefinitionForLogging(destConflicts, sourceConflicts).forEach(log::warn); - return Optional.empty(); - } - - final Set remainingDestConflicts = - projectRemainingConflictsAfterConnectorUpgrades(targetRange, destConflicts, ActorType.DESTINATION); - final Set remainingSourceConflicts = - projectRemainingConflictsAfterConnectorUpgrades(targetRange, sourceConflicts, ActorType.SOURCE); - - if (!remainingDestConflicts.isEmpty() || !remainingSourceConflicts.isEmpty()) { - // These set of connectors need a manual intervention because there is no compatible version listed - formatActorDefinitionForLogging(remainingDestConflicts, remainingSourceConflicts).forEach(log::warn); - return Optional.empty(); - } - - // These can be auto upgraded - destConflicts.removeAll(remainingDestConflicts); - sourceConflicts.removeAll(remainingSourceConflicts); - log.info("The following connectors will be upgraded"); - formatActorDefinitionForLogging(destConflicts, sourceConflicts).forEach(log::info); - return Optional.of(targetRange); - } - - protected Optional getCurrentAirbyteVersion() throws IOException { - return jobPersistence.getVersion().map(AirbyteVersion::new); - } - - protected AirbyteProtocolVersionRange getTargetProtocolVersionRange() { - return airbyteProtocolTargetVersionRange; - } - - protected Map> getConflictingActorDefinitions(final AirbyteProtocolVersionRange targetRange) throws IOException { - final Map> actorDefIdToProtocolVersion = configRepository.getActorDefinitionToProtocolVersionMap(); - final Map> conflicts = - actorDefIdToProtocolVersion.entrySet().stream() - // Keeping only ActorDefinitionIds that have an unsupported protocol version - .filter(e -> !targetRange.isSupported(e.getValue().getValue())) - // Build the ActorType -> List[ActorDefIds] map - .map(e -> Map.entry(e.getValue().getKey(), e.getKey())) - // Group by ActorType and transform the List> into a Set - .collect(Collectors.groupingBy(Entry::getKey, - Collectors.collectingAndThen(Collectors.toList(), list -> list.stream().map(Entry::getValue).collect(Collectors.toSet())))); - return conflicts; - } - - protected Set projectRemainingConflictsAfterConnectorUpgrades(final AirbyteProtocolVersionRange targetRange, - final Set initialConflicts, - final ActorType actorType) { - if (initialConflicts.isEmpty()) { - return Set.of(); - } - - final Set upgradedSourceDefs = getProtocolVersionsForActorDefinitions(actorType) - // Keep definition ids if the protocol version will fall into the new supported range - .filter(e -> initialConflicts.contains(e.getKey()) && targetRange.isSupported(e.getValue())) - .map(Entry::getKey) - .collect(Collectors.toSet()); - - // Get the set of source definitions that will still have conflict after the connector upgrades - final Set remainingConflicts = new HashSet<>(initialConflicts); - remainingConflicts.removeAll(upgradedSourceDefs); - return remainingConflicts; - } - - protected Stream> getProtocolVersionsForActorDefinitions(final ActorType actorType) { - if (definitionsProvider.isEmpty()) { - return Stream.empty(); - } - - return getActorVersions(actorType); - } - - private Stream> getActorVersions(final ActorType actorType) { - switch (actorType) { - case SOURCE: - return definitionsProvider.get().getSourceDefinitions() - .stream() - .map(def -> Map.entry(def.getSourceDefinitionId(), AirbyteProtocolVersion.getWithDefault(def.getSpec().getProtocolVersion()))); - case DESTINATION: - default: - return definitionsProvider.get().getDestinationDefinitions() - .stream() - .map(def -> Map.entry(def.getDestinationDefinitionId(), AirbyteProtocolVersion.getWithDefault(def.getSpec().getProtocolVersion()))); - } - } - - private Stream formatActorDefinitionForLogging(final Set remainingDestConflicts, final Set remainingSourceConflicts) { - return Stream.concat( - remainingSourceConflicts.stream().map(defId -> { - final StandardSourceDefinition sourceDef; - try { - sourceDef = configRepository.getStandardSourceDefinition(defId); - return String.format("Source: %s: %s: protocol version: %s", - sourceDef.getSourceDefinitionId(), sourceDef.getName(), sourceDef.getProtocolVersion()); - } catch (final Exception e) { - log.info("Failed to getStandardSourceDefinition for {}", defId, e); - return String.format("Source: %s: Failed to fetch details...", defId); - } - }), - remainingDestConflicts.stream().map(defId -> { - try { - final StandardDestinationDefinition destDef = configRepository.getStandardDestinationDefinition(defId); - return String.format("Destination: %s: %s: protocol version: %s", - destDef.getDestinationDefinitionId(), destDef.getName(), destDef.getProtocolVersion()); - } catch (final Exception e) { - log.info("Failed to getStandardDestinationDefinition for {}", defId, e); - return String.format("Source: %s: Failed to fetch details...", defId); - } - })); - } - -} diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/SecretMigrator.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/SecretMigrator.java deleted file mode 100644 index 451995f4af40..000000000000 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/SecretMigrator.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.stream.Collectors; -import lombok.Value; -import lombok.extern.slf4j.Slf4j; - -@Singleton -@Slf4j -public class SecretMigrator { - - private final SecretsRepositoryReader secretsReader; - private final SecretsRepositoryWriter secretsWriter; - private final ConfigRepository configRepository; - private final JobPersistence jobPersistence; - private final Optional secretPersistence; - - public SecretMigrator(final SecretsRepositoryReader secretsReader, - final SecretsRepositoryWriter secretsWriter, - final ConfigRepository configRepository, - final JobPersistence jobPersistence, - @Named("secretPersistence") final Optional secretPersistence) { - this.secretsReader = secretsReader; - this.secretsWriter = secretsWriter; - this.configRepository = configRepository; - this.jobPersistence = jobPersistence; - this.secretPersistence = secretPersistence; - } - - @Value - static class ConnectorConfiguration { - - private final UUID workspace; - private final JsonNode configuration; - private final JsonNode spec; - - } - - /** - * Perform a secret migration. It will load all the actor specs extract the secret JsonPath from it. - * Then for all the secret that are stored in a plain text format, it will save the plain text in - * the secret manager and store the coordinate in the config DB. - */ - public void migrateSecrets() throws Exception { - if (secretPersistence.isEmpty()) { - log.info("No secret persistence is provided, the migration won't be run "); - - return; - } else { - secretPersistence.get().initialize(); - } - - final List standardSourceDefinitions = configRepository.listStandardSourceDefinitions(true); - - final Map definitionIdToSourceSpecs = standardSourceDefinitions - .stream().collect(Collectors.toMap(StandardSourceDefinition::getSourceDefinitionId, StandardSourceDefinition::getSpec)); - - final List sourcesWithoutSecrets = configRepository.listSourceConnection(); - final List sourcesWithSecrets = new ArrayList<>(); - for (final SourceConnection source : sourcesWithoutSecrets) { - final SourceConnection sourceWithSecrets = secretsReader.getSourceConnectionWithSecrets(source.getSourceId()); - sourcesWithSecrets.add(sourceWithSecrets); - } - - migrateSources(sourcesWithSecrets, definitionIdToSourceSpecs); - - final List standardDestinationDefinitions = configRepository.listStandardDestinationDefinitions(true); - - final Map definitionIdToDestinationSpecs = standardDestinationDefinitions.stream() - .collect(Collectors.toMap(StandardDestinationDefinition::getDestinationDefinitionId, StandardDestinationDefinition::getSpec)); - - final List destinationsWithoutSecrets = configRepository.listDestinationConnection(); - final List destinationsWithSecrets = new ArrayList<>(); - for (final DestinationConnection destination : destinationsWithoutSecrets) { - final DestinationConnection destinationWithoutSecrets = secretsReader.getDestinationConnectionWithSecrets(destination.getDestinationId()); - destinationsWithSecrets.add(destinationWithoutSecrets); - } - - migrateDestinations(destinationsWithSecrets, definitionIdToDestinationSpecs); - - jobPersistence.setSecretMigrationDone(); - } - - /** - * This is migrating the secrets for the source actors - */ - @VisibleForTesting - void migrateSources(final List sources, final Map definitionIdToSourceSpecs) - throws JsonValidationException, IOException { - log.info("Migrating Sources"); - for (final SourceConnection source : sources) { - final Optional specOptional = Optional.ofNullable(definitionIdToSourceSpecs.get(source.getSourceDefinitionId())); - - if (specOptional.isPresent()) { - secretsWriter.writeSourceConnection(source, specOptional.get()); - } else { - // if the spec can't be found, don't risk writing secrets to db. wipe out the configuration for the - // connector. - final SourceConnection sourceWithConfigRemoved = Jsons.clone(source); - sourceWithConfigRemoved.setConfiguration(Jsons.emptyObject()); - secretsWriter.writeSourceConnection(sourceWithConfigRemoved, new ConnectorSpecification().withConnectionSpecification(Jsons.emptyObject())); - } - } - } - - /** - * This is migrating the secrets for the destination actors - */ - @VisibleForTesting - void migrateDestinations(final List destinations, final Map definitionIdToDestinationSpecs) - throws JsonValidationException, IOException { - log.info("Migration Destinations"); - for (final DestinationConnection destination : destinations) { - final Optional specOptional = - Optional.ofNullable(definitionIdToDestinationSpecs.get(destination.getDestinationDefinitionId())); - - if (specOptional.isPresent()) { - secretsWriter.writeDestinationConnection(destination, specOptional.get()); - } else { - // if the spec can't be found, don't risk writing secrets to db. wipe out the configuration for the - // connector. - final DestinationConnection destinationWithConfigRemoved = Jsons.clone(destination); - destinationWithConfigRemoved.setConfiguration(Jsons.emptyObject()); - secretsWriter.writeDestinationConnection(destinationWithConfigRemoved, - new ConnectorSpecification().withConnectionSpecification(Jsons.emptyObject())); - } - } - } - -} diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/ApplicationBeanFactory.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/ApplicationBeanFactory.java deleted file mode 100644 index df2ced11ef73..000000000000 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/ApplicationBeanFactory.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader.config; - -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.commons.version.Version; -import io.airbyte.config.init.DefinitionsProvider; -import io.airbyte.config.init.LocalDefinitionsProvider; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHydrator; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Value; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.Optional; - -/** - * Micronaut bean factory for general application-related singletons. - */ -@Factory -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -public class ApplicationBeanFactory { - - @Singleton - public AirbyteVersion airbyteVersion(@Value("${airbyte.version}") final String version) { - return new AirbyteVersion(version); - } - - @Singleton - public AirbyteProtocolVersionRange airbyteProtocolTargetVersionRange(@Value("${airbyte.protocol.target.range.min-version}") final String min, - @Value("${airbyte.protocol.target.range.max-version}") final String max) { - return new AirbyteProtocolVersionRange(new Version(min), new Version(max)); - } - - @Singleton - public DefinitionsProvider localDefinitionsProvider() throws IOException { - return new LocalDefinitionsProvider(); - } - - @Singleton - public FeatureFlags featureFlags() { - return new EnvVariableFeatureFlags(); - } - - @Singleton - public JsonSecretsProcessor jsonSecretsProcessor() { - return JsonSecretsProcessor.builder() - .copySecrets(false) - .build(); - } - - @Singleton - public SecretsRepositoryReader secretsRepositoryReader(final ConfigRepository configRepository, final SecretsHydrator secretsHydrator) { - return new SecretsRepositoryReader(configRepository, secretsHydrator); - } - - @Singleton - public SecretsRepositoryWriter secretsRepositoryWriter(final ConfigRepository configRepository, - @Named("secretPersistence") final Optional secretPersistence, - @Named("ephemeralSecretPersistence") final Optional ephemeralSecretPersistence) { - return new SecretsRepositoryWriter(configRepository, secretPersistence, ephemeralSecretPersistence); - } - -} diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java deleted file mode 100644 index f3a77957b919..000000000000 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader.config; - -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.db.Database; -import io.airbyte.db.check.impl.JobsDatabaseAvailabilityCheck; -import io.airbyte.db.factory.DatabaseCheckFactory; -import io.airbyte.db.init.DatabaseInitializer; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.DatabaseMigrator; -import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; -import io.airbyte.db.instance.jobs.JobsDatabaseMigrator; -import io.airbyte.persistence.job.DefaultJobPersistence; -import io.airbyte.persistence.job.JobPersistence; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Value; -import io.micronaut.flyway.FlywayConfigurationProperties; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.IOException; -import javax.sql.DataSource; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; - -/** - * Micronaut bean factory for database-related singletons. - */ -@Factory -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -public class DatabaseBeanFactory { - - private static final String BASELINE_DESCRIPTION = "Baseline from file-based migration v1"; - private static final Boolean BASELINE_ON_MIGRATION = true; - private static final String INSTALLED_BY = "BootloaderApp"; - - @Singleton - @Named("configDatabase") - public Database configDatabase(@Named("config") final DSLContext dslContext) throws IOException { - return new Database(dslContext); - } - - @Singleton - @Named("jobsDatabase") - public Database jobsDatabase(@Named("jobs") final DSLContext dslContext) throws IOException { - return new Database(dslContext); - } - - @Singleton - @Named("configFlyway") - public Flyway configFlyway(@Named("config") final FlywayConfigurationProperties configFlywayConfigurationProperties, - @Named("config") final DataSource configDataSource, - @Value("${airbyte.bootloader.migration-baseline-version}") final String baselineVersion) { - return configFlywayConfigurationProperties.getFluentConfiguration() - .dataSource(configDataSource) - .baselineVersion(baselineVersion) - .baselineDescription(BASELINE_DESCRIPTION) - .baselineOnMigrate(BASELINE_ON_MIGRATION) - .installedBy(INSTALLED_BY) - .table(String.format("airbyte_%s_migrations", "configs")) - .load(); - } - - @Singleton - @Named("jobsFlyway") - public Flyway jobsFlyway(@Named("jobs") final FlywayConfigurationProperties jobsFlywayConfigurationProperties, - @Named("jobs") final DataSource jobsDataSource, - @Value("${airbyte.bootloader.migration-baseline-version}") final String baselineVersion) { - return jobsFlywayConfigurationProperties.getFluentConfiguration() - .dataSource(jobsDataSource) - .baselineVersion(baselineVersion) - .baselineDescription(BASELINE_DESCRIPTION) - .baselineOnMigrate(BASELINE_ON_MIGRATION) - .installedBy(INSTALLED_BY) - .table(String.format("airbyte_%s_migrations", "jobs")) - .load(); - } - - @Singleton - public ConfigRepository configRepository(@Named("configDatabase") final Database configDatabase) { - return new ConfigRepository(configDatabase); - } - - @Singleton - public JobPersistence jobPersistence(@Named("jobsDatabase") final Database jobDatabase) { - return new DefaultJobPersistence(jobDatabase); - } - - @Singleton - @Named("configsDatabaseInitializer") - public DatabaseInitializer configsDatabaseInitializer(@Named("config") final DSLContext configsDslContext, - @Value("${airbyte.flyway.configs.initialization-timeout-ms}") final Long configsDatabaseInitializationTimeoutMs) - throws IOException { - return DatabaseCheckFactory.createConfigsDatabaseInitializer(configsDslContext, - configsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH)); - } - - @Singleton - @Named("jobsDatabaseInitializer") - public DatabaseInitializer jobsDatabaseInitializer(@Named("jobs") final DSLContext jobsDslContext, - @Value("${airbyte.flyway.jobs.initialization-timeout-ms}") final Long jobsDatabaseInitializationTimeoutMs) - throws IOException { - return DatabaseCheckFactory.createJobsDatabaseInitializer(jobsDslContext, - jobsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH)); - } - - @Singleton - @Named("jobsDatabaseAvailabilityCheck") - public JobsDatabaseAvailabilityCheck jobsDatabaseAvailabilityCheck(@Named("jobs") final DSLContext dslContext) { - return new JobsDatabaseAvailabilityCheck(dslContext, DatabaseConstants.DEFAULT_ASSERT_DATABASE_TIMEOUT_MS); - } - - @Singleton - @Named("configsDatabaseMigrator") - public DatabaseMigrator configsDatabaseMigrator(@Named("configDatabase") final Database configDatabase, - @Named("configFlyway") final Flyway configFlyway) { - return new ConfigsDatabaseMigrator(configDatabase, configFlyway); - } - - @Singleton - @Named("jobsDatabaseMigrator") - public DatabaseMigrator jobsDatabaseMigrator(@Named("jobsDatabase") final Database jobsDatabase, - @Named("jobsFlyway") final Flyway jobsFlyway) { - return new JobsDatabaseMigrator(jobsDatabase, jobsFlyway); - } - -} diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/SecretPersistenceBeanFactory.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/SecretPersistenceBeanFactory.java deleted file mode 100644 index 15e9720d23b6..000000000000 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/SecretPersistenceBeanFactory.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader.config; - -import io.airbyte.config.persistence.split_secrets.AWSSecretManagerPersistence; -import io.airbyte.config.persistence.split_secrets.GoogleSecretManagerPersistence; -import io.airbyte.config.persistence.split_secrets.LocalTestingSecretPersistence; -import io.airbyte.config.persistence.split_secrets.RealSecretsHydrator; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHydrator; -import io.airbyte.config.persistence.split_secrets.VaultSecretPersistence; -import io.airbyte.db.Database; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.annotation.Value; -import jakarta.inject.Named; -import jakarta.inject.Singleton; - -/** - * Micronaut bean factory for secret persistence-related singletons. - */ -@Factory -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -public class SecretPersistenceBeanFactory { - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^(?!testing_config_db_table).*") - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^(?!google_secret_manager).*") - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^(?!vault).*") - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^(?!aws_secret_manager).*") - @Named("secretPersistence") - public SecretPersistence defaultSecretPersistence(@Named("configDatabase") final Database configDatabase) { - return localTestingSecretPersistence(configDatabase); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^testing_config_db_table$") - @Named("secretPersistence") - public SecretPersistence localTestingSecretPersistence(@Named("configDatabase") final Database configDatabase) { - return new LocalTestingSecretPersistence(configDatabase); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^testing_config_db_table$") - @Named("ephemeralSecretPersistence") - public SecretPersistence ephemeralLocalTestingSecretPersistence(@Named("configDatabase") final Database configDatabase) { - return new LocalTestingSecretPersistence(configDatabase); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^google_secret_manager$") - @Named("secretPersistence") - public SecretPersistence googleSecretPersistence(@Value("${airbyte.secret.store.gcp.credentials}") final String credentials, - @Value("${airbyte.secret.store.gcp.project-id}") final String projectId) { - return GoogleSecretManagerPersistence.getLongLived(projectId, credentials); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^google_secret_manager$") - @Named("ephemeralSecretPersistence") - public SecretPersistence ephemeralGoogleSecretPersistence(@Value("${airbyte.secret.store.gcp.credentials}") final String credentials, - @Value("${airbyte.secret.store.gcp.project-id}") final String projectId) { - return GoogleSecretManagerPersistence.getEphemeral(projectId, credentials); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^vault$") - @Named("secretPersistence") - public SecretPersistence vaultSecretPersistence(@Value("${airbyte.secret.store.vault.address}") final String address, - @Value("${airbyte.secret.store.vault.prefix}") final String prefix, - @Value("${airbyte.secret.store.vault.token}") final String token) { - return new VaultSecretPersistence(address, prefix, token); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^vault$") - @Named("ephemeralSecretPersistence") - public SecretPersistence ephemeralVaultSecretPersistence(@Value("${airbyte.secret.store.vault.address}") final String address, - @Value("${airbyte.secret.store.vault.prefix}") final String prefix, - @Value("${airbyte.secret.store.vault.token}") final String token) { - return new VaultSecretPersistence(address, prefix, token); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^aws_secret_manager$") - @Named("secretPersistence") - public SecretPersistence awsSecretPersistence(@Value("${airbyte.secret.store.aws.access-key}") final String awsAccessKey, - @Value("${airbyte.secret.store.aws.secret-key}") final String awsSecretKey) { - return new AWSSecretManagerPersistence(awsAccessKey, awsSecretKey); - } - - @Singleton - public SecretsHydrator secretsHydrator(@Named("secretPersistence") final SecretPersistence secretPersistence) { - return new RealSecretsHydrator(secretPersistence); - } - -} diff --git a/airbyte-bootloader/src/main/resources/application.yml b/airbyte-bootloader/src/main/resources/application.yml deleted file mode 100644 index 38ed361e5e0c..000000000000 --- a/airbyte-bootloader/src/main/resources/application.yml +++ /dev/null @@ -1,121 +0,0 @@ -micronaut: - application: - name: airbyte-bootloader - server: - port: 9002 - -airbyte: - bootloader: - auto-upgrade-connectors: ${AUTO_UPGRADE_CONNECTORS_PROTOCOL:false} - migration-baseline-version: ${BOOTLOADER_MIGRATION_BASELINE_VERSION:0.29.0.001} - run-migration-on-startup: ${RUN_DATABASE_MIGRATION_ON_STARTUP:true} - flyway: - configs: - initialization-timeout-ms: ${CONFIGS_DATABASE_INITIALIZATION_TIMEOUT_MS:60000} - jobs: - initialization-timeout-ms: ${JOBS_DATABASE_INITIALIZATION_TIMEOUT_MS:60000} - platform: - remote-connector-catalog: - timeout-ms: ${REMOTE_CONNECTOR_CATALOG_MS:10000} - url: ${REMOTE_CONNECTOR_CATALOG_URL:} - protocol: - target: - range: - min-version: ${AIRBYTE_PROTOCOL_VERSION_MIN:0.0.0} - max-version: ${AIRBYTE_PROTOCOL_VERSION_MAX:0.3.0} - secret: - persistence: ${SECRET_PERSISTENCE:TESTING_CONFIG_DB_TABLE} - store: - aws: - access-key: ${AWS_ACCESS_KEY:} - secret-key: ${AWS_SECRET_ACCESS_KEY:} - gcp: - credentials: ${SECRET_STORE_GCP_CREDENTIALS:} - project-id: ${SECRET_STORE_GCP_PROJECT_ID:} - vault: - address: ${VAULT_ADDRESS:} - prefix: ${VAULT_PREFIX:} - token: ${VAULT_AUTH_TOKEN:} - version: ${AIRBYTE_VERSION} - -datasources: - config: - connection-test-query: SELECT 1 - connection-timeout: 30000 - idle-timeout: 600000 - initialization-fail-timeout: -1 # Disable fail fast checking to avoid issues due to other pods not being started in time - maximum-pool-size: 5 - minimum-idle: 0 - url: ${DATABASE_URL} - driverClassName: org.postgresql.Driver - username: ${DATABASE_USER} - password: ${DATABASE_PASSWORD} - jobs: - connection-test-query: SELECT 1 - connection-timeout: 30000 - idle-timeout: 600000 - initialization-fail-timeout: -1 # Disable fail fast checking to avoid issues due to other pods not being started in time - maximum-pool-size: 5 - minimum-idle: 0 - url: ${DATABASE_URL} - driverClassName: org.postgresql.Driver - username: ${DATABASE_USER} - password: ${DATABASE_PASSWORD} - -endpoints: - beans: - enabled: true - sensitive: false - env: - enabled: true - sensitive: false - health: - enabled: true - sensitive: false - info: - enabled: true - sensitive: true - loggers: - enabled: true - sensitive: true - refresh: - enabled: false - sensitive: true - routes: - enabled: true - sensitive: false - threaddump: - enabled: true - sensitive: true - -flyway: - enabled: true - datasources: - config: - enabled: false - locations: - - 'classpath:io/airbyte/db/instance/configs/migrations' - jobs: - enabled: false - locations: - - 'classpath:io/airbyte/db/instance/jobs/migrations' - -jpa: - default: - properties: - hibernate: - show_sql: true - -jooq: - datasources: - config: - jackson-converter-enabled: true - sql-dialect: POSTGRES - jobs: - jackson-converter-enabled: true - sql-dialect: POSTGRES - -logger: - levels: -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG \ No newline at end of file diff --git a/airbyte-bootloader/src/main/resources/banner/attention-banner.txt b/airbyte-bootloader/src/main/resources/banner/attention-banner.txt deleted file mode 100644 index e35652b219ef..000000000000 --- a/airbyte-bootloader/src/main/resources/banner/attention-banner.txt +++ /dev/null @@ -1,13 +0,0 @@ - ___ ___________________ ________________ _ __ - / |/_ __/_ __/ ____/ | / /_ __/ _/ __ \/ | / / - ________________________ / /| | / / / / / __/ / |/ / / / / // / / / |/ / ________________________ -/_____/_____/_____/_____/ / ___ |/ / / / / /___/ /| / / / _/ // /_/ / /| / /_____/_____/_____/_____/ - __ _______ __________ /_/ _|_/_/ _/_/_/_____/_/_|_/_/_/ /___/\____/_/_|_/ __ __________ __________ - / / / / ___// ____/ __ \ / _/ | / / __ \/ / / /_ __/ / __ \/ ____/ __ \/ / / / _/ __ \/ ____/ __ \ - / / / /\__ \/ __/ / /_/ / / // |/ / /_/ / / / / / / / /_/ / __/ / / / / / / // // /_/ / __/ / / / / -/ /_/ /___/ / /___/ _, _/ _/ // /| / ____/ /_/ / / / / _, _/ /___/ /_/ / /_/ // // _, _/ /___/ /_/ / -\____//____/_____/_/ |_| /___/_/ |_/_/ \____/ /_/ /_/ |_/_____/\___\_\____/___/_/ |_/_____/_____/ - - ------------------- - See details below - ------------------- diff --git a/airbyte-bootloader/src/main/resources/micronaut-banner.txt b/airbyte-bootloader/src/main/resources/micronaut-banner.txt deleted file mode 100644 index ec2646448f42..000000000000 --- a/airbyte-bootloader/src/main/resources/micronaut-banner.txt +++ /dev/null @@ -1,8 +0,0 @@ - - ___ _ __ __ - / | (_)____/ /_ __ __/ /____ - / /| | / / ___/ __ \/ / / / __/ _ \ - / ___ |/ / / / /_/ / /_/ / /_/ __/ -/_/ |_/_/_/ /_.___/\__, /\__/\___/ - /____/ - : airbyte-bootloader : \ No newline at end of file diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java deleted file mode 100644 index 14e0bb7c6aee..000000000000 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java +++ /dev/null @@ -1,417 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader; - -import static io.airbyte.config.Configs.SecretPersistenceType.TESTING_CONFIG_DB_TABLE; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.ObjectMapper; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.commons.version.Version; -import io.airbyte.config.Configs; -import io.airbyte.config.Geography; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.init.ApplyDefinitionsHelper; -import io.airbyte.config.init.DefinitionsProvider; -import io.airbyte.config.init.LocalDefinitionsProvider; -import io.airbyte.config.init.PostLoadExecutor; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.split_secrets.LocalTestingSecretPersistence; -import io.airbyte.config.persistence.split_secrets.RealSecretsHydrator; -import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.factory.DataSourceFactory; -import io.airbyte.db.factory.DatabaseCheckFactory; -import io.airbyte.db.factory.FlywayFactory; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; -import io.airbyte.db.instance.configs.ConfigsDatabaseTestProvider; -import io.airbyte.db.instance.jobs.JobsDatabaseMigrator; -import io.airbyte.db.instance.jobs.JobsDatabaseTestProvider; -import io.airbyte.persistence.job.DefaultJobPersistence; -import java.util.Optional; -import java.util.UUID; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import javax.sql.DataSource; -import lombok.val; -import org.flywaydb.core.Flyway; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; - -/** - * Test suite for the {@link Bootloader} class. - */ -@SuppressWarnings("PMD.AvoidUsingHardCodedIP") -@ExtendWith(SystemStubsExtension.class) -class BootloaderTest { - - private PostgreSQLContainer container; - private DataSource configsDataSource; - private DataSource jobsDataSource; - private static final String DOCKER = "docker"; - private static final String PROTOCOL_VERSION_123 = "1.2.3"; - private static final String PROTOCOL_VERSION_124 = "1.2.4"; - private static final String VERSION_0330_ALPHA = "0.33.0-alpha"; - private static final String VERSION_0320_ALPHA = "0.32.0-alpha"; - private static final String VERSION_0321_ALPHA = "0.32.1-alpha"; - private static final String VERSION_0170_ALPHA = "0.17.0-alpha"; - - // ⚠️ This line should change with every new migration to show that you meant to make a new - // migration to the prod database - private static final String CURRENT_CONFIGS_MIGRATION_VERSION = "0.40.28.001"; - private static final String CURRENT_JOBS_MIGRATION_VERSION = "0.40.28.001"; - - @BeforeEach - void setup() { - container = new PostgreSQLContainer<>("postgres:13-alpine") - .withDatabaseName("public") - .withUsername(DOCKER) - .withPassword(DOCKER); - container.start(); - - configsDataSource = - DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); - jobsDataSource = - DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); - } - - @AfterEach - void cleanup() throws Exception { - closeDataSource(configsDataSource); - closeDataSource(jobsDataSource); - container.stop(); - } - - @SystemStub - private EnvironmentVariables environmentVariables; - - @Test - void testBootloaderAppBlankDb() throws Exception { - val currentAirbyteVersion = new AirbyteVersion(VERSION_0330_ALPHA); - val airbyteProtocolRange = new AirbyteProtocolVersionRange(new Version(PROTOCOL_VERSION_123), new Version(PROTOCOL_VERSION_124)); - val mockedFeatureFlags = mock(FeatureFlags.class); - val runMigrationOnStartup = true; - val mockedSecretMigrator = mock(SecretMigrator.class); - - try (val configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); - val jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES)) { - - val configsFlyway = createConfigsFlyway(configsDataSource); - val jobsFlyway = createJobsFlyway(jobsDataSource); - - val configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); - val jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); - val configRepository = new ConfigRepository(configDatabase); - val configsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val configDatabaseInitializer = DatabaseCheckFactory.createConfigsDatabaseInitializer(configsDslContext, - configsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH)); - val configsDatabaseMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); - final Optional definitionsProvider = - Optional.of(new LocalDefinitionsProvider()); - val jobsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val jobsDatabaseInitializer = DatabaseCheckFactory.createJobsDatabaseInitializer(jobsDslContext, - jobsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH)); - val jobsDatabaseMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); - val jobsPersistence = new DefaultJobPersistence(jobDatabase); - val protocolVersionChecker = new ProtocolVersionChecker(jobsPersistence, airbyteProtocolRange, configRepository, definitionsProvider); - val applyDefinitionsHelper = new ApplyDefinitionsHelper(configRepository, definitionsProvider, jobsPersistence); - val postLoadExecutor = new DefaultPostLoadExecutor(applyDefinitionsHelper, mockedFeatureFlags, jobsPersistence, mockedSecretMigrator); - - val bootloader = - new Bootloader(false, configRepository, configDatabaseInitializer, configsDatabaseMigrator, currentAirbyteVersion, - definitionsProvider, mockedFeatureFlags, jobsDatabaseInitializer, jobsDatabaseMigrator, jobsPersistence, protocolVersionChecker, - runMigrationOnStartup, mockedSecretMigrator, postLoadExecutor); - bootloader.load(); - - val jobsMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); - assertEquals(CURRENT_JOBS_MIGRATION_VERSION, jobsMigrator.getLatestMigration().getVersion().getVersion()); - - val configsMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); - assertEquals(CURRENT_CONFIGS_MIGRATION_VERSION, configsMigrator.getLatestMigration().getVersion().getVersion()); - - assertEquals(VERSION_0330_ALPHA, jobsPersistence.getVersion().get()); - assertEquals(new Version(PROTOCOL_VERSION_123), jobsPersistence.getAirbyteProtocolVersionMin().get()); - assertEquals(new Version(PROTOCOL_VERSION_124), jobsPersistence.getAirbyteProtocolVersionMax().get()); - - assertNotEquals(Optional.empty(), jobsPersistence.getDeployment()); - } - } - - @Test - void testBootloaderAppRunSecretMigration() throws Exception { - val mockedConfigs = mock(Configs.class); - when(mockedConfigs.getSecretPersistenceType()).thenReturn(TESTING_CONFIG_DB_TABLE); - - val currentAirbyteVersion = new AirbyteVersion(VERSION_0330_ALPHA); - val airbyteProtocolRange = new AirbyteProtocolVersionRange(new Version(PROTOCOL_VERSION_123), new Version(PROTOCOL_VERSION_124)); - val mockedFeatureFlags = mock(FeatureFlags.class); - val runMigrationOnStartup = true; - - try (val configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); - val jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES)) { - - val configsFlyway = createConfigsFlyway(configsDataSource); - val jobsFlyway = createJobsFlyway(jobsDataSource); - - val configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); - val jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); - val configRepository = new ConfigRepository(configDatabase); - val configsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val configDatabaseInitializer = DatabaseCheckFactory.createConfigsDatabaseInitializer(configsDslContext, - configsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH)); - val configsDatabaseMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); - final Optional definitionsProvider = - Optional.of(new LocalDefinitionsProvider()); - val jobsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val jobsDatabaseInitializer = DatabaseCheckFactory.createJobsDatabaseInitializer(jobsDslContext, - jobsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH)); - val jobsDatabaseMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); - val jobsPersistence = new DefaultJobPersistence(jobDatabase); - val secretPersistence = new LocalTestingSecretPersistence(configDatabase); - val protocolVersionChecker = new ProtocolVersionChecker(jobsPersistence, airbyteProtocolRange, configRepository, definitionsProvider); - - val localTestingSecretPersistence = new LocalTestingSecretPersistence(configDatabase); - - val secretsReader = new SecretsRepositoryReader(configRepository, new RealSecretsHydrator(localTestingSecretPersistence)); - val secretsWriter = new SecretsRepositoryWriter(configRepository, Optional.of(secretPersistence), Optional.empty()); - - val spiedSecretMigrator = - spy(new SecretMigrator(secretsReader, secretsWriter, configRepository, jobsPersistence, Optional.of(secretPersistence))); - - val applyDefinitionsHelper = new ApplyDefinitionsHelper(configRepository, definitionsProvider, jobsPersistence); - var postLoadExecutor = new DefaultPostLoadExecutor(applyDefinitionsHelper, mockedFeatureFlags, jobsPersistence, null); - - // Bootstrap the database for the test - val initBootloader = - new Bootloader(false, configRepository, configDatabaseInitializer, configsDatabaseMigrator, currentAirbyteVersion, - definitionsProvider, mockedFeatureFlags, jobsDatabaseInitializer, jobsDatabaseMigrator, jobsPersistence, protocolVersionChecker, - runMigrationOnStartup, null, postLoadExecutor); - initBootloader.load(); - - final DefinitionsProvider localDefinitions = new LocalDefinitionsProvider(); - configRepository.seedActorDefinitions(localDefinitions.getSourceDefinitions(), localDefinitions.getDestinationDefinitions()); - - final String sourceSpecs = """ - { - "account_id": "1234567891234567", - "start_date": "2022-04-01T00:00:00Z", - "access_token": "nonhiddensecret", - "include_deleted": false, - "fetch_thumbnail_images": false - } - - """; - - final ObjectMapper mapper = new ObjectMapper(); - - final UUID workspaceId = UUID.randomUUID(); - configRepository.writeStandardWorkspaceNoSecrets(new StandardWorkspace() - .withWorkspaceId(workspaceId) - .withName("wName") - .withSlug("wSlug") - .withEmail("email@mail.com") - .withTombstone(false) - .withInitialSetupComplete(false) - .withDefaultGeography(Geography.AUTO)); - final UUID sourceId = UUID.randomUUID(); - configRepository.writeSourceConnectionNoSecrets(new SourceConnection() - .withSourceDefinitionId(UUID.fromString("e7778cfc-e97c-4458-9ecb-b4f2bba8946c")) // Facebook Marketing - .withSourceId(sourceId) - .withName("test source") - .withWorkspaceId(workspaceId) - .withTombstone(false) - .withConfiguration(mapper.readTree(sourceSpecs))); - - when(mockedFeatureFlags.forceSecretMigration()).thenReturn(false); - - postLoadExecutor = new DefaultPostLoadExecutor(applyDefinitionsHelper, mockedFeatureFlags, jobsPersistence, spiedSecretMigrator); - - // Perform secrets migration - var bootloader = - new Bootloader(false, configRepository, configDatabaseInitializer, configsDatabaseMigrator, currentAirbyteVersion, - definitionsProvider, mockedFeatureFlags, jobsDatabaseInitializer, jobsDatabaseMigrator, jobsPersistence, protocolVersionChecker, - runMigrationOnStartup, spiedSecretMigrator, postLoadExecutor); - boolean isMigrated = jobsPersistence.isSecretMigrated(); - - assertFalse(isMigrated); - - bootloader.load(); - verify(spiedSecretMigrator).migrateSecrets(); - - final SourceConnection sourceConnection = configRepository.getSourceConnection(sourceId); - - assertFalse(sourceConnection.getConfiguration().toString().contains("nonhiddensecret")); - assertTrue(sourceConnection.getConfiguration().toString().contains("_secret")); - - isMigrated = jobsPersistence.isSecretMigrated(); - assertTrue(isMigrated); - - // Verify that the migration does not happen if it has already been performed - reset(spiedSecretMigrator); - // We need to re-create the bootloader because it is closing the persistence after running load - bootloader = - new Bootloader(false, configRepository, configDatabaseInitializer, configsDatabaseMigrator, currentAirbyteVersion, - definitionsProvider, mockedFeatureFlags, jobsDatabaseInitializer, jobsDatabaseMigrator, jobsPersistence, protocolVersionChecker, - runMigrationOnStartup, spiedSecretMigrator, postLoadExecutor); - bootloader.load(); - verifyNoInteractions(spiedSecretMigrator); - - // Verify that the migration occurs if the force migration feature flag is enabled - reset(spiedSecretMigrator); - when(mockedFeatureFlags.forceSecretMigration()).thenReturn(true); - // We need to re-create the bootloader because it is closing the persistence after running load - bootloader = - new Bootloader(false, configRepository, configDatabaseInitializer, configsDatabaseMigrator, currentAirbyteVersion, - definitionsProvider, mockedFeatureFlags, jobsDatabaseInitializer, jobsDatabaseMigrator, jobsPersistence, protocolVersionChecker, - runMigrationOnStartup, spiedSecretMigrator, postLoadExecutor); - bootloader.load(); - verify(spiedSecretMigrator).migrateSecrets(); - } - } - - // - @Test - void testIsLegalUpgradePredicate() throws Exception { - val currentAirbyteVersion = new AirbyteVersion(VERSION_0330_ALPHA); - val airbyteProtocolRange = new AirbyteProtocolVersionRange(new Version(PROTOCOL_VERSION_123), new Version(PROTOCOL_VERSION_124)); - val mockedFeatureFlags = mock(FeatureFlags.class); - val runMigrationOnStartup = true; - val mockedSecretMigrator = mock(SecretMigrator.class); - - try (val configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); - val jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES)) { - - val configsFlyway = createConfigsFlyway(configsDataSource); - val jobsFlyway = createJobsFlyway(jobsDataSource); - - val configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); - val jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); - val configRepository = new ConfigRepository(configDatabase); - val configsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val configDatabaseInitializer = DatabaseCheckFactory.createConfigsDatabaseInitializer(configsDslContext, - configsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH)); - val configsDatabaseMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); - final Optional definitionsProvider = Optional.of( - new LocalDefinitionsProvider()); - val jobsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val jobsDatabaseInitializer = DatabaseCheckFactory.createJobsDatabaseInitializer(jobsDslContext, - jobsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH)); - val jobsDatabaseMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); - val jobsPersistence = new DefaultJobPersistence(jobDatabase); - val protocolVersionChecker = new ProtocolVersionChecker(jobsPersistence, airbyteProtocolRange, configRepository, definitionsProvider); - val applyDefinitionsHelper = new ApplyDefinitionsHelper(configRepository, definitionsProvider, jobsPersistence); - val postLoadExecutor = - new DefaultPostLoadExecutor(applyDefinitionsHelper, mockedFeatureFlags, jobsPersistence, mockedSecretMigrator); - - val bootloader = - new Bootloader(false, configRepository, configDatabaseInitializer, configsDatabaseMigrator, currentAirbyteVersion, - definitionsProvider, mockedFeatureFlags, jobsDatabaseInitializer, jobsDatabaseMigrator, jobsPersistence, protocolVersionChecker, - runMigrationOnStartup, mockedSecretMigrator, postLoadExecutor); - - // starting from no previous version is always legal. - assertTrue(bootloader.isLegalUpgrade(null, new AirbyteVersion("0.17.1-alpha"))); - assertTrue(bootloader.isLegalUpgrade(null, new AirbyteVersion(VERSION_0320_ALPHA))); - assertTrue(bootloader.isLegalUpgrade(null, new AirbyteVersion(VERSION_0321_ALPHA))); - assertTrue(bootloader.isLegalUpgrade(null, new AirbyteVersion("0.33.1-alpha"))); - // starting from a version that is pre-breaking migration cannot go past the breaking migration. - assertTrue(bootloader.isLegalUpgrade(new AirbyteVersion(VERSION_0170_ALPHA), new AirbyteVersion("0.17.1-alpha"))); - assertTrue(bootloader.isLegalUpgrade(new AirbyteVersion(VERSION_0170_ALPHA), new AirbyteVersion("0.18.0-alpha"))); - assertTrue(bootloader.isLegalUpgrade(new AirbyteVersion(VERSION_0170_ALPHA), new AirbyteVersion(VERSION_0320_ALPHA))); - assertFalse(bootloader.isLegalUpgrade(new AirbyteVersion(VERSION_0170_ALPHA), new AirbyteVersion(VERSION_0321_ALPHA))); - assertFalse(bootloader.isLegalUpgrade(new AirbyteVersion(VERSION_0170_ALPHA), new AirbyteVersion(VERSION_0330_ALPHA))); - // any migration starting at the breaking migration or after it can upgrade to anything. - assertTrue(bootloader.isLegalUpgrade(new AirbyteVersion(VERSION_0320_ALPHA), new AirbyteVersion(VERSION_0321_ALPHA))); - assertTrue(bootloader.isLegalUpgrade(new AirbyteVersion(VERSION_0320_ALPHA), new AirbyteVersion(VERSION_0330_ALPHA))); - assertTrue(bootloader.isLegalUpgrade(new AirbyteVersion(VERSION_0321_ALPHA), new AirbyteVersion(VERSION_0321_ALPHA))); - assertTrue(bootloader.isLegalUpgrade(new AirbyteVersion(VERSION_0321_ALPHA), new AirbyteVersion(VERSION_0330_ALPHA))); - assertTrue(bootloader.isLegalUpgrade(new AirbyteVersion(VERSION_0330_ALPHA), new AirbyteVersion("0.33.1-alpha"))); - assertTrue(bootloader.isLegalUpgrade(new AirbyteVersion(VERSION_0330_ALPHA), new AirbyteVersion("0.34.0-alpha"))); - } - } - - @Test - void testPostLoadExecutionExecutes() throws Exception { - final var testTriggered = new AtomicBoolean(); - val currentAirbyteVersion = new AirbyteVersion(VERSION_0330_ALPHA); - val airbyteProtocolRange = new AirbyteProtocolVersionRange(new Version(PROTOCOL_VERSION_123), new Version(PROTOCOL_VERSION_124)); - val mockedFeatureFlags = mock(FeatureFlags.class); - val runMigrationOnStartup = true; - val mockedSecretMigrator = mock(SecretMigrator.class); - - try (val configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); - val jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES)) { - - val configsFlyway = createConfigsFlyway(configsDataSource); - val jobsFlyway = createJobsFlyway(jobsDataSource); - - val configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); - val jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); - val configRepository = new ConfigRepository(configDatabase); - val configsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val configDatabaseInitializer = DatabaseCheckFactory.createConfigsDatabaseInitializer(configsDslContext, - configsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH)); - val configsDatabaseMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); - final Optional definitionsProvider = - Optional.of(new LocalDefinitionsProvider()); - val jobsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val jobsDatabaseInitializer = DatabaseCheckFactory.createJobsDatabaseInitializer(jobsDslContext, - jobsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH)); - val jobsDatabaseMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); - val jobsPersistence = new DefaultJobPersistence(jobDatabase); - val protocolVersionChecker = new ProtocolVersionChecker(jobsPersistence, airbyteProtocolRange, configRepository, definitionsProvider); - val postLoadExecutor = new PostLoadExecutor() { - - @Override - public void execute() { - testTriggered.set(true); - } - - }; - val bootloader = - new Bootloader(false, configRepository, configDatabaseInitializer, configsDatabaseMigrator, currentAirbyteVersion, - definitionsProvider, mockedFeatureFlags, jobsDatabaseInitializer, jobsDatabaseMigrator, jobsPersistence, protocolVersionChecker, - runMigrationOnStartup, mockedSecretMigrator, postLoadExecutor); - bootloader.load(); - assertTrue(testTriggered.get()); - } - } - - private Flyway createConfigsFlyway(final DataSource dataSource) { - return FlywayFactory.create(dataSource, getClass().getName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, - ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); - } - - private Flyway createJobsFlyway(final DataSource dataSource) { - return FlywayFactory.create(dataSource, getClass().getName(), JobsDatabaseMigrator.DB_IDENTIFIER, - JobsDatabaseMigrator.MIGRATION_FILE_LOCATION); - } - - private void closeDataSource(final DataSource dataSource) throws Exception { - DataSourceFactory.close(dataSource); - } - -} diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/DefaultPostLoadExecutorTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/DefaultPostLoadExecutorTest.java deleted file mode 100644 index afeddba5533a..000000000000 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/DefaultPostLoadExecutorTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.config.init.ApplyDefinitionsHelper; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; - -/** - * Test suite for the {@link DefaultPostLoadExecutor} class. - */ -class DefaultPostLoadExecutorTest { - - @ParameterizedTest - @CsvSource({"true,true,1", "true,false,1", "false,true,0", "false,false,1"}) - void testPostLoadExecution(final boolean forceSecretMigration, final boolean isSecretMigration, final int expectedTimes) - throws Exception { - final ApplyDefinitionsHelper applyDefinitionsHelper = mock(ApplyDefinitionsHelper.class); - final FeatureFlags featureFlags = mock(FeatureFlags.class); - final JobPersistence jobPersistence = mock(JobPersistence.class); - final SecretMigrator secretMigrator = mock(SecretMigrator.class); - - when(featureFlags.forceSecretMigration()).thenReturn(forceSecretMigration); - when(jobPersistence.isSecretMigrated()).thenReturn(isSecretMigration); - - final DefaultPostLoadExecutor postLoadExecution = - new DefaultPostLoadExecutor(applyDefinitionsHelper, featureFlags, jobPersistence, secretMigrator); - - assertDoesNotThrow(() -> postLoadExecution.execute()); - verify(applyDefinitionsHelper, times(1)).apply(); - verify(secretMigrator, times(expectedTimes)).migrateSecrets(); - } - - @Test - void testPostLoadExecutionNullSecretManager() throws JsonValidationException, IOException { - final ApplyDefinitionsHelper applyDefinitionsHelper = mock(ApplyDefinitionsHelper.class); - final FeatureFlags featureFlags = mock(FeatureFlags.class); - final JobPersistence jobPersistence = mock(JobPersistence.class); - - when(featureFlags.forceSecretMigration()).thenReturn(true); - - final DefaultPostLoadExecutor postLoadExecution = - new DefaultPostLoadExecutor(applyDefinitionsHelper, featureFlags, jobPersistence, null); - - assertDoesNotThrow(() -> postLoadExecution.execute()); - verify(applyDefinitionsHelper, times(1)).apply(); - } - - @Test - void testPostLoadExecutionWithException() throws JsonValidationException, IOException { - final ApplyDefinitionsHelper applyDefinitionsHelper = mock(ApplyDefinitionsHelper.class); - final FeatureFlags featureFlags = mock(FeatureFlags.class); - final JobPersistence jobPersistence = mock(JobPersistence.class); - final SecretMigrator secretMigrator = mock(SecretMigrator.class); - - doThrow(new IOException("test")).when(applyDefinitionsHelper).apply(); - - final DefaultPostLoadExecutor postLoadExecution = - new DefaultPostLoadExecutor(applyDefinitionsHelper, featureFlags, jobPersistence, secretMigrator); - - assertThrows(IOException.class, () -> postLoadExecution.execute()); - } - -} diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/ProtocolVersionCheckerTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/ProtocolVersionCheckerTest.java deleted file mode 100644 index 8fe8e5b57eed..000000000000 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/ProtocolVersionCheckerTest.java +++ /dev/null @@ -1,360 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorType; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.init.DefinitionsProvider; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.protocol.models.ConnectorSpecification; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; - -class ProtocolVersionCheckerTest { - - ConfigRepository configRepository; - DefinitionsProvider definitionsProvider; - JobPersistence jobPersistence; - - final Version V0_0_0 = new Version("0.0.0"); - final Version V1_0_0 = new Version("1.0.0"); - final Version V2_0_0 = new Version("2.0.0"); - - @BeforeEach - void beforeEach() throws IOException { - configRepository = mock(ConfigRepository.class); - definitionsProvider = mock(DefinitionsProvider.class); - jobPersistence = mock(JobPersistence.class); - - when(jobPersistence.getVersion()).thenReturn(Optional.of("1.2.3")); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testFirstInstallCheck(final boolean supportAutoUpgrade) throws IOException { - final AirbyteProtocolVersionRange expectedRange = new AirbyteProtocolVersionRange(V0_0_0, V1_0_0); - when(jobPersistence.getVersion()).thenReturn(Optional.empty()); - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, expectedRange, configRepository, Optional.of(definitionsProvider)); - final Optional supportedRange = protocolVersionChecker.validate(supportAutoUpgrade); - assertTrue(supportedRange.isPresent()); - assertEquals(expectedRange.max(), supportedRange.get().max()); - assertEquals(expectedRange.min(), supportedRange.get().min()); - } - - @Test - void testGetTargetRange() throws IOException { - final AirbyteProtocolVersionRange expectedRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, expectedRange, configRepository, Optional.of(definitionsProvider)); - assertEquals(expectedRange.max(), protocolVersionChecker.getTargetProtocolVersionRange().max()); - assertEquals(expectedRange.min(), protocolVersionChecker.getTargetProtocolVersionRange().min()); - } - - @Test - void testRetrievingCurrentConflicts() throws IOException { - final AirbyteProtocolVersionRange targetRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); - - final UUID source1 = UUID.randomUUID(); - final UUID source2 = UUID.randomUUID(); - final UUID source3 = UUID.randomUUID(); - final UUID dest1 = UUID.randomUUID(); - final UUID dest2 = UUID.randomUUID(); - - final Map> initialActorDefinitions = Map.of( - source1, Map.entry(ActorType.SOURCE, V0_0_0), - source2, Map.entry(ActorType.SOURCE, V1_0_0), - source3, Map.entry(ActorType.SOURCE, V2_0_0), - dest1, Map.entry(ActorType.DESTINATION, V0_0_0), - dest2, Map.entry(ActorType.DESTINATION, V0_0_0)); - when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); - - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, targetRange, configRepository, Optional.of(definitionsProvider)); - final Map> conflicts = protocolVersionChecker.getConflictingActorDefinitions(targetRange); - - final Map> expectedConflicts = Map.of( - ActorType.DESTINATION, Set.of(dest1, dest2), - ActorType.SOURCE, Set.of(source1)); - assertEquals(expectedConflicts, conflicts); - } - - @Test - void testRetrievingCurrentConflictsWhenNoConflicts() throws IOException { - final AirbyteProtocolVersionRange targetRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); - - final UUID source1 = UUID.randomUUID(); - final UUID dest1 = UUID.randomUUID(); - - final Map> initialActorDefinitions = Map.of( - source1, Map.entry(ActorType.SOURCE, V2_0_0), - dest1, Map.entry(ActorType.DESTINATION, V1_0_0)); - when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); - - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, targetRange, configRepository, Optional.of(definitionsProvider)); - final Map> conflicts = protocolVersionChecker.getConflictingActorDefinitions(targetRange); - - assertEquals(Map.of(), conflicts); - } - - @Test - void testProjectRemainingSourceConflicts() { - final AirbyteProtocolVersionRange targetRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); - - final UUID unrelatedSource = UUID.randomUUID(); - final UUID upgradedSource = UUID.randomUUID(); - final UUID notChangedSource = UUID.randomUUID(); - final UUID missingSource = UUID.randomUUID(); - final Set initialConflicts = Set.of(upgradedSource, notChangedSource, missingSource); - - setNewSourceDefinitions(List.of( - Map.entry(unrelatedSource, V2_0_0), - Map.entry(upgradedSource, V1_0_0), - Map.entry(notChangedSource, V0_0_0))); - - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, targetRange, configRepository, Optional.of(definitionsProvider)); - final Set actualConflicts = - protocolVersionChecker.projectRemainingConflictsAfterConnectorUpgrades(targetRange, initialConflicts, ActorType.SOURCE); - - final Set expectedConflicts = Set.of(notChangedSource, missingSource); - assertEquals(expectedConflicts, actualConflicts); - } - - @Test - void testProjectRemainingDestinationConflicts() { - final AirbyteProtocolVersionRange targetRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); - - final UUID dest1 = UUID.randomUUID(); - final UUID dest2 = UUID.randomUUID(); - final UUID dest3 = UUID.randomUUID(); - final Set initialConflicts = Set.of(dest1, dest2, dest3); - - setNewDestinationDefinitions(List.of( - Map.entry(dest1, V2_0_0), - Map.entry(dest2, V1_0_0), - Map.entry(dest3, V2_0_0))); - - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, targetRange, configRepository, Optional.of(definitionsProvider)); - final Set actualConflicts = - protocolVersionChecker.projectRemainingConflictsAfterConnectorUpgrades(targetRange, initialConflicts, ActorType.DESTINATION); - - final Set expectedConflicts = Set.of(); - assertEquals(expectedConflicts, actualConflicts); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testValidateSameRange(final boolean supportAutoUpgrade) throws Exception { - final AirbyteProtocolVersionRange expectedRange = new AirbyteProtocolVersionRange(V0_0_0, V2_0_0); - setCurrentProtocolRangeRange(expectedRange.min(), expectedRange.max()); - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, expectedRange, configRepository, Optional.of(definitionsProvider)); - - final Optional supportedRange = protocolVersionChecker.validate(supportAutoUpgrade); - assertTrue(supportedRange.isPresent()); - assertEquals(expectedRange.max(), supportedRange.get().max()); - assertEquals(expectedRange.min(), supportedRange.get().min()); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testValidateAllConnectorsAreUpgraded(final boolean supportAutoUpgrade) throws Exception { - setCurrentProtocolRangeRange(V0_0_0, V1_0_0); - - final UUID source1 = UUID.randomUUID(); - final UUID source2 = UUID.randomUUID(); - final UUID source3 = UUID.randomUUID(); - final UUID source4 = UUID.randomUUID(); - final UUID dest1 = UUID.randomUUID(); - final UUID dest2 = UUID.randomUUID(); - final UUID dest3 = UUID.randomUUID(); - final AirbyteProtocolVersionRange expectedTargetVersionRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); - - final Map> initialActorDefinitions = Map.of( - source1, Map.entry(ActorType.SOURCE, V0_0_0), - source2, Map.entry(ActorType.SOURCE, V1_0_0), - source3, Map.entry(ActorType.SOURCE, V0_0_0), - source4, Map.entry(ActorType.SOURCE, V0_0_0), - dest1, Map.entry(ActorType.DESTINATION, V0_0_0), - dest2, Map.entry(ActorType.DESTINATION, V1_0_0), - dest3, Map.entry(ActorType.DESTINATION, V2_0_0)); - when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); - - setNewSourceDefinitions(List.of( - Map.entry(source1, V1_0_0), - Map.entry(source2, V1_0_0), - Map.entry(source3, V2_0_0), - Map.entry(source4, V1_0_0))); - setNewDestinationDefinitions(List.of( - Map.entry(dest1, V1_0_0), - Map.entry(dest2, V1_0_0), - Map.entry(dest3, V2_0_0))); - - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, expectedTargetVersionRange, configRepository, Optional.of(definitionsProvider)); - final Optional actualRange = protocolVersionChecker.validate(supportAutoUpgrade); - - // Without auto upgrade, we will fail the validation because it would require connector automatic - // actor definition - // upgrade for used sources/destinations. - if (supportAutoUpgrade) { - assertTrue(actualRange.isPresent()); - assertEquals(expectedTargetVersionRange.max(), actualRange.get().max()); - assertEquals(expectedTargetVersionRange.min(), actualRange.get().min()); - } else { - assertEquals(Optional.empty(), actualRange); - } - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testValidateBadUpgradeMissingSource(final boolean supportAutoUpgrade) throws Exception { - final AirbyteProtocolVersionRange expectedTargetVersionRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); - setCurrentProtocolRangeRange(V0_0_0, V1_0_0); - - final UUID source1 = UUID.randomUUID(); - final UUID source2 = UUID.randomUUID(); - final UUID dest1 = UUID.randomUUID(); - final UUID dest2 = UUID.randomUUID(); - - final Map> initialActorDefinitions = Map.of( - source1, Map.entry(ActorType.SOURCE, V0_0_0), - source2, Map.entry(ActorType.SOURCE, V0_0_0), - dest1, Map.entry(ActorType.DESTINATION, V0_0_0), - dest2, Map.entry(ActorType.DESTINATION, V0_0_0)); - when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); - - setNewSourceDefinitions(List.of( - Map.entry(source1, V0_0_0), - Map.entry(source2, V1_0_0))); - setNewDestinationDefinitions(List.of( - Map.entry(dest1, V1_0_0), - Map.entry(dest2, V1_0_0))); - - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, expectedTargetVersionRange, configRepository, Optional.of(definitionsProvider)); - final Optional actualRange = protocolVersionChecker.validate(supportAutoUpgrade); - assertEquals(Optional.empty(), actualRange); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testValidateBadUpgradeMissingDestination(final boolean supportAutoUpgrade) throws Exception { - final AirbyteProtocolVersionRange expectedTargetVersionRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); - setCurrentProtocolRangeRange(V0_0_0, V1_0_0); - - final UUID source1 = UUID.randomUUID(); - final UUID source2 = UUID.randomUUID(); - final UUID dest1 = UUID.randomUUID(); - final UUID dest2 = UUID.randomUUID(); - - final Map> initialActorDefinitions = Map.of( - source1, Map.entry(ActorType.SOURCE, V0_0_0), - source2, Map.entry(ActorType.SOURCE, V0_0_0), - dest1, Map.entry(ActorType.DESTINATION, V0_0_0), - dest2, Map.entry(ActorType.DESTINATION, V0_0_0)); - when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); - - setNewSourceDefinitions(List.of( - Map.entry(source1, V1_0_0), - Map.entry(source2, V1_0_0))); - setNewDestinationDefinitions(List.of( - Map.entry(dest1, V1_0_0), - Map.entry(dest2, V0_0_0))); - - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, expectedTargetVersionRange, configRepository, Optional.of(definitionsProvider)); - final Optional actualRange = protocolVersionChecker.validate(supportAutoUpgrade); - assertEquals(Optional.empty(), actualRange); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testValidateFailsOnProtocolRangeChangeWithoutDefinitionsProvider(final boolean supportAutoUpgrade) throws Exception { - final AirbyteProtocolVersionRange expectedTargetVersionRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); - setCurrentProtocolRangeRange(V0_0_0, V1_0_0); - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, expectedTargetVersionRange, configRepository, Optional.empty()); - - final UUID source1 = UUID.randomUUID(); - final UUID dest1 = UUID.randomUUID(); - - final Map> initialActorDefinitions = Map.of( - source1, Map.entry(ActorType.SOURCE, V0_0_0), - dest1, Map.entry(ActorType.DESTINATION, V0_0_0)); - when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); - - final Optional actualRange = protocolVersionChecker.validate(supportAutoUpgrade); - assertEquals(Optional.empty(), actualRange); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testValidateSucceedsWhenNoProtocolRangeChangeWithoutDefinitionsProvider(final boolean supportAutoUpgrade) throws Exception { - final AirbyteProtocolVersionRange expectedTargetVersionRange = new AirbyteProtocolVersionRange(V0_0_0, V2_0_0); - setCurrentProtocolRangeRange(V0_0_0, V2_0_0); - final ProtocolVersionChecker protocolVersionChecker = - new ProtocolVersionChecker(jobPersistence, expectedTargetVersionRange, configRepository, Optional.empty()); - - final UUID source1 = UUID.randomUUID(); - final UUID dest1 = UUID.randomUUID(); - - final Map> initialActorDefinitions = Map.of( - source1, Map.entry(ActorType.SOURCE, V0_0_0), - dest1, Map.entry(ActorType.DESTINATION, V0_0_0)); - when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); - - final Optional actualRange = protocolVersionChecker.validate(supportAutoUpgrade); - assertTrue(actualRange.isPresent()); - assertEquals(expectedTargetVersionRange.max(), actualRange.get().max()); - assertEquals(expectedTargetVersionRange.min(), actualRange.get().min()); - } - - private void setCurrentProtocolRangeRange(final Version min, final Version max) throws IOException { - when(jobPersistence.getCurrentProtocolVersionRange()).thenReturn(Optional.of(new AirbyteProtocolVersionRange(min, max))); - when(jobPersistence.getAirbyteProtocolVersionMin()).thenReturn(Optional.of(min)); - when(jobPersistence.getAirbyteProtocolVersionMax()).thenReturn(Optional.of(max)); - } - - private void setNewDestinationDefinitions(final List> defs) { - final List destDefinitions = defs.stream() - .map(e -> new StandardDestinationDefinition() - .withDestinationDefinitionId(e.getKey()) - .withSpec(new ConnectorSpecification().withProtocolVersion(e.getValue().serialize()))) - .toList(); - when(definitionsProvider.getDestinationDefinitions()).thenReturn(destDefinitions); - } - - private void setNewSourceDefinitions(final List> defs) { - final List sourceDefinitions = defs.stream() - .map(e -> new StandardSourceDefinition() - .withSourceDefinitionId(e.getKey()) - .withSpec(new ConnectorSpecification().withProtocolVersion(e.getValue().serialize()))) - .toList(); - when(definitionsProvider.getSourceDefinitions()).thenReturn(sourceDefinitions); - } - -} diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/SecretMigratorTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/SecretMigratorTest.java deleted file mode 100644 index 7fd6a4be3519..000000000000 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/SecretMigratorTest.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.bootloader; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class SecretMigratorTest { - - private final UUID workspaceId = UUID.randomUUID(); - - @Mock - private ConfigRepository configRepository; - - @Mock - private SecretsRepositoryReader secretsReader; - - @Mock - private SecretsRepositoryWriter secretsWriter; - - @Mock - private SecretPersistence secretPersistence; - - @Mock - private JobPersistence jobPersistence; - - private SecretMigrator secretMigrator; - - @BeforeEach - void setup() { - secretMigrator = Mockito.spy(new SecretMigrator(secretsReader, secretsWriter, configRepository, jobPersistence, Optional.of(secretPersistence))); - } - - @Test - void testMigrateSecret() throws Exception { - final JsonNode sourceSpec = Jsons.jsonNode("sourceSpec"); - final UUID sourceDefinitionId = UUID.randomUUID(); - final StandardSourceDefinition standardSourceDefinition = new StandardSourceDefinition() - .withSourceDefinitionId(sourceDefinitionId) - .withSpec( - new ConnectorSpecification() - .withConnectionSpecification(sourceSpec)); - final Map standardSourceDefinitions = new HashMap<>(); - standardSourceDefinitions.put(sourceDefinitionId, standardSourceDefinition.getSpec()); - when(configRepository.listStandardSourceDefinitions(true)) - .thenReturn(Lists.newArrayList(standardSourceDefinition)); - - final JsonNode sourceConfiguration = Jsons.jsonNode("sourceConfiguration"); - final SourceConnection sourceConnection = new SourceConnection() - .withSourceId(UUID.randomUUID()) - .withSourceDefinitionId(sourceDefinitionId) - .withConfiguration(sourceConfiguration) - .withWorkspaceId(workspaceId); - final List sourceConnections = Lists.newArrayList(sourceConnection); - when(configRepository.listSourceConnection()) - .thenReturn(sourceConnections); - - final JsonNode destinationSpec = Jsons.jsonNode("destinationSpec"); - final UUID destinationDefinitionId = UUID.randomUUID(); - final StandardDestinationDefinition standardDestinationDefinition = new StandardDestinationDefinition() - .withDestinationDefinitionId(destinationDefinitionId) - .withSpec( - new ConnectorSpecification() - .withConnectionSpecification(destinationSpec)); - final Map standardDestinationDefinitions = new HashMap<>(); - standardDestinationDefinitions.put(destinationDefinitionId, standardDestinationDefinition.getSpec()); - when(configRepository.listStandardDestinationDefinitions(true)) - .thenReturn(Lists.newArrayList(standardDestinationDefinition)); - - final JsonNode destinationConfiguration = Jsons.jsonNode("destinationConfiguration"); - final DestinationConnection destinationConnection = new DestinationConnection() - .withDestinationId(UUID.randomUUID()) - .withDestinationDefinitionId(destinationDefinitionId) - .withConfiguration(destinationConfiguration) - .withWorkspaceId(workspaceId); - final List destinationConnections = Lists.newArrayList(destinationConnection); - when(configRepository.listDestinationConnection()) - .thenReturn(destinationConnections); - - when(secretsReader.getSourceConnectionWithSecrets(sourceConnection.getSourceId())).thenReturn(sourceConnection); - when(secretsReader.getDestinationConnectionWithSecrets(destinationConnection.getDestinationId())).thenReturn(destinationConnection); - - secretMigrator.migrateSecrets(); - - Mockito.verify(secretMigrator).migrateSources(sourceConnections, standardSourceDefinitions); - Mockito.verify(secretsWriter).writeSourceConnection(sourceConnection, standardSourceDefinition.getSpec()); - secretPersistence.write(any(), any()); - Mockito.verify(secretMigrator).migrateDestinations(destinationConnections, standardDestinationDefinitions); - Mockito.verify(secretsWriter).writeDestinationConnection(destinationConnection, standardDestinationDefinition.getSpec()); - - Mockito.verify(jobPersistence).setSecretMigrationDone(); - } - - @Test - void testSourceMigration() throws JsonValidationException, IOException { - final UUID definitionId1 = UUID.randomUUID(); - final UUID definitionId2 = UUID.randomUUID(); - final UUID sourceId1 = UUID.randomUUID(); - final UUID sourceId2 = UUID.randomUUID(); - final JsonNode sourceConfiguration1 = Jsons.jsonNode("conf1"); - final JsonNode sourceConfiguration2 = Jsons.jsonNode("conf2"); - final ConnectorSpecification sourceDefinition1 = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode("def1")); - final ConnectorSpecification sourceDefinition2 = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode("def2")); - final SourceConnection sourceConnection1 = new SourceConnection() - .withSourceId(sourceId1) - .withSourceDefinitionId(definitionId1) - .withConfiguration(sourceConfiguration1); - final SourceConnection sourceConnection2 = new SourceConnection() - .withSourceId(sourceId2) - .withSourceDefinitionId(definitionId2) - .withConfiguration(sourceConfiguration2); - - final List sources = Lists.newArrayList(sourceConnection1, sourceConnection2); - final Map definitionIdToDestinationSpecs = new HashMap<>(); - definitionIdToDestinationSpecs.put(definitionId1, sourceDefinition1); - definitionIdToDestinationSpecs.put(definitionId2, sourceDefinition2); - - secretMigrator.migrateSources(sources, definitionIdToDestinationSpecs); - - Mockito.verify(secretsWriter).writeSourceConnection(sourceConnection1, sourceDefinition1); - Mockito.verify(secretsWriter).writeSourceConnection(sourceConnection2, sourceDefinition2); - } - - @Test - void testDestinationMigration() throws JsonValidationException, IOException { - final UUID definitionId1 = UUID.randomUUID(); - final UUID definitionId2 = UUID.randomUUID(); - final UUID destinationId1 = UUID.randomUUID(); - final UUID destinationId2 = UUID.randomUUID(); - final JsonNode destinationConfiguration1 = Jsons.jsonNode("conf1"); - final JsonNode destinationConfiguration2 = Jsons.jsonNode("conf2"); - final ConnectorSpecification destinationDefinition1 = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode("def1")); - final ConnectorSpecification destinationDefinition2 = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode("def2")); - final DestinationConnection destinationConnection1 = new DestinationConnection() - .withDestinationId(destinationId1) - .withDestinationDefinitionId(definitionId1) - .withConfiguration(destinationConfiguration1); - final DestinationConnection destinationConnection2 = new DestinationConnection() - .withDestinationId(destinationId2) - .withDestinationDefinitionId(definitionId2) - .withConfiguration(destinationConfiguration2); - - final List destinations = Lists.newArrayList(destinationConnection1, destinationConnection2); - final Map definitionIdToDestinationSpecs = new HashMap<>(); - definitionIdToDestinationSpecs.put(definitionId1, destinationDefinition1); - definitionIdToDestinationSpecs.put(definitionId2, destinationDefinition2); - - secretMigrator.migrateDestinations(destinations, definitionIdToDestinationSpecs); - - Mockito.verify(secretsWriter).writeDestinationConnection(destinationConnection1, destinationDefinition1); - Mockito.verify(secretsWriter).writeDestinationConnection(destinationConnection2, destinationDefinition2); - } - -} diff --git a/airbyte-commons-server/build.gradle b/airbyte-commons-server/build.gradle deleted file mode 100644 index ec25c650ae7a..000000000000 --- a/airbyte-commons-server/build.gradle +++ /dev/null @@ -1,67 +0,0 @@ -plugins { - id "java-library" -} - -dependencies { - annotationProcessor platform(libs.micronaut.bom) - annotationProcessor libs.bundles.micronaut.annotation.processor - annotationProcessor libs.micronaut.jaxrs.processor - - implementation platform(libs.micronaut.bom) - implementation libs.bundles.micronaut - implementation libs.micronaut.jaxrs.server - - // Ensure that the versions defined in deps.toml are used - // instead of versions from transitive dependencies - implementation(libs.flyway.core) { - force = true - } - implementation(libs.s3) { - // Force to avoid updated version brought in transitively from Micronaut 3.8+ - // that is incompatible with our current Helm setup - force = true - } - implementation(libs.aws.java.sdk.s3) { - // Force to avoid updated version brought in transitively from Micronaut 3.8+ - // that is incompatible with our current Helm setup - force = true - } - - implementation project(':airbyte-analytics') - implementation project(':airbyte-api') - implementation project(':airbyte-commons-temporal') - implementation project(':airbyte-commons-worker') - implementation project(':airbyte-config:init') - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-config:config-persistence') - implementation project(':airbyte-config:specs') - implementation project(':airbyte-metrics:metrics-lib') - implementation project(':airbyte-db:db-lib') - implementation project(":airbyte-json-validation") - implementation project(':airbyte-oauth') - implementation libs.airbyte.protocol - implementation project(':airbyte-persistence:job-persistence') - - implementation 'com.github.slugify:slugify:2.4' - implementation 'commons-cli:commons-cli:1.4' - implementation libs.temporal.sdk - implementation 'org.apache.cxf:cxf-core:3.4.2' - implementation 'org.eclipse.jetty:jetty-server:9.4.31.v20200723' - implementation 'org.eclipse.jetty:jetty-servlet:9.4.31.v20200723' - implementation 'org.glassfish.jaxb:jaxb-runtime:3.0.2' - implementation 'org.glassfish.jersey.containers:jersey-container-servlet' - implementation 'org.glassfish.jersey.inject:jersey-hk2' - implementation 'org.glassfish.jersey.media:jersey-media-json-jackson' - implementation 'org.glassfish.jersey.ext:jersey-bean-validation' - implementation 'org.quartz-scheduler:quartz:2.3.2' - implementation 'io.sentry:sentry:6.3.1' - implementation 'io.swagger:swagger-annotations:1.6.2' - - testImplementation project(':airbyte-test-utils') - testImplementation libs.postgresql - testImplementation libs.platform.testcontainers.postgresql - testImplementation 'com.squareup.okhttp3:mockwebserver:4.9.1' - testImplementation 'org.mockito:mockito-inline:4.7.0' -} - -Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/RequestLogger.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/RequestLogger.java deleted file mode 100644 index 67af0fe67820..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/RequestLogger.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.json.Jsons; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.container.ContainerRequestFilter; -import javax.ws.rs.container.ContainerResponseContext; -import javax.ws.rs.container.ContainerResponseFilter; -import javax.ws.rs.core.Context; -import org.apache.commons.io.IOUtils; -import org.eclipse.jetty.http.HttpStatus; -import org.glassfish.jersey.message.MessageUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - -/** - * This class implements two {@code filter()} methods that execute as part of the Jersey framework - * request/response chain. - *

- * The first {@code filter()} is the Request Filter. It takes an incoming - * {@link ContainerRequestContext} that contains request information, such as the request body. In - * this filter, we extract the request body and store it back on the request context as a custom - * property. We don't write any logs for requests. However, since we want to include the request - * body in logs for responses, we have to extract the request body in this filter. - *

- * The second @{code filter()} is the Response Filter. It takes an incoming - * {@link ContainerResponseContext} that contains response information, such as the status code. - * This method also has read-only access to the original {@link ContainerRequestContext}, where we - * set the request body as a custom property in the request filter. This is where we create and - * persist log lines that contain both the response status code and the original request body. - */ -public class RequestLogger implements ContainerRequestFilter, ContainerResponseFilter { - - private static final Logger LOGGER = LoggerFactory.getLogger(RequestLogger.class); - private static final String REQUEST_BODY_PROPERTY = "requestBodyProperty"; - - @Context - private HttpServletRequest servletRequest; - - private final Map mdc; - - public RequestLogger(final Map mdc) { - this.mdc = mdc; - } - - @VisibleForTesting - RequestLogger(final Map mdc, final HttpServletRequest servletRequest) { - this.mdc = mdc; - this.servletRequest = servletRequest; - } - - @Override - public void filter(final ContainerRequestContext requestContext) throws IOException { - if ("POST".equals(requestContext.getMethod())) { - // hack to refill the entity stream so it doesn't interfere with other operations - final ByteArrayOutputStream baos = new ByteArrayOutputStream(); - IOUtils.copy(requestContext.getEntityStream(), baos); - final InputStream entity = new ByteArrayInputStream(baos.toByteArray()); - requestContext.setEntityStream(new ByteArrayInputStream(baos.toByteArray())); - // end hack - - requestContext.setProperty(REQUEST_BODY_PROPERTY, IOUtils.toString(entity, MessageUtils.getCharset(requestContext.getMediaType()))); - } - } - - @Override - public void filter(final ContainerRequestContext requestContext, final ContainerResponseContext responseContext) { - MDC.setContextMap(mdc); - - final String remoteAddr = servletRequest.getRemoteAddr(); - final String method = servletRequest.getMethod(); - final String url = servletRequest.getRequestURI(); - - final String requestBody = (String) requestContext.getProperty(REQUEST_BODY_PROPERTY); - - final boolean isPrintable = servletRequest.getHeader("Content-Type") != null && - servletRequest.getHeader("Content-Type").toLowerCase().contains("application/json") && - isValidJson(requestBody); - - final int status = responseContext.getStatus(); - - final StringBuilder logBuilder = createLogPrefix( - remoteAddr, - method, - status, - url); - - if (method.equals("POST") && requestBody != null && !requestBody.equals("") && isPrintable) { - logBuilder - .append(" - ") - .append(redactSensitiveInfo(requestBody)); - } - - if (HttpStatus.isClientError(status) || HttpStatus.isServerError(status)) { - LOGGER.error(logBuilder.toString()); - } else { - LOGGER.info(logBuilder.toString()); - } - } - - @VisibleForTesting - static StringBuilder createLogPrefix( - final String remoteAddr, - final String method, - final int status, - final String url) { - return new StringBuilder() - .append("REQ ") - .append(remoteAddr) - .append(" ") - .append(method) - .append(" ") - .append(status) - .append(" ") - .append(url); - } - - private static final Set TOP_LEVEL_SENSITIVE_FIELDS = Set.of( - "connectionConfiguration"); - - private static String redactSensitiveInfo(final String requestBody) { - final Optional jsonNodeOpt = Jsons.tryDeserialize(requestBody); - - if (jsonNodeOpt.isPresent()) { - final JsonNode jsonNode = jsonNodeOpt.get(); - if (jsonNode instanceof ObjectNode) { - final ObjectNode objectNode = (ObjectNode) jsonNode; - - for (final String topLevelSensitiveField : TOP_LEVEL_SENSITIVE_FIELDS) { - if (objectNode.has(topLevelSensitiveField)) { - objectNode.put(topLevelSensitiveField, "REDACTED"); - } - } - - return objectNode.toString(); - } else { - return "Unable to deserialize POST body for logging."; - } - } - - return requestBody; - } - - private static boolean isValidJson(final String json) { - return Jsons.tryDeserialize(json).isPresent(); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/ServerConstants.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/ServerConstants.java deleted file mode 100644 index 6bff247a533c..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/ServerConstants.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server; - -public class ServerConstants { - - public static final String DEV_IMAGE_TAG = "dev"; - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/ApiPojoConverters.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/ApiPojoConverters.java deleted file mode 100644 index 2f2b73d8b66b..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/ApiPojoConverters.java +++ /dev/null @@ -1,319 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import io.airbyte.api.model.generated.ActorDefinitionResourceRequirements; -import io.airbyte.api.model.generated.AttemptSyncConfig; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionSchedule; -import io.airbyte.api.model.generated.ConnectionScheduleData; -import io.airbyte.api.model.generated.ConnectionScheduleDataBasicSchedule; -import io.airbyte.api.model.generated.ConnectionScheduleDataCron; -import io.airbyte.api.model.generated.ConnectionState; -import io.airbyte.api.model.generated.ConnectionStateType; -import io.airbyte.api.model.generated.ConnectionStatus; -import io.airbyte.api.model.generated.Geography; -import io.airbyte.api.model.generated.JobType; -import io.airbyte.api.model.generated.JobTypeResourceLimit; -import io.airbyte.api.model.generated.NonBreakingChangesPreference; -import io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig; -import io.airbyte.api.model.generated.ResourceRequirements; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.server.handlers.helpers.CatalogConverter; -import io.airbyte.config.BasicSchedule; -import io.airbyte.config.Schedule; -import io.airbyte.config.StandardSync; -import io.airbyte.config.State; -import io.airbyte.config.StateWrapper; -import io.airbyte.config.helpers.StateMessageHelper; -import io.airbyte.workers.helper.StateConverter; -import java.util.Optional; -import java.util.UUID; -import java.util.stream.Collectors; - -public class ApiPojoConverters { - - public static io.airbyte.config.ActorDefinitionResourceRequirements actorDefResourceReqsToInternal(final ActorDefinitionResourceRequirements actorDefResourceReqs) { - if (actorDefResourceReqs == null) { - return null; - } - - return new io.airbyte.config.ActorDefinitionResourceRequirements() - .withDefault(actorDefResourceReqs.getDefault() == null ? null : resourceRequirementsToInternal(actorDefResourceReqs.getDefault())) - .withJobSpecific(actorDefResourceReqs.getJobSpecific() == null ? null - : actorDefResourceReqs.getJobSpecific() - .stream() - .map(jobSpecific -> new io.airbyte.config.JobTypeResourceLimit() - .withJobType(toInternalJobType(jobSpecific.getJobType())) - .withResourceRequirements(resourceRequirementsToInternal(jobSpecific.getResourceRequirements()))) - .collect(Collectors.toList())); - } - - public static io.airbyte.config.AttemptSyncConfig attemptSyncConfigToInternal(final AttemptSyncConfig attemptSyncConfig) { - if (attemptSyncConfig == null) { - return null; - } - - final io.airbyte.config.AttemptSyncConfig internalAttemptSyncConfig = new io.airbyte.config.AttemptSyncConfig() - .withSourceConfiguration(attemptSyncConfig.getSourceConfiguration()) - .withDestinationConfiguration(attemptSyncConfig.getDestinationConfiguration()); - - final ConnectionState connectionState = attemptSyncConfig.getState(); - if (connectionState != null && connectionState.getStateType() != ConnectionStateType.NOT_SET) { - final StateWrapper stateWrapper = StateConverter.toInternal(attemptSyncConfig.getState()); - final io.airbyte.config.State state = StateMessageHelper.getState(stateWrapper); - internalAttemptSyncConfig.setState(state); - } - - return internalAttemptSyncConfig; - } - - public static io.airbyte.api.client.model.generated.AttemptSyncConfig attemptSyncConfigToClient(final io.airbyte.config.AttemptSyncConfig attemptSyncConfig, - final UUID connectionId, - final boolean useStreamCapableState) { - if (attemptSyncConfig == null) { - return null; - } - - final State state = attemptSyncConfig.getState(); - final Optional optStateWrapper = state != null ? StateMessageHelper.getTypedState( - state.getState(), useStreamCapableState) : Optional.empty(); - - return new io.airbyte.api.client.model.generated.AttemptSyncConfig() - .sourceConfiguration(attemptSyncConfig.getSourceConfiguration()) - .destinationConfiguration(attemptSyncConfig.getDestinationConfiguration()) - .state(StateConverter.toClient(connectionId, optStateWrapper.orElse(null))); - } - - public static ActorDefinitionResourceRequirements actorDefResourceReqsToApi(final io.airbyte.config.ActorDefinitionResourceRequirements actorDefResourceReqs) { - if (actorDefResourceReqs == null) { - return null; - } - - return new ActorDefinitionResourceRequirements() - ._default(actorDefResourceReqs.getDefault() == null ? null : resourceRequirementsToApi(actorDefResourceReqs.getDefault())) - .jobSpecific(actorDefResourceReqs.getJobSpecific() == null ? null - : actorDefResourceReqs.getJobSpecific() - .stream() - .map(jobSpecific -> new JobTypeResourceLimit() - .jobType(toApiJobType(jobSpecific.getJobType())) - .resourceRequirements(resourceRequirementsToApi(jobSpecific.getResourceRequirements()))) - .collect(Collectors.toList())); - } - - public static io.airbyte.config.ResourceRequirements resourceRequirementsToInternal(final ResourceRequirements resourceReqs) { - if (resourceReqs == null) { - return null; - } - - return new io.airbyte.config.ResourceRequirements() - .withCpuRequest(resourceReqs.getCpuRequest()) - .withCpuLimit(resourceReqs.getCpuLimit()) - .withMemoryRequest(resourceReqs.getMemoryRequest()) - .withMemoryLimit(resourceReqs.getMemoryLimit()); - } - - public static ResourceRequirements resourceRequirementsToApi(final io.airbyte.config.ResourceRequirements resourceReqs) { - if (resourceReqs == null) { - return null; - } - - return new ResourceRequirements() - .cpuRequest(resourceReqs.getCpuRequest()) - .cpuLimit(resourceReqs.getCpuLimit()) - .memoryRequest(resourceReqs.getMemoryRequest()) - .memoryLimit(resourceReqs.getMemoryLimit()); - } - - public static NormalizationDestinationDefinitionConfig normalizationDestinationDefinitionConfigToApi(final io.airbyte.config.NormalizationDestinationDefinitionConfig normalizationDestinationDefinitionConfig) { - if (normalizationDestinationDefinitionConfig == null) { - return new NormalizationDestinationDefinitionConfig().supported(false); - } - return new NormalizationDestinationDefinitionConfig() - .supported(true) - .normalizationRepository(normalizationDestinationDefinitionConfig.getNormalizationRepository()) - .normalizationTag(normalizationDestinationDefinitionConfig.getNormalizationTag()) - .normalizationIntegrationType(normalizationDestinationDefinitionConfig.getNormalizationIntegrationType()); - } - - public static ConnectionRead internalToConnectionRead(final StandardSync standardSync) { - final ConnectionRead connectionRead = new ConnectionRead() - .connectionId(standardSync.getConnectionId()) - .sourceId(standardSync.getSourceId()) - .destinationId(standardSync.getDestinationId()) - .operationIds(standardSync.getOperationIds()) - .status(toApiStatus(standardSync.getStatus())) - .name(standardSync.getName()) - .namespaceDefinition(Enums.convertTo(standardSync.getNamespaceDefinition(), io.airbyte.api.model.generated.NamespaceDefinitionType.class)) - .namespaceFormat(standardSync.getNamespaceFormat()) - .prefix(standardSync.getPrefix()) - .syncCatalog(CatalogConverter.toApi(standardSync.getCatalog(), standardSync.getFieldSelectionData())) - .sourceCatalogId(standardSync.getSourceCatalogId()) - .breakingChange(standardSync.getBreakingChange()) - .geography(Enums.convertTo(standardSync.getGeography(), Geography.class)) - .nonBreakingChangesPreference(Enums.convertTo(standardSync.getNonBreakingChangesPreference(), NonBreakingChangesPreference.class)) - .notifySchemaChanges(standardSync.getNotifySchemaChanges()); - - if (standardSync.getResourceRequirements() != null) { - connectionRead.resourceRequirements(resourceRequirementsToApi(standardSync.getResourceRequirements())); - } - - populateConnectionReadSchedule(standardSync, connectionRead); - - return connectionRead; - } - - public static JobType toApiJobType(final io.airbyte.config.JobTypeResourceLimit.JobType jobType) { - return Enums.convertTo(jobType, JobType.class); - } - - public static io.airbyte.config.JobTypeResourceLimit.JobType toInternalJobType(final JobType jobType) { - return Enums.convertTo(jobType, io.airbyte.config.JobTypeResourceLimit.JobType.class); - } - - // TODO(https://github.com/airbytehq/airbyte/issues/11432): remove these helpers. - public static ConnectionSchedule.TimeUnitEnum toApiTimeUnit(final Schedule.TimeUnit apiTimeUnit) { - return Enums.convertTo(apiTimeUnit, ConnectionSchedule.TimeUnitEnum.class); - } - - public static ConnectionSchedule.TimeUnitEnum toApiTimeUnit(final BasicSchedule.TimeUnit timeUnit) { - return Enums.convertTo(timeUnit, ConnectionSchedule.TimeUnitEnum.class); - } - - public static ConnectionStatus toApiStatus(final StandardSync.Status status) { - return Enums.convertTo(status, ConnectionStatus.class); - } - - public static StandardSync.Status toPersistenceStatus(final ConnectionStatus apiStatus) { - return Enums.convertTo(apiStatus, StandardSync.Status.class); - } - - public static StandardSync.NonBreakingChangesPreference toPersistenceNonBreakingChangesPreference(final NonBreakingChangesPreference preference) { - return Enums.convertTo(preference, StandardSync.NonBreakingChangesPreference.class); - } - - public static Geography toApiGeography(final io.airbyte.config.Geography geography) { - return Enums.convertTo(geography, Geography.class); - } - - public static io.airbyte.config.Geography toPersistenceGeography(final Geography apiGeography) { - return Enums.convertTo(apiGeography, io.airbyte.config.Geography.class); - } - - public static Schedule.TimeUnit toPersistenceTimeUnit(final ConnectionSchedule.TimeUnitEnum apiTimeUnit) { - return Enums.convertTo(apiTimeUnit, Schedule.TimeUnit.class); - } - - public static BasicSchedule.TimeUnit toBasicScheduleTimeUnit(final ConnectionSchedule.TimeUnitEnum apiTimeUnit) { - return Enums.convertTo(apiTimeUnit, BasicSchedule.TimeUnit.class); - } - - public static BasicSchedule.TimeUnit toBasicScheduleTimeUnit(final ConnectionScheduleDataBasicSchedule.TimeUnitEnum apiTimeUnit) { - return Enums.convertTo(apiTimeUnit, BasicSchedule.TimeUnit.class); - } - - public static Schedule.TimeUnit toLegacyScheduleTimeUnit(final ConnectionScheduleDataBasicSchedule.TimeUnitEnum timeUnit) { - return Enums.convertTo(timeUnit, Schedule.TimeUnit.class); - } - - public static ConnectionScheduleDataBasicSchedule.TimeUnitEnum toApiBasicScheduleTimeUnit(final BasicSchedule.TimeUnit timeUnit) { - return Enums.convertTo(timeUnit, ConnectionScheduleDataBasicSchedule.TimeUnitEnum.class); - } - - public static ConnectionScheduleDataBasicSchedule.TimeUnitEnum toApiBasicScheduleTimeUnit(final Schedule.TimeUnit timeUnit) { - return Enums.convertTo(timeUnit, ConnectionScheduleDataBasicSchedule.TimeUnitEnum.class); - } - - public static io.airbyte.api.model.generated.ConnectionScheduleType toApiConnectionScheduleType(final StandardSync standardSync) { - if (standardSync.getScheduleType() != null) { - switch (standardSync.getScheduleType()) { - case MANUAL -> { - return io.airbyte.api.model.generated.ConnectionScheduleType.MANUAL; - } - case BASIC_SCHEDULE -> { - return io.airbyte.api.model.generated.ConnectionScheduleType.BASIC; - } - case CRON -> { - return io.airbyte.api.model.generated.ConnectionScheduleType.CRON; - } - default -> throw new RuntimeException("Unexpected scheduleType " + standardSync.getScheduleType()); - } - } else if (standardSync.getManual()) { - // Legacy schema, manual sync. - return io.airbyte.api.model.generated.ConnectionScheduleType.MANUAL; - } else { - // Legacy schema, basic schedule. - return io.airbyte.api.model.generated.ConnectionScheduleType.BASIC; - } - } - - public static io.airbyte.api.model.generated.ConnectionScheduleData toApiConnectionScheduleData(final StandardSync standardSync) { - if (standardSync.getScheduleType() != null) { - switch (standardSync.getScheduleType()) { - case MANUAL -> { - return null; - } - case BASIC_SCHEDULE -> { - return new ConnectionScheduleData() - .basicSchedule(new ConnectionScheduleDataBasicSchedule() - .timeUnit(toApiBasicScheduleTimeUnit(standardSync.getScheduleData().getBasicSchedule().getTimeUnit())) - .units(standardSync.getScheduleData().getBasicSchedule().getUnits())); - } - case CRON -> { - return new ConnectionScheduleData() - .cron(new ConnectionScheduleDataCron() - .cronExpression(standardSync.getScheduleData().getCron().getCronExpression()) - .cronTimeZone(standardSync.getScheduleData().getCron().getCronTimeZone())); - } - default -> throw new RuntimeException("Unexpected scheduleType " + standardSync.getScheduleType()); - } - } else if (standardSync.getManual()) { - // Legacy schema, manual sync. - return null; - } else { - // Legacy schema, basic schedule. - return new ConnectionScheduleData() - .basicSchedule(new ConnectionScheduleDataBasicSchedule() - .timeUnit(toApiBasicScheduleTimeUnit(standardSync.getSchedule().getTimeUnit())) - .units(standardSync.getSchedule().getUnits())); - } - } - - public static ConnectionSchedule toLegacyConnectionSchedule(final StandardSync standardSync) { - if (standardSync.getScheduleType() != null) { - // Populate everything based on the new schema. - switch (standardSync.getScheduleType()) { - case MANUAL, CRON -> { - // We don't populate any legacy data here. - return null; - } - case BASIC_SCHEDULE -> { - return new ConnectionSchedule() - .timeUnit(toApiTimeUnit(standardSync.getScheduleData().getBasicSchedule().getTimeUnit())) - .units(standardSync.getScheduleData().getBasicSchedule().getUnits()); - } - default -> throw new RuntimeException("Unexpected scheduleType " + standardSync.getScheduleType()); - } - } else if (standardSync.getManual()) { - // Legacy schema, manual sync. - return null; - } else { - // Legacy schema, basic schedule. - return new ConnectionSchedule() - .timeUnit(toApiTimeUnit(standardSync.getSchedule().getTimeUnit())) - .units(standardSync.getSchedule().getUnits()); - } - } - - public static void populateConnectionReadSchedule(final StandardSync standardSync, final ConnectionRead connectionRead) { - connectionRead.scheduleType(toApiConnectionScheduleType(standardSync)); - connectionRead.scheduleData(toApiConnectionScheduleData(standardSync)); - - // TODO(https://github.com/airbytehq/airbyte/issues/11432): only return new schema once frontend is - // ready. - connectionRead.schedule(toLegacyConnectionSchedule(standardSync)); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/CatalogDiffConverters.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/CatalogDiffConverters.java deleted file mode 100644 index 77d763b0f555..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/CatalogDiffConverters.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import io.airbyte.api.model.generated.FieldAdd; -import io.airbyte.api.model.generated.FieldRemove; -import io.airbyte.api.model.generated.FieldSchemaUpdate; -import io.airbyte.api.model.generated.FieldTransform; -import io.airbyte.api.model.generated.StreamTransform; -import io.airbyte.commons.enums.Enums; -import io.airbyte.protocol.models.transform_models.FieldTransformType; -import io.airbyte.protocol.models.transform_models.StreamTransformType; -import io.airbyte.workers.helper.ProtocolConverters; -import java.util.List; -import java.util.Optional; - -/** - * Utility methods for converting between internal and API representation of catalog diffs. - */ -public class CatalogDiffConverters { - - public static StreamTransform streamTransformToApi(final io.airbyte.protocol.models.transform_models.StreamTransform transform) { - return new StreamTransform() - .transformType(Enums.convertTo(transform.getTransformType(), StreamTransform.TransformTypeEnum.class)) - .streamDescriptor(ProtocolConverters.streamDescriptorToApi(transform.getStreamDescriptor())) - .updateStream(updateStreamToApi(transform).orElse(null)); - } - - public static Optional> updateStreamToApi(final io.airbyte.protocol.models.transform_models.StreamTransform transform) { - if (transform.getTransformType() == StreamTransformType.UPDATE_STREAM) { - return Optional.ofNullable(transform.getUpdateStreamTransform() - .getFieldTransforms() - .stream() - .map(CatalogDiffConverters::fieldTransformToApi) - .toList()); - } else { - return Optional.empty(); - } - } - - public static FieldTransform fieldTransformToApi(final io.airbyte.protocol.models.transform_models.FieldTransform transform) { - return new FieldTransform() - .transformType(Enums.convertTo(transform.getTransformType(), FieldTransform.TransformTypeEnum.class)) - .fieldName(transform.getFieldName()) - .breaking(transform.breaking()) - .addField(addFieldToApi(transform).orElse(null)) - .removeField(removeFieldToApi(transform).orElse(null)) - .updateFieldSchema(updateFieldToApi(transform).orElse(null)); - } - - private static Optional addFieldToApi(final io.airbyte.protocol.models.transform_models.FieldTransform transform) { - if (transform.getTransformType() == FieldTransformType.ADD_FIELD) { - return Optional.of(new FieldAdd() - .schema(transform.getAddFieldTransform().getSchema())); - } else { - return Optional.empty(); - } - } - - private static Optional removeFieldToApi(final io.airbyte.protocol.models.transform_models.FieldTransform transform) { - if (transform.getTransformType() == FieldTransformType.REMOVE_FIELD) { - return Optional.of(new FieldRemove() - .schema(transform.getRemoveFieldTransform().getSchema())); - } else { - return Optional.empty(); - } - } - - private static Optional updateFieldToApi(final io.airbyte.protocol.models.transform_models.FieldTransform transform) { - if (transform.getTransformType() == FieldTransformType.UPDATE_FIELD_SCHEMA) { - return Optional.of(new FieldSchemaUpdate() - .oldSchema(transform.getUpdateFieldTransform().getOldSchema()) - .newSchema(transform.getUpdateFieldTransform().getNewSchema())); - } else { - return Optional.empty(); - } - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/ConfigurationUpdate.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/ConfigurationUpdate.java deleted file mode 100644 index b4cab910c03c..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/ConfigurationUpdate.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.UUID; - -@Singleton -public class ConfigurationUpdate { - - private final ConfigRepository configRepository; - private final SecretsRepositoryReader secretsRepositoryReader; - private final JsonSecretsProcessor secretsProcessor; - - public ConfigurationUpdate(final ConfigRepository configRepository, final SecretsRepositoryReader secretsRepositoryReader) { - this(configRepository, secretsRepositoryReader, JsonSecretsProcessor.builder() - .copySecrets(true) - .build()); - } - - public ConfigurationUpdate(final ConfigRepository configRepository, - final SecretsRepositoryReader secretsRepositoryReader, - final JsonSecretsProcessor secretsProcessor) { - this.configRepository = configRepository; - this.secretsRepositoryReader = secretsRepositoryReader; - this.secretsProcessor = secretsProcessor; - } - - public SourceConnection source(final UUID sourceId, final String sourceName, final JsonNode newConfiguration) - throws ConfigNotFoundException, IOException, JsonValidationException { - // get existing source - final SourceConnection persistedSource = secretsRepositoryReader.getSourceConnectionWithSecrets(sourceId); - persistedSource.setName(sourceName); - // get spec - final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(persistedSource.getSourceDefinitionId()); - final ConnectorSpecification spec = sourceDefinition.getSpec(); - // copy any necessary secrets from the current source to the incoming updated source - final JsonNode updatedConfiguration = secretsProcessor.copySecrets( - persistedSource.getConfiguration(), - newConfiguration, - spec.getConnectionSpecification()); - - return Jsons.clone(persistedSource).withConfiguration(updatedConfiguration); - } - - public DestinationConnection destination(final UUID destinationId, final String destName, final JsonNode newConfiguration) - throws ConfigNotFoundException, IOException, JsonValidationException { - // get existing destination - final DestinationConnection persistedDestination = secretsRepositoryReader.getDestinationConnectionWithSecrets(destinationId); - persistedDestination.setName(destName); - // get spec - final StandardDestinationDefinition destinationDefinition = configRepository - .getStandardDestinationDefinition(persistedDestination.getDestinationDefinitionId()); - final ConnectorSpecification spec = destinationDefinition.getSpec(); - // copy any necessary secrets from the current destination to the incoming updated destination - final JsonNode updatedConfiguration = secretsProcessor.copySecrets( - persistedDestination.getConfiguration(), - newConfiguration, - spec.getConnectionSpecification()); - - return Jsons.clone(persistedDestination).withConfiguration(updatedConfiguration); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/JobConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/JobConverter.java deleted file mode 100644 index 1a5e59f7d1a6..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/JobConverter.java +++ /dev/null @@ -1,261 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import io.airbyte.api.model.generated.AttemptFailureOrigin; -import io.airbyte.api.model.generated.AttemptFailureReason; -import io.airbyte.api.model.generated.AttemptFailureSummary; -import io.airbyte.api.model.generated.AttemptFailureType; -import io.airbyte.api.model.generated.AttemptInfoRead; -import io.airbyte.api.model.generated.AttemptNormalizationStatusRead; -import io.airbyte.api.model.generated.AttemptRead; -import io.airbyte.api.model.generated.AttemptStats; -import io.airbyte.api.model.generated.AttemptStatus; -import io.airbyte.api.model.generated.AttemptStreamStats; -import io.airbyte.api.model.generated.DestinationDefinitionRead; -import io.airbyte.api.model.generated.JobConfigType; -import io.airbyte.api.model.generated.JobDebugRead; -import io.airbyte.api.model.generated.JobInfoLightRead; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.api.model.generated.JobOptionalRead; -import io.airbyte.api.model.generated.JobRead; -import io.airbyte.api.model.generated.JobStatus; -import io.airbyte.api.model.generated.JobWithAttemptsRead; -import io.airbyte.api.model.generated.LogRead; -import io.airbyte.api.model.generated.ResetConfig; -import io.airbyte.api.model.generated.SourceDefinitionRead; -import io.airbyte.api.model.generated.SynchronousJobRead; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.server.scheduler.SynchronousJobMetadata; -import io.airbyte.commons.server.scheduler.SynchronousResponse; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.JobOutput; -import io.airbyte.config.ResetSourceConfiguration; -import io.airbyte.config.StandardSyncOutput; -import io.airbyte.config.StandardSyncSummary; -import io.airbyte.config.StreamSyncStats; -import io.airbyte.config.SyncStats; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.AttemptNormalizationStatus; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.workers.helper.ProtocolConverters; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.nio.file.Path; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; - -@Singleton -public class JobConverter { - - private final WorkerEnvironment workerEnvironment; - private final LogConfigs logConfigs; - - public JobConverter(final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs) { - this.workerEnvironment = workerEnvironment; - this.logConfigs = logConfigs; - } - - public JobInfoRead getJobInfoRead(final Job job) { - return new JobInfoRead() - .job(getJobWithAttemptsRead(job).getJob()) - .attempts(job.getAttempts().stream().map(this::getAttemptInfoRead).collect(Collectors.toList())); - } - - public JobInfoLightRead getJobInfoLightRead(final Job job) { - return new JobInfoLightRead().job(getJobRead(job)); - } - - public JobOptionalRead getJobOptionalRead(final Job job) { - return new JobOptionalRead().job(getJobRead(job)); - } - - public static JobDebugRead getDebugJobInfoRead(final JobInfoRead jobInfoRead, - final SourceDefinitionRead sourceDefinitionRead, - final DestinationDefinitionRead destinationDefinitionRead, - final AirbyteVersion airbyteVersion) { - return new JobDebugRead() - .id(jobInfoRead.getJob().getId()) - .configId(jobInfoRead.getJob().getConfigId()) - .configType(jobInfoRead.getJob().getConfigType()) - .status(jobInfoRead.getJob().getStatus()) - .airbyteVersion(airbyteVersion.serialize()) - .sourceDefinition(sourceDefinitionRead) - .destinationDefinition(destinationDefinitionRead); - } - - public static JobWithAttemptsRead getJobWithAttemptsRead(final Job job) { - return new JobWithAttemptsRead() - .job(getJobRead(job)) - .attempts(job.getAttempts().stream().map(JobConverter::getAttemptRead).toList()); - } - - public static JobRead getJobRead(final Job job) { - final String configId = job.getScope(); - final JobConfigType configType = Enums.convertTo(job.getConfigType(), JobConfigType.class); - - return new JobRead() - .id(job.getId()) - .configId(configId) - .configType(configType) - .resetConfig(extractResetConfigIfReset(job).orElse(null)) - .createdAt(job.getCreatedAtInSecond()) - .updatedAt(job.getUpdatedAtInSecond()) - .startedAt(job.getStartedAtInSecond().isPresent() ? job.getStartedAtInSecond().get() : null) - .status(Enums.convertTo(job.getStatus(), JobStatus.class)); - } - - /** - * If the job is of type RESET, extracts the part of the reset config that we expose in the API. - * Otherwise, returns empty optional. - * - * @param job - job - * @return api representation of reset config - */ - private static Optional extractResetConfigIfReset(final Job job) { - if (job.getConfigType() == ConfigType.RESET_CONNECTION) { - final ResetSourceConfiguration resetSourceConfiguration = job.getConfig().getResetConnection().getResetSourceConfiguration(); - if (resetSourceConfiguration == null) { - return Optional.empty(); - } - return Optional.ofNullable( - new ResetConfig().streamsToReset(job.getConfig().getResetConnection().getResetSourceConfiguration().getStreamsToReset() - .stream() - .map(ProtocolConverters::streamDescriptorToApi) - .toList())); - } else { - return Optional.empty(); - } - } - - public AttemptInfoRead getAttemptInfoRead(final Attempt attempt) { - return new AttemptInfoRead() - .attempt(getAttemptRead(attempt)) - .logs(getLogRead(attempt.getLogPath())); - } - - public static AttemptRead getAttemptRead(final Attempt attempt) { - return new AttemptRead() - .id((long) attempt.getAttemptNumber()) - .status(Enums.convertTo(attempt.getStatus(), AttemptStatus.class)) - .bytesSynced(attempt.getOutput() // TODO (parker) remove after frontend switches to totalStats - .map(JobOutput::getSync) - .map(StandardSyncOutput::getStandardSyncSummary) - .map(StandardSyncSummary::getBytesSynced) - .orElse(null)) - .recordsSynced(attempt.getOutput() // TODO (parker) remove after frontend switches to totalStats - .map(JobOutput::getSync) - .map(StandardSyncOutput::getStandardSyncSummary) - .map(StandardSyncSummary::getRecordsSynced) - .orElse(null)) - .totalStats(getTotalAttemptStats(attempt)) - .streamStats(getAttemptStreamStats(attempt)) - .createdAt(attempt.getCreatedAtInSecond()) - .updatedAt(attempt.getUpdatedAtInSecond()) - .endedAt(attempt.getEndedAtInSecond().orElse(null)) - .failureSummary(getAttemptFailureSummary(attempt)); - } - - private static AttemptStats getTotalAttemptStats(final Attempt attempt) { - final SyncStats totalStats = attempt.getOutput() - .map(JobOutput::getSync) - .map(StandardSyncOutput::getStandardSyncSummary) - .map(StandardSyncSummary::getTotalStats) - .orElse(null); - - if (totalStats == null) { - return null; - } - - return new AttemptStats() - .bytesEmitted(totalStats.getBytesEmitted()) - .recordsEmitted(totalStats.getRecordsEmitted()) - .stateMessagesEmitted(totalStats.getSourceStateMessagesEmitted()) - .recordsCommitted(totalStats.getRecordsCommitted()); - } - - private static List getAttemptStreamStats(final Attempt attempt) { - final List streamStats = attempt.getOutput() - .map(JobOutput::getSync) - .map(StandardSyncOutput::getStandardSyncSummary) - .map(StandardSyncSummary::getStreamStats) - .orElse(null); - - if (streamStats == null) { - return null; - } - - return streamStats.stream() - .map(streamStat -> new AttemptStreamStats() - .streamName(streamStat.getStreamName()) - .stats(new AttemptStats() - .bytesEmitted(streamStat.getStats().getBytesEmitted()) - .recordsEmitted(streamStat.getStats().getRecordsEmitted()) - .stateMessagesEmitted(streamStat.getStats().getSourceStateMessagesEmitted()) - .recordsCommitted(streamStat.getStats().getRecordsCommitted()))) - .collect(Collectors.toList()); - } - - private static AttemptFailureSummary getAttemptFailureSummary(final Attempt attempt) { - final io.airbyte.config.AttemptFailureSummary failureSummary = attempt.getFailureSummary().orElse(null); - - if (failureSummary == null) { - return null; - } - - return new AttemptFailureSummary() - .failures(failureSummary.getFailures().stream().map(failure -> new AttemptFailureReason() - .failureOrigin(Enums.convertTo(failure.getFailureOrigin(), AttemptFailureOrigin.class)) - .failureType(Enums.convertTo(failure.getFailureType(), AttemptFailureType.class)) - .externalMessage(failure.getExternalMessage()) - .internalMessage(failure.getInternalMessage()) - .stacktrace(failure.getStacktrace()) - .timestamp(failure.getTimestamp()) - .retryable(failure.getRetryable())) - .collect(Collectors.toList())) - .partialSuccess(failureSummary.getPartialSuccess()); - } - - public LogRead getLogRead(final Path logPath) { - try { - return new LogRead().logLines(LogClientSingleton.getInstance().getJobLogFile(workerEnvironment, logConfigs, logPath)); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public SynchronousJobRead getSynchronousJobRead(final SynchronousResponse response) { - return getSynchronousJobRead(response.getMetadata()); - } - - public SynchronousJobRead getSynchronousJobRead(final SynchronousJobMetadata metadata) { - final JobConfigType configType = Enums.convertTo(metadata.getConfigType(), JobConfigType.class); - - return new SynchronousJobRead() - .id(metadata.getId()) - .configType(configType) - .configId(String.valueOf(metadata.getConfigId())) - .createdAt(metadata.getCreatedAt()) - .endedAt(metadata.getEndedAt()) - .succeeded(metadata.isSucceeded()) - .connectorConfigurationUpdated(metadata.isConnectorConfigurationUpdated()) - .logs(getLogRead(metadata.getLogPath())); - } - - public static AttemptNormalizationStatusRead convertAttemptNormalizationStatus( - final AttemptNormalizationStatus databaseStatus) { - return new AttemptNormalizationStatusRead() - .attemptNumber(databaseStatus.attemptNumber()) - .hasRecordsCommitted(!databaseStatus.recordsCommitted().isEmpty()) - .recordsCommitted(databaseStatus.recordsCommitted().orElse(0L)) - .hasNormalizationFailed(databaseStatus.normalizationFailed()); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/NotificationConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/NotificationConverter.java deleted file mode 100644 index 9fb97c319d5c..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/NotificationConverter.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import io.airbyte.commons.enums.Enums; -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; - -public class NotificationConverter { - - public static List toConfigList(final List notifications) { - if (notifications == null) { - return Collections.emptyList(); - } - return notifications.stream().map(NotificationConverter::toConfig).collect(Collectors.toList()); - } - - public static io.airbyte.config.Notification toConfig(final io.airbyte.api.model.generated.Notification notification) { - return new io.airbyte.config.Notification() - .withNotificationType(Enums.convertTo(notification.getNotificationType(), io.airbyte.config.Notification.NotificationType.class)) - .withSendOnSuccess(notification.getSendOnSuccess()) - .withSendOnFailure(notification.getSendOnFailure()) - .withSlackConfiguration(toConfig(notification.getSlackConfiguration())); - } - - private static io.airbyte.config.SlackNotificationConfiguration toConfig(final io.airbyte.api.model.generated.SlackNotificationConfiguration notification) { - return new io.airbyte.config.SlackNotificationConfiguration() - .withWebhook(notification.getWebhook()); - } - - public static List toApiList(final List notifications) { - return notifications.stream().map(NotificationConverter::toApi).collect(Collectors.toList()); - } - - public static io.airbyte.api.model.generated.Notification toApi(final io.airbyte.config.Notification notification) { - return new io.airbyte.api.model.generated.Notification() - .notificationType(Enums.convertTo(notification.getNotificationType(), io.airbyte.api.model.generated.NotificationType.class)) - .sendOnSuccess(notification.getSendOnSuccess()) - .sendOnFailure(notification.getSendOnFailure()) - .slackConfiguration(toApi(notification.getSlackConfiguration())); - } - - private static io.airbyte.api.model.generated.SlackNotificationConfiguration toApi(final io.airbyte.config.SlackNotificationConfiguration notification) { - return new io.airbyte.api.model.generated.SlackNotificationConfiguration() - .webhook(notification.getWebhook()); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/OauthModelConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/OauthModelConverter.java deleted file mode 100644 index 4c43cdecab5c..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/OauthModelConverter.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import io.airbyte.api.model.generated.AdvancedAuth; -import io.airbyte.api.model.generated.AdvancedAuth.AuthFlowTypeEnum; -import io.airbyte.api.model.generated.AuthSpecification; -import io.airbyte.api.model.generated.OAuth2Specification; -import io.airbyte.api.model.generated.OAuthConfigSpecification; -import io.airbyte.protocol.models.ConnectorSpecification; -import java.util.List; -import java.util.Optional; - -public class OauthModelConverter { - - public static Optional getAuthSpec(final ConnectorSpecification spec) { - if (spec.getAuthSpecification() == null) { - return Optional.empty(); - } - final io.airbyte.protocol.models.AuthSpecification incomingAuthSpec = spec.getAuthSpecification(); - - final AuthSpecification authSpecification = new AuthSpecification(); - if (incomingAuthSpec.getAuthType() == io.airbyte.protocol.models.AuthSpecification.AuthType.OAUTH_2_0) { - authSpecification.authType(AuthSpecification.AuthTypeEnum.OAUTH2_0) - .oauth2Specification(new OAuth2Specification() - .rootObject(incomingAuthSpec.getOauth2Specification().getRootObject()) - .oauthFlowInitParameters(incomingAuthSpec.getOauth2Specification().getOauthFlowInitParameters()) - .oauthFlowOutputParameters(incomingAuthSpec.getOauth2Specification().getOauthFlowOutputParameters())); - } - return Optional.of(authSpecification); - } - - public static Optional getAdvancedAuth(final ConnectorSpecification spec) { - if (spec.getAdvancedAuth() == null) { - return Optional.empty(); - } - final io.airbyte.protocol.models.AdvancedAuth incomingAdvancedAuth = spec.getAdvancedAuth(); - final AdvancedAuth advancedAuth = new AdvancedAuth(); - if (List.of(io.airbyte.protocol.models.AdvancedAuth.AuthFlowType.OAUTH_1_0, io.airbyte.protocol.models.AdvancedAuth.AuthFlowType.OAUTH_2_0) - .contains(incomingAdvancedAuth.getAuthFlowType())) { - final AuthFlowTypeEnum oauthFlowType; - if (io.airbyte.protocol.models.AdvancedAuth.AuthFlowType.OAUTH_1_0.equals(incomingAdvancedAuth.getAuthFlowType())) { - oauthFlowType = AuthFlowTypeEnum.OAUTH1_0; - } else { - oauthFlowType = AuthFlowTypeEnum.OAUTH2_0; - } - final io.airbyte.protocol.models.OAuthConfigSpecification incomingOAuthConfigSpecification = incomingAdvancedAuth.getOauthConfigSpecification(); - advancedAuth - .authFlowType(oauthFlowType) - .predicateKey(incomingAdvancedAuth.getPredicateKey()) - .predicateValue(incomingAdvancedAuth.getPredicateValue()) - .oauthConfigSpecification(new OAuthConfigSpecification() - .oauthUserInputFromConnectorConfigSpecification(incomingOAuthConfigSpecification.getOauthUserInputFromConnectorConfigSpecification()) - .completeOAuthOutputSpecification(incomingOAuthConfigSpecification.getCompleteOauthOutputSpecification()) - .completeOAuthServerInputSpecification(incomingOAuthConfigSpecification.getCompleteOauthServerInputSpecification()) - .completeOAuthServerOutputSpecification(incomingOAuthConfigSpecification.getCompleteOauthServerOutputSpecification())); - } - return Optional.of(advancedAuth); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/OperationsConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/OperationsConverter.java deleted file mode 100644 index 6b2d2167d91d..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/OperationsConverter.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import static io.airbyte.api.model.generated.OperatorWebhook.WebhookTypeEnum.DBTCLOUD; - -import com.google.common.base.Preconditions; -import io.airbyte.api.model.generated.OperationRead; -import io.airbyte.api.model.generated.OperatorConfiguration; -import io.airbyte.api.model.generated.OperatorNormalization.OptionEnum; -import io.airbyte.api.model.generated.OperatorWebhookDbtCloud; -import io.airbyte.commons.enums.Enums; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; -import io.airbyte.config.OperatorWebhook; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class OperationsConverter { - - public static void populateOperatorConfigFromApi(final OperatorConfiguration operatorConfig, final StandardSyncOperation standardSyncOperation) { - standardSyncOperation.withOperatorType(Enums.convertTo(operatorConfig.getOperatorType(), OperatorType.class)); - switch (operatorConfig.getOperatorType()) { - case NORMALIZATION -> { - Preconditions.checkArgument(operatorConfig.getNormalization() != null); - standardSyncOperation.withOperatorNormalization(new OperatorNormalization() - .withOption(Enums.convertTo(operatorConfig.getNormalization().getOption(), Option.class))); - // Null out the other configs, since it's mutually exclusive. We need to do this if it's an update. - standardSyncOperation.withOperatorDbt(null); - standardSyncOperation.withOperatorWebhook(null); - } - case DBT -> { - Preconditions.checkArgument(operatorConfig.getDbt() != null); - standardSyncOperation.withOperatorDbt(new OperatorDbt() - .withGitRepoUrl(operatorConfig.getDbt().getGitRepoUrl()) - .withGitRepoBranch(operatorConfig.getDbt().getGitRepoBranch()) - .withDockerImage(operatorConfig.getDbt().getDockerImage()) - .withDbtArguments(operatorConfig.getDbt().getDbtArguments())); - // Null out the other configs, since they're mutually exclusive. We need to do this if it's an - // update. - standardSyncOperation.withOperatorNormalization(null); - standardSyncOperation.withOperatorWebhook(null); - } - case WEBHOOK -> { - Preconditions.checkArgument(operatorConfig.getWebhook() != null); - // TODO(mfsiega-airbyte): check that the webhook config id references a real webhook config. - standardSyncOperation.withOperatorWebhook(webhookOperatorFromConfig(operatorConfig.getWebhook())); - // Null out the other configs, since it's mutually exclusive. We need to do this if it's an update. - standardSyncOperation.withOperatorNormalization(null); - standardSyncOperation.withOperatorDbt(null); - } - } - } - - public static OperationRead operationReadFromPersistedOperation(final StandardSyncOperation standardSyncOperation) { - final OperatorConfiguration operatorConfiguration = new OperatorConfiguration() - .operatorType(Enums.convertTo(standardSyncOperation.getOperatorType(), io.airbyte.api.model.generated.OperatorType.class)); - if (standardSyncOperation.getOperatorType() == null) { - // TODO(mfsiega-airbyte): this case shouldn't happen, but the API today would tolerate it. After - // verifying that it really can't happen, turn this into a precondition. - return new OperationRead() - .workspaceId(standardSyncOperation.getWorkspaceId()) - .operationId(standardSyncOperation.getOperationId()) - .name(standardSyncOperation.getName()); - } - switch (standardSyncOperation.getOperatorType()) { - case NORMALIZATION -> { - Preconditions.checkArgument(standardSyncOperation.getOperatorNormalization() != null); - operatorConfiguration.normalization(new io.airbyte.api.model.generated.OperatorNormalization() - .option(Enums.convertTo(standardSyncOperation.getOperatorNormalization().getOption(), OptionEnum.class))); - } - case DBT -> { - Preconditions.checkArgument(standardSyncOperation.getOperatorDbt() != null); - operatorConfiguration.dbt(new io.airbyte.api.model.generated.OperatorDbt() - .gitRepoUrl(standardSyncOperation.getOperatorDbt().getGitRepoUrl()) - .gitRepoBranch(standardSyncOperation.getOperatorDbt().getGitRepoBranch()) - .dockerImage(standardSyncOperation.getOperatorDbt().getDockerImage()) - .dbtArguments(standardSyncOperation.getOperatorDbt().getDbtArguments())); - } - case WEBHOOK -> { - Preconditions.checkArgument(standardSyncOperation.getOperatorWebhook() != null); - operatorConfiguration.webhook(webhookOperatorFromPersistence(standardSyncOperation.getOperatorWebhook())); - } - } - return new OperationRead() - .workspaceId(standardSyncOperation.getWorkspaceId()) - .operationId(standardSyncOperation.getOperationId()) - .name(standardSyncOperation.getName()) - .operatorConfiguration(operatorConfiguration); - } - - private static OperatorWebhook webhookOperatorFromConfig(io.airbyte.api.model.generated.OperatorWebhook webhookConfig) { - final var operatorWebhook = new OperatorWebhook().withWebhookConfigId(webhookConfig.getWebhookConfigId()); - // TODO(mfsiega-airbyte): remove this once the frontend is sending the new format. - if (webhookConfig.getWebhookType() == null) { - return operatorWebhook - .withExecutionUrl(webhookConfig.getExecutionUrl()) - .withExecutionBody(webhookConfig.getExecutionBody()); - } - switch (webhookConfig.getWebhookType()) { - case DBTCLOUD -> { - return operatorWebhook - .withExecutionUrl(DbtCloudOperationConverter.getExecutionUrlFrom(webhookConfig.getDbtCloud())) - .withExecutionBody(DbtCloudOperationConverter.getDbtCloudExecutionBody()); - } - // Future webhook operator types added here. - } - throw new IllegalArgumentException("Unsupported webhook operation type"); - } - - private static io.airbyte.api.model.generated.OperatorWebhook webhookOperatorFromPersistence(final OperatorWebhook persistedWebhook) { - final io.airbyte.api.model.generated.OperatorWebhook webhookOperator = new io.airbyte.api.model.generated.OperatorWebhook() - .webhookConfigId(persistedWebhook.getWebhookConfigId()); - OperatorWebhookDbtCloud dbtCloudOperator = DbtCloudOperationConverter.parseFrom(persistedWebhook); - if (dbtCloudOperator != null) { - webhookOperator.webhookType(DBTCLOUD).dbtCloud(dbtCloudOperator); - // TODO(mfsiega-airbyte): remove once frontend switches to new format. - // Dual-write deprecated webhook format. - webhookOperator.executionUrl(DbtCloudOperationConverter.getExecutionUrlFrom(dbtCloudOperator)); - webhookOperator.executionBody(DbtCloudOperationConverter.getDbtCloudExecutionBody()); - } else { - throw new IllegalArgumentException("Unexpected webhook operator config"); - } - return webhookOperator; - } - - private static class DbtCloudOperationConverter { - - // See https://docs.getdbt.com/dbt-cloud/api-v2 for documentation on dbt Cloud API endpoints. - final static Pattern dbtUrlPattern = Pattern.compile("^https://cloud\\.getdbt\\.com/api/v2/accounts/(\\d+)/jobs/(\\d+)/run/$"); - private static final int ACCOUNT_REGEX_GROUP = 1; - private static final int JOB_REGEX_GROUP = 2; - - private static OperatorWebhookDbtCloud parseFrom(OperatorWebhook persistedWebhook) { - Matcher dbtCloudUrlMatcher = dbtUrlPattern.matcher(persistedWebhook.getExecutionUrl()); - final var dbtCloudConfig = new OperatorWebhookDbtCloud(); - if (dbtCloudUrlMatcher.matches()) { - dbtCloudConfig.setAccountId(Integer.valueOf(dbtCloudUrlMatcher.group(ACCOUNT_REGEX_GROUP))); - dbtCloudConfig.setJobId(Integer.valueOf(dbtCloudUrlMatcher.group(JOB_REGEX_GROUP))); - return dbtCloudConfig; - } - return null; - } - - private static String getExecutionUrlFrom(final OperatorWebhookDbtCloud dbtCloudConfig) { - return String.format("https://cloud.getdbt.com/api/v2/accounts/%d/jobs/%d/run/", dbtCloudConfig.getAccountId(), - dbtCloudConfig.getJobId()); - } - - private static String getDbtCloudExecutionBody() { - return "{\"cause\": \"airbyte\"}"; - } - - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/SpecFetcher.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/SpecFetcher.java deleted file mode 100644 index 0202b23a86b1..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/SpecFetcher.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import com.google.common.base.Preconditions; -import io.airbyte.commons.server.scheduler.SynchronousResponse; -import io.airbyte.protocol.models.ConnectorSpecification; - -public class SpecFetcher { - - public static ConnectorSpecification getSpecFromJob(final SynchronousResponse response) { - Preconditions.checkState(response.isSuccess(), "Get Spec job failed."); - Preconditions.checkNotNull(response.getOutput(), "Get Spec job return null spec"); - - return response.getOutput(); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/WorkflowStateConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/WorkflowStateConverter.java deleted file mode 100644 index af912a94dbd4..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/WorkflowStateConverter.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import io.airbyte.api.model.generated.WorkflowStateRead; -import io.airbyte.commons.temporal.scheduling.state.WorkflowState; -import jakarta.inject.Singleton; - -@Singleton -public class WorkflowStateConverter { - - public WorkflowStateRead getWorkflowStateRead(final WorkflowState workflowState) { - return new WorkflowStateRead().running(workflowState.isRunning()); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/WorkspaceWebhookConfigsConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/WorkspaceWebhookConfigsConverter.java deleted file mode 100644 index 9e9669ee299d..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/WorkspaceWebhookConfigsConverter.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.api.model.generated.WebhookConfigRead; -import io.airbyte.api.model.generated.WebhookConfigWrite; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.WebhookConfig; -import io.airbyte.config.WebhookOperationConfigs; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.UUID; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -// NOTE: we suppress this warning because PMD thinks it can be a foreach loop in toApiReads but the -// compiler disagrees. -@SuppressWarnings("PMD.ForLoopCanBeForeach") -public class WorkspaceWebhookConfigsConverter { - - public static JsonNode toPersistenceWrite(List apiWebhookConfigs, Supplier uuidSupplier) { - if (apiWebhookConfigs == null) { - return Jsons.emptyObject(); - } - - final WebhookOperationConfigs configs = new WebhookOperationConfigs() - .withWebhookConfigs(apiWebhookConfigs.stream().map((item) -> toPersistenceConfig(uuidSupplier, item)).collect(Collectors.toList())); - - return Jsons.jsonNode(configs); - } - - /** - * Extract the read-only properties from a set of persisted webhook operation configs. - *

- * Specifically, returns the id and name but excludes the secret auth token. Note that we "manually" - * deserialize the JSON tree instead of deserializing to our internal schema -- - * WebhookOperationConfigs -- because the persisted JSON doesn't conform to that schema until we - * hydrate the secrets. Since we don't want to unnecessarily hydrate the secrets to read from the - * API, we do this instead. - *

- * TODO(mfsiega-airbyte): try find a cleaner way to handle this situation. - * - * @param persistedWebhookConfig - The JsonNode of the persisted webhook configs - * @return a list of (webhook id, name) pairs - */ - public static List toApiReads(final JsonNode persistedWebhookConfig) { - if (persistedWebhookConfig == null) { - return Collections.emptyList(); - } - - // NOTE: we deserialize it "by hand" because the secrets aren't hydrated, so we can't deserialize it - // into the usual shape. - // TODO(mfsiega-airbyte): find a cleaner way to handle this situation. - List configReads = new ArrayList<>(); - - final JsonNode configArray = persistedWebhookConfig.findPath("webhookConfigs"); - Iterator it = configArray.elements(); - while (it.hasNext()) { - JsonNode webhookConfig = it.next(); - configReads.add(toApiRead(webhookConfig)); - } - return configReads; - } - - private static WebhookConfig toPersistenceConfig(final Supplier uuidSupplier, final WebhookConfigWrite input) { - return new WebhookConfig() - .withId(uuidSupplier.get()) - .withName(input.getName()) - .withAuthToken(input.getAuthToken()); - } - - private static WebhookConfigRead toApiRead(final JsonNode configJson) { - final var read = new WebhookConfigRead(); - read.setId(UUID.fromString(configJson.findValue("id").asText())); - read.setName(configJson.findValue("name").asText()); - return read; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ApplicationErrorKnownException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ApplicationErrorKnownException.java deleted file mode 100644 index d7f73e42931e..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ApplicationErrorKnownException.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -public class ApplicationErrorKnownException extends KnownException { - - public ApplicationErrorKnownException(final String message) { - super(message); - } - - public ApplicationErrorKnownException(final String message, final Throwable cause) { - super(message, cause); - } - - @Override - public int getHttpCode() { - return 422; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/BadObjectSchemaKnownException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/BadObjectSchemaKnownException.java deleted file mode 100644 index 426d3f0aa73a..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/BadObjectSchemaKnownException.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -public class BadObjectSchemaKnownException extends KnownException { - - public BadObjectSchemaKnownException(final String message) { - super(message); - } - - public BadObjectSchemaKnownException(final String message, final Throwable cause) { - super(message, cause); - } - - @Override - public int getHttpCode() { - return 422; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ConnectFailureKnownException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ConnectFailureKnownException.java deleted file mode 100644 index a94d4b41ead9..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ConnectFailureKnownException.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -public class ConnectFailureKnownException extends KnownException { - - public ConnectFailureKnownException(final String message) { - super(message); - } - - public ConnectFailureKnownException(final String message, final Throwable cause) { - super(message, cause); - } - - @Override - public int getHttpCode() { - return 400; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundExceptionMapper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundExceptionMapper.java deleted file mode 100644 index 1025be5710bc..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundExceptionMapper.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.MediaType; -import io.micronaut.http.annotation.Produces; -import io.micronaut.http.server.exceptions.ExceptionHandler; -import jakarta.inject.Singleton; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Produces -@Singleton -@Requires(classes = IdNotFoundKnownException.class) -public class IdNotFoundExceptionMapper implements ExceptionHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(IdNotFoundExceptionMapper.class); - - @Override - public HttpResponse handle(final HttpRequest request, final IdNotFoundKnownException exception) { - final IdNotFoundKnownException idnf = new IdNotFoundKnownException("Id not found: " + exception.getMessage(), exception); - LOGGER.error("Not found exception", idnf.getNotFoundKnownExceptionInfo()); - - return HttpResponse.status(HttpStatus.NOT_FOUND) - .body(KnownException.infoFromThrowableWithMessage(exception, "Internal Server Error: " + exception.getMessage())) - .contentType(MediaType.APPLICATION_JSON); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundKnownException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundKnownException.java deleted file mode 100644 index 14360386cbf3..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundKnownException.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -import io.airbyte.api.model.generated.NotFoundKnownExceptionInfo; -import org.apache.logging.log4j.core.util.Throwables; - -public class IdNotFoundKnownException extends KnownException { - - String id; - - public IdNotFoundKnownException(final String message, final String id) { - super(message); - this.id = id; - } - - public IdNotFoundKnownException(final String message, final String id, final Throwable cause) { - super(message, cause); - this.id = id; - } - - public IdNotFoundKnownException(final String message, final Throwable cause) { - super(message, cause); - } - - @Override - public int getHttpCode() { - return 404; - } - - public String getId() { - return id; - } - - public NotFoundKnownExceptionInfo getNotFoundKnownExceptionInfo() { - final NotFoundKnownExceptionInfo exceptionInfo = new NotFoundKnownExceptionInfo() - .exceptionClassName(this.getClass().getName()) - .message(this.getMessage()) - .exceptionStack(Throwables.toStringList(this)); - if (this.getCause() != null) { - exceptionInfo.rootCauseExceptionClassName(this.getClass().getClass().getName()); - exceptionInfo.rootCauseExceptionStack(Throwables.toStringList(this.getCause())); - } - exceptionInfo.id(this.getId()); - return exceptionInfo; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InternalServerKnownException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InternalServerKnownException.java deleted file mode 100644 index 6f328721af4a..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InternalServerKnownException.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -public class InternalServerKnownException extends KnownException { - - public InternalServerKnownException(final String message) { - super(message); - } - - public InternalServerKnownException(final String message, final Throwable cause) { - super(message, cause); - } - - @Override - public int getHttpCode() { - return 500; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InvalidInputExceptionMapper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InvalidInputExceptionMapper.java deleted file mode 100644 index 48306d05ab54..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InvalidInputExceptionMapper.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -import io.airbyte.api.model.generated.InvalidInputExceptionInfo; -import io.airbyte.api.model.generated.InvalidInputProperty; -import io.airbyte.commons.json.Jsons; -import java.util.ArrayList; -import java.util.List; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; -import org.apache.logging.log4j.core.util.Throwables; - -@Provider -public class InvalidInputExceptionMapper implements ExceptionMapper { - - public static InvalidInputExceptionInfo infoFromConstraints(final ConstraintViolationException cve) { - final InvalidInputExceptionInfo exceptionInfo = new InvalidInputExceptionInfo() - .exceptionClassName(cve.getClass().getName()) - .message("Some properties contained invalid input.") - .exceptionStack(Throwables.toStringList(cve)); - - final List props = new ArrayList(); - for (final ConstraintViolation cv : cve.getConstraintViolations()) { - props.add(new InvalidInputProperty() - .propertyPath(cv.getPropertyPath().toString()) - .message(cv.getMessage()) - .invalidValue(cv.getInvalidValue() != null ? cv.getInvalidValue().toString() : "null")); - } - exceptionInfo.validationErrors(props); - return exceptionInfo; - } - - @Override - public Response toResponse(final ConstraintViolationException e) { - return Response.status(Response.Status.BAD_REQUEST) - .entity(Jsons.serialize(InvalidInputExceptionMapper.infoFromConstraints(e))) - .type("application/json") - .build(); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InvalidJsonExceptionMapper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InvalidJsonExceptionMapper.java deleted file mode 100644 index 7cd7a297199f..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InvalidJsonExceptionMapper.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -import com.fasterxml.jackson.core.JsonParseException; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -@Provider -public class InvalidJsonExceptionMapper implements ExceptionMapper { - - @Override - public Response toResponse(final JsonParseException e) { - return Response.status(422) - .entity(KnownException.infoFromThrowableWithMessage(e, "Invalid json. " + e.getMessage() + " " + e.getOriginalMessage())) - .type("application/json") - .build(); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InvalidJsonInputExceptionMapper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InvalidJsonInputExceptionMapper.java deleted file mode 100644 index a0545fdf101f..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/InvalidJsonInputExceptionMapper.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -import com.fasterxml.jackson.databind.JsonMappingException; -import io.airbyte.commons.json.Jsons; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -@Provider -public class InvalidJsonInputExceptionMapper implements ExceptionMapper { - - @Override - public Response toResponse(final JsonMappingException e) { - return Response.status(422) - .entity( - Jsons.serialize(KnownException.infoFromThrowableWithMessage(e, "Invalid json input. " + e.getMessage() + " " + e.getOriginalMessage()))) - .type("application/json") - .build(); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownException.java deleted file mode 100644 index db35ccac7cd5..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownException.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -import io.airbyte.api.model.generated.KnownExceptionInfo; -import org.apache.logging.log4j.core.util.Throwables; - -public abstract class KnownException extends RuntimeException { - - public KnownException(final String message) { - super(message); - } - - public KnownException(final String message, final Throwable cause) { - super(message, cause); - } - - abstract public int getHttpCode(); - - public KnownExceptionInfo getKnownExceptionInfo() { - return KnownException.infoFromThrowable(this); - } - - public static KnownExceptionInfo infoFromThrowableWithMessage(final Throwable t, final String message) { - final KnownExceptionInfo exceptionInfo = new KnownExceptionInfo() - .exceptionClassName(t.getClass().getName()) - .message(message) - .exceptionStack(Throwables.toStringList(t)); - if (t.getCause() != null) { - exceptionInfo.rootCauseExceptionClassName(t.getClass().getClass().getName()); - exceptionInfo.rootCauseExceptionStack(Throwables.toStringList(t.getCause())); - } - return exceptionInfo; - } - - public static KnownExceptionInfo infoFromThrowable(final Throwable t) { - return infoFromThrowableWithMessage(t, t.getMessage()); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownExceptionMapper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownExceptionMapper.java deleted file mode 100644 index 6bbce96bca7f..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownExceptionMapper.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -import io.airbyte.commons.json.Jsons; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Produces; -import jakarta.inject.Singleton; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Produces -@Singleton -@Requires(classes = KnownException.class) -public class KnownExceptionMapper implements ExceptionMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(KnownExceptionMapper.class); - - @Override - public Response toResponse(final KnownException e) { - LOGGER.info("Known exception", e.getKnownExceptionInfo()); - return Response.status(e.getHttpCode()) - .entity(Jsons.serialize(e.getKnownExceptionInfo())) - .type("application/json") - .build(); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/NotFoundExceptionMapper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/NotFoundExceptionMapper.java deleted file mode 100644 index 152933ba9a43..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/NotFoundExceptionMapper.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -import io.airbyte.commons.json.Jsons; -import javax.ws.rs.NotFoundException; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Provider -public class NotFoundExceptionMapper implements ExceptionMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(NotFoundExceptionMapper.class); - - @Override - public Response toResponse(final NotFoundException e) { - // Would like to send the id along but we don't have access to the http request anymore to fetch it - // from. TODO: Come back to this with issue #4189 - final IdNotFoundKnownException idnf = new IdNotFoundKnownException("Object not found. " + e.getMessage(), e); - LOGGER.error("Not found exception", idnf.getNotFoundKnownExceptionInfo()); - return Response.status(404) - .entity(Jsons.serialize(idnf.getNotFoundKnownExceptionInfo())) - .type("application/json") - .build(); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/UncaughtExceptionMapper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/UncaughtExceptionMapper.java deleted file mode 100644 index ff738e6bae48..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/UncaughtExceptionMapper.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -import io.airbyte.api.model.generated.KnownExceptionInfo; -import io.airbyte.commons.json.Jsons; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Provider -public class UncaughtExceptionMapper implements ExceptionMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(UncaughtExceptionMapper.class); - - @Override - public Response toResponse(final Throwable e) { - LOGGER.error("Uncaught exception", e); - final KnownExceptionInfo exceptionInfo = KnownException.infoFromThrowableWithMessage(e, "Internal Server Error: " + e.getMessage()); - return Response.status(500) - .entity(Jsons.serialize(exceptionInfo)) - .type("application/json") - .build(); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/UnsupportedProtocolVersionException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/UnsupportedProtocolVersionException.java deleted file mode 100644 index 0d8a28921c83..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/UnsupportedProtocolVersionException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -import io.airbyte.commons.version.Version; - -public class UnsupportedProtocolVersionException extends KnownException { - - public UnsupportedProtocolVersionException(final Version current, final Version minSupported, final Version maxSupported) { - this(current.serialize(), minSupported, maxSupported); - } - - public UnsupportedProtocolVersionException(final String current, final Version minSupported, final Version maxSupported) { - super(String.format("Airbyte Protocol Version %s is not supported. (Must be within [%s:%s])", - current, minSupported.serialize(), maxSupported.serialize())); - } - - @Override - public int getHttpCode() { - return 400; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ValueConflictKnownException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ValueConflictKnownException.java deleted file mode 100644 index d1ce98447c09..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ValueConflictKnownException.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.errors; - -public class ValueConflictKnownException extends KnownException { - - public ValueConflictKnownException(final String message) { - super(message); - } - - public ValueConflictKnownException(final String message, final Throwable cause) { - super(message, cause); - } - - @Override - public int getHttpCode() { - return 409; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/AttemptHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/AttemptHandler.java deleted file mode 100644 index 5f3dd815a54d..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/AttemptHandler.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import io.airbyte.api.model.generated.InternalOperationResult; -import io.airbyte.api.model.generated.SaveAttemptSyncConfigRequestBody; -import io.airbyte.api.model.generated.SaveStatsRequestBody; -import io.airbyte.api.model.generated.SetWorkflowInAttemptRequestBody; -import io.airbyte.commons.server.converters.ApiPojoConverters; -import io.airbyte.config.StreamSyncStats; -import io.airbyte.config.SyncStats; -import io.airbyte.persistence.job.JobPersistence; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Singleton -public class AttemptHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(AttemptHandler.class); - - private final JobPersistence jobPersistence; - - public AttemptHandler(final JobPersistence jobPersistence) { - this.jobPersistence = jobPersistence; - } - - public InternalOperationResult setWorkflowInAttempt(final SetWorkflowInAttemptRequestBody requestBody) { - try { - jobPersistence.setAttemptTemporalWorkflowInfo(requestBody.getJobId(), - requestBody.getAttemptNumber(), requestBody.getWorkflowId(), requestBody.getProcessingTaskQueue()); - } catch (final IOException ioe) { - LOGGER.error("IOException when setting temporal workflow in attempt;", ioe); - return new InternalOperationResult().succeeded(false); - } - return new InternalOperationResult().succeeded(true); - } - - public InternalOperationResult saveStats(final SaveStatsRequestBody requestBody) { - try { - final var stats = requestBody.getStats(); - final var streamStats = requestBody.getStreamStats().stream() - .map(s -> new StreamSyncStats() - .withStreamName(s.getStreamName()) - .withStreamNamespace(s.getStreamNamespace()) - .withStats(new SyncStats() - .withBytesEmitted(s.getStats().getBytesEmitted()) - .withRecordsEmitted(s.getStats().getRecordsEmitted()) - .withEstimatedBytes(s.getStats().getEstimatedBytes()) - .withEstimatedRecords(s.getStats().getEstimatedRecords()))) - .collect(Collectors.toList()); - - jobPersistence.writeStats(requestBody.getJobId(), requestBody.getAttemptNumber(), - stats.getEstimatedRecords(), stats.getEstimatedBytes(), stats.getRecordsEmitted(), stats.getBytesEmitted(), streamStats); - - } catch (final IOException ioe) { - LOGGER.error("IOException when setting temporal workflow in attempt;", ioe); - return new InternalOperationResult().succeeded(false); - } - - return new InternalOperationResult().succeeded(true); - } - - public InternalOperationResult saveSyncConfig(final SaveAttemptSyncConfigRequestBody requestBody) { - try { - jobPersistence.writeAttemptSyncConfig( - requestBody.getJobId(), - requestBody.getAttemptNumber(), - ApiPojoConverters.attemptSyncConfigToInternal(requestBody.getSyncConfig())); - } catch (final IOException ioe) { - LOGGER.error("IOException when saving AttemptSyncConfig for attempt;", ioe); - return new InternalOperationResult().succeeded(false); - } - return new InternalOperationResult().succeeded(true); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java deleted file mode 100644 index d159297329b9..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java +++ /dev/null @@ -1,609 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; -import com.google.common.collect.Lists; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.api.model.generated.AirbyteCatalog; -import io.airbyte.api.model.generated.AirbyteStreamConfiguration; -import io.airbyte.api.model.generated.CatalogDiff; -import io.airbyte.api.model.generated.ConnectionCreate; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionReadList; -import io.airbyte.api.model.generated.ConnectionSearch; -import io.airbyte.api.model.generated.ConnectionUpdate; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationSearch; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.SourceSearch; -import io.airbyte.api.model.generated.StreamDescriptor; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.converters.ApiPojoConverters; -import io.airbyte.commons.server.converters.CatalogDiffConverters; -import io.airbyte.commons.server.handlers.helpers.CatalogConverter; -import io.airbyte.commons.server.handlers.helpers.ConnectionMatcher; -import io.airbyte.commons.server.handlers.helpers.ConnectionScheduleHelper; -import io.airbyte.commons.server.handlers.helpers.DestinationMatcher; -import io.airbyte.commons.server.handlers.helpers.SourceMatcher; -import io.airbyte.commons.server.scheduler.EventRunner; -import io.airbyte.config.ActorCatalog; -import io.airbyte.config.BasicSchedule; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.FieldSelectionData; -import io.airbyte.config.Geography; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.Schedule; -import io.airbyte.config.ScheduleData; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.ScheduleType; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.helpers.ScheduleHelpers; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.WorkspaceHelper; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.validation.json.JsonValidationException; -import io.airbyte.workers.helper.ConnectionHelper; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.TimeUnit; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Singleton -public class ConnectionsHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(ConnectionsHandler.class); - - private final ConfigRepository configRepository; - private final Supplier uuidGenerator; - private final WorkspaceHelper workspaceHelper; - private final TrackingClient trackingClient; - private final EventRunner eventRunner; - private final ConnectionHelper connectionHelper; - - @VisibleForTesting - ConnectionsHandler(final ConfigRepository configRepository, - final Supplier uuidGenerator, - final WorkspaceHelper workspaceHelper, - final TrackingClient trackingClient, - final EventRunner eventRunner, - final ConnectionHelper connectionHelper) { - this.configRepository = configRepository; - this.uuidGenerator = uuidGenerator; - this.workspaceHelper = workspaceHelper; - this.trackingClient = trackingClient; - this.eventRunner = eventRunner; - this.connectionHelper = connectionHelper; - } - - @Deprecated(forRemoval = true) - public ConnectionsHandler(final ConfigRepository configRepository, - final WorkspaceHelper workspaceHelper, - final TrackingClient trackingClient, - final EventRunner eventRunner, - final ConnectionHelper connectionHelper) { - this(configRepository, - UUID::randomUUID, - workspaceHelper, - trackingClient, - eventRunner, - connectionHelper); - - } - - public ConnectionRead createConnection(final ConnectionCreate connectionCreate) - throws JsonValidationException, IOException, ConfigNotFoundException { - - // Validate source and destination - final SourceConnection sourceConnection = configRepository.getSourceConnection(connectionCreate.getSourceId()); - final DestinationConnection destinationConnection = configRepository.getDestinationConnection(connectionCreate.getDestinationId()); - - // Set this as default name if connectionCreate doesn't have it - final String defaultName = sourceConnection.getName() + " <> " + destinationConnection.getName(); - - final List operationIds = connectionCreate.getOperationIds() != null ? connectionCreate.getOperationIds() : Collections.emptyList(); - - ConnectionHelper.validateWorkspace(workspaceHelper, - connectionCreate.getSourceId(), - connectionCreate.getDestinationId(), - operationIds); - - final UUID connectionId = uuidGenerator.get(); - - // If not specified, default the NamespaceDefinition to 'source' - final NamespaceDefinitionType namespaceDefinitionType = - connectionCreate.getNamespaceDefinition() == null - ? NamespaceDefinitionType.SOURCE - : Enums.convertTo(connectionCreate.getNamespaceDefinition(), NamespaceDefinitionType.class); - - // persist sync - final StandardSync standardSync = new StandardSync() - .withConnectionId(connectionId) - .withName(connectionCreate.getName() != null ? connectionCreate.getName() : defaultName) - .withNamespaceDefinition(namespaceDefinitionType) - .withNamespaceFormat(connectionCreate.getNamespaceFormat()) - .withPrefix(connectionCreate.getPrefix()) - .withSourceId(connectionCreate.getSourceId()) - .withDestinationId(connectionCreate.getDestinationId()) - .withOperationIds(operationIds) - .withStatus(ApiPojoConverters.toPersistenceStatus(connectionCreate.getStatus())) - .withSourceCatalogId(connectionCreate.getSourceCatalogId()) - .withGeography(getGeographyFromConnectionCreateOrWorkspace(connectionCreate)) - .withBreakingChange(false) - .withNonBreakingChangesPreference( - ApiPojoConverters.toPersistenceNonBreakingChangesPreference(connectionCreate.getNonBreakingChangesPreference())); - if (connectionCreate.getResourceRequirements() != null) { - standardSync.withResourceRequirements(ApiPojoConverters.resourceRequirementsToInternal(connectionCreate.getResourceRequirements())); - } - - // TODO Undesirable behavior: sending a null configured catalog should not be valid? - if (connectionCreate.getSyncCatalog() != null) { - standardSync.withCatalog(CatalogConverter.toConfiguredProtocol(connectionCreate.getSyncCatalog())); - standardSync.withFieldSelectionData(CatalogConverter.getFieldSelectionData(connectionCreate.getSyncCatalog())); - } else { - standardSync.withCatalog(new ConfiguredAirbyteCatalog().withStreams(Collections.emptyList())); - standardSync.withFieldSelectionData(new FieldSelectionData()); - } - - if (connectionCreate.getSchedule() != null && connectionCreate.getScheduleType() != null) { - throw new JsonValidationException("supply old or new schedule schema but not both"); - } - - if (connectionCreate.getScheduleType() != null) { - ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(standardSync, connectionCreate.getScheduleType(), - connectionCreate.getScheduleData()); - } else { - populateSyncFromLegacySchedule(standardSync, connectionCreate); - } - - configRepository.writeStandardSync(standardSync); - - trackNewConnection(standardSync); - - try { - LOGGER.info("Starting a connection manager workflow"); - eventRunner.createConnectionManagerWorkflow(connectionId); - } catch (final Exception e) { - LOGGER.error("Start of the connection manager workflow failed", e); - configRepository.deleteStandardSync(standardSync.getConnectionId()); - throw e; - } - - return buildConnectionRead(connectionId); - } - - private Geography getGeographyFromConnectionCreateOrWorkspace(final ConnectionCreate connectionCreate) - throws JsonValidationException, ConfigNotFoundException, IOException { - - if (connectionCreate.getGeography() != null) { - return ApiPojoConverters.toPersistenceGeography(connectionCreate.getGeography()); - } - - // connectionCreate didn't specify a geography, so use the workspace default geography if one exists - final UUID workspaceId = workspaceHelper.getWorkspaceForSourceId(connectionCreate.getSourceId()); - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); - - if (workspace.getDefaultGeography() != null) { - return workspace.getDefaultGeography(); - } - - // if the workspace doesn't have a default geography, default to 'auto' - return Geography.AUTO; - } - - private void populateSyncFromLegacySchedule(final StandardSync standardSync, final ConnectionCreate connectionCreate) { - if (connectionCreate.getSchedule() != null) { - final Schedule schedule = new Schedule() - .withTimeUnit(ApiPojoConverters.toPersistenceTimeUnit(connectionCreate.getSchedule().getTimeUnit())) - .withUnits(connectionCreate.getSchedule().getUnits()); - // Populate the legacy field. - // TODO(https://github.com/airbytehq/airbyte/issues/11432): remove. - standardSync - .withManual(false) - .withSchedule(schedule); - // Also write into the new field. This one will be consumed if populated. - standardSync - .withScheduleType(ScheduleType.BASIC_SCHEDULE); - standardSync.withScheduleData(new ScheduleData().withBasicSchedule( - new BasicSchedule().withTimeUnit(ApiPojoConverters.toBasicScheduleTimeUnit(connectionCreate.getSchedule().getTimeUnit())) - .withUnits(connectionCreate.getSchedule().getUnits()))); - } else { - standardSync.withManual(true); - standardSync.withScheduleType(ScheduleType.MANUAL); - } - } - - private void trackNewConnection(final StandardSync standardSync) { - try { - final UUID workspaceId = workspaceHelper.getWorkspaceForConnectionIdIgnoreExceptions(standardSync.getConnectionId()); - final Builder metadataBuilder = generateMetadata(standardSync); - trackingClient.track(workspaceId, "New Connection - Backend", metadataBuilder.build()); - } catch (final Exception e) { - LOGGER.error("failed while reporting usage.", e); - } - } - - private Builder generateMetadata(final StandardSync standardSync) { - final Builder metadata = ImmutableMap.builder(); - - final UUID connectionId = standardSync.getConnectionId(); - final StandardSourceDefinition sourceDefinition = configRepository - .getSourceDefinitionFromConnection(connectionId); - final StandardDestinationDefinition destinationDefinition = configRepository - .getDestinationDefinitionFromConnection(connectionId); - - metadata.put("connector_source", sourceDefinition.getName()); - metadata.put("connector_source_definition_id", sourceDefinition.getSourceDefinitionId()); - metadata.put("connector_destination", destinationDefinition.getName()); - metadata.put("connector_destination_definition_id", destinationDefinition.getDestinationDefinitionId()); - - final String frequencyString; - if (standardSync.getScheduleType() != null) { - frequencyString = getFrequencyStringFromScheduleType(standardSync.getScheduleType(), standardSync.getScheduleData()); - } else if (standardSync.getManual()) { - frequencyString = "manual"; - } else { - final long intervalInMinutes = TimeUnit.SECONDS.toMinutes(ScheduleHelpers.getIntervalInSecond(standardSync.getSchedule())); - frequencyString = intervalInMinutes + " min"; - } - metadata.put("frequency", frequencyString); - return metadata; - } - - public ConnectionRead updateConnection(final ConnectionUpdate connectionPatch) - throws ConfigNotFoundException, IOException, JsonValidationException { - - final UUID connectionId = connectionPatch.getConnectionId(); - - LOGGER.debug("Starting updateConnection for connectionId {}...", connectionId); - LOGGER.debug("incoming connectionPatch: {}", connectionPatch); - - final StandardSync sync = configRepository.getStandardSync(connectionId); - LOGGER.debug("initial StandardSync: {}", sync); - - validateConnectionPatch(workspaceHelper, sync, connectionPatch); - - final ConnectionRead initialConnectionRead = ApiPojoConverters.internalToConnectionRead(sync); - LOGGER.debug("initial ConnectionRead: {}", initialConnectionRead); - - applyPatchToStandardSync(sync, connectionPatch); - - LOGGER.debug("patched StandardSync before persisting: {}", sync); - configRepository.writeStandardSync(sync); - - eventRunner.update(connectionId); - - final ConnectionRead updatedRead = buildConnectionRead(connectionId); - LOGGER.debug("final connectionRead: {}", updatedRead); - - return updatedRead; - } - - /** - * Modifies the given StandardSync by applying changes from a partially-filled ConnectionUpdate - * patch. Any fields that are null in the patch will be left unchanged. - */ - private static void applyPatchToStandardSync(final StandardSync sync, final ConnectionUpdate patch) throws JsonValidationException { - // update the sync's schedule using the patch's scheduleType and scheduleData. validations occur in - // the helper to ensure both fields - // make sense together. - if (patch.getScheduleType() != null) { - ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(sync, patch.getScheduleType(), patch.getScheduleData()); - } - - // the rest of the fields are straightforward to patch. If present in the patch, set the field to - // the value - // in the patch. Otherwise, leave the field unchanged. - - if (patch.getSyncCatalog() != null) { - sync.setCatalog(CatalogConverter.toConfiguredProtocol(patch.getSyncCatalog())); - sync.withFieldSelectionData(CatalogConverter.getFieldSelectionData(patch.getSyncCatalog())); - } - - if (patch.getName() != null) { - sync.setName(patch.getName()); - } - - if (patch.getNamespaceDefinition() != null) { - sync.setNamespaceDefinition(Enums.convertTo(patch.getNamespaceDefinition(), NamespaceDefinitionType.class)); - } - - if (patch.getNamespaceFormat() != null) { - sync.setNamespaceFormat(patch.getNamespaceFormat()); - } - - if (patch.getPrefix() != null) { - sync.setPrefix(patch.getPrefix()); - } - - if (patch.getOperationIds() != null) { - sync.setOperationIds(patch.getOperationIds()); - } - - if (patch.getStatus() != null) { - sync.setStatus(ApiPojoConverters.toPersistenceStatus(patch.getStatus())); - } - - if (patch.getSourceCatalogId() != null) { - sync.setSourceCatalogId(patch.getSourceCatalogId()); - } - - if (patch.getResourceRequirements() != null) { - sync.setResourceRequirements(ApiPojoConverters.resourceRequirementsToInternal(patch.getResourceRequirements())); - } - - if (patch.getGeography() != null) { - sync.setGeography(ApiPojoConverters.toPersistenceGeography(patch.getGeography())); - } - - if (patch.getBreakingChange() != null) { - sync.setBreakingChange(patch.getBreakingChange()); - } - - if (patch.getNotifySchemaChanges() != null) { - sync.setNotifySchemaChanges(patch.getNotifySchemaChanges()); - } - - if (patch.getNonBreakingChangesPreference() != null) { - sync.setNonBreakingChangesPreference(ApiPojoConverters.toPersistenceNonBreakingChangesPreference(patch.getNonBreakingChangesPreference())); - } - } - - private void validateConnectionPatch(final WorkspaceHelper workspaceHelper, final StandardSync persistedSync, final ConnectionUpdate patch) { - // sanity check that we're updating the right connection - Preconditions.checkArgument(persistedSync.getConnectionId().equals(patch.getConnectionId())); - - // make sure all operationIds belong to the same workspace as the connection - ConnectionHelper.validateWorkspace( - workspaceHelper, persistedSync.getSourceId(), persistedSync.getDestinationId(), patch.getOperationIds()); - - // make sure the incoming schedule update is sensible. Note that schedule details are further - // validated in ConnectionScheduleHelper, this just - // sanity checks that fields are populated when they should be. - Preconditions.checkArgument( - patch.getSchedule() == null, - "ConnectionUpdate should only make changes to the schedule by setting scheduleType and scheduleData. 'schedule' is no longer supported."); - - if (patch.getScheduleType() == null) { - Preconditions.checkArgument( - patch.getScheduleData() == null, - "ConnectionUpdate should not include any scheduleData without also specifying a valid scheduleType."); - } else { - switch (patch.getScheduleType()) { - case MANUAL -> Preconditions.checkArgument( - patch.getScheduleData() == null, - "ConnectionUpdate should not include any scheduleData when setting the Connection scheduleType to MANUAL."); - case BASIC -> Preconditions.checkArgument( - patch.getScheduleData() != null, - "ConnectionUpdate should include scheduleData when setting the Connection scheduleType to BASIC."); - case CRON -> Preconditions.checkArgument( - patch.getScheduleData() != null, - "ConnectionUpdate should include scheduleData when setting the Connection scheduleType to CRON."); - - // shouldn't be possible to reach this case - default -> throw new RuntimeException("Unrecognized scheduleType!"); - } - } - } - - public ConnectionReadList listConnectionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - return listConnectionsForWorkspace(workspaceIdRequestBody, false); - } - - public ConnectionReadList listAllConnectionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - return listConnectionsForWorkspace(workspaceIdRequestBody, true); - } - - public ConnectionReadList listConnectionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody, final boolean includeDeleted) - throws JsonValidationException, IOException, ConfigNotFoundException { - final List connectionReads = Lists.newArrayList(); - - for (final StandardSync standardSync : configRepository.listWorkspaceStandardSyncs(workspaceIdRequestBody.getWorkspaceId(), includeDeleted)) { - connectionReads.add(ApiPojoConverters.internalToConnectionRead(standardSync)); - } - - return new ConnectionReadList().connections(connectionReads); - } - - public ConnectionReadList listConnectionsForSource(final UUID sourceId, final boolean includeDeleted) throws IOException { - final List connectionReads = Lists.newArrayList(); - for (final StandardSync standardSync : configRepository.listConnectionsBySource(sourceId, includeDeleted)) { - connectionReads.add(ApiPojoConverters.internalToConnectionRead(standardSync)); - } - return new ConnectionReadList().connections(connectionReads); - } - - public ConnectionReadList listConnections() throws JsonValidationException, ConfigNotFoundException, IOException { - final List connectionReads = Lists.newArrayList(); - - for (final StandardSync standardSync : configRepository.listStandardSyncs()) { - if (standardSync.getStatus() == StandardSync.Status.DEPRECATED) { - continue; - } - connectionReads.add(ApiPojoConverters.internalToConnectionRead(standardSync)); - } - - return new ConnectionReadList().connections(connectionReads); - } - - public ConnectionRead getConnection(final UUID connectionId) - throws JsonValidationException, IOException, ConfigNotFoundException { - return buildConnectionRead(connectionId); - } - - public CatalogDiff getDiff(final AirbyteCatalog oldCatalog, final AirbyteCatalog newCatalog, final ConfiguredAirbyteCatalog configuredCatalog) - throws JsonValidationException { - return new CatalogDiff().transforms(CatalogHelpers.getCatalogDiff( - CatalogHelpers.configuredCatalogToCatalog(CatalogConverter.toProtocolKeepAllStreams(oldCatalog)), - CatalogHelpers.configuredCatalogToCatalog(CatalogConverter.toProtocolKeepAllStreams(newCatalog)), configuredCatalog) - .stream() - .map(CatalogDiffConverters::streamTransformToApi) - .toList()); - } - - /** - * Returns the list of the streamDescriptor that have their config updated. - * - * @param oldCatalog the old catalog - * @param newCatalog the new catalog - * @return the list of StreamDescriptor that have their configuration changed - */ - public Set getConfigurationDiff(final AirbyteCatalog oldCatalog, final AirbyteCatalog newCatalog) { - final Map oldStreams = catalogToPerStreamConfiguration(oldCatalog); - final Map newStreams = catalogToPerStreamConfiguration(newCatalog); - - final Set streamWithDifferentConf = new HashSet<>(); - - newStreams.forEach(((streamDescriptor, airbyteStreamConfiguration) -> { - final AirbyteStreamConfiguration oldConfig = oldStreams.get(streamDescriptor); - - if (oldConfig != null && haveConfigChange(oldConfig, airbyteStreamConfiguration)) { - streamWithDifferentConf.add(streamDescriptor); - } - })); - - return streamWithDifferentConf; - } - - private boolean haveConfigChange(final AirbyteStreamConfiguration oldConfig, final AirbyteStreamConfiguration newConfig) { - final List oldCursors = oldConfig.getCursorField(); - final List newCursors = newConfig.getCursorField(); - final boolean hasCursorChanged = !(oldCursors.equals(newCursors)); - - final boolean hasSyncModeChanged = !oldConfig.getSyncMode().equals(newConfig.getSyncMode()); - - final boolean hasDestinationSyncModeChanged = !oldConfig.getDestinationSyncMode().equals(newConfig.getDestinationSyncMode()); - - final Set> convertedOldPrimaryKey = new HashSet<>(oldConfig.getPrimaryKey()); - final Set> convertedNewPrimaryKey = new HashSet<>(newConfig.getPrimaryKey()); - final boolean hasPrimaryKeyChanged = !(convertedOldPrimaryKey.equals(convertedNewPrimaryKey)); - - return hasCursorChanged || hasSyncModeChanged || hasDestinationSyncModeChanged || hasPrimaryKeyChanged; - } - - private Map catalogToPerStreamConfiguration(final AirbyteCatalog catalog) { - return catalog.getStreams().stream().collect(Collectors.toMap(stream -> new StreamDescriptor() - .name(stream.getStream().getName()) - .namespace(stream.getStream().getNamespace()), - stream -> stream.getConfig())); - } - - public Optional getConnectionAirbyteCatalog(final UUID connectionId) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSync connection = configRepository.getStandardSync(connectionId); - if (connection.getSourceCatalogId() == null) { - return Optional.empty(); - } - final ActorCatalog catalog = configRepository.getActorCatalogById(connection.getSourceCatalogId()); - final StandardSourceDefinition sourceDefinition = configRepository.getSourceDefinitionFromSource(connection.getSourceId()); - final io.airbyte.protocol.models.AirbyteCatalog jsonCatalog = Jsons.object(catalog.getCatalog(), io.airbyte.protocol.models.AirbyteCatalog.class); - return Optional.of(CatalogConverter.toApi(jsonCatalog, sourceDefinition)); - } - - public ConnectionReadList searchConnections(final ConnectionSearch connectionSearch) - throws JsonValidationException, IOException, ConfigNotFoundException { - final List reads = Lists.newArrayList(); - for (final StandardSync standardSync : configRepository.listStandardSyncs()) { - if (standardSync.getStatus() != StandardSync.Status.DEPRECATED) { - final ConnectionRead connectionRead = ApiPojoConverters.internalToConnectionRead(standardSync); - if (matchSearch(connectionSearch, connectionRead)) { - reads.add(connectionRead); - } - } - } - - return new ConnectionReadList().connections(reads); - } - - public boolean matchSearch(final ConnectionSearch connectionSearch, final ConnectionRead connectionRead) - throws JsonValidationException, ConfigNotFoundException, IOException { - - final SourceConnection sourceConnection = configRepository.getSourceConnection(connectionRead.getSourceId()); - final StandardSourceDefinition sourceDefinition = - configRepository.getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()); - final SourceRead sourceRead = SourceHandler.toSourceRead(sourceConnection, sourceDefinition); - - final DestinationConnection destinationConnection = configRepository.getDestinationConnection(connectionRead.getDestinationId()); - final StandardDestinationDefinition destinationDefinition = - configRepository.getStandardDestinationDefinition(destinationConnection.getDestinationDefinitionId()); - final DestinationRead destinationRead = DestinationHandler.toDestinationRead(destinationConnection, destinationDefinition); - - final ConnectionMatcher connectionMatcher = new ConnectionMatcher(connectionSearch); - final ConnectionRead connectionReadFromSearch = connectionMatcher.match(connectionRead); - - return (connectionReadFromSearch == null || connectionReadFromSearch.equals(connectionRead)) && - matchSearch(connectionSearch.getSource(), sourceRead) && - matchSearch(connectionSearch.getDestination(), destinationRead); - } - - // todo (cgardens) - make this static. requires removing one bad dependency in SourceHandlerTest - public boolean matchSearch(final SourceSearch sourceSearch, final SourceRead sourceRead) { - final SourceMatcher sourceMatcher = new SourceMatcher(sourceSearch); - final SourceRead sourceReadFromSearch = sourceMatcher.match(sourceRead); - - return (sourceReadFromSearch == null || sourceReadFromSearch.equals(sourceRead)); - } - - // todo (cgardens) - make this static. requires removing one bad dependency in - // DestinationHandlerTest - public boolean matchSearch(final DestinationSearch destinationSearch, final DestinationRead destinationRead) { - final DestinationMatcher destinationMatcher = new DestinationMatcher(destinationSearch); - final DestinationRead destinationReadFromSearch = destinationMatcher.match(destinationRead); - - return (destinationReadFromSearch == null || destinationReadFromSearch.equals(destinationRead)); - } - - public void deleteConnection(final UUID connectionId) throws JsonValidationException, ConfigNotFoundException, IOException { - connectionHelper.deleteConnection(connectionId); - eventRunner.forceDeleteConnection(connectionId); - } - - private ConnectionRead buildConnectionRead(final UUID connectionId) - throws ConfigNotFoundException, IOException, JsonValidationException { - final StandardSync standardSync = configRepository.getStandardSync(connectionId); - return ApiPojoConverters.internalToConnectionRead(standardSync); - } - - private static String getFrequencyStringFromScheduleType(final ScheduleType scheduleType, final ScheduleData scheduleData) { - switch (scheduleType) { - case MANUAL -> { - return "manual"; - } - case BASIC_SCHEDULE -> { - return TimeUnit.SECONDS.toMinutes(ScheduleHelpers.getIntervalInSecond(scheduleData.getBasicSchedule())) + " min"; - } - case CRON -> { - // TODO(https://github.com/airbytehq/airbyte/issues/2170): consider something more detailed. - return "cron"; - } - default -> { - throw new RuntimeException("Unexpected schedule type"); - } - } - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandler.java deleted file mode 100644 index 91fa73011270..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandler.java +++ /dev/null @@ -1,334 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.api.model.generated.CustomDestinationDefinitionCreate; -import io.airbyte.api.model.generated.DestinationDefinitionCreate; -import io.airbyte.api.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.DestinationDefinitionRead; -import io.airbyte.api.model.generated.DestinationDefinitionReadList; -import io.airbyte.api.model.generated.DestinationDefinitionUpdate; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.PrivateDestinationDefinitionRead; -import io.airbyte.api.model.generated.PrivateDestinationDefinitionReadList; -import io.airbyte.api.model.generated.ReleaseStage; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.server.ServerConstants; -import io.airbyte.commons.server.converters.ApiPojoConverters; -import io.airbyte.commons.server.converters.SpecFetcher; -import io.airbyte.commons.server.errors.IdNotFoundKnownException; -import io.airbyte.commons.server.errors.InternalServerKnownException; -import io.airbyte.commons.server.errors.UnsupportedProtocolVersionException; -import io.airbyte.commons.server.scheduler.SynchronousResponse; -import io.airbyte.commons.server.scheduler.SynchronousSchedulerClient; -import io.airbyte.commons.server.services.AirbyteGithubStore; -import io.airbyte.commons.util.MoreLists; -import io.airbyte.commons.version.AirbyteProtocolVersion; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorDefinitionResourceRequirements; -import io.airbyte.config.ActorType; -import io.airbyte.config.Configs; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.time.LocalDate; -import java.util.List; -import java.util.Map.Entry; -import java.util.Objects; -import java.util.UUID; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -@SuppressWarnings("PMD.AvoidCatchingNPE") -@Singleton -public class DestinationDefinitionsHandler { - - private final ConfigRepository configRepository; - private final Supplier uuidSupplier; - private final SynchronousSchedulerClient schedulerSynchronousClient; - private final AirbyteGithubStore githubStore; - private final DestinationHandler destinationHandler; - private final AirbyteProtocolVersionRange protocolVersionRange; - - @VisibleForTesting - public DestinationDefinitionsHandler(final ConfigRepository configRepository, - final Supplier uuidSupplier, - final SynchronousSchedulerClient schedulerSynchronousClient, - final AirbyteGithubStore githubStore, - final DestinationHandler destinationHandler, - final AirbyteProtocolVersionRange protocolVersionRange) { - this.configRepository = configRepository; - this.uuidSupplier = uuidSupplier; - this.schedulerSynchronousClient = schedulerSynchronousClient; - this.githubStore = githubStore; - this.destinationHandler = destinationHandler; - this.protocolVersionRange = protocolVersionRange; - } - - // This should be deleted when cloud is migrated to micronaut - @Deprecated(forRemoval = true) - public DestinationDefinitionsHandler(final ConfigRepository configRepository, - final SynchronousSchedulerClient schedulerSynchronousClient, - final DestinationHandler destinationHandler) { - this.configRepository = configRepository; - this.uuidSupplier = UUID::randomUUID; - this.schedulerSynchronousClient = schedulerSynchronousClient; - this.githubStore = AirbyteGithubStore.production(); - this.destinationHandler = destinationHandler; - final Configs configs = new EnvConfigs(); - this.protocolVersionRange = new AirbyteProtocolVersionRange(configs.getAirbyteProtocolVersionMin(), configs.getAirbyteProtocolVersionMax()); - } - - @VisibleForTesting - static DestinationDefinitionRead buildDestinationDefinitionRead(final StandardDestinationDefinition standardDestinationDefinition) { - try { - - return new DestinationDefinitionRead() - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .name(standardDestinationDefinition.getName()) - .dockerRepository(standardDestinationDefinition.getDockerRepository()) - .dockerImageTag(standardDestinationDefinition.getDockerImageTag()) - .documentationUrl(new URI(standardDestinationDefinition.getDocumentationUrl())) - .icon(loadIcon(standardDestinationDefinition.getIcon())) - .protocolVersion(standardDestinationDefinition.getProtocolVersion()) - .releaseStage(getReleaseStage(standardDestinationDefinition)) - .releaseDate(getReleaseDate(standardDestinationDefinition)) - .supportsDbt(Objects.requireNonNullElse(standardDestinationDefinition.getSupportsDbt(), false)) - .normalizationConfig( - ApiPojoConverters.normalizationDestinationDefinitionConfigToApi(standardDestinationDefinition.getNormalizationConfig())) - .resourceRequirements(ApiPojoConverters.actorDefResourceReqsToApi(standardDestinationDefinition.getResourceRequirements())); - } catch (final URISyntaxException | NullPointerException e) { - throw new InternalServerKnownException("Unable to process retrieved latest destination definitions list", e); - } - } - - private static ReleaseStage getReleaseStage(final StandardDestinationDefinition standardDestinationDefinition) { - if (standardDestinationDefinition.getReleaseStage() == null) { - return null; - } - return ReleaseStage.fromValue(standardDestinationDefinition.getReleaseStage().value()); - } - - private static LocalDate getReleaseDate(final StandardDestinationDefinition standardDestinationDefinition) { - if (standardDestinationDefinition.getReleaseDate() == null || standardDestinationDefinition.getReleaseDate().isBlank()) { - return null; - } - - return LocalDate.parse(standardDestinationDefinition.getReleaseDate()); - } - - public DestinationDefinitionReadList listDestinationDefinitions() throws IOException, JsonValidationException { - return toDestinationDefinitionReadList(configRepository.listStandardDestinationDefinitions(false)); - } - - private static DestinationDefinitionReadList toDestinationDefinitionReadList(final List defs) { - final List reads = defs.stream() - .map(DestinationDefinitionsHandler::buildDestinationDefinitionRead) - .collect(Collectors.toList()); - return new DestinationDefinitionReadList().destinationDefinitions(reads); - } - - public DestinationDefinitionReadList listLatestDestinationDefinitions() { - return toDestinationDefinitionReadList(getLatestDestinations()); - } - - private List getLatestDestinations() { - try { - return githubStore.getLatestDestinations(); - } catch (final InterruptedException e) { - throw new InternalServerKnownException("Request to retrieve latest destination definitions failed", e); - } - } - - public DestinationDefinitionReadList listDestinationDefinitionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) - throws IOException { - return toDestinationDefinitionReadList(MoreLists.concat( - configRepository.listPublicDestinationDefinitions(false), - configRepository.listGrantedDestinationDefinitions(workspaceIdRequestBody.getWorkspaceId(), false))); - } - - public PrivateDestinationDefinitionReadList listPrivateDestinationDefinitions(final WorkspaceIdRequestBody workspaceIdRequestBody) - throws IOException { - final List> standardDestinationDefinitionBooleanMap = - configRepository.listGrantableDestinationDefinitions(workspaceIdRequestBody.getWorkspaceId(), false); - return toPrivateDestinationDefinitionReadList(standardDestinationDefinitionBooleanMap); - } - - private static PrivateDestinationDefinitionReadList toPrivateDestinationDefinitionReadList( - final List> defs) { - final List reads = defs.stream() - .map(entry -> new PrivateDestinationDefinitionRead() - .destinationDefinition(buildDestinationDefinitionRead(entry.getKey())) - .granted(entry.getValue())) - .collect(Collectors.toList()); - return new PrivateDestinationDefinitionReadList().destinationDefinitions(reads); - } - - public DestinationDefinitionRead getDestinationDefinition(final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody) - throws ConfigNotFoundException, IOException, JsonValidationException { - return buildDestinationDefinitionRead( - configRepository.getStandardDestinationDefinition(destinationDefinitionIdRequestBody.getDestinationDefinitionId())); - } - - public DestinationDefinitionRead getDestinationDefinitionForWorkspace( - final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) - throws ConfigNotFoundException, IOException, JsonValidationException { - final UUID definitionId = destinationDefinitionIdWithWorkspaceId.getDestinationDefinitionId(); - final UUID workspaceId = destinationDefinitionIdWithWorkspaceId.getWorkspaceId(); - if (!configRepository.workspaceCanUseDefinition(definitionId, workspaceId)) { - throw new IdNotFoundKnownException("Cannot find the requested definition with given id for this workspace", definitionId.toString()); - } - return getDestinationDefinition(new DestinationDefinitionIdRequestBody().destinationDefinitionId(definitionId)); - } - - public DestinationDefinitionRead createCustomDestinationDefinition(final CustomDestinationDefinitionCreate customDestinationDefinitionCreate) - throws IOException { - final StandardDestinationDefinition destinationDefinition = destinationDefinitionFromCreate( - customDestinationDefinitionCreate.getDestinationDefinition()) - .withPublic(false) - .withCustom(true); - if (!protocolVersionRange.isSupported(new Version(destinationDefinition.getProtocolVersion()))) { - throw new UnsupportedProtocolVersionException(destinationDefinition.getProtocolVersion(), protocolVersionRange.min(), - protocolVersionRange.max()); - } - configRepository.writeCustomDestinationDefinition(destinationDefinition, customDestinationDefinitionCreate.getWorkspaceId()); - - return buildDestinationDefinitionRead(destinationDefinition); - } - - private StandardDestinationDefinition destinationDefinitionFromCreate(final DestinationDefinitionCreate destinationDefCreate) throws IOException { - final ConnectorSpecification spec = getSpecForImage( - destinationDefCreate.getDockerRepository(), - destinationDefCreate.getDockerImageTag(), - // Only custom connectors can be created via handlers. - true); - - final Version airbyteProtocolVersion = AirbyteProtocolVersion.getWithDefault(spec.getProtocolVersion()); - - final UUID id = uuidSupplier.get(); - final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() - .withDestinationDefinitionId(id) - .withDockerRepository(destinationDefCreate.getDockerRepository()) - .withDockerImageTag(destinationDefCreate.getDockerImageTag()) - .withDocumentationUrl(destinationDefCreate.getDocumentationUrl().toString()) - .withName(destinationDefCreate.getName()) - .withIcon(destinationDefCreate.getIcon()) - .withSpec(spec) - .withProtocolVersion(airbyteProtocolVersion.serialize()) - .withTombstone(false) - .withReleaseStage(StandardDestinationDefinition.ReleaseStage.CUSTOM) - .withResourceRequirements(ApiPojoConverters.actorDefResourceReqsToInternal(destinationDefCreate.getResourceRequirements())); - return destinationDefinition; - } - - public DestinationDefinitionRead updateDestinationDefinition(final DestinationDefinitionUpdate destinationDefinitionUpdate) - throws ConfigNotFoundException, IOException, JsonValidationException { - final StandardDestinationDefinition currentDestination = configRepository - .getStandardDestinationDefinition(destinationDefinitionUpdate.getDestinationDefinitionId()); - - // specs are re-fetched from the container if the image tag has changed, or if the tag is "dev", - // to allow for easier iteration of dev images - final boolean specNeedsUpdate = !currentDestination.getDockerImageTag().equals(destinationDefinitionUpdate.getDockerImageTag()) - || ServerConstants.DEV_IMAGE_TAG.equals(destinationDefinitionUpdate.getDockerImageTag()); - final ConnectorSpecification spec = specNeedsUpdate - ? getSpecForImage(currentDestination.getDockerRepository(), destinationDefinitionUpdate.getDockerImageTag(), currentDestination.getCustom()) - : currentDestination.getSpec(); - final ActorDefinitionResourceRequirements updatedResourceReqs = destinationDefinitionUpdate.getResourceRequirements() != null - ? ApiPojoConverters.actorDefResourceReqsToInternal(destinationDefinitionUpdate.getResourceRequirements()) - : currentDestination.getResourceRequirements(); - - final Version airbyteProtocolVersion = AirbyteProtocolVersion.getWithDefault(spec.getProtocolVersion()); - if (!protocolVersionRange.isSupported(airbyteProtocolVersion)) { - throw new UnsupportedProtocolVersionException(airbyteProtocolVersion, protocolVersionRange.min(), protocolVersionRange.max()); - } - - final StandardDestinationDefinition newDestination = new StandardDestinationDefinition() - .withDestinationDefinitionId(currentDestination.getDestinationDefinitionId()) - .withDockerImageTag(destinationDefinitionUpdate.getDockerImageTag()) - .withDockerRepository(currentDestination.getDockerRepository()) - .withName(currentDestination.getName()) - .withDocumentationUrl(currentDestination.getDocumentationUrl()) - .withIcon(currentDestination.getIcon()) - .withNormalizationConfig(currentDestination.getNormalizationConfig()) - .withSupportsDbt(currentDestination.getSupportsDbt()) - .withSpec(spec) - .withProtocolVersion(airbyteProtocolVersion.serialize()) - .withTombstone(currentDestination.getTombstone()) - .withPublic(currentDestination.getPublic()) - .withCustom(currentDestination.getCustom()) - .withReleaseStage(currentDestination.getReleaseStage()) - .withReleaseDate(currentDestination.getReleaseDate()) - .withResourceRequirements(updatedResourceReqs); - - configRepository.writeStandardDestinationDefinition(newDestination); - configRepository.clearUnsupportedProtocolVersionFlag(newDestination.getDestinationDefinitionId(), ActorType.DESTINATION, protocolVersionRange); - return buildDestinationDefinitionRead(newDestination); - } - - public void deleteDestinationDefinition(final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody) - throws JsonValidationException, ConfigNotFoundException, IOException { - // "delete" all destinations associated with the destination definition as well. This will cascade - // to connections that depend on any deleted - // destinations. Delete destinations first in case a failure occurs mid-operation. - - final StandardDestinationDefinition persistedDestinationDefinition = - configRepository.getStandardDestinationDefinition(destinationDefinitionIdRequestBody.getDestinationDefinitionId()); - - for (final DestinationRead destinationRead : destinationHandler.listDestinationsForDestinationDefinition(destinationDefinitionIdRequestBody) - .getDestinations()) { - destinationHandler.deleteDestination(destinationRead); - } - - persistedDestinationDefinition.withTombstone(true); - configRepository.writeStandardDestinationDefinition(persistedDestinationDefinition); - } - - private ConnectorSpecification getSpecForImage(final String dockerRepository, final String imageTag, final boolean isCustomConnector) - throws IOException { - final String imageName = dockerRepository + ":" + imageTag; - final SynchronousResponse getSpecResponse = schedulerSynchronousClient.createGetSpecJob(imageName, isCustomConnector); - return SpecFetcher.getSpecFromJob(getSpecResponse); - } - - public static String loadIcon(final String name) { - try { - return name == null ? null : MoreResources.readResource("icons/" + name); - } catch (final Exception e) { - return null; - } - } - - public PrivateDestinationDefinitionRead grantDestinationDefinitionToWorkspace( - final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardDestinationDefinition standardDestinationDefinition = - configRepository.getStandardDestinationDefinition(destinationDefinitionIdWithWorkspaceId.getDestinationDefinitionId()); - configRepository.writeActorDefinitionWorkspaceGrant( - destinationDefinitionIdWithWorkspaceId.getDestinationDefinitionId(), - destinationDefinitionIdWithWorkspaceId.getWorkspaceId()); - return new PrivateDestinationDefinitionRead() - .destinationDefinition(buildDestinationDefinitionRead(standardDestinationDefinition)) - .granted(true); - } - - public void revokeDestinationDefinitionFromWorkspace(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) - throws IOException { - configRepository.deleteActorDefinitionWorkspaceGrant( - destinationDefinitionIdWithWorkspaceId.getDestinationDefinitionId(), - destinationDefinitionIdWithWorkspaceId.getWorkspaceId()); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationHandler.java deleted file mode 100644 index 13471b10397c..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationHandler.java +++ /dev/null @@ -1,329 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.DestinationCloneConfiguration; -import io.airbyte.api.model.generated.DestinationCloneRequestBody; -import io.airbyte.api.model.generated.DestinationCreate; -import io.airbyte.api.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationReadList; -import io.airbyte.api.model.generated.DestinationSearch; -import io.airbyte.api.model.generated.DestinationSnippetRead; -import io.airbyte.api.model.generated.DestinationUpdate; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.converters.ConfigurationUpdate; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; -import io.airbyte.persistence.job.factory.OAuthConfigSupplier; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Inject; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.List; -import java.util.UUID; -import java.util.function.Supplier; - -@Singleton -public class DestinationHandler { - - private final ConnectionsHandler connectionsHandler; - private final Supplier uuidGenerator; - private final ConfigRepository configRepository; - private final SecretsRepositoryReader secretsRepositoryReader; - private final SecretsRepositoryWriter secretsRepositoryWriter; - private final JsonSchemaValidator validator; - private final ConfigurationUpdate configurationUpdate; - private final JsonSecretsProcessor secretsProcessor; - private final OAuthConfigSupplier oAuthConfigSupplier; - - @VisibleForTesting - DestinationHandler(final ConfigRepository configRepository, - final SecretsRepositoryReader secretsRepositoryReader, - final SecretsRepositoryWriter secretsRepositoryWriter, - final JsonSchemaValidator integrationSchemaValidation, - final ConnectionsHandler connectionsHandler, - final Supplier uuidGenerator, - final JsonSecretsProcessor secretsProcessor, - final ConfigurationUpdate configurationUpdate, - final OAuthConfigSupplier oAuthConfigSupplier) { - this.configRepository = configRepository; - this.secretsRepositoryReader = secretsRepositoryReader; - this.secretsRepositoryWriter = secretsRepositoryWriter; - validator = integrationSchemaValidation; - this.connectionsHandler = connectionsHandler; - this.uuidGenerator = uuidGenerator; - this.configurationUpdate = configurationUpdate; - this.secretsProcessor = secretsProcessor; - this.oAuthConfigSupplier = oAuthConfigSupplier; - } - - @Inject - public DestinationHandler(final ConfigRepository configRepository, - final SecretsRepositoryReader secretsRepositoryReader, - final SecretsRepositoryWriter secretsRepositoryWriter, - final JsonSchemaValidator integrationSchemaValidation, - final ConnectionsHandler connectionsHandler, - final OAuthConfigSupplier oAuthConfigSupplier) { - this( - configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - integrationSchemaValidation, - connectionsHandler, - UUID::randomUUID, - JsonSecretsProcessor.builder() - .copySecrets(true) - .build(), - new ConfigurationUpdate(configRepository, secretsRepositoryReader), - oAuthConfigSupplier); - } - - public DestinationRead createDestination(final DestinationCreate destinationCreate) - throws ConfigNotFoundException, IOException, JsonValidationException { - // validate configuration - final ConnectorSpecification spec = getSpec(destinationCreate.getDestinationDefinitionId()); - validateDestination(spec, destinationCreate.getConnectionConfiguration()); - - // persist - final UUID destinationId = uuidGenerator.get(); - persistDestinationConnection( - destinationCreate.getName() != null ? destinationCreate.getName() : "default", - destinationCreate.getDestinationDefinitionId(), - destinationCreate.getWorkspaceId(), - destinationId, - destinationCreate.getConnectionConfiguration(), - false); - - // read configuration from db - return buildDestinationRead(configRepository.getDestinationConnection(destinationId), spec); - } - - public void deleteDestination(final DestinationIdRequestBody destinationIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - // get existing implementation - final DestinationRead destination = buildDestinationRead(destinationIdRequestBody.getDestinationId()); - - deleteDestination(destination); - } - - public void deleteDestination(final DestinationRead destination) - throws JsonValidationException, IOException, ConfigNotFoundException { - // disable all connections associated with this destination - // Delete connections first in case it fails in the middle, destination will still be visible - final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody().workspaceId(destination.getWorkspaceId()); - for (final ConnectionRead connectionRead : connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody).getConnections()) { - if (!connectionRead.getDestinationId().equals(destination.getDestinationId())) { - continue; - } - - connectionsHandler.deleteConnection(connectionRead.getConnectionId()); - } - - final var fullConfig = secretsRepositoryReader.getDestinationConnectionWithSecrets(destination.getDestinationId()).getConfiguration(); - - // persist - persistDestinationConnection( - destination.getName(), - destination.getDestinationDefinitionId(), - destination.getWorkspaceId(), - destination.getDestinationId(), - fullConfig, - true); - } - - public DestinationRead updateDestination(final DestinationUpdate destinationUpdate) - throws ConfigNotFoundException, IOException, JsonValidationException { - // get existing implementation - final DestinationConnection updatedDestination = configurationUpdate - .destination(destinationUpdate.getDestinationId(), destinationUpdate.getName(), destinationUpdate.getConnectionConfiguration()); - - final ConnectorSpecification spec = getSpec(updatedDestination.getDestinationDefinitionId()); - - // validate configuration - validateDestination(spec, updatedDestination.getConfiguration()); - - // persist - persistDestinationConnection( - updatedDestination.getName(), - updatedDestination.getDestinationDefinitionId(), - updatedDestination.getWorkspaceId(), - updatedDestination.getDestinationId(), - updatedDestination.getConfiguration(), - updatedDestination.getTombstone()); - - // read configuration from db - return buildDestinationRead( - configRepository.getDestinationConnection(destinationUpdate.getDestinationId()), spec); - } - - public DestinationRead getDestination(final DestinationIdRequestBody destinationIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - return buildDestinationRead(destinationIdRequestBody.getDestinationId()); - } - - public DestinationRead cloneDestination(final DestinationCloneRequestBody destinationCloneRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - // read destination configuration from db - final DestinationRead destinationToClone = buildDestinationReadWithSecrets(destinationCloneRequestBody.getDestinationCloneId()); - final DestinationCloneConfiguration destinationCloneConfiguration = destinationCloneRequestBody.getDestinationConfiguration(); - - final String copyText = " (Copy)"; - final String destinationName = destinationToClone.getName() + copyText; - - final DestinationCreate destinationCreate = new DestinationCreate() - .name(destinationName) - .destinationDefinitionId(destinationToClone.getDestinationDefinitionId()) - .connectionConfiguration(destinationToClone.getConnectionConfiguration()) - .workspaceId(destinationToClone.getWorkspaceId()); - - if (destinationCloneConfiguration != null) { - if (destinationCloneConfiguration.getName() != null) { - destinationCreate.name(destinationCloneConfiguration.getName()); - } - - if (destinationCloneConfiguration.getConnectionConfiguration() != null) { - destinationCreate.connectionConfiguration(destinationCloneConfiguration.getConnectionConfiguration()); - } - } - - return createDestination(destinationCreate); - } - - public DestinationReadList listDestinationsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) - throws ConfigNotFoundException, IOException, JsonValidationException { - - final List reads = Lists.newArrayList(); - for (final DestinationConnection dci : configRepository.listWorkspaceDestinationConnection(workspaceIdRequestBody.getWorkspaceId())) { - reads.add(buildDestinationRead(dci)); - } - return new DestinationReadList().destinations(reads); - } - - public DestinationReadList listDestinationsForDestinationDefinition(final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - final List reads = Lists.newArrayList(); - - for (final DestinationConnection destinationConnection : configRepository - .listDestinationsForDefinition(destinationDefinitionIdRequestBody.getDestinationDefinitionId())) { - reads.add(buildDestinationRead(destinationConnection)); - } - - return new DestinationReadList().destinations(reads); - } - - public DestinationReadList searchDestinations(final DestinationSearch destinationSearch) - throws ConfigNotFoundException, IOException, JsonValidationException { - final List reads = Lists.newArrayList(); - - for (final DestinationConnection dci : configRepository.listDestinationConnection()) { - if (!dci.getTombstone()) { - final DestinationRead destinationRead = buildDestinationRead(dci); - if (connectionsHandler.matchSearch(destinationSearch, destinationRead)) { - reads.add(destinationRead); - } - } - } - - return new DestinationReadList().destinations(reads); - } - - private void validateDestination(final ConnectorSpecification spec, final JsonNode configuration) throws JsonValidationException { - validator.ensure(spec.getConnectionSpecification(), configuration); - } - - public ConnectorSpecification getSpec(final UUID destinationDefinitionId) - throws JsonValidationException, IOException, ConfigNotFoundException { - return configRepository.getStandardDestinationDefinition(destinationDefinitionId).getSpec(); - } - - private void persistDestinationConnection(final String name, - final UUID destinationDefinitionId, - final UUID workspaceId, - final UUID destinationId, - final JsonNode configurationJson, - final boolean tombstone) - throws JsonValidationException, IOException, ConfigNotFoundException { - final JsonNode oAuthMaskedConfigurationJson = - oAuthConfigSupplier.maskDestinationOAuthParameters(destinationDefinitionId, workspaceId, configurationJson); - final DestinationConnection destinationConnection = new DestinationConnection() - .withName(name) - .withDestinationDefinitionId(destinationDefinitionId) - .withWorkspaceId(workspaceId) - .withDestinationId(destinationId) - .withConfiguration(oAuthMaskedConfigurationJson) - .withTombstone(tombstone); - secretsRepositoryWriter.writeDestinationConnection(destinationConnection, getSpec(destinationDefinitionId)); - } - - private DestinationRead buildDestinationRead(final UUID destinationId) throws JsonValidationException, IOException, ConfigNotFoundException { - return buildDestinationRead(configRepository.getDestinationConnection(destinationId)); - } - - private DestinationRead buildDestinationRead(final DestinationConnection destinationConnection) - throws JsonValidationException, IOException, ConfigNotFoundException { - final ConnectorSpecification spec = getSpec(destinationConnection.getDestinationDefinitionId()); - return buildDestinationRead(destinationConnection, spec); - } - - private DestinationRead buildDestinationRead(final DestinationConnection destinationConnection, final ConnectorSpecification spec) - throws ConfigNotFoundException, IOException, JsonValidationException { - - // remove secrets from config before returning the read - final DestinationConnection dci = Jsons.clone(destinationConnection); - dci.setConfiguration(secretsProcessor.prepareSecretsForOutput(dci.getConfiguration(), spec.getConnectionSpecification())); - - final StandardDestinationDefinition standardDestinationDefinition = - configRepository.getStandardDestinationDefinition(dci.getDestinationDefinitionId()); - return toDestinationRead(dci, standardDestinationDefinition); - } - - private DestinationRead buildDestinationReadWithSecrets(final UUID destinationId) - throws ConfigNotFoundException, IOException, JsonValidationException { - - // remove secrets from config before returning the read - final DestinationConnection dci = Jsons.clone(secretsRepositoryReader.getDestinationConnectionWithSecrets(destinationId)); - final StandardDestinationDefinition standardDestinationDefinition = - configRepository.getStandardDestinationDefinition(dci.getDestinationDefinitionId()); - return toDestinationRead(dci, standardDestinationDefinition); - } - - protected static DestinationRead toDestinationRead(final DestinationConnection destinationConnection, - final StandardDestinationDefinition standardDestinationDefinition) { - return new DestinationRead() - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .destinationId(destinationConnection.getDestinationId()) - .workspaceId(destinationConnection.getWorkspaceId()) - .destinationDefinitionId(destinationConnection.getDestinationDefinitionId()) - .connectionConfiguration(destinationConnection.getConfiguration()) - .name(destinationConnection.getName()) - .destinationName(standardDestinationDefinition.getName()) - .icon(DestinationDefinitionsHandler.loadIcon(standardDestinationDefinition.getIcon())); - } - - protected static DestinationSnippetRead toDestinationSnippetRead(final DestinationConnection destinationConnection, - final StandardDestinationDefinition standardDestinationDefinition) { - return new DestinationSnippetRead() - .destinationId(destinationConnection.getDestinationId()) - .name(destinationConnection.getName()) - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .destinationName(standardDestinationDefinition.getName()) - .icon(DestinationDefinitionsHandler.loadIcon(standardDestinationDefinition.getIcon())); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/HealthCheckHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/HealthCheckHandler.java deleted file mode 100644 index fc618c2dcdb5..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/HealthCheckHandler.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import io.airbyte.api.model.generated.HealthCheckRead; -import io.airbyte.config.persistence.ConfigRepository; -import jakarta.inject.Named; -import jakarta.inject.Singleton; - -@Singleton -public class HealthCheckHandler { - - private final ConfigRepository repository; - - public HealthCheckHandler(@Named("configRepository") final ConfigRepository repository) { - this.repository = repository; - } - - public HealthCheckRead health() { - return new HealthCheckRead().available(repository.healthCheck()); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java deleted file mode 100644 index e3a7d01665ef..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java +++ /dev/null @@ -1,305 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import com.google.common.base.Preconditions; -import io.airbyte.api.model.generated.AttemptInfoRead; -import io.airbyte.api.model.generated.AttemptNormalizationStatusReadList; -import io.airbyte.api.model.generated.AttemptRead; -import io.airbyte.api.model.generated.AttemptStats; -import io.airbyte.api.model.generated.AttemptStreamStats; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.model.generated.DestinationDefinitionRead; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.JobDebugInfoRead; -import io.airbyte.api.model.generated.JobDebugRead; -import io.airbyte.api.model.generated.JobIdRequestBody; -import io.airbyte.api.model.generated.JobInfoLightRead; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.api.model.generated.JobListRequestBody; -import io.airbyte.api.model.generated.JobOptionalRead; -import io.airbyte.api.model.generated.JobRead; -import io.airbyte.api.model.generated.JobReadList; -import io.airbyte.api.model.generated.JobWithAttemptsRead; -import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.model.generated.SourceDefinitionRead; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.server.converters.JobConverter; -import io.airbyte.commons.server.converters.WorkflowStateConverter; -import io.airbyte.commons.temporal.TemporalClient; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.JobPersistence.JobAttemptPair; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.persistence.job.models.JobStatus; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - -@Singleton -@Slf4j -public class JobHistoryHandler { - - private final ConnectionsHandler connectionsHandler; - private final SourceHandler sourceHandler; - private final DestinationHandler destinationHandler; - private final SourceDefinitionsHandler sourceDefinitionsHandler; - private final DestinationDefinitionsHandler destinationDefinitionsHandler; - public static final int DEFAULT_PAGE_SIZE = 200; - private final JobPersistence jobPersistence; - private final JobConverter jobConverter; - private final WorkflowStateConverter workflowStateConverter; - private final AirbyteVersion airbyteVersion; - private final TemporalClient temporalClient; - - public JobHistoryHandler(final JobPersistence jobPersistence, - final WorkerEnvironment workerEnvironment, - final LogConfigs logConfigs, - final ConnectionsHandler connectionsHandler, - final SourceHandler sourceHandler, - final SourceDefinitionsHandler sourceDefinitionsHandler, - final DestinationHandler destinationHandler, - final DestinationDefinitionsHandler destinationDefinitionsHandler, - final AirbyteVersion airbyteVersion, - final TemporalClient temporalClient) { - jobConverter = new JobConverter(workerEnvironment, logConfigs); - workflowStateConverter = new WorkflowStateConverter(); - this.jobPersistence = jobPersistence; - this.connectionsHandler = connectionsHandler; - this.sourceHandler = sourceHandler; - this.sourceDefinitionsHandler = sourceDefinitionsHandler; - this.destinationHandler = destinationHandler; - this.destinationDefinitionsHandler = destinationDefinitionsHandler; - this.airbyteVersion = airbyteVersion; - this.temporalClient = temporalClient; - } - - @Deprecated(forRemoval = true) - public JobHistoryHandler(final JobPersistence jobPersistence, - final WorkerEnvironment workerEnvironment, - final LogConfigs logConfigs, - final ConnectionsHandler connectionsHandler, - final SourceHandler sourceHandler, - final SourceDefinitionsHandler sourceDefinitionsHandler, - final DestinationHandler destinationHandler, - final DestinationDefinitionsHandler destinationDefinitionsHandler, - final AirbyteVersion airbyteVersion) { - this(jobPersistence, workerEnvironment, logConfigs, connectionsHandler, sourceHandler, sourceDefinitionsHandler, destinationHandler, - destinationDefinitionsHandler, airbyteVersion, null); - } - - @SuppressWarnings("UnstableApiUsage") - public JobReadList listJobsFor(final JobListRequestBody request) throws IOException { - Preconditions.checkNotNull(request.getConfigTypes(), "configType cannot be null."); - Preconditions.checkState(!request.getConfigTypes().isEmpty(), "Must include at least one configType."); - - final Set configTypes = request.getConfigTypes() - .stream() - .map(type -> Enums.convertTo(type, JobConfig.ConfigType.class)) - .collect(Collectors.toSet()); - final String configId = request.getConfigId(); - - final int pageSize = (request.getPagination() != null && request.getPagination().getPageSize() != null) ? request.getPagination().getPageSize() - : DEFAULT_PAGE_SIZE; - final List jobs; - - if (request.getIncludingJobId() != null) { - jobs = jobPersistence.listJobsIncludingId(configTypes, configId, request.getIncludingJobId(), pageSize); - } else { - jobs = jobPersistence.listJobs(configTypes, configId, pageSize, - (request.getPagination() != null && request.getPagination().getRowOffset() != null) ? request.getPagination().getRowOffset() : 0); - } - - final List jobReads = jobs.stream().map(JobConverter::getJobWithAttemptsRead).collect(Collectors.toList()); - final var jobIds = jobReads.stream().map(r -> r.getJob().getId()).toList(); - final Map stats = jobPersistence.getAttemptStats(jobIds); - for (final JobWithAttemptsRead jwar : jobReads) { - for (final AttemptRead a : jwar.getAttempts()) { - final var stat = stats.get(new JobAttemptPair(jwar.getJob().getId(), a.getId().intValue())); - if (stat == null) { - log.error("Missing stats for job {} attempt {}", jwar.getJob().getId(), a.getId().intValue()); - continue; - } - - hydrateWithStats(a, stat); - } - } - - final Long totalJobCount = jobPersistence.getJobCount(configTypes, configId); - return new JobReadList().jobs(jobReads).totalJobCount(totalJobCount); - } - - /** - * Retrieve stats for a given job id and attempt number and hydrate the api model with the retrieved - * information. - * - * @param jobId the job the attempt belongs to. Used as an index to retrieve stats. - * @param a the attempt to hydrate stats for. - */ - private void hydrateWithStats(final AttemptRead a, final JobPersistence.AttemptStats attemptStats) { - a.setTotalStats(new AttemptStats()); - - final var combinedStats = attemptStats.combinedStats(); - if (combinedStats == null) { - // If overall stats are missing, assume stream stats are also missing, since overall stats are - // easier to produce than stream stats. Exit early. - return; - } - - a.getTotalStats() - .estimatedBytes(combinedStats.getEstimatedBytes()) - .estimatedRecords(combinedStats.getEstimatedRecords()) - .bytesEmitted(combinedStats.getBytesEmitted()) - .recordsEmitted(combinedStats.getRecordsEmitted()) - .recordsCommitted(combinedStats.getRecordsCommitted()); - - final var streamStats = attemptStats.perStreamStats().stream().map(s -> new AttemptStreamStats() - .streamName(s.getStreamName()) - .streamNamespace(s.getStreamNamespace()) - .stats(new AttemptStats() - .bytesEmitted(s.getStats().getBytesEmitted()) - .recordsEmitted(s.getStats().getRecordsEmitted()) - .recordsCommitted(s.getStats().getRecordsCommitted()) - .estimatedBytes(s.getStats().getEstimatedBytes()) - .estimatedRecords(s.getStats().getEstimatedRecords()))) - .collect(Collectors.toList()); - a.setStreamStats(streamStats); - } - - public JobInfoRead getJobInfo(final JobIdRequestBody jobIdRequestBody) throws IOException { - final Job job = jobPersistence.getJob(jobIdRequestBody.getId()); - return jobConverter.getJobInfoRead(job); - } - - public JobInfoLightRead getJobInfoLight(final JobIdRequestBody jobIdRequestBody) throws IOException { - final Job job = jobPersistence.getJob(jobIdRequestBody.getId()); - return jobConverter.getJobInfoLightRead(job); - } - - public JobOptionalRead getLastReplicationJob(final ConnectionIdRequestBody connectionIdRequestBody) throws IOException { - Optional job = jobPersistence.getLastReplicationJob(connectionIdRequestBody.getConnectionId()); - if (job.isEmpty()) { - return new JobOptionalRead(); - } else { - return jobConverter.getJobOptionalRead(job.get()); - } - - } - - public JobDebugInfoRead getJobDebugInfo(final JobIdRequestBody jobIdRequestBody) - throws ConfigNotFoundException, IOException, JsonValidationException { - final Job job = jobPersistence.getJob(jobIdRequestBody.getId()); - final JobInfoRead jobinfoRead = jobConverter.getJobInfoRead(job); - - for (final AttemptInfoRead a : jobinfoRead.getAttempts()) { - final int attemptNumber = a.getAttempt().getId().intValue(); - final var attemptStats = jobPersistence.getAttemptStats(job.getId(), attemptNumber); - hydrateWithStats(a.getAttempt(), attemptStats); - } - - final JobDebugInfoRead jobDebugInfoRead = buildJobDebugInfoRead(jobinfoRead); - if (temporalClient != null) { - final UUID connectionId = UUID.fromString(job.getScope()); - temporalClient.getWorkflowState(connectionId) - .map(workflowStateConverter::getWorkflowStateRead) - .ifPresent(jobDebugInfoRead::setWorkflowState); - } - - return jobDebugInfoRead; - } - - public Optional getLatestRunningSyncJob(final UUID connectionId) throws IOException { - final List nonTerminalSyncJobsForConnection = jobPersistence.listJobsForConnectionWithStatuses( - connectionId, - Collections.singleton(ConfigType.SYNC), - JobStatus.NON_TERMINAL_STATUSES); - - // there *should* only be a single running sync job for a connection, but - // jobPersistence.listJobsForConnectionWithStatuses orders by created_at desc so - // .findFirst will always return what we want. - return nonTerminalSyncJobsForConnection.stream().map(JobConverter::getJobRead).findFirst(); - } - - public Optional getLatestSyncJob(final UUID connectionId) throws IOException { - return jobPersistence.getLastSyncJob(connectionId).map(JobConverter::getJobRead); - } - - public List getLatestSyncJobsForConnections(final List connectionIds) throws IOException { - return jobPersistence.getLastSyncJobForConnections(connectionIds).stream() - .map(JobConverter::getJobRead) - .collect(Collectors.toList()); - } - - public AttemptNormalizationStatusReadList getAttemptNormalizationStatuses(final JobIdRequestBody jobIdRequestBody) throws IOException { - return new AttemptNormalizationStatusReadList() - .attemptNormalizationStatuses(jobPersistence.getAttemptNormalizationStatusesForJob(jobIdRequestBody.getId()).stream() - .map(JobConverter::convertAttemptNormalizationStatus).collect(Collectors.toList())); - } - - public List getRunningSyncJobForConnections(final List connectionIds) throws IOException { - return jobPersistence.getRunningSyncJobForConnections(connectionIds).stream() - .map(JobConverter::getJobRead) - .collect(Collectors.toList()); - } - - private SourceRead getSourceRead(final ConnectionRead connectionRead) throws JsonValidationException, IOException, ConfigNotFoundException { - final SourceIdRequestBody sourceIdRequestBody = new SourceIdRequestBody().sourceId(connectionRead.getSourceId()); - return sourceHandler.getSource(sourceIdRequestBody); - } - - private DestinationRead getDestinationRead(final ConnectionRead connectionRead) - throws JsonValidationException, IOException, ConfigNotFoundException { - final DestinationIdRequestBody destinationIdRequestBody = new DestinationIdRequestBody().destinationId(connectionRead.getDestinationId()); - return destinationHandler.getDestination(destinationIdRequestBody); - } - - private SourceDefinitionRead getSourceDefinitionRead(final SourceRead sourceRead) - throws JsonValidationException, IOException, ConfigNotFoundException { - final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody = - new SourceDefinitionIdRequestBody().sourceDefinitionId(sourceRead.getSourceDefinitionId()); - return sourceDefinitionsHandler.getSourceDefinition(sourceDefinitionIdRequestBody); - } - - private DestinationDefinitionRead getDestinationDefinitionRead(final DestinationRead destinationRead) - throws JsonValidationException, IOException, ConfigNotFoundException { - final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody = - new DestinationDefinitionIdRequestBody().destinationDefinitionId(destinationRead.getDestinationDefinitionId()); - return destinationDefinitionsHandler.getDestinationDefinition(destinationDefinitionIdRequestBody); - } - - private JobDebugInfoRead buildJobDebugInfoRead(final JobInfoRead jobInfoRead) - throws ConfigNotFoundException, IOException, JsonValidationException { - final String configId = jobInfoRead.getJob().getConfigId(); - final ConnectionRead connection = connectionsHandler.getConnection(UUID.fromString(configId)); - final SourceRead source = getSourceRead(connection); - final DestinationRead destination = getDestinationRead(connection); - final SourceDefinitionRead sourceDefinitionRead = getSourceDefinitionRead(source); - final DestinationDefinitionRead destinationDefinitionRead = getDestinationDefinitionRead(destination); - final JobDebugRead jobDebugRead = JobConverter.getDebugJobInfoRead(jobInfoRead, sourceDefinitionRead, destinationDefinitionRead, airbyteVersion); - - return new JobDebugInfoRead() - .attempts(jobInfoRead.getAttempts()) - .job(jobDebugRead); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/LogsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/LogsHandler.java deleted file mode 100644 index 2aa8ea5161e5..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/LogsHandler.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import io.airbyte.api.model.generated.LogsRequestBody; -import io.airbyte.config.Configs; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.helpers.LogConfigs; -import jakarta.inject.Inject; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.File; -import java.nio.file.Path; - -/** - * This handler is only responsible for server and scheduler logs. Jobs logs paths are determined by - * the submitJob function in the JobSubmitter class in the airbyte-server module. - */ -@Singleton -public class LogsHandler { - - private final Path workspaceRoot; - private final WorkerEnvironment workerEnvironment; - private final LogConfigs logConfigs; - - @Deprecated(forRemoval = true) - public LogsHandler(final Configs configs) { - this(configs.getWorkspaceRoot(), configs.getWorkerEnvironment(), configs.getLogConfigs()); - } - - @Inject - public LogsHandler(@Named("workspaceRoot") final Path workspaceRoot, - final WorkerEnvironment workerEnvironment, - final LogConfigs logConfigs) { - this.workspaceRoot = workspaceRoot; - this.workerEnvironment = workerEnvironment; - this.logConfigs = logConfigs; - } - - public File getLogs(final LogsRequestBody logsRequestBody) { - switch (logsRequestBody.getLogType()) { - case SERVER -> { - return LogClientSingleton.getInstance().getServerLogFile(workspaceRoot, workerEnvironment, logConfigs); - } - case SCHEDULER -> { - return LogClientSingleton.getInstance().getSchedulerLogFile(workspaceRoot, workerEnvironment, logConfigs); - } - default -> throw new IllegalStateException("Unexpected value: " + logsRequestBody.getLogType()); - } - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java deleted file mode 100644 index adbea35e36c1..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java +++ /dev/null @@ -1,364 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.DESTINATION_DEFINITION_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.SOURCE_DEFINITION_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.WORKSPACE_ID_KEY; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.api.model.generated.CompleteDestinationOAuthRequest; -import io.airbyte.api.model.generated.CompleteSourceOauthRequest; -import io.airbyte.api.model.generated.DestinationOauthConsentRequest; -import io.airbyte.api.model.generated.OAuthConsentRead; -import io.airbyte.api.model.generated.SetInstancewideDestinationOauthParamsRequestBody; -import io.airbyte.api.model.generated.SetInstancewideSourceOauthParamsRequestBody; -import io.airbyte.api.model.generated.SourceOauthConsentRequest; -import io.airbyte.commons.constants.AirbyteSecretConstants; -import io.airbyte.commons.json.JsonPaths; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.handlers.helpers.OAuthPathExtractor; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.oauth.OAuthImplementationFactory; -import io.airbyte.persistence.job.factory.OAuthConfigSupplier; -import io.airbyte.persistence.job.tracker.TrackingMetadata; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.net.http.HttpClient; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Singleton -public class OAuthHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(OAuthHandler.class); - private static final String ERROR_MESSAGE = "failed while reporting usage."; - - private final ConfigRepository configRepository; - private final OAuthImplementationFactory oAuthImplementationFactory; - private final TrackingClient trackingClient; - private final SecretsRepositoryReader secretsRepositoryReader; - - public OAuthHandler(final ConfigRepository configRepository, - final HttpClient httpClient, - final TrackingClient trackingClient, - final SecretsRepositoryReader secretsRepositoryReader) { - this.configRepository = configRepository; - this.oAuthImplementationFactory = new OAuthImplementationFactory(configRepository, httpClient); - this.trackingClient = trackingClient; - this.secretsRepositoryReader = secretsRepositoryReader; - } - - public OAuthConsentRead getSourceOAuthConsent(final SourceOauthConsentRequest sourceOauthConsentRequest) - throws JsonValidationException, ConfigNotFoundException, IOException { - final Map traceTags = Map.of(WORKSPACE_ID_KEY, sourceOauthConsentRequest.getWorkspaceId(), SOURCE_DEFINITION_ID_KEY, - sourceOauthConsentRequest.getSourceDefinitionId()); - ApmTraceUtils.addTagsToTrace(traceTags); - ApmTraceUtils.addTagsToRootSpan(traceTags); - final StandardSourceDefinition sourceDefinition = - configRepository.getStandardSourceDefinition(sourceOauthConsentRequest.getSourceDefinitionId()); - final OAuthFlowImplementation oAuthFlowImplementation = oAuthImplementationFactory.create(sourceDefinition); - final ConnectorSpecification spec = sourceDefinition.getSpec(); - final Map metadata = generateSourceMetadata(sourceOauthConsentRequest.getSourceDefinitionId()); - final OAuthConsentRead result; - if (OAuthConfigSupplier.hasOAuthConfigSpecification(spec)) { - final JsonNode oAuthInputConfigurationForConsent; - - if (sourceOauthConsentRequest.getSourceId() == null) { - oAuthInputConfigurationForConsent = sourceOauthConsentRequest.getoAuthInputConfiguration(); - } else { - final SourceConnection hydratedSourceConnection = - secretsRepositoryReader.getSourceConnectionWithSecrets(sourceOauthConsentRequest.getSourceId()); - - oAuthInputConfigurationForConsent = getOAuthInputConfigurationForConsent(spec, - hydratedSourceConnection.getConfiguration(), - sourceOauthConsentRequest.getoAuthInputConfiguration()); - } - - result = new OAuthConsentRead().consentUrl(oAuthFlowImplementation.getSourceConsentUrl( - sourceOauthConsentRequest.getWorkspaceId(), - sourceOauthConsentRequest.getSourceDefinitionId(), - sourceOauthConsentRequest.getRedirectUrl(), - oAuthInputConfigurationForConsent, - spec.getAdvancedAuth().getOauthConfigSpecification())); - } else { - result = new OAuthConsentRead().consentUrl(oAuthFlowImplementation.getSourceConsentUrl( - sourceOauthConsentRequest.getWorkspaceId(), - sourceOauthConsentRequest.getSourceDefinitionId(), - sourceOauthConsentRequest.getRedirectUrl(), Jsons.emptyObject(), null)); - } - try { - trackingClient.track(sourceOauthConsentRequest.getWorkspaceId(), "Get Oauth Consent URL - Backend", metadata); - } catch (final Exception e) { - LOGGER.error(ERROR_MESSAGE, e); - } - return result; - } - - public OAuthConsentRead getDestinationOAuthConsent(final DestinationOauthConsentRequest destinationOauthConsentRequest) - throws JsonValidationException, ConfigNotFoundException, IOException { - final Map traceTags = Map.of(WORKSPACE_ID_KEY, destinationOauthConsentRequest.getWorkspaceId(), DESTINATION_DEFINITION_ID_KEY, - destinationOauthConsentRequest.getDestinationDefinitionId()); - ApmTraceUtils.addTagsToTrace(traceTags); - ApmTraceUtils.addTagsToRootSpan(traceTags); - - final StandardDestinationDefinition destinationDefinition = - configRepository.getStandardDestinationDefinition(destinationOauthConsentRequest.getDestinationDefinitionId()); - final OAuthFlowImplementation oAuthFlowImplementation = oAuthImplementationFactory.create(destinationDefinition); - final ConnectorSpecification spec = destinationDefinition.getSpec(); - final Map metadata = generateDestinationMetadata(destinationOauthConsentRequest.getDestinationDefinitionId()); - final OAuthConsentRead result; - if (OAuthConfigSupplier.hasOAuthConfigSpecification(spec)) { - final JsonNode oAuthInputConfigurationForConsent; - - if (destinationOauthConsentRequest.getDestinationId() == null) { - oAuthInputConfigurationForConsent = destinationOauthConsentRequest.getoAuthInputConfiguration(); - } else { - final DestinationConnection hydratedSourceConnection = - secretsRepositoryReader.getDestinationConnectionWithSecrets(destinationOauthConsentRequest.getDestinationId()); - - oAuthInputConfigurationForConsent = getOAuthInputConfigurationForConsent(spec, - hydratedSourceConnection.getConfiguration(), - destinationOauthConsentRequest.getoAuthInputConfiguration()); - - } - - result = new OAuthConsentRead().consentUrl(oAuthFlowImplementation.getDestinationConsentUrl( - destinationOauthConsentRequest.getWorkspaceId(), - destinationOauthConsentRequest.getDestinationDefinitionId(), - destinationOauthConsentRequest.getRedirectUrl(), - oAuthInputConfigurationForConsent, - spec.getAdvancedAuth().getOauthConfigSpecification())); - } else { - result = new OAuthConsentRead().consentUrl(oAuthFlowImplementation.getDestinationConsentUrl( - destinationOauthConsentRequest.getWorkspaceId(), - destinationOauthConsentRequest.getDestinationDefinitionId(), - destinationOauthConsentRequest.getRedirectUrl(), Jsons.emptyObject(), null)); - } - try { - trackingClient.track(destinationOauthConsentRequest.getWorkspaceId(), "Get Oauth Consent URL - Backend", metadata); - } catch (final Exception e) { - LOGGER.error(ERROR_MESSAGE, e); - } - return result; - } - - public Map completeSourceOAuth(final CompleteSourceOauthRequest completeSourceOauthRequest) - throws JsonValidationException, ConfigNotFoundException, IOException { - final Map traceTags = Map.of(WORKSPACE_ID_KEY, completeSourceOauthRequest.getWorkspaceId(), SOURCE_DEFINITION_ID_KEY, - completeSourceOauthRequest.getSourceDefinitionId()); - ApmTraceUtils.addTagsToTrace(traceTags); - ApmTraceUtils.addTagsToRootSpan(traceTags); - - final StandardSourceDefinition sourceDefinition = - configRepository.getStandardSourceDefinition(completeSourceOauthRequest.getSourceDefinitionId()); - final OAuthFlowImplementation oAuthFlowImplementation = oAuthImplementationFactory.create(sourceDefinition); - final ConnectorSpecification spec = sourceDefinition.getSpec(); - final Map metadata = generateSourceMetadata(completeSourceOauthRequest.getSourceDefinitionId()); - final Map result; - if (OAuthConfigSupplier.hasOAuthConfigSpecification(spec)) { - final JsonNode oAuthInputConfigurationForConsent; - - if (completeSourceOauthRequest.getSourceId() == null) { - oAuthInputConfigurationForConsent = completeSourceOauthRequest.getoAuthInputConfiguration(); - } else { - final SourceConnection hydratedSourceConnection = - secretsRepositoryReader.getSourceConnectionWithSecrets(completeSourceOauthRequest.getSourceId()); - - oAuthInputConfigurationForConsent = getOAuthInputConfigurationForConsent(spec, - hydratedSourceConnection.getConfiguration(), - completeSourceOauthRequest.getoAuthInputConfiguration()); - } - - result = oAuthFlowImplementation.completeSourceOAuth( - completeSourceOauthRequest.getWorkspaceId(), - completeSourceOauthRequest.getSourceDefinitionId(), - completeSourceOauthRequest.getQueryParams(), - completeSourceOauthRequest.getRedirectUrl(), - oAuthInputConfigurationForConsent, - spec.getAdvancedAuth().getOauthConfigSpecification()); - } else { - // deprecated but this path is kept for connectors that don't define OAuth Spec yet - result = oAuthFlowImplementation.completeSourceOAuth( - completeSourceOauthRequest.getWorkspaceId(), - completeSourceOauthRequest.getSourceDefinitionId(), - completeSourceOauthRequest.getQueryParams(), - completeSourceOauthRequest.getRedirectUrl()); - } - try { - trackingClient.track(completeSourceOauthRequest.getWorkspaceId(), "Complete OAuth Flow - Backend", metadata); - } catch (final Exception e) { - LOGGER.error(ERROR_MESSAGE, e); - } - return result; - } - - public Map completeDestinationOAuth(final CompleteDestinationOAuthRequest completeDestinationOAuthRequest) - throws JsonValidationException, ConfigNotFoundException, IOException { - final Map traceTags = Map.of(WORKSPACE_ID_KEY, completeDestinationOAuthRequest.getWorkspaceId(), DESTINATION_DEFINITION_ID_KEY, - completeDestinationOAuthRequest.getDestinationDefinitionId()); - ApmTraceUtils.addTagsToTrace(traceTags); - ApmTraceUtils.addTagsToRootSpan(traceTags); - - final StandardDestinationDefinition destinationDefinition = - configRepository.getStandardDestinationDefinition(completeDestinationOAuthRequest.getDestinationDefinitionId()); - final OAuthFlowImplementation oAuthFlowImplementation = oAuthImplementationFactory.create(destinationDefinition); - final ConnectorSpecification spec = destinationDefinition.getSpec(); - final Map metadata = generateDestinationMetadata(completeDestinationOAuthRequest.getDestinationDefinitionId()); - final Map result; - if (OAuthConfigSupplier.hasOAuthConfigSpecification(spec)) { - final JsonNode oAuthInputConfigurationForConsent; - - if (completeDestinationOAuthRequest.getDestinationId() == null) { - oAuthInputConfigurationForConsent = completeDestinationOAuthRequest.getoAuthInputConfiguration(); - } else { - final DestinationConnection hydratedSourceConnection = - secretsRepositoryReader.getDestinationConnectionWithSecrets(completeDestinationOAuthRequest.getDestinationId()); - - oAuthInputConfigurationForConsent = getOAuthInputConfigurationForConsent(spec, - hydratedSourceConnection.getConfiguration(), - completeDestinationOAuthRequest.getoAuthInputConfiguration()); - - } - - result = oAuthFlowImplementation.completeDestinationOAuth( - completeDestinationOAuthRequest.getWorkspaceId(), - completeDestinationOAuthRequest.getDestinationDefinitionId(), - completeDestinationOAuthRequest.getQueryParams(), - completeDestinationOAuthRequest.getRedirectUrl(), - oAuthInputConfigurationForConsent, - spec.getAdvancedAuth().getOauthConfigSpecification()); - } else { - // deprecated but this path is kept for connectors that don't define OAuth Spec yet - result = oAuthFlowImplementation.completeDestinationOAuth( - completeDestinationOAuthRequest.getWorkspaceId(), - completeDestinationOAuthRequest.getDestinationDefinitionId(), - completeDestinationOAuthRequest.getQueryParams(), - completeDestinationOAuthRequest.getRedirectUrl()); - } - try { - trackingClient.track(completeDestinationOAuthRequest.getWorkspaceId(), "Complete OAuth Flow - Backend", metadata); - } catch (final Exception e) { - LOGGER.error(ERROR_MESSAGE, e); - } - return result; - } - - public void setSourceInstancewideOauthParams(final SetInstancewideSourceOauthParamsRequestBody requestBody) - throws JsonValidationException, IOException { - final SourceOAuthParameter param = configRepository - .getSourceOAuthParamByDefinitionIdOptional(null, requestBody.getSourceDefinitionId()) - .orElseGet(() -> new SourceOAuthParameter().withOauthParameterId(UUID.randomUUID())) - .withConfiguration(Jsons.jsonNode(requestBody.getParams())) - .withSourceDefinitionId(requestBody.getSourceDefinitionId()); - // TODO validate requestBody.getParams() against - // spec.getAdvancedAuth().getOauthConfigSpecification().getCompleteOauthServerInputSpecification() - configRepository.writeSourceOAuthParam(param); - } - - public void setDestinationInstancewideOauthParams(final SetInstancewideDestinationOauthParamsRequestBody requestBody) - throws JsonValidationException, IOException { - final DestinationOAuthParameter param = configRepository - .getDestinationOAuthParamByDefinitionIdOptional(null, requestBody.getDestinationDefinitionId()) - .orElseGet(() -> new DestinationOAuthParameter().withOauthParameterId(UUID.randomUUID())) - .withConfiguration(Jsons.jsonNode(requestBody.getParams())) - .withDestinationDefinitionId(requestBody.getDestinationDefinitionId()); - // TODO validate requestBody.getParams() against - // spec.getAdvancedAuth().getOauthConfigSpecification().getCompleteOauthServerInputSpecification() - configRepository.writeDestinationOAuthParam(param); - } - - private JsonNode getOAuthInputConfigurationForConsent(final ConnectorSpecification spec, - final JsonNode hydratedSourceConnectionConfiguration, - final JsonNode oAuthInputConfiguration) { - final Map fieldsToGet = - buildJsonPathFromOAuthFlowInitParameters(OAuthPathExtractor.extractOauthConfigurationPaths( - spec.getAdvancedAuth().getOauthConfigSpecification().getOauthUserInputFromConnectorConfigSpecification())); - - final JsonNode oAuthInputConfigurationFromDB = getOAuthInputConfiguration(hydratedSourceConnectionConfiguration, fieldsToGet); - - return getOauthFromDBIfNeeded(oAuthInputConfigurationFromDB, - oAuthInputConfiguration); - } - - private Map generateSourceMetadata(final UUID sourceDefinitionId) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(sourceDefinitionId); - return TrackingMetadata.generateSourceDefinitionMetadata(sourceDefinition); - } - - private Map generateDestinationMetadata(final UUID destinationDefinitionId) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardDestinationDefinition destinationDefinition = configRepository.getStandardDestinationDefinition(destinationDefinitionId); - return TrackingMetadata.generateDestinationDefinitionMetadata(destinationDefinition); - } - - @VisibleForTesting - Map buildJsonPathFromOAuthFlowInitParameters(final Map> oAuthFlowInitParameters) { - return oAuthFlowInitParameters.entrySet().stream() - .map(entry -> Map.entry(entry.getKey(), "$." + String.join(".", entry.getValue()))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - } - - @VisibleForTesting - JsonNode getOauthFromDBIfNeeded(final JsonNode oAuthInputConfigurationFromDB, final JsonNode oAuthInputConfigurationFromInput) { - final ObjectNode result = (ObjectNode) Jsons.emptyObject(); - - oAuthInputConfigurationFromInput.fields().forEachRemaining(entry -> { - final String k = entry.getKey(); - final JsonNode v = entry.getValue(); - - // Note: This does not currently handle replacing masked secrets within nested objects. - if (AirbyteSecretConstants.SECRETS_MASK.equals(v.textValue())) { - if (oAuthInputConfigurationFromDB.has(k)) { - result.set(k, oAuthInputConfigurationFromDB.get(k)); - } else { - LOGGER.warn("Missing the key {} in the config store in DB", k); - } - } else { - result.set(k, v); - } - }); - - return result; - } - - @VisibleForTesting - JsonNode getOAuthInputConfiguration(final JsonNode hydratedSourceConnectionConfiguration, final Map pathsToGet) { - final Map result = new HashMap<>(); - pathsToGet.forEach((k, v) -> { - final Optional configValue = JsonPaths.getSingleValue(hydratedSourceConnectionConfiguration, v); - if (configValue.isPresent()) { - result.put(k, configValue.get()); - } else { - LOGGER.warn("Missing the key {} from the config stored in DB", k); - } - }); - - return Jsons.jsonNode(result); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OpenApiConfigHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OpenApiConfigHandler.java deleted file mode 100644 index 32a3fa70accd..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OpenApiConfigHandler.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import io.airbyte.commons.resources.MoreResources; -import jakarta.inject.Singleton; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; - -@Singleton -public class OpenApiConfigHandler { - - private static final File TMP_FILE; - - static { - try { - TMP_FILE = File.createTempFile("airbyte", "openapiconfig"); - TMP_FILE.deleteOnExit(); - Files.writeString(TMP_FILE.toPath(), MoreResources.readResource("config.yaml")); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public File getFile() { - return TMP_FILE; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OperationsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OperationsHandler.java deleted file mode 100644 index 21b793e69fc1..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OperationsHandler.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import io.airbyte.api.model.generated.CheckOperationRead; -import io.airbyte.api.model.generated.CheckOperationRead.StatusEnum; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.OperationCreate; -import io.airbyte.api.model.generated.OperationIdRequestBody; -import io.airbyte.api.model.generated.OperationRead; -import io.airbyte.api.model.generated.OperationReadList; -import io.airbyte.api.model.generated.OperationUpdate; -import io.airbyte.api.model.generated.OperatorConfiguration; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.server.converters.OperationsConverter; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Inject; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.UUID; -import java.util.function.Supplier; - -@Singleton -public class OperationsHandler { - - private final ConfigRepository configRepository; - private final Supplier uuidGenerator; - - @Inject - public OperationsHandler(final ConfigRepository configRepository) { - this(configRepository, UUID::randomUUID); - } - - @VisibleForTesting - OperationsHandler(final ConfigRepository configRepository, final Supplier uuidGenerator) { - this.configRepository = configRepository; - this.uuidGenerator = uuidGenerator; - } - - public CheckOperationRead checkOperation(final OperatorConfiguration operationCheck) { - try { - validateOperation(operationCheck); - } catch (final IllegalArgumentException e) { - return new CheckOperationRead().status(StatusEnum.FAILED) - .message(e.getMessage()); - } - return new CheckOperationRead().status(StatusEnum.SUCCEEDED); - } - - public OperationRead createOperation(final OperationCreate operationCreate) - throws JsonValidationException, IOException, ConfigNotFoundException { - final UUID operationId = uuidGenerator.get(); - final StandardSyncOperation standardSyncOperation = toStandardSyncOperation(operationCreate) - .withOperationId(operationId); - return persistOperation(standardSyncOperation); - } - - private StandardSyncOperation toStandardSyncOperation(final OperationCreate operationCreate) { - final StandardSyncOperation standardSyncOperation = new StandardSyncOperation() - .withWorkspaceId(operationCreate.getWorkspaceId()) - .withName(operationCreate.getName()) - .withOperatorType(Enums.convertTo(operationCreate.getOperatorConfiguration().getOperatorType(), OperatorType.class)) - .withTombstone(false); - OperationsConverter.populateOperatorConfigFromApi(operationCreate.getOperatorConfiguration(), standardSyncOperation); - return standardSyncOperation; - } - - private void validateOperation(final OperatorConfiguration operatorConfiguration) { - if ((io.airbyte.api.model.generated.OperatorType.NORMALIZATION).equals(operatorConfiguration.getOperatorType())) { - Preconditions.checkArgument(operatorConfiguration.getNormalization() != null); - } - if ((io.airbyte.api.model.generated.OperatorType.DBT).equals(operatorConfiguration.getOperatorType())) { - Preconditions.checkArgument(operatorConfiguration.getDbt() != null); - } - if (io.airbyte.api.model.generated.OperatorType.WEBHOOK.equals(operatorConfiguration.getOperatorType())) { - Preconditions.checkArgument(operatorConfiguration.getWebhook() != null); - } - } - - public OperationRead updateOperation(final OperationUpdate operationUpdate) - throws ConfigNotFoundException, IOException, JsonValidationException { - final StandardSyncOperation standardSyncOperation = configRepository.getStandardSyncOperation(operationUpdate.getOperationId()); - return persistOperation(updateOperation(operationUpdate, standardSyncOperation)); - } - - private OperationRead persistOperation(final StandardSyncOperation standardSyncOperation) - throws ConfigNotFoundException, IOException, JsonValidationException { - configRepository.writeStandardSyncOperation(standardSyncOperation); - return buildOperationRead(standardSyncOperation.getOperationId()); - } - - public static StandardSyncOperation updateOperation(final OperationUpdate operationUpdate, final StandardSyncOperation standardSyncOperation) { - standardSyncOperation - .withName(operationUpdate.getName()); - OperationsConverter.populateOperatorConfigFromApi(operationUpdate.getOperatorConfiguration(), standardSyncOperation); - return standardSyncOperation; - } - - public OperationReadList listOperationsForConnection(final ConnectionIdRequestBody connectionIdRequestBody) - throws JsonValidationException, ConfigNotFoundException, IOException { - final List operationReads = Lists.newArrayList(); - final StandardSync standardSync = configRepository.getStandardSync(connectionIdRequestBody.getConnectionId()); - for (final UUID operationId : standardSync.getOperationIds()) { - final StandardSyncOperation standardSyncOperation = configRepository.getStandardSyncOperation(operationId); - if (standardSyncOperation.getTombstone() != null && standardSyncOperation.getTombstone()) { - continue; - } - operationReads.add(buildOperationRead(standardSyncOperation)); - } - return new OperationReadList().operations(operationReads); - } - - public OperationRead getOperation(final OperationIdRequestBody operationIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - return buildOperationRead(operationIdRequestBody.getOperationId()); - } - - public void deleteOperationsForConnection(final ConnectionIdRequestBody connectionIdRequestBody) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSync standardSync = configRepository.getStandardSync(connectionIdRequestBody.getConnectionId()); - deleteOperationsForConnection(standardSync, standardSync.getOperationIds()); - } - - public void deleteOperationsForConnection(final UUID connectionId, final List deleteOperationIds) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSync standardSync = configRepository.getStandardSync(connectionId); - deleteOperationsForConnection(standardSync, deleteOperationIds); - } - - public void deleteOperationsForConnection(final StandardSync standardSync, final List deleteOperationIds) - throws JsonValidationException, ConfigNotFoundException, IOException { - final List operationIds = new ArrayList<>(standardSync.getOperationIds()); - for (final UUID operationId : deleteOperationIds) { - operationIds.remove(operationId); - boolean sharedOperation = false; - for (final StandardSync sync : configRepository.listStandardSyncsUsingOperation(operationId)) { - // Check if other connections are using the same operation - if (!sync.getConnectionId().equals(standardSync.getConnectionId())) { - sharedOperation = true; - break; - } - } - if (!sharedOperation) { - removeOperation(operationId); - } - } - - configRepository.updateConnectionOperationIds(standardSync.getConnectionId(), new HashSet<>(operationIds)); - } - - public void deleteOperation(final OperationIdRequestBody operationIdRequestBody) - throws IOException { - final UUID operationId = operationIdRequestBody.getOperationId(); - configRepository.deleteStandardSyncOperation(operationId); - } - - private void removeOperation(final UUID operationId) throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSyncOperation standardSyncOperation = configRepository.getStandardSyncOperation(operationId); - if (standardSyncOperation != null) { - standardSyncOperation.withTombstone(true); - persistOperation(standardSyncOperation); - } else { - throw new ConfigNotFoundException(ConfigSchema.STANDARD_SYNC_OPERATION, operationId.toString()); - } - } - - private OperationRead buildOperationRead(final UUID operationId) - throws ConfigNotFoundException, IOException, JsonValidationException { - final StandardSyncOperation standardSyncOperation = configRepository.getStandardSyncOperation(operationId); - if (standardSyncOperation != null) { - return buildOperationRead(standardSyncOperation); - } else { - throw new ConfigNotFoundException(ConfigSchema.STANDARD_SYNC_OPERATION, operationId.toString()); - } - } - - private static OperationRead buildOperationRead(final StandardSyncOperation standardSyncOperation) { - return OperationsConverter.operationReadFromPersistedOperation(standardSyncOperation); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java deleted file mode 100644 index baf39694a53b..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java +++ /dev/null @@ -1,549 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Charsets; -import com.google.common.collect.ImmutableSet; -import com.google.common.hash.HashFunction; -import com.google.common.hash.Hashing; -import io.airbyte.api.model.generated.AdvancedAuth; -import io.airbyte.api.model.generated.AuthSpecification; -import io.airbyte.api.model.generated.CatalogDiff; -import io.airbyte.api.model.generated.CheckConnectionRead; -import io.airbyte.api.model.generated.CheckConnectionRead.StatusEnum; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionReadList; -import io.airbyte.api.model.generated.ConnectionStatus; -import io.airbyte.api.model.generated.ConnectionUpdate; -import io.airbyte.api.model.generated.DestinationCoreConfig; -import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.DestinationDefinitionSpecificationRead; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.DestinationSyncMode; -import io.airbyte.api.model.generated.DestinationUpdate; -import io.airbyte.api.model.generated.FieldTransform; -import io.airbyte.api.model.generated.JobConfigType; -import io.airbyte.api.model.generated.JobIdRequestBody; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.api.model.generated.LogRead; -import io.airbyte.api.model.generated.NonBreakingChangesPreference; -import io.airbyte.api.model.generated.SourceCoreConfig; -import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.SourceDefinitionSpecificationRead; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRead; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.SourceUpdate; -import io.airbyte.api.model.generated.StreamTransform; -import io.airbyte.api.model.generated.StreamTransform.TransformTypeEnum; -import io.airbyte.api.model.generated.SynchronousJobRead; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.converters.ConfigurationUpdate; -import io.airbyte.commons.server.converters.JobConverter; -import io.airbyte.commons.server.converters.OauthModelConverter; -import io.airbyte.commons.server.errors.ValueConflictKnownException; -import io.airbyte.commons.server.handlers.helpers.CatalogConverter; -import io.airbyte.commons.server.scheduler.EventRunner; -import io.airbyte.commons.server.scheduler.SynchronousJobMetadata; -import io.airbyte.commons.server.scheduler.SynchronousResponse; -import io.airbyte.commons.server.scheduler.SynchronousSchedulerClient; -import io.airbyte.commons.temporal.ErrorCode; -import io.airbyte.commons.temporal.TemporalClient.ManualOperationResult; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorCatalog; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardCheckConnectionOutput; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.WebUrlHelper; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Optional; -import java.util.UUID; -import javax.validation.constraints.NotNull; -import lombok.extern.slf4j.Slf4j; - -@Singleton -@Slf4j -public class SchedulerHandler { - - private static final HashFunction HASH_FUNCTION = Hashing.md5(); - - private static final ImmutableSet VALUE_CONFLICT_EXCEPTION_ERROR_CODE_SET = - ImmutableSet.of(ErrorCode.WORKFLOW_DELETED, ErrorCode.WORKFLOW_RUNNING); - - private final ConnectionsHandler connectionsHandler; - private final ConfigRepository configRepository; - private final SecretsRepositoryWriter secretsRepositoryWriter; - private final SynchronousSchedulerClient synchronousSchedulerClient; - private final ConfigurationUpdate configurationUpdate; - private final JsonSchemaValidator jsonSchemaValidator; - private final JobPersistence jobPersistence; - private final JobConverter jobConverter; - private final EventRunner eventRunner; - private final FeatureFlags envVariableFeatureFlags; - private final WebUrlHelper webUrlHelper; - - // TODO: Convert to be fully using micronaut - public SchedulerHandler(final ConfigRepository configRepository, - final SecretsRepositoryReader secretsRepositoryReader, - final SecretsRepositoryWriter secretsRepositoryWriter, - final SynchronousSchedulerClient synchronousSchedulerClient, - final JobPersistence jobPersistence, - final WorkerEnvironment workerEnvironment, - final LogConfigs logConfigs, - final EventRunner eventRunner, - final ConnectionsHandler connectionsHandler, - final FeatureFlags envVariableFeatureFlags, - final WebUrlHelper webUrlHelper) { - this( - configRepository, - secretsRepositoryWriter, - synchronousSchedulerClient, - new ConfigurationUpdate(configRepository, secretsRepositoryReader), - new JsonSchemaValidator(), - jobPersistence, - eventRunner, - new JobConverter(workerEnvironment, logConfigs), - connectionsHandler, - envVariableFeatureFlags, - webUrlHelper); - } - - @VisibleForTesting - SchedulerHandler(final ConfigRepository configRepository, - final SecretsRepositoryWriter secretsRepositoryWriter, - final SynchronousSchedulerClient synchronousSchedulerClient, - final ConfigurationUpdate configurationUpdate, - final JsonSchemaValidator jsonSchemaValidator, - final JobPersistence jobPersistence, - final EventRunner eventRunner, - final JobConverter jobConverter, - final ConnectionsHandler connectionsHandler, - final FeatureFlags envVariableFeatureFlags, - final WebUrlHelper webUrlHelper) { - this.configRepository = configRepository; - this.secretsRepositoryWriter = secretsRepositoryWriter; - this.synchronousSchedulerClient = synchronousSchedulerClient; - this.configurationUpdate = configurationUpdate; - this.jsonSchemaValidator = jsonSchemaValidator; - this.jobPersistence = jobPersistence; - this.eventRunner = eventRunner; - this.jobConverter = jobConverter; - this.connectionsHandler = connectionsHandler; - this.envVariableFeatureFlags = envVariableFeatureFlags; - this.webUrlHelper = webUrlHelper; - } - - public CheckConnectionRead checkSourceConnectionFromSourceId(final SourceIdRequestBody sourceIdRequestBody) - throws ConfigNotFoundException, IOException, JsonValidationException { - final SourceConnection source = configRepository.getSourceConnection(sourceIdRequestBody.getSourceId()); - final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(source.getSourceDefinitionId()); - final String imageName = sourceDef.getDockerRepository() + ":" + sourceDef.getDockerImageTag(); - final boolean isCustomConnector = sourceDef.getCustom(); - final Version protocolVersion = new Version(sourceDef.getProtocolVersion()); - - return reportConnectionStatus(synchronousSchedulerClient.createSourceCheckConnectionJob(source, imageName, protocolVersion, isCustomConnector)); - } - - public CheckConnectionRead checkSourceConnectionFromSourceCreate(final SourceCoreConfig sourceConfig) - throws ConfigNotFoundException, IOException, JsonValidationException { - final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(sourceConfig.getSourceDefinitionId()); - final var partialConfig = secretsRepositoryWriter.statefulSplitEphemeralSecrets( - sourceConfig.getConnectionConfiguration(), - sourceDef.getSpec()); - - // todo (cgardens) - narrow the struct passed to the client. we are not setting fields that are - // technically declared as required. - final SourceConnection source = new SourceConnection() - .withSourceId(sourceConfig.getSourceId()) - .withSourceDefinitionId(sourceConfig.getSourceDefinitionId()) - .withConfiguration(partialConfig) - .withWorkspaceId(sourceConfig.getWorkspaceId()); - - final Version protocolVersion = new Version(sourceDef.getProtocolVersion()); - - final String imageName = sourceDef.getDockerRepository() + ":" + sourceDef.getDockerImageTag(); - final boolean isCustomConnector = sourceDef.getCustom(); - return reportConnectionStatus(synchronousSchedulerClient.createSourceCheckConnectionJob(source, imageName, protocolVersion, isCustomConnector)); - } - - public CheckConnectionRead checkSourceConnectionFromSourceIdForUpdate(final SourceUpdate sourceUpdate) - throws ConfigNotFoundException, IOException, JsonValidationException { - final SourceConnection updatedSource = - configurationUpdate.source(sourceUpdate.getSourceId(), sourceUpdate.getName(), sourceUpdate.getConnectionConfiguration()); - - final ConnectorSpecification spec = getSpecFromSourceDefinitionId(updatedSource.getSourceDefinitionId()); - jsonSchemaValidator.ensure(spec.getConnectionSpecification(), updatedSource.getConfiguration()); - - final SourceCoreConfig sourceCoreConfig = new SourceCoreConfig() - .sourceId(updatedSource.getSourceId()) - .connectionConfiguration(updatedSource.getConfiguration()) - .sourceDefinitionId(updatedSource.getSourceDefinitionId()) - .workspaceId(updatedSource.getWorkspaceId()); - - return checkSourceConnectionFromSourceCreate(sourceCoreConfig); - } - - public CheckConnectionRead checkDestinationConnectionFromDestinationId(final DestinationIdRequestBody destinationIdRequestBody) - throws ConfigNotFoundException, IOException, JsonValidationException { - final DestinationConnection destination = configRepository.getDestinationConnection(destinationIdRequestBody.getDestinationId()); - final StandardDestinationDefinition destinationDef = configRepository.getStandardDestinationDefinition(destination.getDestinationDefinitionId()); - final String imageName = destinationDef.getDockerRepository() + ":" + destinationDef.getDockerImageTag(); - final boolean isCustomConnector = destinationDef.getCustom(); - final Version protocolVersion = new Version(destinationDef.getProtocolVersion()); - return reportConnectionStatus( - synchronousSchedulerClient.createDestinationCheckConnectionJob(destination, imageName, protocolVersion, isCustomConnector)); - } - - public CheckConnectionRead checkDestinationConnectionFromDestinationCreate(final DestinationCoreConfig destinationConfig) - throws ConfigNotFoundException, IOException, JsonValidationException { - final StandardDestinationDefinition destDef = configRepository.getStandardDestinationDefinition(destinationConfig.getDestinationDefinitionId()); - final var partialConfig = secretsRepositoryWriter.statefulSplitEphemeralSecrets( - destinationConfig.getConnectionConfiguration(), - destDef.getSpec()); - final boolean isCustomConnector = destDef.getCustom(); - - // todo (cgardens) - narrow the struct passed to the client. we are not setting fields that are - // technically declared as required. - final DestinationConnection destination = new DestinationConnection() - .withDestinationId(destinationConfig.getDestinationId()) - .withDestinationDefinitionId(destinationConfig.getDestinationDefinitionId()) - .withConfiguration(partialConfig) - .withWorkspaceId(destinationConfig.getWorkspaceId()); - final String imageName = destDef.getDockerRepository() + ":" + destDef.getDockerImageTag(); - final Version protocolVersion = new Version(destDef.getProtocolVersion()); - return reportConnectionStatus( - synchronousSchedulerClient.createDestinationCheckConnectionJob(destination, imageName, protocolVersion, isCustomConnector)); - } - - public CheckConnectionRead checkDestinationConnectionFromDestinationIdForUpdate(final DestinationUpdate destinationUpdate) - throws JsonValidationException, IOException, ConfigNotFoundException { - final DestinationConnection updatedDestination = configurationUpdate - .destination(destinationUpdate.getDestinationId(), destinationUpdate.getName(), destinationUpdate.getConnectionConfiguration()); - - final ConnectorSpecification spec = getSpecFromDestinationDefinitionId(updatedDestination.getDestinationDefinitionId()); - jsonSchemaValidator.ensure(spec.getConnectionSpecification(), updatedDestination.getConfiguration()); - - final DestinationCoreConfig destinationCoreConfig = new DestinationCoreConfig() - .destinationId(updatedDestination.getDestinationId()) - .connectionConfiguration(updatedDestination.getConfiguration()) - .destinationDefinitionId(updatedDestination.getDestinationDefinitionId()) - .workspaceId(updatedDestination.getWorkspaceId()); - - return checkDestinationConnectionFromDestinationCreate(destinationCoreConfig); - } - - public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceId(final SourceDiscoverSchemaRequestBody discoverSchemaRequestBody) - throws ConfigNotFoundException, IOException, JsonValidationException { - final SourceConnection source = configRepository.getSourceConnection(discoverSchemaRequestBody.getSourceId()); - final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(source.getSourceDefinitionId()); - final String imageName = sourceDef.getDockerRepository() + ":" + sourceDef.getDockerImageTag(); - final boolean isCustomConnector = sourceDef.getCustom(); - - final String configHash = HASH_FUNCTION.hashBytes(Jsons.serialize(source.getConfiguration()).getBytes( - Charsets.UTF_8)).toString(); - final String connectorVersion = sourceDef.getDockerImageTag(); - final Optional currentCatalog = - configRepository.getActorCatalog(discoverSchemaRequestBody.getSourceId(), connectorVersion, configHash); - final boolean bustActorCatalogCache = discoverSchemaRequestBody.getDisableCache() != null && discoverSchemaRequestBody.getDisableCache(); - if (currentCatalog.isEmpty() || bustActorCatalogCache) { - final SynchronousResponse persistedCatalogId = - synchronousSchedulerClient.createDiscoverSchemaJob( - source, - imageName, - connectorVersion, - new Version(sourceDef.getProtocolVersion()), - isCustomConnector); - final SourceDiscoverSchemaRead discoveredSchema = retrieveDiscoveredSchema(persistedCatalogId, sourceDef); - - if (persistedCatalogId.isSuccess() && discoverSchemaRequestBody.getConnectionId() != null) { - // modify discoveredSchema object to add CatalogDiff, containsBreakingChange, and connectionStatus - generateCatalogDiffsAndDisableConnectionsIfNeeded(discoveredSchema, discoverSchemaRequestBody, source.getWorkspaceId()); - } - - return discoveredSchema; - } - final AirbyteCatalog airbyteCatalog = Jsons.object(currentCatalog.get().getCatalog(), AirbyteCatalog.class); - final SynchronousJobRead emptyJob = new SynchronousJobRead() - .configId("NoConfiguration") - .configType(JobConfigType.DISCOVER_SCHEMA) - .id(UUID.randomUUID()) - .createdAt(0L) - .endedAt(0L) - .logs(new LogRead().logLines(new ArrayList<>())) - .succeeded(true); - return new SourceDiscoverSchemaRead() - .catalog(CatalogConverter.toApi(airbyteCatalog, sourceDef)) - .jobInfo(emptyJob) - .catalogId(currentCatalog.get().getId()); - } - - public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceCreate(final SourceCoreConfig sourceCreate) - throws ConfigNotFoundException, IOException, JsonValidationException { - final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(sourceCreate.getSourceDefinitionId()); - final var partialConfig = secretsRepositoryWriter.statefulSplitEphemeralSecrets( - sourceCreate.getConnectionConfiguration(), - sourceDef.getSpec()); - - final String imageName = sourceDef.getDockerRepository() + ":" + sourceDef.getDockerImageTag(); - final boolean isCustomConnector = sourceDef.getCustom(); - // todo (cgardens) - narrow the struct passed to the client. we are not setting fields that are - // technically declared as required. - final SourceConnection source = new SourceConnection() - .withSourceDefinitionId(sourceCreate.getSourceDefinitionId()) - .withConfiguration(partialConfig) - .withWorkspaceId(sourceCreate.getWorkspaceId()); - final SynchronousResponse response = synchronousSchedulerClient.createDiscoverSchemaJob( - source, - imageName, - sourceDef.getDockerImageTag(), - new Version( - sourceDef.getProtocolVersion()), - isCustomConnector); - return retrieveDiscoveredSchema(response, sourceDef); - } - - private SourceDiscoverSchemaRead retrieveDiscoveredSchema(final SynchronousResponse response, final StandardSourceDefinition sourceDef) - throws ConfigNotFoundException, IOException { - final SourceDiscoverSchemaRead sourceDiscoverSchemaRead = new SourceDiscoverSchemaRead() - .jobInfo(jobConverter.getSynchronousJobRead(response)); - - if (response.isSuccess()) { - final ActorCatalog catalog = configRepository.getActorCatalogById(response.getOutput()); - final AirbyteCatalog persistenceCatalog = Jsons.object(catalog.getCatalog(), - io.airbyte.protocol.models.AirbyteCatalog.class); - sourceDiscoverSchemaRead.catalog(CatalogConverter.toApi(persistenceCatalog, sourceDef)); - sourceDiscoverSchemaRead.catalogId(response.getOutput()); - } - - return sourceDiscoverSchemaRead; - } - - public SourceDefinitionSpecificationRead getSourceDefinitionSpecification(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) - throws ConfigNotFoundException, IOException, JsonValidationException { - final UUID sourceDefinitionId = sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId(); - final StandardSourceDefinition source = configRepository.getStandardSourceDefinition(sourceDefinitionId); - final ConnectorSpecification spec = source.getSpec(); - - final SourceDefinitionSpecificationRead specRead = new SourceDefinitionSpecificationRead() - .jobInfo(jobConverter.getSynchronousJobRead(SynchronousJobMetadata.mock(ConfigType.GET_SPEC))) - .connectionSpecification(spec.getConnectionSpecification()) - .sourceDefinitionId(sourceDefinitionId); - - if (spec.getDocumentationUrl() != null) { - specRead.documentationUrl(spec.getDocumentationUrl().toString()); - } - - final Optional authSpec = OauthModelConverter.getAuthSpec(spec); - authSpec.ifPresent(specRead::setAuthSpecification); - - final Optional advancedAuth = OauthModelConverter.getAdvancedAuth(spec); - advancedAuth.ifPresent(specRead::setAdvancedAuth); - - return specRead; - } - - public DestinationDefinitionSpecificationRead getDestinationSpecification( - final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) - throws ConfigNotFoundException, IOException, JsonValidationException { - final UUID destinationDefinitionId = destinationDefinitionIdWithWorkspaceId.getDestinationDefinitionId(); - final StandardDestinationDefinition destination = configRepository.getStandardDestinationDefinition(destinationDefinitionId); - final ConnectorSpecification spec = destination.getSpec(); - - final DestinationDefinitionSpecificationRead specRead = new DestinationDefinitionSpecificationRead() - .jobInfo(jobConverter.getSynchronousJobRead(SynchronousJobMetadata.mock(ConfigType.GET_SPEC))) - .supportedDestinationSyncModes(Enums.convertListTo(spec.getSupportedDestinationSyncModes(), DestinationSyncMode.class)) - .connectionSpecification(spec.getConnectionSpecification()) - .documentationUrl(spec.getDocumentationUrl().toString()) - .destinationDefinitionId(destinationDefinitionId); - - final Optional authSpec = OauthModelConverter.getAuthSpec(spec); - authSpec.ifPresent(specRead::setAuthSpecification); - - final Optional advancedAuth = OauthModelConverter.getAdvancedAuth(spec); - advancedAuth.ifPresent(specRead::setAdvancedAuth); - - return specRead; - } - - public JobInfoRead syncConnection(final ConnectionIdRequestBody connectionIdRequestBody) - throws IOException, JsonValidationException, ConfigNotFoundException { - return submitManualSyncToWorker(connectionIdRequestBody.getConnectionId()); - } - - public JobInfoRead resetConnection(final ConnectionIdRequestBody connectionIdRequestBody) - throws IOException, JsonValidationException, ConfigNotFoundException { - return submitResetConnectionToWorker(connectionIdRequestBody.getConnectionId()); - } - - public JobInfoRead cancelJob(final JobIdRequestBody jobIdRequestBody) throws IOException { - return submitCancellationToWorker(jobIdRequestBody.getId()); - } - - // Find all connections that use the source from the SourceDiscoverSchemaRequestBody. For each one, - // determine whether 1. the source schema change resulted in a broken connection or 2. the user - // wants the connection disabled when non-breaking changes are detected. If so, disable that - // connection. Modify the current discoveredSchema object to add a CatalogDiff, - // containsBreakingChange parameter, and connectionStatus parameter. - private void generateCatalogDiffsAndDisableConnectionsIfNeeded(final SourceDiscoverSchemaRead discoveredSchema, - final SourceDiscoverSchemaRequestBody discoverSchemaRequestBody, - final UUID workspaceId) - throws JsonValidationException, ConfigNotFoundException, IOException { - final ConnectionReadList connectionsForSource = connectionsHandler.listConnectionsForSource(discoverSchemaRequestBody.getSourceId(), false); - for (final ConnectionRead connectionRead : connectionsForSource.getConnections()) { - final Optional catalogUsedToMakeConfiguredCatalog = connectionsHandler - .getConnectionAirbyteCatalog(connectionRead.getConnectionId()); - final io.airbyte.api.model.generated.@NotNull AirbyteCatalog currentAirbyteCatalog = - connectionRead.getSyncCatalog(); - final CatalogDiff diff = - connectionsHandler.getDiff(catalogUsedToMakeConfiguredCatalog.orElse(currentAirbyteCatalog), discoveredSchema.getCatalog(), - CatalogConverter.toConfiguredProtocol(currentAirbyteCatalog)); - final boolean containsBreakingChange = containsBreakingChange(diff); - final ConnectionUpdate updateObject = - new ConnectionUpdate().breakingChange(containsBreakingChange).connectionId(connectionRead.getConnectionId()); - final ConnectionStatus connectionStatus; - if (shouldDisableConnection(containsBreakingChange, connectionRead.getNonBreakingChangesPreference(), diff)) { - connectionStatus = ConnectionStatus.INACTIVE; - } else { - connectionStatus = connectionRead.getStatus(); - } - updateObject.status(connectionStatus); - connectionsHandler.updateConnection(updateObject); - if (shouldNotifySchemaChange(diff, connectionRead, discoverSchemaRequestBody)) { - final String url = webUrlHelper.getConnectionUrl(workspaceId, connectionRead.getConnectionId()); - eventRunner.sendSchemaChangeNotification(connectionRead.getConnectionId(), url); - } - if (connectionRead.getConnectionId().equals(discoverSchemaRequestBody.getConnectionId())) { - discoveredSchema.catalogDiff(diff).breakingChange(containsBreakingChange).connectionStatus(connectionStatus); - } - } - } - - private boolean shouldNotifySchemaChange(final CatalogDiff diff, - final ConnectionRead connectionRead, - final SourceDiscoverSchemaRequestBody requestBody) { - return !diff.getTransforms().isEmpty() && connectionRead.getNotifySchemaChanges() && requestBody.getNotifySchemaChange() != null - && requestBody.getNotifySchemaChange(); - } - - private boolean shouldDisableConnection(final boolean containsBreakingChange, - final NonBreakingChangesPreference preference, - final CatalogDiff diff) { - if (!envVariableFeatureFlags.autoDetectSchema()) { - return false; - } - - return containsBreakingChange || (preference == NonBreakingChangesPreference.DISABLE && !diff.getTransforms().isEmpty()); - } - - private CheckConnectionRead reportConnectionStatus(final SynchronousResponse response) { - final CheckConnectionRead checkConnectionRead = new CheckConnectionRead() - .jobInfo(jobConverter.getSynchronousJobRead(response)); - - if (response.getOutput() != null) { - checkConnectionRead - .status(Enums.convertTo(response.getOutput().getStatus(), StatusEnum.class)) - .message(response.getOutput().getMessage()); - } else { - checkConnectionRead - .status(StatusEnum.FAILED) - .message("Check Connection Failed!"); - } - - return checkConnectionRead; - } - - private ConnectorSpecification getSpecFromSourceDefinitionId(final UUID sourceDefId) - throws IOException, JsonValidationException, ConfigNotFoundException { - final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(sourceDefId); - return sourceDef.getSpec(); - } - - private ConnectorSpecification getSpecFromDestinationDefinitionId(final UUID destDefId) - throws IOException, JsonValidationException, ConfigNotFoundException { - final StandardDestinationDefinition destinationDef = configRepository.getStandardDestinationDefinition(destDefId); - return destinationDef.getSpec(); - } - - private JobInfoRead submitCancellationToWorker(final Long jobId) throws IOException { - final Job job = jobPersistence.getJob(jobId); - - final ManualOperationResult cancellationResult = eventRunner.startNewCancellation(UUID.fromString(job.getScope())); - if (cancellationResult.getFailingReason().isPresent()) { - throw new IllegalStateException(cancellationResult.getFailingReason().get()); - } - - // query same job ID again to get updated job info after cancellation - return jobConverter.getJobInfoRead(jobPersistence.getJob(jobId)); - } - - private JobInfoRead submitManualSyncToWorker(final UUID connectionId) - throws IOException, IllegalStateException, JsonValidationException, ConfigNotFoundException { - // get standard sync to validate connection id before submitting sync to temporal - configRepository.getStandardSync(connectionId); - final ManualOperationResult manualSyncResult = eventRunner.startNewManualSync(connectionId); - - return readJobFromResult(manualSyncResult); - } - - private JobInfoRead submitResetConnectionToWorker(final UUID connectionId) throws IOException, IllegalStateException, ConfigNotFoundException { - final ManualOperationResult resetConnectionResult = eventRunner.resetConnection( - connectionId, - configRepository.getAllStreamsForConnection(connectionId), - false); - - return readJobFromResult(resetConnectionResult); - } - - private JobInfoRead readJobFromResult(final ManualOperationResult manualOperationResult) throws IOException, IllegalStateException { - if (manualOperationResult.getFailingReason().isPresent()) { - if (VALUE_CONFLICT_EXCEPTION_ERROR_CODE_SET.contains(manualOperationResult.getErrorCode().get())) { - throw new ValueConflictKnownException(manualOperationResult.getFailingReason().get()); - } else { - throw new IllegalStateException(manualOperationResult.getFailingReason().get()); - } - } - - final Job job = jobPersistence.getJob(manualOperationResult.getJobId().get()); - - return jobConverter.getJobInfoRead(job); - } - - private boolean containsBreakingChange(final CatalogDiff diff) { - for (final StreamTransform streamTransform : diff.getTransforms()) { - if (streamTransform.getTransformType() != TransformTypeEnum.UPDATE_STREAM) { - continue; - } - - final boolean anyBreakingFieldTransforms = streamTransform.getUpdateStream().stream().anyMatch(FieldTransform::getBreaking); - if (anyBreakingFieldTransforms) { - return true; - } - } - - return false; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandler.java deleted file mode 100644 index c2ad9aa4189c..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandler.java +++ /dev/null @@ -1,335 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.api.model.generated.CustomSourceDefinitionCreate; -import io.airbyte.api.model.generated.PrivateSourceDefinitionRead; -import io.airbyte.api.model.generated.PrivateSourceDefinitionReadList; -import io.airbyte.api.model.generated.ReleaseStage; -import io.airbyte.api.model.generated.SourceDefinitionCreate; -import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.SourceDefinitionRead; -import io.airbyte.api.model.generated.SourceDefinitionRead.SourceTypeEnum; -import io.airbyte.api.model.generated.SourceDefinitionReadList; -import io.airbyte.api.model.generated.SourceDefinitionUpdate; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.server.ServerConstants; -import io.airbyte.commons.server.converters.ApiPojoConverters; -import io.airbyte.commons.server.converters.SpecFetcher; -import io.airbyte.commons.server.errors.IdNotFoundKnownException; -import io.airbyte.commons.server.errors.InternalServerKnownException; -import io.airbyte.commons.server.errors.UnsupportedProtocolVersionException; -import io.airbyte.commons.server.scheduler.SynchronousResponse; -import io.airbyte.commons.server.scheduler.SynchronousSchedulerClient; -import io.airbyte.commons.server.services.AirbyteGithubStore; -import io.airbyte.commons.util.MoreLists; -import io.airbyte.commons.version.AirbyteProtocolVersion; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorDefinitionResourceRequirements; -import io.airbyte.config.ActorType; -import io.airbyte.config.Configs; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Inject; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.time.LocalDate; -import java.util.List; -import java.util.Map.Entry; -import java.util.UUID; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -@SuppressWarnings("PMD.AvoidCatchingNPE") -@Singleton -public class SourceDefinitionsHandler { - - private final ConfigRepository configRepository; - private final Supplier uuidSupplier; - private final AirbyteGithubStore githubStore; - private final SynchronousSchedulerClient schedulerSynchronousClient; - private final SourceHandler sourceHandler; - private final AirbyteProtocolVersionRange protocolVersionRange; - - @Inject - public SourceDefinitionsHandler(final ConfigRepository configRepository, - final Supplier uuidSupplier, - final SynchronousSchedulerClient schedulerSynchronousClient, - final AirbyteGithubStore githubStore, - final SourceHandler sourceHandler, - final AirbyteProtocolVersionRange protocolVersionRange) { - this.configRepository = configRepository; - this.uuidSupplier = uuidSupplier; - this.schedulerSynchronousClient = schedulerSynchronousClient; - this.githubStore = githubStore; - this.sourceHandler = sourceHandler; - this.protocolVersionRange = protocolVersionRange; - } - - // This should be deleted when cloud is migrated to micronaut - @Deprecated(forRemoval = true) - public SourceDefinitionsHandler(final ConfigRepository configRepository, - final SynchronousSchedulerClient schedulerSynchronousClient, - final SourceHandler sourceHandler) { - this.configRepository = configRepository; - this.uuidSupplier = UUID::randomUUID; - this.schedulerSynchronousClient = schedulerSynchronousClient; - this.githubStore = AirbyteGithubStore.production(); - this.sourceHandler = sourceHandler; - final Configs configs = new EnvConfigs(); - this.protocolVersionRange = new AirbyteProtocolVersionRange(configs.getAirbyteProtocolVersionMin(), configs.getAirbyteProtocolVersionMax()); - } - - @VisibleForTesting - static SourceDefinitionRead buildSourceDefinitionRead(final StandardSourceDefinition standardSourceDefinition) { - try { - return new SourceDefinitionRead() - .sourceDefinitionId(standardSourceDefinition.getSourceDefinitionId()) - .name(standardSourceDefinition.getName()) - .sourceType(getSourceType(standardSourceDefinition)) - .dockerRepository(standardSourceDefinition.getDockerRepository()) - .dockerImageTag(standardSourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(standardSourceDefinition.getDocumentationUrl())) - .icon(loadIcon(standardSourceDefinition.getIcon())) - .protocolVersion(standardSourceDefinition.getProtocolVersion()) - .releaseStage(getReleaseStage(standardSourceDefinition)) - .releaseDate(getReleaseDate(standardSourceDefinition)) - .resourceRequirements(ApiPojoConverters.actorDefResourceReqsToApi(standardSourceDefinition.getResourceRequirements())); - - } catch (final URISyntaxException | NullPointerException e) { - throw new InternalServerKnownException("Unable to process retrieved latest source definitions list", e); - } - } - - private static SourceTypeEnum getSourceType(final StandardSourceDefinition standardSourceDefinition) { - if (standardSourceDefinition.getSourceType() == null) { - return null; - } - return SourceTypeEnum.fromValue(standardSourceDefinition.getSourceType().value()); - } - - private static ReleaseStage getReleaseStage(final StandardSourceDefinition standardSourceDefinition) { - if (standardSourceDefinition.getReleaseStage() == null) { - return null; - } - return ReleaseStage.fromValue(standardSourceDefinition.getReleaseStage().value()); - } - - private static LocalDate getReleaseDate(final StandardSourceDefinition standardSourceDefinition) { - if (standardSourceDefinition.getReleaseDate() == null || standardSourceDefinition.getReleaseDate().isBlank()) { - return null; - } - - return LocalDate.parse(standardSourceDefinition.getReleaseDate()); - } - - public SourceDefinitionReadList listSourceDefinitions() throws IOException, JsonValidationException { - return toSourceDefinitionReadList(configRepository.listStandardSourceDefinitions(false)); - } - - private static SourceDefinitionReadList toSourceDefinitionReadList(final List defs) { - final List reads = defs.stream() - .map(SourceDefinitionsHandler::buildSourceDefinitionRead) - .collect(Collectors.toList()); - return new SourceDefinitionReadList().sourceDefinitions(reads); - } - - public SourceDefinitionReadList listLatestSourceDefinitions() { - return toSourceDefinitionReadList(getLatestSources()); - } - - private List getLatestSources() { - try { - return githubStore.getLatestSources(); - } catch (final InterruptedException e) { - throw new InternalServerKnownException("Request to retrieve latest destination definitions failed", e); - } - } - - public SourceDefinitionReadList listSourceDefinitionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) - throws IOException { - return toSourceDefinitionReadList(MoreLists.concat( - configRepository.listPublicSourceDefinitions(false), - configRepository.listGrantedSourceDefinitions(workspaceIdRequestBody.getWorkspaceId(), false))); - } - - public PrivateSourceDefinitionReadList listPrivateSourceDefinitions(final WorkspaceIdRequestBody workspaceIdRequestBody) - throws IOException { - final List> standardSourceDefinitionBooleanMap = - configRepository.listGrantableSourceDefinitions(workspaceIdRequestBody.getWorkspaceId(), false); - return toPrivateSourceDefinitionReadList(standardSourceDefinitionBooleanMap); - } - - private static PrivateSourceDefinitionReadList toPrivateSourceDefinitionReadList(final List> defs) { - final List reads = defs.stream() - .map(entry -> new PrivateSourceDefinitionRead() - .sourceDefinition(buildSourceDefinitionRead(entry.getKey())) - .granted(entry.getValue())) - .collect(Collectors.toList()); - return new PrivateSourceDefinitionReadList().sourceDefinitions(reads); - } - - public SourceDefinitionRead getSourceDefinition(final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody) - throws ConfigNotFoundException, IOException, JsonValidationException { - return buildSourceDefinitionRead(configRepository.getStandardSourceDefinition(sourceDefinitionIdRequestBody.getSourceDefinitionId())); - } - - public SourceDefinitionRead getSourceDefinitionForWorkspace(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) - throws ConfigNotFoundException, IOException, JsonValidationException { - final UUID definitionId = sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId(); - final UUID workspaceId = sourceDefinitionIdWithWorkspaceId.getWorkspaceId(); - if (!configRepository.workspaceCanUseDefinition(definitionId, workspaceId)) { - throw new IdNotFoundKnownException("Cannot find the requested definition with given id for this workspace", definitionId.toString()); - } - return getSourceDefinition(new SourceDefinitionIdRequestBody().sourceDefinitionId(definitionId)); - } - - public SourceDefinitionRead createCustomSourceDefinition(final CustomSourceDefinitionCreate customSourceDefinitionCreate) - throws IOException { - final StandardSourceDefinition sourceDefinition = sourceDefinitionFromCreate(customSourceDefinitionCreate.getSourceDefinition()) - .withPublic(false) - .withCustom(true); - if (!protocolVersionRange.isSupported(new Version(sourceDefinition.getProtocolVersion()))) { - throw new UnsupportedProtocolVersionException(sourceDefinition.getProtocolVersion(), protocolVersionRange.min(), protocolVersionRange.max()); - } - configRepository.writeCustomSourceDefinition(sourceDefinition, customSourceDefinitionCreate.getWorkspaceId()); - - return buildSourceDefinitionRead(sourceDefinition); - } - - private StandardSourceDefinition sourceDefinitionFromCreate(final SourceDefinitionCreate sourceDefinitionCreate) - throws IOException { - final ConnectorSpecification spec = - getSpecForImage( - sourceDefinitionCreate.getDockerRepository(), - sourceDefinitionCreate.getDockerImageTag(), - // Only custom connectors can be created via handlers. - true); - - final Version airbyteProtocolVersion = AirbyteProtocolVersion.getWithDefault(spec.getProtocolVersion()); - - final UUID id = uuidSupplier.get(); - return new StandardSourceDefinition() - .withSourceDefinitionId(id) - .withDockerRepository(sourceDefinitionCreate.getDockerRepository()) - .withDockerImageTag(sourceDefinitionCreate.getDockerImageTag()) - .withDocumentationUrl(sourceDefinitionCreate.getDocumentationUrl().toString()) - .withName(sourceDefinitionCreate.getName()) - .withIcon(sourceDefinitionCreate.getIcon()) - .withSpec(spec) - .withProtocolVersion(airbyteProtocolVersion.serialize()) - .withTombstone(false) - .withReleaseStage(StandardSourceDefinition.ReleaseStage.CUSTOM) - .withResourceRequirements(ApiPojoConverters.actorDefResourceReqsToInternal(sourceDefinitionCreate.getResourceRequirements())); - } - - public SourceDefinitionRead updateSourceDefinition(final SourceDefinitionUpdate sourceDefinitionUpdate) - throws ConfigNotFoundException, IOException, JsonValidationException { - final StandardSourceDefinition currentSourceDefinition = - configRepository.getStandardSourceDefinition(sourceDefinitionUpdate.getSourceDefinitionId()); - - // specs are re-fetched from the container if the image tag has changed, or if the tag is "dev", - // to allow for easier iteration of dev images - final boolean specNeedsUpdate = !currentSourceDefinition.getDockerImageTag().equals(sourceDefinitionUpdate.getDockerImageTag()) - || ServerConstants.DEV_IMAGE_TAG.equals(sourceDefinitionUpdate.getDockerImageTag()); - final ConnectorSpecification spec = specNeedsUpdate - ? getSpecForImage(currentSourceDefinition.getDockerRepository(), sourceDefinitionUpdate.getDockerImageTag(), - currentSourceDefinition.getCustom()) - : currentSourceDefinition.getSpec(); - final ActorDefinitionResourceRequirements updatedResourceReqs = sourceDefinitionUpdate.getResourceRequirements() != null - ? ApiPojoConverters.actorDefResourceReqsToInternal(sourceDefinitionUpdate.getResourceRequirements()) - : currentSourceDefinition.getResourceRequirements(); - - final Version airbyteProtocolVersion = AirbyteProtocolVersion.getWithDefault(spec.getProtocolVersion()); - if (!protocolVersionRange.isSupported(airbyteProtocolVersion)) { - throw new UnsupportedProtocolVersionException(airbyteProtocolVersion, protocolVersionRange.min(), protocolVersionRange.max()); - } - - final StandardSourceDefinition newSource = new StandardSourceDefinition() - .withSourceDefinitionId(currentSourceDefinition.getSourceDefinitionId()) - .withDockerImageTag(sourceDefinitionUpdate.getDockerImageTag()) - .withDockerRepository(currentSourceDefinition.getDockerRepository()) - .withDocumentationUrl(currentSourceDefinition.getDocumentationUrl()) - .withName(currentSourceDefinition.getName()) - .withIcon(currentSourceDefinition.getIcon()) - .withSpec(spec) - .withProtocolVersion(airbyteProtocolVersion.serialize()) - .withTombstone(currentSourceDefinition.getTombstone()) - .withPublic(currentSourceDefinition.getPublic()) - .withCustom(currentSourceDefinition.getCustom()) - .withReleaseStage(currentSourceDefinition.getReleaseStage()) - .withReleaseDate(currentSourceDefinition.getReleaseDate()) - .withResourceRequirements(updatedResourceReqs); - - configRepository.writeStandardSourceDefinition(newSource); - configRepository.clearUnsupportedProtocolVersionFlag(newSource.getSourceDefinitionId(), ActorType.SOURCE, protocolVersionRange); - - return buildSourceDefinitionRead(newSource); - } - - public void deleteSourceDefinition(final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - // "delete" all sources associated with the source definition as well. This will cascade to - // connections that depend on any deleted sources. - // Delete sources first in case a failure occurs mid-operation. - - final StandardSourceDefinition persistedSourceDefinition = - configRepository.getStandardSourceDefinition(sourceDefinitionIdRequestBody.getSourceDefinitionId()); - - for (final SourceRead sourceRead : sourceHandler.listSourcesForSourceDefinition(sourceDefinitionIdRequestBody).getSources()) { - sourceHandler.deleteSource(sourceRead); - } - - persistedSourceDefinition.withTombstone(true); - configRepository.writeStandardSourceDefinition(persistedSourceDefinition); - } - - private ConnectorSpecification getSpecForImage(final String dockerRepository, final String imageTag, final boolean isCustomConnector) - throws IOException { - final String imageName = dockerRepository + ":" + imageTag; - final SynchronousResponse getSpecResponse = schedulerSynchronousClient.createGetSpecJob(imageName, isCustomConnector); - return SpecFetcher.getSpecFromJob(getSpecResponse); - } - - public static String loadIcon(final String name) { - try { - return name == null ? null : MoreResources.readResource("icons/" + name); - } catch (final Exception e) { - return null; - } - } - - public PrivateSourceDefinitionRead grantSourceDefinitionToWorkspace( - final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSourceDefinition standardSourceDefinition = - configRepository.getStandardSourceDefinition(sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId()); - configRepository.writeActorDefinitionWorkspaceGrant( - sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId(), - sourceDefinitionIdWithWorkspaceId.getWorkspaceId()); - return new PrivateSourceDefinitionRead() - .sourceDefinition(buildSourceDefinitionRead(standardSourceDefinition)) - .granted(true); - } - - public void revokeSourceDefinitionFromWorkspace(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) - throws IOException { - configRepository.deleteActorDefinitionWorkspaceGrant( - sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId(), - sourceDefinitionIdWithWorkspaceId.getWorkspaceId()); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java deleted file mode 100644 index 102b4cf2ac8e..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java +++ /dev/null @@ -1,370 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.api.model.generated.ActorCatalogWithUpdatedAt; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.DiscoverCatalogResult; -import io.airbyte.api.model.generated.SourceCloneConfiguration; -import io.airbyte.api.model.generated.SourceCloneRequestBody; -import io.airbyte.api.model.generated.SourceCreate; -import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.model.generated.SourceDiscoverSchemaWriteRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.SourceReadList; -import io.airbyte.api.model.generated.SourceSearch; -import io.airbyte.api.model.generated.SourceSnippetRead; -import io.airbyte.api.model.generated.SourceUpdate; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.server.converters.ConfigurationUpdate; -import io.airbyte.commons.server.handlers.helpers.CatalogConverter; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; -import io.airbyte.persistence.job.factory.OAuthConfigSupplier; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Inject; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import java.util.function.Supplier; - -@Singleton -public class SourceHandler { - - private final Supplier uuidGenerator; - private final ConfigRepository configRepository; - private final SecretsRepositoryReader secretsRepositoryReader; - private final SecretsRepositoryWriter secretsRepositoryWriter; - private final JsonSchemaValidator validator; - private final ConnectionsHandler connectionsHandler; - private final ConfigurationUpdate configurationUpdate; - private final JsonSecretsProcessor secretsProcessor; - private final OAuthConfigSupplier oAuthConfigSupplier; - - @Inject - SourceHandler(final ConfigRepository configRepository, - final SecretsRepositoryReader secretsRepositoryReader, - final SecretsRepositoryWriter secretsRepositoryWriter, - final JsonSchemaValidator integrationSchemaValidation, - final ConnectionsHandler connectionsHandler, - final Supplier uuidGenerator, - final JsonSecretsProcessor secretsProcessor, - final ConfigurationUpdate configurationUpdate, - final OAuthConfigSupplier oAuthConfigSupplier) { - this.configRepository = configRepository; - this.secretsRepositoryReader = secretsRepositoryReader; - this.secretsRepositoryWriter = secretsRepositoryWriter; - validator = integrationSchemaValidation; - this.connectionsHandler = connectionsHandler; - this.uuidGenerator = uuidGenerator; - this.configurationUpdate = configurationUpdate; - this.secretsProcessor = secretsProcessor; - this.oAuthConfigSupplier = oAuthConfigSupplier; - } - - public SourceHandler(final ConfigRepository configRepository, - final SecretsRepositoryReader secretsRepositoryReader, - final SecretsRepositoryWriter secretsRepositoryWriter, - final JsonSchemaValidator integrationSchemaValidation, - final ConnectionsHandler connectionsHandler, - final OAuthConfigSupplier oAuthConfigSupplier) { - this( - configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - integrationSchemaValidation, - connectionsHandler, - UUID::randomUUID, - JsonSecretsProcessor.builder() - .copySecrets(true) - .build(), - new ConfigurationUpdate(configRepository, secretsRepositoryReader), - oAuthConfigSupplier); - } - - public SourceRead createSource(final SourceCreate sourceCreate) - throws ConfigNotFoundException, IOException, JsonValidationException { - // validate configuration - final ConnectorSpecification spec = getSpecFromSourceDefinitionId( - sourceCreate.getSourceDefinitionId()); - validateSource(spec, sourceCreate.getConnectionConfiguration()); - - // persist - final UUID sourceId = uuidGenerator.get(); - persistSourceConnection( - sourceCreate.getName() != null ? sourceCreate.getName() : "default", - sourceCreate.getSourceDefinitionId(), - sourceCreate.getWorkspaceId(), - sourceId, - false, - sourceCreate.getConnectionConfiguration(), - spec); - - // read configuration from db - return buildSourceRead(configRepository.getSourceConnection(sourceId), spec); - } - - public SourceRead updateSource(final SourceUpdate sourceUpdate) - throws ConfigNotFoundException, IOException, JsonValidationException { - - final UUID sourceId = sourceUpdate.getSourceId(); - final SourceConnection updatedSource = configurationUpdate - .source(sourceId, sourceUpdate.getName(), - sourceUpdate.getConnectionConfiguration()); - final ConnectorSpecification spec = getSpecFromSourceId(sourceId); - validateSource(spec, sourceUpdate.getConnectionConfiguration()); - - // persist - persistSourceConnection( - updatedSource.getName(), - updatedSource.getSourceDefinitionId(), - updatedSource.getWorkspaceId(), - updatedSource.getSourceId(), - updatedSource.getTombstone(), - updatedSource.getConfiguration(), - spec); - - // read configuration from db - return buildSourceRead(configRepository.getSourceConnection(sourceId), spec); - } - - public SourceRead getSource(final SourceIdRequestBody sourceIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - return buildSourceRead(sourceIdRequestBody.getSourceId()); - } - - public ActorCatalogWithUpdatedAt getMostRecentSourceActorCatalogWithUpdatedAt(final SourceIdRequestBody sourceIdRequestBody) - throws IOException { - Optional actorCatalog = - configRepository.getMostRecentSourceActorCatalog(sourceIdRequestBody.getSourceId()); - if (actorCatalog.isEmpty()) { - return new ActorCatalogWithUpdatedAt(); - } else { - return new ActorCatalogWithUpdatedAt().updatedAt(actorCatalog.get().getUpdatedAt()).catalog(actorCatalog.get().getCatalog()); - } - } - - public SourceRead cloneSource(final SourceCloneRequestBody sourceCloneRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - // read source configuration from db - final SourceRead sourceToClone = buildSourceReadWithSecrets(sourceCloneRequestBody.getSourceCloneId()); - final SourceCloneConfiguration sourceCloneConfiguration = sourceCloneRequestBody.getSourceConfiguration(); - - final String copyText = " (Copy)"; - final String sourceName = sourceToClone.getName() + copyText; - - final SourceCreate sourceCreate = new SourceCreate() - .name(sourceName) - .sourceDefinitionId(sourceToClone.getSourceDefinitionId()) - .connectionConfiguration(sourceToClone.getConnectionConfiguration()) - .workspaceId(sourceToClone.getWorkspaceId()); - - if (sourceCloneConfiguration != null) { - if (sourceCloneConfiguration.getName() != null) { - sourceCreate.name(sourceCloneConfiguration.getName()); - } - - if (sourceCloneConfiguration.getConnectionConfiguration() != null) { - sourceCreate.connectionConfiguration(sourceCloneConfiguration.getConnectionConfiguration()); - } - } - - return createSource(sourceCreate); - } - - public SourceReadList listSourcesForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) - throws ConfigNotFoundException, IOException, JsonValidationException { - - final List sourceConnections = configRepository.listWorkspaceSourceConnection(workspaceIdRequestBody.getWorkspaceId()); - - final List reads = Lists.newArrayList(); - for (final SourceConnection sc : sourceConnections) { - reads.add(buildSourceRead(sc)); - } - - return new SourceReadList().sources(reads); - } - - public SourceReadList listSourcesForSourceDefinition(final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - - final List reads = Lists.newArrayList(); - for (final SourceConnection sourceConnection : configRepository.listSourcesForDefinition(sourceDefinitionIdRequestBody.getSourceDefinitionId())) { - reads.add(buildSourceRead(sourceConnection)); - } - - return new SourceReadList().sources(reads); - } - - public SourceReadList searchSources(final SourceSearch sourceSearch) - throws ConfigNotFoundException, IOException, JsonValidationException { - final List reads = Lists.newArrayList(); - - for (final SourceConnection sci : configRepository.listSourceConnection()) { - if (!sci.getTombstone()) { - final SourceRead sourceRead = buildSourceRead(sci); - if (connectionsHandler.matchSearch(sourceSearch, sourceRead)) { - reads.add(sourceRead); - } - } - } - - return new SourceReadList().sources(reads); - } - - public void deleteSource(final SourceIdRequestBody sourceIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - // get existing source - final SourceRead source = buildSourceRead(sourceIdRequestBody.getSourceId()); - deleteSource(source); - } - - public void deleteSource(final SourceRead source) - throws JsonValidationException, IOException, ConfigNotFoundException { - // "delete" all connections associated with source as well. - // Delete connections first in case it fails in the middle, source will still be visible - final var workspaceIdRequestBody = new WorkspaceIdRequestBody() - .workspaceId(source.getWorkspaceId()); - - final List uuidsToDelete = connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody) - .getConnections().stream() - .filter(con -> con.getSourceId().equals(source.getSourceId())) - .map(ConnectionRead::getConnectionId) - .toList(); - - for (final UUID uuidToDelete : uuidsToDelete) { - connectionsHandler.deleteConnection(uuidToDelete); - } - - final var spec = getSpecFromSourceId(source.getSourceId()); - final var fullConfig = secretsRepositoryReader.getSourceConnectionWithSecrets(source.getSourceId()).getConfiguration(); - - // persist - persistSourceConnection( - source.getName(), - source.getSourceDefinitionId(), - source.getWorkspaceId(), - source.getSourceId(), - true, - fullConfig, - spec); - } - - public DiscoverCatalogResult writeDiscoverCatalogResult(final SourceDiscoverSchemaWriteRequestBody request) - throws JsonValidationException, IOException { - final AirbyteCatalog persistenceCatalog = CatalogConverter.toProtocol(request.getCatalog()); - UUID catalogId = configRepository.writeActorCatalogFetchEvent( - persistenceCatalog, - request.getSourceId(), - request.getConnectorVersion(), - request.getConfigurationHash()); - return new DiscoverCatalogResult().catalogId(catalogId); - } - - private SourceRead buildSourceRead(final UUID sourceId) - throws ConfigNotFoundException, IOException, JsonValidationException { - // read configuration from db - final SourceConnection sourceConnection = configRepository.getSourceConnection(sourceId); - return buildSourceRead(sourceConnection); - } - - private SourceRead buildSourceRead(final SourceConnection sourceConnection) - throws ConfigNotFoundException, IOException, JsonValidationException { - final StandardSourceDefinition sourceDef = configRepository.getSourceDefinitionFromSource(sourceConnection.getSourceId()); - final ConnectorSpecification spec = sourceDef.getSpec(); - return buildSourceRead(sourceConnection, spec); - } - - private SourceRead buildSourceRead(final SourceConnection sourceConnection, final ConnectorSpecification spec) - throws ConfigNotFoundException, IOException, JsonValidationException { - // read configuration from db - final StandardSourceDefinition standardSourceDefinition = configRepository - .getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()); - final JsonNode sanitizedConfig = secretsProcessor.prepareSecretsForOutput(sourceConnection.getConfiguration(), spec.getConnectionSpecification()); - sourceConnection.setConfiguration(sanitizedConfig); - return toSourceRead(sourceConnection, standardSourceDefinition); - } - - private SourceRead buildSourceReadWithSecrets(final UUID sourceId) - throws ConfigNotFoundException, IOException, JsonValidationException { - // read configuration from db - final SourceConnection sourceConnection = secretsRepositoryReader.getSourceConnectionWithSecrets(sourceId); - final StandardSourceDefinition standardSourceDefinition = configRepository - .getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()); - return toSourceRead(sourceConnection, standardSourceDefinition); - } - - private void validateSource(final ConnectorSpecification spec, final JsonNode implementationJson) - throws JsonValidationException { - validator.ensure(spec.getConnectionSpecification(), implementationJson); - } - - private ConnectorSpecification getSpecFromSourceId(final UUID sourceId) - throws IOException, JsonValidationException, ConfigNotFoundException { - final SourceConnection source = configRepository.getSourceConnection(sourceId); - return getSpecFromSourceDefinitionId(source.getSourceDefinitionId()); - } - - private ConnectorSpecification getSpecFromSourceDefinitionId(final UUID sourceDefId) - throws IOException, JsonValidationException, ConfigNotFoundException { - final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(sourceDefId); - return sourceDef.getSpec(); - } - - private void persistSourceConnection(final String name, - final UUID sourceDefinitionId, - final UUID workspaceId, - final UUID sourceId, - final boolean tombstone, - final JsonNode configurationJson, - final ConnectorSpecification spec) - throws JsonValidationException, IOException { - final JsonNode oAuthMaskedConfigurationJson = oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionId, workspaceId, configurationJson); - final SourceConnection sourceConnection = new SourceConnection() - .withName(name) - .withSourceDefinitionId(sourceDefinitionId) - .withWorkspaceId(workspaceId) - .withSourceId(sourceId) - .withTombstone(tombstone) - .withConfiguration(oAuthMaskedConfigurationJson); - secretsRepositoryWriter.writeSourceConnection(sourceConnection, spec); - } - - protected static SourceRead toSourceRead(final SourceConnection sourceConnection, - final StandardSourceDefinition standardSourceDefinition) { - return new SourceRead() - .sourceDefinitionId(standardSourceDefinition.getSourceDefinitionId()) - .sourceName(standardSourceDefinition.getName()) - .sourceId(sourceConnection.getSourceId()) - .workspaceId(sourceConnection.getWorkspaceId()) - .sourceDefinitionId(sourceConnection.getSourceDefinitionId()) - .connectionConfiguration(sourceConnection.getConfiguration()) - .name(sourceConnection.getName()) - .icon(SourceDefinitionsHandler.loadIcon(standardSourceDefinition.getIcon())); - } - - protected static SourceSnippetRead toSourceSnippetRead(final SourceConnection source, final StandardSourceDefinition sourceDefinition) { - return new SourceSnippetRead() - .sourceId(source.getSourceId()) - .name(source.getName()) - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .sourceName(sourceDefinition.getName()) - .icon(SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon())); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/StateHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/StateHandler.java deleted file mode 100644 index 7ca6709634aa..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/StateHandler.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionState; -import io.airbyte.api.model.generated.ConnectionStateCreateOrUpdate; -import io.airbyte.config.StateWrapper; -import io.airbyte.config.persistence.StatePersistence; -import io.airbyte.workers.helper.StateConverter; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.Optional; -import java.util.UUID; - -@Singleton -public class StateHandler { - - private final StatePersistence statePersistence; - - public StateHandler(final StatePersistence statePersistence) { - this.statePersistence = statePersistence; - } - - public ConnectionState getState(final ConnectionIdRequestBody connectionIdRequestBody) throws IOException { - final UUID connectionId = connectionIdRequestBody.getConnectionId(); - final Optional currentState = statePersistence.getCurrentState(connectionId); - return StateConverter.toApi(connectionId, currentState.orElse(null)); - } - - public ConnectionState createOrUpdateState(final ConnectionStateCreateOrUpdate connectionStateCreateOrUpdate) throws IOException { - final UUID connectionId = connectionStateCreateOrUpdate.getConnectionId(); - - final StateWrapper convertedCreateOrUpdate = StateConverter.toInternal(connectionStateCreateOrUpdate.getConnectionState()); - statePersistence.updateOrCreateState(connectionId, convertedCreateOrUpdate); - final Optional newInternalState = statePersistence.getCurrentState(connectionId); - - return StateConverter.toApi(connectionId, newInternalState.orElse(null)); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendCheckUpdatesHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendCheckUpdatesHandler.java deleted file mode 100644 index daedd09fa59d..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendCheckUpdatesHandler.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import io.airbyte.api.model.generated.WebBackendCheckUpdatesRead; -import io.airbyte.commons.server.services.AirbyteGithubStore; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigRepository; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.UUID; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - -/** - * The web backend is an abstraction that allows the frontend to structure data in such a way that - * it is easier for a react frontend to consume. It should NOT have direct access to the database. - * It should operate exclusively by calling other endpoints that are exposed in the API. - **/ -@Slf4j -@Singleton -public class WebBackendCheckUpdatesHandler { - - private static final int NO_CHANGES_FOUND = 0; - - // todo (cgardens) - this handler should NOT have access to the db. only access via handler. - @Deprecated - final ConfigRepository configRepositoryDoNotUse; - final AirbyteGithubStore githubStore; - - public WebBackendCheckUpdatesHandler(final ConfigRepository configRepositoryDoNotUse, final AirbyteGithubStore githubStore) { - this.configRepositoryDoNotUse = configRepositoryDoNotUse; - this.githubStore = githubStore; - } - - public WebBackendCheckUpdatesRead checkUpdates() { - - final int destinationDiffCount = getDestinationDiffCount(); - final int sourceDiffCount = getSourceDiffCount(); - - return new WebBackendCheckUpdatesRead() - .destinationDefinitions(destinationDiffCount) - .sourceDefinitions(sourceDiffCount); - } - - private int getDestinationDiffCount() { - final List> currentActorDefToDockerImageTag; - final Map newActorDefToDockerImageTag; - - try { - currentActorDefToDockerImageTag = configRepositoryDoNotUse.listStandardDestinationDefinitions(false) - .stream() - .map(def -> Map.entry(def.getDestinationDefinitionId(), def.getDockerImageTag())) - .toList(); - } catch (final IOException e) { - log.error("Failed to get current list of standard destination definitions", e); - return NO_CHANGES_FOUND; - } - - try { - newActorDefToDockerImageTag = githubStore.getLatestDestinations() - .stream() - .collect(Collectors.toMap(StandardDestinationDefinition::getDestinationDefinitionId, StandardDestinationDefinition::getDockerImageTag)); - } catch (final InterruptedException e) { - log.error("Failed to get latest list of standard destination definitions", e); - return NO_CHANGES_FOUND; - } - - return getDiffCount(currentActorDefToDockerImageTag, newActorDefToDockerImageTag); - } - - private int getSourceDiffCount() { - final List> currentActorDefToDockerImageTag; - final Map newActorDefToDockerImageTag; - - try { - currentActorDefToDockerImageTag = configRepositoryDoNotUse.listStandardSourceDefinitions(false) - .stream() - .map(def -> Map.entry(def.getSourceDefinitionId(), def.getDockerImageTag())) - .toList(); - } catch (final IOException e) { - log.error("Failed to get current list of standard source definitions", e); - return NO_CHANGES_FOUND; - } - - try { - newActorDefToDockerImageTag = githubStore.getLatestSources() - .stream() - .collect(Collectors.toMap(StandardSourceDefinition::getSourceDefinitionId, StandardSourceDefinition::getDockerImageTag)); - } catch (final InterruptedException e) { - log.error("Failed to get latest list of standard source definitions", e); - return NO_CHANGES_FOUND; - } - - return getDiffCount(currentActorDefToDockerImageTag, newActorDefToDockerImageTag); - } - - private int getDiffCount(final List> initialSet, final Map newSet) { - int diffCount = 0; - for (final Entry kvp : initialSet) { - final String newDockerImageTag = newSet.get(kvp.getKey()); - if (newDockerImageTag != null && !kvp.getValue().equals(newDockerImageTag)) { - ++diffCount; - } - } - return diffCount; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java deleted file mode 100644 index b863e56cdedb..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java +++ /dev/null @@ -1,794 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static java.util.stream.Collectors.toMap; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; -import io.airbyte.api.model.generated.AirbyteCatalog; -import io.airbyte.api.model.generated.AirbyteStream; -import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration; -import io.airbyte.api.model.generated.AirbyteStreamConfiguration; -import io.airbyte.api.model.generated.CatalogDiff; -import io.airbyte.api.model.generated.ConnectionCreate; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionStateType; -import io.airbyte.api.model.generated.ConnectionUpdate; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationSnippetRead; -import io.airbyte.api.model.generated.FieldTransform; -import io.airbyte.api.model.generated.JobRead; -import io.airbyte.api.model.generated.OperationCreate; -import io.airbyte.api.model.generated.OperationReadList; -import io.airbyte.api.model.generated.OperationUpdate; -import io.airbyte.api.model.generated.SchemaChange; -import io.airbyte.api.model.generated.SelectedFieldInfo; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRead; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.SourceSnippetRead; -import io.airbyte.api.model.generated.StreamDescriptor; -import io.airbyte.api.model.generated.StreamTransform; -import io.airbyte.api.model.generated.StreamTransform.TransformTypeEnum; -import io.airbyte.api.model.generated.WebBackendConnectionCreate; -import io.airbyte.api.model.generated.WebBackendConnectionListItem; -import io.airbyte.api.model.generated.WebBackendConnectionListRequestBody; -import io.airbyte.api.model.generated.WebBackendConnectionRead; -import io.airbyte.api.model.generated.WebBackendConnectionReadList; -import io.airbyte.api.model.generated.WebBackendConnectionRequestBody; -import io.airbyte.api.model.generated.WebBackendConnectionUpdate; -import io.airbyte.api.model.generated.WebBackendOperationCreateOrUpdate; -import io.airbyte.api.model.generated.WebBackendWorkspaceState; -import io.airbyte.api.model.generated.WebBackendWorkspaceStateResult; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.MoreBooleans; -import io.airbyte.commons.server.converters.ApiPojoConverters; -import io.airbyte.commons.server.handlers.helpers.CatalogConverter; -import io.airbyte.commons.server.scheduler.EventRunner; -import io.airbyte.config.ActorCatalog; -import io.airbyte.config.ActorCatalogFetchEvent; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.ConfigRepository.StandardSyncQuery; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.validation.json.JsonValidationException; -import io.airbyte.workers.helper.ProtocolConverters; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.function.Function; -import java.util.stream.Collectors; - -/** - * The web backend is an abstraction that allows the frontend to structure data in such a way that - * it is easier for a react frontend to consume. It should NOT have direct access to the database. - * It should operate exclusively by calling other endpoints that are exposed in the API. - **/ -@Singleton -public class WebBackendConnectionsHandler { - - private final ConnectionsHandler connectionsHandler; - private final StateHandler stateHandler; - private final SourceHandler sourceHandler; - private final DestinationHandler destinationHandler; - private final JobHistoryHandler jobHistoryHandler; - private final SchedulerHandler schedulerHandler; - private final OperationsHandler operationsHandler; - private final EventRunner eventRunner; - // todo (cgardens) - this handler should NOT have access to the db. only access via handler. - @Deprecated - private final ConfigRepository configRepositoryDoNotUse; - - public WebBackendConnectionsHandler(final ConnectionsHandler connectionsHandler, - final StateHandler stateHandler, - final SourceHandler sourceHandler, - final DestinationHandler destinationHandler, - final JobHistoryHandler jobHistoryHandler, - final SchedulerHandler schedulerHandler, - final OperationsHandler operationsHandler, - final EventRunner eventRunner, - final ConfigRepository configRepositoryDoNotUse) { - this.connectionsHandler = connectionsHandler; - this.stateHandler = stateHandler; - this.sourceHandler = sourceHandler; - this.destinationHandler = destinationHandler; - this.jobHistoryHandler = jobHistoryHandler; - this.schedulerHandler = schedulerHandler; - this.operationsHandler = operationsHandler; - this.eventRunner = eventRunner; - this.configRepositoryDoNotUse = configRepositoryDoNotUse; - } - - public WebBackendWorkspaceStateResult getWorkspaceState(final WebBackendWorkspaceState webBackendWorkspaceState) throws IOException { - final var workspaceId = webBackendWorkspaceState.getWorkspaceId(); - final var connectionCount = configRepositoryDoNotUse.countConnectionsForWorkspace(workspaceId); - final var destinationCount = configRepositoryDoNotUse.countDestinationsForWorkspace(workspaceId); - final var sourceCount = configRepositoryDoNotUse.countSourcesForWorkspace(workspaceId); - - return new WebBackendWorkspaceStateResult() - .hasConnections(connectionCount > 0) - .hasDestinations(destinationCount > 0) - .hasSources(sourceCount > 0); - } - - public ConnectionStateType getStateType(final ConnectionIdRequestBody connectionIdRequestBody) throws IOException { - return Enums.convertTo(stateHandler.getState(connectionIdRequestBody).getStateType(), ConnectionStateType.class); - } - - public WebBackendConnectionReadList webBackendListConnectionsForWorkspace(final WebBackendConnectionListRequestBody webBackendConnectionListRequestBody) - throws IOException { - - final StandardSyncQuery query = new StandardSyncQuery( - webBackendConnectionListRequestBody.getWorkspaceId(), - webBackendConnectionListRequestBody.getSourceId(), - webBackendConnectionListRequestBody.getDestinationId(), - // passing 'false' so that deleted connections are not included - false); - - final List standardSyncs = configRepositoryDoNotUse.listWorkspaceStandardSyncs(query); - final List sourceIds = standardSyncs.stream().map(StandardSync::getSourceId).toList(); - final List destinationIds = standardSyncs.stream().map(StandardSync::getDestinationId).toList(); - final List connectionIds = standardSyncs.stream().map(StandardSync::getConnectionId).toList(); - - // Fetching all the related objects we need for the final output - final Map sourceReadById = getSourceSnippetReadById(sourceIds); - final Map destinationReadById = getDestinationSnippetReadById(destinationIds); - final Map latestJobByConnectionId = getLatestJobByConnectionId(connectionIds); - // This call could be removed, running jobs should be a subset of latest jobs, need to expose the - // right status filtering for this. - final Map runningJobByConnectionId = getRunningJobByConnectionId(connectionIds); - final Map newestFetchEventsByActorId = - configRepositoryDoNotUse.getMostRecentActorCatalogFetchEventForSources(sourceIds); - - final List connectionItems = Lists.newArrayList(); - - for (final StandardSync standardSync : standardSyncs) { - connectionItems.add( - buildWebBackendConnectionListItem( - standardSync, - sourceReadById, - destinationReadById, - latestJobByConnectionId, - runningJobByConnectionId, - Optional.ofNullable(newestFetchEventsByActorId.get(standardSync.getSourceId())))); - } - - return new WebBackendConnectionReadList().connections(connectionItems); - } - - private Map getLatestJobByConnectionId(final List connectionIds) throws IOException { - return jobHistoryHandler.getLatestSyncJobsForConnections(connectionIds).stream() - .collect(Collectors.toMap(j -> UUID.fromString(j.getConfigId()), Function.identity())); - } - - private Map getRunningJobByConnectionId(final List connectionIds) throws IOException { - return jobHistoryHandler.getRunningSyncJobForConnections(connectionIds).stream() - .collect(Collectors.toMap(j -> UUID.fromString(j.getConfigId()), Function.identity())); - } - - private Map getSourceSnippetReadById(final List sourceIds) throws IOException { - return configRepositoryDoNotUse.getSourceAndDefinitionsFromSourceIds(sourceIds) - .stream() - .map(sourceAndDefinition -> SourceHandler.toSourceSnippetRead(sourceAndDefinition.source(), sourceAndDefinition.definition())) - .collect(Collectors.toMap(SourceSnippetRead::getSourceId, Function.identity())); - } - - private Map getDestinationSnippetReadById(final List destinationIds) throws IOException { - return configRepositoryDoNotUse.getDestinationAndDefinitionsFromDestinationIds(destinationIds) - .stream() - .map(destinationAndDefinition -> DestinationHandler.toDestinationSnippetRead(destinationAndDefinition.destination(), - destinationAndDefinition.definition())) - .collect(Collectors.toMap(DestinationSnippetRead::getDestinationId, Function.identity())); - } - - private WebBackendConnectionRead buildWebBackendConnectionRead(final ConnectionRead connectionRead, final Optional currentSourceCatalogId) - throws ConfigNotFoundException, IOException, JsonValidationException { - final SourceRead source = getSourceRead(connectionRead.getSourceId()); - final DestinationRead destination = getDestinationRead(connectionRead.getDestinationId()); - final OperationReadList operations = getOperationReadList(connectionRead); - final Optional latestSyncJob = jobHistoryHandler.getLatestSyncJob(connectionRead.getConnectionId()); - final Optional latestRunningSyncJob = jobHistoryHandler.getLatestRunningSyncJob(connectionRead.getConnectionId()); - - final WebBackendConnectionRead webBackendConnectionRead = getWebBackendConnectionRead(connectionRead, source, destination, operations) - .catalogId(connectionRead.getSourceCatalogId()); - - webBackendConnectionRead.setIsSyncing(latestRunningSyncJob.isPresent()); - - latestSyncJob.ifPresent(job -> { - webBackendConnectionRead.setLatestSyncJobCreatedAt(job.getCreatedAt()); - webBackendConnectionRead.setLatestSyncJobStatus(job.getStatus()); - }); - - final Optional mostRecentFetchEvent = - configRepositoryDoNotUse.getMostRecentActorCatalogFetchEventForSource(connectionRead.getSourceId()); - - final SchemaChange schemaChange = getSchemaChange(connectionRead, currentSourceCatalogId, mostRecentFetchEvent); - - webBackendConnectionRead.setSchemaChange(schemaChange); - - return webBackendConnectionRead; - } - - static private WebBackendConnectionListItem buildWebBackendConnectionListItem( - final StandardSync standardSync, - final Map sourceReadById, - final Map destinationReadById, - final Map latestJobByConnectionId, - final Map runningJobByConnectionId, - final Optional latestFetchEvent) { - - final SourceSnippetRead source = sourceReadById.get(standardSync.getSourceId()); - final DestinationSnippetRead destination = destinationReadById.get(standardSync.getDestinationId()); - final Optional latestSyncJob = Optional.ofNullable(latestJobByConnectionId.get(standardSync.getConnectionId())); - final Optional latestRunningSyncJob = Optional.ofNullable(runningJobByConnectionId.get(standardSync.getConnectionId())); - final ConnectionRead connectionRead = ApiPojoConverters.internalToConnectionRead(standardSync); - final Optional currentCatalogId = connectionRead == null ? Optional.empty() : Optional.ofNullable(connectionRead.getSourceCatalogId()); - - final SchemaChange schemaChange = getSchemaChange(connectionRead, currentCatalogId, latestFetchEvent); - - final WebBackendConnectionListItem listItem = new WebBackendConnectionListItem() - .connectionId(standardSync.getConnectionId()) - .status(ApiPojoConverters.toApiStatus(standardSync.getStatus())) - .name(standardSync.getName()) - .scheduleType(ApiPojoConverters.toApiConnectionScheduleType(standardSync)) - .scheduleData(ApiPojoConverters.toApiConnectionScheduleData(standardSync)) - .source(source) - .destination(destination) - .isSyncing(latestRunningSyncJob.isPresent()) - .schemaChange(schemaChange); - - latestSyncJob.ifPresent(job -> { - listItem.setLatestSyncJobCreatedAt(job.getCreatedAt()); - listItem.setLatestSyncJobStatus(job.getStatus()); - }); - - return listItem; - } - - /* - * A breakingChange boolean is stored on the connectionRead object and corresponds to the boolean - * breakingChange field on the connection table. If there is not a breaking change, we still have to - * check whether there is a non-breaking schema change by fetching the most recent - * ActorCatalogFetchEvent. A new ActorCatalogFetchEvent is stored each time there is a source schema - * refresh, so if the most recent ActorCatalogFetchEvent has a different actor catalog than the - * existing actor catalog, there is a schema change. - */ - @VisibleForTesting - static SchemaChange getSchemaChange( - final ConnectionRead connectionRead, - final Optional currentSourceCatalogId, - final Optional mostRecentFetchEvent) { - if (connectionRead == null || currentSourceCatalogId.isEmpty()) { - return SchemaChange.NO_CHANGE; - } - - if (connectionRead.getBreakingChange() != null && connectionRead.getBreakingChange()) { - return SchemaChange.BREAKING; - } - - if (mostRecentFetchEvent.isPresent() && !mostRecentFetchEvent.map(ActorCatalogFetchEvent::getActorCatalogId).equals(currentSourceCatalogId)) { - return SchemaChange.NON_BREAKING; - } - - return SchemaChange.NO_CHANGE; - } - - private SourceRead getSourceRead(final UUID sourceId) throws JsonValidationException, IOException, ConfigNotFoundException { - final SourceIdRequestBody sourceIdRequestBody = new SourceIdRequestBody().sourceId(sourceId); - return sourceHandler.getSource(sourceIdRequestBody); - } - - private DestinationRead getDestinationRead(final UUID destinationId) - throws JsonValidationException, IOException, ConfigNotFoundException { - final DestinationIdRequestBody destinationIdRequestBody = new DestinationIdRequestBody().destinationId(destinationId); - return destinationHandler.getDestination(destinationIdRequestBody); - } - - private OperationReadList getOperationReadList(final ConnectionRead connectionRead) - throws JsonValidationException, IOException, ConfigNotFoundException { - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(connectionRead.getConnectionId()); - return operationsHandler.listOperationsForConnection(connectionIdRequestBody); - } - - private static WebBackendConnectionRead getWebBackendConnectionRead(final ConnectionRead connectionRead, - final SourceRead source, - final DestinationRead destination, - final OperationReadList operations) { - return new WebBackendConnectionRead() - .connectionId(connectionRead.getConnectionId()) - .sourceId(connectionRead.getSourceId()) - .destinationId(connectionRead.getDestinationId()) - .operationIds(connectionRead.getOperationIds()) - .name(connectionRead.getName()) - .namespaceDefinition(connectionRead.getNamespaceDefinition()) - .namespaceFormat(connectionRead.getNamespaceFormat()) - .prefix(connectionRead.getPrefix()) - .syncCatalog(connectionRead.getSyncCatalog()) - .status(connectionRead.getStatus()) - .schedule(connectionRead.getSchedule()) - .scheduleType(connectionRead.getScheduleType()) - .scheduleData(connectionRead.getScheduleData()) - .source(source) - .destination(destination) - .operations(operations.getOperations()) - .resourceRequirements(connectionRead.getResourceRequirements()) - .geography(connectionRead.getGeography()) - .notifySchemaChanges(connectionRead.getNotifySchemaChanges()) - .nonBreakingChangesPreference(connectionRead.getNonBreakingChangesPreference()); - } - - // todo (cgardens) - This logic is a headache to follow it stems from the internal data model not - // tracking selected streams in any reasonable way. We should update that. - public WebBackendConnectionRead webBackendGetConnection(final WebBackendConnectionRequestBody webBackendConnectionRequestBody) - throws ConfigNotFoundException, IOException, JsonValidationException { - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody() - .connectionId(webBackendConnectionRequestBody.getConnectionId()); - - final ConnectionRead connection = connectionsHandler.getConnection(connectionIdRequestBody.getConnectionId()); - - /* - * This variable contains all configuration but will be missing streams that were not selected. - */ - final AirbyteCatalog configuredCatalog = connection.getSyncCatalog(); - /* - * This catalog represents the full catalog that was used to create the configured catalog. It will - * have all streams that were present at the time. It will have no configuration set. - */ - final Optional catalogUsedToMakeConfiguredCatalog = connectionsHandler - .getConnectionAirbyteCatalog(webBackendConnectionRequestBody.getConnectionId()); - - /* - * This catalog represents the full catalog that exists now for the source. It will have no - * configuration set. - */ - final Optional refreshedCatalog; - if (MoreBooleans.isTruthy(webBackendConnectionRequestBody.getWithRefreshedCatalog())) { - refreshedCatalog = getRefreshedSchema(connection.getSourceId(), connection.getConnectionId()); - } else { - refreshedCatalog = Optional.empty(); - } - - final CatalogDiff diff; - final AirbyteCatalog syncCatalog; - final Optional currentSourceCatalogId = Optional.ofNullable(connection.getSourceCatalogId()); - if (refreshedCatalog.isPresent()) { - connection.sourceCatalogId(refreshedCatalog.get().getCatalogId()); - /* - * constructs a full picture of all existing configured + all new / updated streams in the newest - * catalog. - * - * Diffing the catalog used to make the configured catalog gives us the clearest diff between the - * schema when the configured catalog was made and now. In the case where we do not have the - * original catalog used to make the configured catalog, we make due, but using the configured - * catalog itself. The drawback is that any stream that was not selected in the configured catalog - * but was present at time of configuration will appear in the diff as an added stream which is - * confusing. We need to figure out why source_catalog_id is not always populated in the db. - */ - syncCatalog = updateSchemaWithRefreshedDiscoveredCatalog(configuredCatalog, catalogUsedToMakeConfiguredCatalog.orElse(configuredCatalog), - refreshedCatalog.get().getCatalog()); - - diff = refreshedCatalog.get().getCatalogDiff(); - connection.setBreakingChange(refreshedCatalog.get().getBreakingChange()); - connection.setStatus(refreshedCatalog.get().getConnectionStatus()); - } else if (catalogUsedToMakeConfiguredCatalog.isPresent()) { - // reconstructs a full picture of the full schema at the time the catalog was configured. - syncCatalog = updateSchemaWithOriginalDiscoveredCatalog(configuredCatalog, catalogUsedToMakeConfiguredCatalog.get()); - // diff not relevant if there was no refresh. - diff = null; - } else { - // fallback. over time this should be rarely used because source_catalog_id should always be set. - syncCatalog = configuredCatalog; - // diff not relevant if there was no refresh. - diff = null; - } - - connection.setSyncCatalog(syncCatalog); - return buildWebBackendConnectionRead(connection, currentSourceCatalogId).catalogDiff(diff); - } - - private AirbyteCatalog updateSchemaWithOriginalDiscoveredCatalog(final AirbyteCatalog configuredCatalog, - final AirbyteCatalog originalDiscoveredCatalog) { - // We pass the original discovered catalog in as the "new" discovered catalog. - return updateSchemaWithRefreshedDiscoveredCatalog(configuredCatalog, originalDiscoveredCatalog, originalDiscoveredCatalog); - } - - private Optional getRefreshedSchema(final UUID sourceId, final UUID connectionId) - throws JsonValidationException, ConfigNotFoundException, IOException { - final SourceDiscoverSchemaRequestBody discoverSchemaReadReq = new SourceDiscoverSchemaRequestBody() - .sourceId(sourceId) - .disableCache(true) - .connectionId(connectionId) - .notifySchemaChange(false); - final SourceDiscoverSchemaRead schemaRead = schedulerHandler.discoverSchemaForSourceFromSourceId(discoverSchemaReadReq); - return Optional.ofNullable(schemaRead); - } - - /** - * Applies existing configurations to a newly discovered catalog. For example, if the users stream - * is in the old and new catalog, any configuration that was previously set for users, we add to the - * new catalog. - * - * @param originalConfigured fully configured, original catalog - * @param originalDiscovered the original discovered catalog used to make the original configured - * catalog - * @param discovered newly discovered catalog, no configurations set - * @return merged catalog, most up-to-date schema with most up-to-date configurations from old - * catalog - */ - @VisibleForTesting - protected static AirbyteCatalog updateSchemaWithRefreshedDiscoveredCatalog(final AirbyteCatalog originalConfigured, - final AirbyteCatalog originalDiscovered, - final AirbyteCatalog discovered) { - /* - * We can't directly use s.getStream() as the key, because it contains a bunch of other fields, so - * we just define a quick-and-dirty record class. - */ - final Map streamDescriptorToOriginalStream = originalConfigured.getStreams() - .stream() - .collect(toMap(s -> new Stream(s.getStream().getName(), s.getStream().getNamespace()), s -> s)); - final Map streamDescriptorToOriginalDiscoveredStream = originalDiscovered.getStreams() - .stream() - .collect(toMap(s -> new Stream(s.getStream().getName(), s.getStream().getNamespace()), s -> s)); - - final List streams = new ArrayList<>(); - - for (final AirbyteStreamAndConfiguration discoveredStream : discovered.getStreams()) { - final AirbyteStream stream = discoveredStream.getStream(); - final AirbyteStreamAndConfiguration originalConfiguredStream = streamDescriptorToOriginalStream.get( - new Stream(stream.getName(), stream.getNamespace())); - final AirbyteStreamAndConfiguration originalDiscoveredStream = streamDescriptorToOriginalDiscoveredStream.get( - new Stream(stream.getName(), stream.getNamespace())); - final AirbyteStreamConfiguration outputStreamConfig; - - if (originalConfiguredStream != null) { - final AirbyteStreamConfiguration originalStreamConfig = originalConfiguredStream.getConfig(); - final AirbyteStreamConfiguration discoveredStreamConfig = discoveredStream.getConfig(); - outputStreamConfig = new AirbyteStreamConfiguration(); - - if (stream.getSupportedSyncModes().contains(originalStreamConfig.getSyncMode())) { - outputStreamConfig.setSyncMode(originalStreamConfig.getSyncMode()); - } else { - outputStreamConfig.setSyncMode(discoveredStreamConfig.getSyncMode()); - } - - if (originalStreamConfig.getCursorField().size() > 0) { - outputStreamConfig.setCursorField(originalStreamConfig.getCursorField()); - } else { - outputStreamConfig.setCursorField(discoveredStreamConfig.getCursorField()); - } - - outputStreamConfig.setDestinationSyncMode(originalStreamConfig.getDestinationSyncMode()); - if (originalStreamConfig.getPrimaryKey().size() > 0) { - outputStreamConfig.setPrimaryKey(originalStreamConfig.getPrimaryKey()); - } else { - outputStreamConfig.setPrimaryKey(discoveredStreamConfig.getPrimaryKey()); - } - - outputStreamConfig.setAliasName(originalStreamConfig.getAliasName()); - outputStreamConfig.setSelected(originalConfiguredStream.getConfig().getSelected()); - - outputStreamConfig.setFieldSelectionEnabled(originalStreamConfig.getFieldSelectionEnabled()); - if (outputStreamConfig.getFieldSelectionEnabled()) { - // TODO(mfsiega-airbyte): support nested fields. - // If field selection is enabled, populate the selected fields. - final Set originallyDiscovered = new HashSet<>(); - final Set refreshDiscovered = new HashSet<>(); - // NOTE: by only taking the first element of the path, we're restricting to top-level fields. - final Set originallySelected = new HashSet<>( - originalConfiguredStream.getConfig().getSelectedFields().stream().map((field) -> field.getFieldPath().get(0)).toList()); - originalDiscoveredStream.getStream().getJsonSchema().findPath("properties").fieldNames() - .forEachRemaining((name) -> originallyDiscovered.add(name)); - stream.getJsonSchema().findPath("properties").fieldNames().forEachRemaining((name) -> refreshDiscovered.add(name)); - // We include a selected field if it: - // (is in the newly discovered schema) AND (it was either originally selected OR not in the - // originally discovered schema at all) - // NOTE: this implies that the default behaviour for newly-discovered columns is to add them. - for (final String discoveredField : refreshDiscovered) { - if (originallySelected.contains(discoveredField) || !originallyDiscovered.contains(discoveredField)) { - outputStreamConfig.addSelectedFieldsItem(new SelectedFieldInfo().addFieldPathItem(discoveredField)); - } - } - } - - } else { - outputStreamConfig = discoveredStream.getConfig(); - outputStreamConfig.setSelected(false); - } - final AirbyteStreamAndConfiguration outputStream = new AirbyteStreamAndConfiguration() - .stream(Jsons.clone(stream)) - .config(outputStreamConfig); - streams.add(outputStream); - } - return new AirbyteCatalog().streams(streams); - } - - public WebBackendConnectionRead webBackendCreateConnection(final WebBackendConnectionCreate webBackendConnectionCreate) - throws ConfigNotFoundException, IOException, JsonValidationException { - final List operationIds = createOperations(webBackendConnectionCreate); - - final ConnectionCreate connectionCreate = toConnectionCreate(webBackendConnectionCreate, operationIds); - final Optional currentSourceCatalogId = Optional.ofNullable(connectionCreate.getSourceCatalogId()); - return buildWebBackendConnectionRead(connectionsHandler.createConnection(connectionCreate), currentSourceCatalogId); - } - - /** - * Given a WebBackendConnectionUpdate, patch the connection by applying any non-null properties from - * the patch to the connection. - * - * As a convenience to the front-end, this endpoint also creates new operations present in the - * request, and bundles those newly-created operationIds into the connection update. - */ - public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConnectionUpdate webBackendConnectionPatch) - throws ConfigNotFoundException, IOException, JsonValidationException { - - final UUID connectionId = webBackendConnectionPatch.getConnectionId(); - final ConnectionRead originalConnectionRead = connectionsHandler.getConnection(connectionId); - boolean breakingChange = originalConnectionRead.getBreakingChange() != null && originalConnectionRead.getBreakingChange(); - - // If there have been changes to the sync catalog, check whether these changes result in or fix a - // broken connection - if (webBackendConnectionPatch.getSyncCatalog() != null) { - // Get the most recent actor catalog fetched for this connection's source and the newly updated sync - // catalog - final Optional mostRecentActorCatalog = - configRepositoryDoNotUse.getMostRecentActorCatalogForSource(originalConnectionRead.getSourceId()); - final AirbyteCatalog newAirbyteCatalog = webBackendConnectionPatch.getSyncCatalog(); - // Get the diff between these two catalogs to check for breaking changes - if (mostRecentActorCatalog.isPresent()) { - final io.airbyte.protocol.models.AirbyteCatalog mostRecentAirbyteCatalog = - Jsons.object(mostRecentActorCatalog.get().getCatalog(), io.airbyte.protocol.models.AirbyteCatalog.class); - final StandardSourceDefinition sourceDefinition = - configRepositoryDoNotUse.getSourceDefinitionFromSource(originalConnectionRead.getSourceId()); - final CatalogDiff catalogDiff = - connectionsHandler.getDiff(newAirbyteCatalog, CatalogConverter.toApi(mostRecentAirbyteCatalog, sourceDefinition), - CatalogConverter.toConfiguredProtocol(newAirbyteCatalog)); - breakingChange = containsBreakingChange(catalogDiff); - } - } - - // before doing any updates, fetch the existing catalog so that it can be diffed - // with the final catalog to determine which streams might need to be reset. - final ConfiguredAirbyteCatalog oldConfiguredCatalog = - configRepositoryDoNotUse.getConfiguredCatalogForConnection(connectionId); - - final List newAndExistingOperationIds = createOrUpdateOperations(originalConnectionRead, webBackendConnectionPatch); - - // pass in operationIds because the patch object doesn't include operationIds that were just created - // above. - final ConnectionUpdate connectionPatch = toConnectionPatch(webBackendConnectionPatch, newAndExistingOperationIds, breakingChange); - - // persist the update and set the connectionRead to the updated form. - final ConnectionRead updatedConnectionRead = connectionsHandler.updateConnection(connectionPatch); - - // detect if any streams need to be reset based on the patch and initial catalog, if so, reset them - resetStreamsIfNeeded(webBackendConnectionPatch, oldConfiguredCatalog, updatedConnectionRead, originalConnectionRead); - /* - * This catalog represents the full catalog that was used to create the configured catalog. It will - * have all streams that were present at the time. It will have no configuration set. - */ - final Optional catalogUsedToMakeConfiguredCatalog = connectionsHandler - .getConnectionAirbyteCatalog(connectionId); - if (catalogUsedToMakeConfiguredCatalog.isPresent()) { - // Update the Catalog returned to include all streams, including disabled ones - final AirbyteCatalog syncCatalog = - updateSchemaWithRefreshedDiscoveredCatalog(updatedConnectionRead.getSyncCatalog(), catalogUsedToMakeConfiguredCatalog.get(), - catalogUsedToMakeConfiguredCatalog.get()); - updatedConnectionRead.setSyncCatalog(syncCatalog); - } - - final Optional currentSourceCatalogId = Optional.ofNullable(updatedConnectionRead.getSourceCatalogId()); - return buildWebBackendConnectionRead(updatedConnectionRead, currentSourceCatalogId); - } - - /** - * Given a fully updated connection, check for a diff between the old catalog and the updated - * catalog to see if any streams need to be reset. - */ - private void resetStreamsIfNeeded(final WebBackendConnectionUpdate webBackendConnectionPatch, - final ConfiguredAirbyteCatalog oldConfiguredCatalog, - final ConnectionRead updatedConnectionRead, - final ConnectionRead oldConnectionRead) - throws IOException, JsonValidationException, ConfigNotFoundException { - - final UUID connectionId = webBackendConnectionPatch.getConnectionId(); - final Boolean skipReset = webBackendConnectionPatch.getSkipReset() != null ? webBackendConnectionPatch.getSkipReset() : false; - if (!skipReset) { - final AirbyteCatalog apiExistingCatalog = CatalogConverter.toApi(oldConfiguredCatalog, - CatalogConverter.getFieldSelectionData(oldConnectionRead.getSyncCatalog())); - final AirbyteCatalog upToDateAirbyteCatalog = updatedConnectionRead.getSyncCatalog(); - final CatalogDiff catalogDiff = - connectionsHandler.getDiff(apiExistingCatalog, upToDateAirbyteCatalog, CatalogConverter.toConfiguredProtocol(upToDateAirbyteCatalog)); - final List apiStreamsToReset = getStreamsToReset(catalogDiff); - final Set changedConfigStreamDescriptors = - connectionsHandler.getConfigurationDiff(apiExistingCatalog, upToDateAirbyteCatalog); - final Set allStreamToReset = new HashSet<>(); - allStreamToReset.addAll(apiStreamsToReset); - allStreamToReset.addAll(changedConfigStreamDescriptors); - List streamsToReset = - allStreamToReset.stream().map(ProtocolConverters::streamDescriptorToProtocol).toList(); - - if (!streamsToReset.isEmpty()) { - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(connectionId); - final ConnectionStateType stateType = getStateType(connectionIdRequestBody); - - if (stateType == ConnectionStateType.LEGACY || stateType == ConnectionStateType.NOT_SET) { - streamsToReset = configRepositoryDoNotUse.getAllStreamsForConnection(connectionId); - } - eventRunner.resetConnection( - connectionId, - streamsToReset, true); - } - } - } - - private List createOperations(final WebBackendConnectionCreate webBackendConnectionCreate) - throws JsonValidationException, ConfigNotFoundException, IOException { - if (webBackendConnectionCreate.getOperations() == null) { - return Collections.emptyList(); - } - final List operationIds = new ArrayList<>(); - for (final var operationCreate : webBackendConnectionCreate.getOperations()) { - operationIds.add(operationsHandler.createOperation(operationCreate).getOperationId()); - } - return operationIds; - } - - private List createOrUpdateOperations(final ConnectionRead connectionRead, final WebBackendConnectionUpdate webBackendConnectionPatch) - throws JsonValidationException, ConfigNotFoundException, IOException { - - // this is a patch-style update, so don't make any changes if the request doesn't include operations - if (webBackendConnectionPatch.getOperations() == null) { - return null; - } - - // wrap operationIds in a new ArrayList so that it is modifiable below, when calling .removeAll - final List originalOperationIds = - connectionRead.getOperationIds() == null ? new ArrayList<>() : new ArrayList<>(connectionRead.getOperationIds()); - - final List updatedOperations = webBackendConnectionPatch.getOperations(); - final List finalOperationIds = new ArrayList<>(); - - for (final var operationCreateOrUpdate : updatedOperations) { - if (operationCreateOrUpdate.getOperationId() == null || !originalOperationIds.contains(operationCreateOrUpdate.getOperationId())) { - final OperationCreate operationCreate = toOperationCreate(operationCreateOrUpdate); - finalOperationIds.add(operationsHandler.createOperation(operationCreate).getOperationId()); - } else { - final OperationUpdate operationUpdate = toOperationUpdate(operationCreateOrUpdate); - finalOperationIds.add(operationsHandler.updateOperation(operationUpdate).getOperationId()); - } - } - - // remove operationIds that weren't included in the update - originalOperationIds.removeAll(finalOperationIds); - operationsHandler.deleteOperationsForConnection(connectionRead.getConnectionId(), originalOperationIds); - return finalOperationIds; - } - - @VisibleForTesting - protected static OperationCreate toOperationCreate(final WebBackendOperationCreateOrUpdate operationCreateOrUpdate) { - final OperationCreate operationCreate = new OperationCreate(); - - operationCreate.name(operationCreateOrUpdate.getName()); - operationCreate.workspaceId(operationCreateOrUpdate.getWorkspaceId()); - operationCreate.operatorConfiguration(operationCreateOrUpdate.getOperatorConfiguration()); - - return operationCreate; - } - - @VisibleForTesting - protected static OperationUpdate toOperationUpdate(final WebBackendOperationCreateOrUpdate operationCreateOrUpdate) { - final OperationUpdate operationUpdate = new OperationUpdate(); - - operationUpdate.operationId(operationCreateOrUpdate.getOperationId()); - operationUpdate.name(operationCreateOrUpdate.getName()); - operationUpdate.operatorConfiguration(operationCreateOrUpdate.getOperatorConfiguration()); - - return operationUpdate; - } - - @VisibleForTesting - protected static ConnectionCreate toConnectionCreate(final WebBackendConnectionCreate webBackendConnectionCreate, final List operationIds) { - final ConnectionCreate connectionCreate = new ConnectionCreate(); - - connectionCreate.name(webBackendConnectionCreate.getName()); - connectionCreate.namespaceDefinition(webBackendConnectionCreate.getNamespaceDefinition()); - connectionCreate.namespaceFormat(webBackendConnectionCreate.getNamespaceFormat()); - connectionCreate.prefix(webBackendConnectionCreate.getPrefix()); - connectionCreate.sourceId(webBackendConnectionCreate.getSourceId()); - connectionCreate.destinationId(webBackendConnectionCreate.getDestinationId()); - connectionCreate.operationIds(operationIds); - connectionCreate.syncCatalog(webBackendConnectionCreate.getSyncCatalog()); - connectionCreate.schedule(webBackendConnectionCreate.getSchedule()); - connectionCreate.scheduleType(webBackendConnectionCreate.getScheduleType()); - connectionCreate.scheduleData(webBackendConnectionCreate.getScheduleData()); - connectionCreate.status(webBackendConnectionCreate.getStatus()); - connectionCreate.resourceRequirements(webBackendConnectionCreate.getResourceRequirements()); - connectionCreate.sourceCatalogId(webBackendConnectionCreate.getSourceCatalogId()); - connectionCreate.geography(webBackendConnectionCreate.getGeography()); - connectionCreate.nonBreakingChangesPreference(webBackendConnectionCreate.getNonBreakingChangesPreference()); - - return connectionCreate; - } - - /** - * Take in a WebBackendConnectionUpdate and convert it into a ConnectionUpdate. OperationIds are - * handled as a special case because the WebBackendConnectionUpdate handler allows for on-the-fly - * creation of new operations. So, the brand-new IDs are passed in because they aren't present in - * the WebBackendConnectionUpdate itself. - * - * The return value is used as a patch -- a field set to null means that it should not be modified. - */ - @VisibleForTesting - protected static ConnectionUpdate toConnectionPatch(final WebBackendConnectionUpdate webBackendConnectionPatch, - final List finalOperationIds, - final boolean breakingChange) { - final ConnectionUpdate connectionPatch = new ConnectionUpdate(); - - connectionPatch.connectionId(webBackendConnectionPatch.getConnectionId()); - connectionPatch.namespaceDefinition(webBackendConnectionPatch.getNamespaceDefinition()); - connectionPatch.namespaceFormat(webBackendConnectionPatch.getNamespaceFormat()); - connectionPatch.prefix(webBackendConnectionPatch.getPrefix()); - connectionPatch.name(webBackendConnectionPatch.getName()); - connectionPatch.syncCatalog(webBackendConnectionPatch.getSyncCatalog()); - connectionPatch.schedule(webBackendConnectionPatch.getSchedule()); - connectionPatch.scheduleType(webBackendConnectionPatch.getScheduleType()); - connectionPatch.scheduleData(webBackendConnectionPatch.getScheduleData()); - connectionPatch.status(webBackendConnectionPatch.getStatus()); - connectionPatch.resourceRequirements(webBackendConnectionPatch.getResourceRequirements()); - connectionPatch.sourceCatalogId(webBackendConnectionPatch.getSourceCatalogId()); - connectionPatch.geography(webBackendConnectionPatch.getGeography()); - connectionPatch.notifySchemaChanges(webBackendConnectionPatch.getNotifySchemaChanges()); - connectionPatch.nonBreakingChangesPreference(webBackendConnectionPatch.getNonBreakingChangesPreference()); - connectionPatch.breakingChange(breakingChange); - - connectionPatch.operationIds(finalOperationIds); - - return connectionPatch; - } - - @VisibleForTesting - static List getStreamsToReset(final CatalogDiff catalogDiff) { - return catalogDiff.getTransforms().stream().map(StreamTransform::getStreamDescriptor).toList(); - } - - /** - * Equivalent to {@see io.airbyte.integrations.base.AirbyteStreamNameNamespacePair}. Intentionally - * not using that class because it doesn't make sense for airbyte-server to depend on - * base-java-integration. - */ - private record Stream(String name, String namespace) { - - } - - private boolean containsBreakingChange(final CatalogDiff diff) { - for (final StreamTransform streamTransform : diff.getTransforms()) { - if (streamTransform.getTransformType() != TransformTypeEnum.UPDATE_STREAM) { - continue; - } - - final boolean anyBreakingFieldTransforms = streamTransform.getUpdateStream().stream().anyMatch(FieldTransform::getBreaking); - if (anyBreakingFieldTransforms) { - return true; - } - } - - return false; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendGeographiesHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendGeographiesHandler.java deleted file mode 100644 index 70379fb553f7..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendGeographiesHandler.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import io.airbyte.api.model.generated.Geography; -import io.airbyte.api.model.generated.WebBackendGeographiesListResult; -import jakarta.inject.Singleton; -import java.util.Arrays; -import java.util.Collections; - -/** - * The web backend is an abstraction that allows the frontend to structure data in such a way that - * it is easier for a react frontend to consume. It should NOT have direct access to the database. - * It should operate exclusively by calling other endpoints that are exposed in the API. - **/ -@Singleton -public class WebBackendGeographiesHandler { - - public WebBackendGeographiesListResult listGeographiesOSS() { - // for now, OSS only supports AUTO. This can evolve to account for complex OSS use cases, but for - // now we expect OSS deployments to use a single default Task Queue for scheduling syncs in a vast - // majority of cases. - return new WebBackendGeographiesListResult().geographies( - Collections.singletonList(Geography.AUTO)); - } - - /** - * Only called by the wrapped Cloud API to enable multi-cloud - */ - public WebBackendGeographiesListResult listGeographiesCloud() { - return new WebBackendGeographiesListResult().geographies(Arrays.asList(Geography.values())); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java deleted file mode 100644 index 9cc35fcd7ff7..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java +++ /dev/null @@ -1,314 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import com.github.slugify.Slugify; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import io.airbyte.analytics.TrackingClientSingleton; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.Geography; -import io.airbyte.api.model.generated.SlugRequestBody; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.WorkspaceCreate; -import io.airbyte.api.model.generated.WorkspaceGiveFeedback; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.api.model.generated.WorkspaceRead; -import io.airbyte.api.model.generated.WorkspaceReadList; -import io.airbyte.api.model.generated.WorkspaceUpdate; -import io.airbyte.api.model.generated.WorkspaceUpdateName; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.server.converters.ApiPojoConverters; -import io.airbyte.commons.server.converters.NotificationConverter; -import io.airbyte.commons.server.converters.WorkspaceWebhookConfigsConverter; -import io.airbyte.commons.server.errors.InternalServerKnownException; -import io.airbyte.commons.server.errors.ValueConflictKnownException; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Inject; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.List; -import java.util.UUID; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import org.apache.commons.lang3.RandomStringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Singleton -public class WorkspacesHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkspacesHandler.class); - private final ConfigRepository configRepository; - private final SecretsRepositoryWriter secretsRepositoryWriter; - private final ConnectionsHandler connectionsHandler; - private final DestinationHandler destinationHandler; - private final SourceHandler sourceHandler; - private final Supplier uuidSupplier; - private final Slugify slugify; - - @Inject - public WorkspacesHandler(final ConfigRepository configRepository, - final SecretsRepositoryWriter secretsRepositoryWriter, - final ConnectionsHandler connectionsHandler, - final DestinationHandler destinationHandler, - final SourceHandler sourceHandler) { - this(configRepository, secretsRepositoryWriter, connectionsHandler, destinationHandler, sourceHandler, UUID::randomUUID); - } - - @VisibleForTesting - WorkspacesHandler(final ConfigRepository configRepository, - final SecretsRepositoryWriter secretsRepositoryWriter, - final ConnectionsHandler connectionsHandler, - final DestinationHandler destinationHandler, - final SourceHandler sourceHandler, - final Supplier uuidSupplier) { - this.configRepository = configRepository; - this.secretsRepositoryWriter = secretsRepositoryWriter; - this.connectionsHandler = connectionsHandler; - this.destinationHandler = destinationHandler; - this.sourceHandler = sourceHandler; - this.uuidSupplier = uuidSupplier; - this.slugify = new Slugify(); - } - - public WorkspaceRead createWorkspace(final WorkspaceCreate workspaceCreate) - throws JsonValidationException, IOException, ValueConflictKnownException { - - final String email = workspaceCreate.getEmail(); - final Boolean anonymousDataCollection = workspaceCreate.getAnonymousDataCollection(); - final Boolean news = workspaceCreate.getNews(); - final Boolean securityUpdates = workspaceCreate.getSecurityUpdates(); - final Boolean displaySetupWizard = workspaceCreate.getDisplaySetupWizard(); - - // if not set on the workspaceCreate, set the defaultGeography to AUTO - final io.airbyte.config.Geography defaultGeography = workspaceCreate.getDefaultGeography() != null - ? Enums.convertTo(workspaceCreate.getDefaultGeography(), io.airbyte.config.Geography.class) - : io.airbyte.config.Geography.AUTO; - - final StandardWorkspace workspace = new StandardWorkspace() - .withWorkspaceId(uuidSupplier.get()) - .withCustomerId(uuidSupplier.get()) - .withName(workspaceCreate.getName()) - .withSlug(generateUniqueSlug(workspaceCreate.getName())) - .withInitialSetupComplete(false) - .withAnonymousDataCollection(anonymousDataCollection != null ? anonymousDataCollection : false) - .withNews(news != null ? news : false) - .withSecurityUpdates(securityUpdates != null ? securityUpdates : false) - .withDisplaySetupWizard(displaySetupWizard != null ? displaySetupWizard : false) - .withTombstone(false) - .withNotifications(NotificationConverter.toConfigList(workspaceCreate.getNotifications())) - .withDefaultGeography(defaultGeography) - .withWebhookOperationConfigs(WorkspaceWebhookConfigsConverter.toPersistenceWrite(workspaceCreate.getWebhookConfigs(), uuidSupplier)); - - if (!Strings.isNullOrEmpty(email)) { - workspace.withEmail(email); - } - - return persistStandardWorkspace(workspace); - } - - public void deleteWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - // get existing implementation - final StandardWorkspace persistedWorkspace = configRepository.getStandardWorkspaceNoSecrets(workspaceIdRequestBody.getWorkspaceId(), false); - - // disable all connections associated with this workspace - for (final ConnectionRead connectionRead : connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody).getConnections()) { - connectionsHandler.deleteConnection(connectionRead.getConnectionId()); - } - - // disable all destinations associated with this workspace - for (final DestinationRead destinationRead : destinationHandler.listDestinationsForWorkspace(workspaceIdRequestBody).getDestinations()) { - destinationHandler.deleteDestination(destinationRead); - } - - // disable all sources associated with this workspace - for (final SourceRead sourceRead : sourceHandler.listSourcesForWorkspace(workspaceIdRequestBody).getSources()) { - sourceHandler.deleteSource(sourceRead); - } - - persistedWorkspace.withTombstone(true); - persistStandardWorkspace(persistedWorkspace); - } - - public WorkspaceReadList listWorkspaces() throws JsonValidationException, IOException { - final List reads = configRepository.listStandardWorkspaces(false).stream() - .map(WorkspacesHandler::buildWorkspaceRead) - .collect(Collectors.toList()); - return new WorkspaceReadList().workspaces(reads); - } - - public WorkspaceRead getWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - final UUID workspaceId = workspaceIdRequestBody.getWorkspaceId(); - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, false); - return buildWorkspaceRead(workspace); - } - - @SuppressWarnings("unused") - public WorkspaceRead getWorkspaceBySlug(final SlugRequestBody slugRequestBody) - throws JsonValidationException, IOException, ConfigNotFoundException { - // for now we assume there is one workspace and it has a default uuid. - final StandardWorkspace workspace = configRepository.getWorkspaceBySlug(slugRequestBody.getSlug(), false); - return buildWorkspaceRead(workspace); - } - - public WorkspaceRead getWorkspaceByConnectionId(final ConnectionIdRequestBody connectionIdRequestBody) { - final StandardWorkspace workspace = configRepository.getStandardWorkspaceFromConnection(connectionIdRequestBody.getConnectionId(), false); - return buildWorkspaceRead(workspace); - } - - public WorkspaceRead updateWorkspace(final WorkspaceUpdate workspacePatch) throws ConfigNotFoundException, IOException, JsonValidationException { - final UUID workspaceId = workspacePatch.getWorkspaceId(); - - LOGGER.debug("Starting updateWorkspace for workspaceId {}...", workspaceId); - LOGGER.debug("Incoming workspacePatch: {}", workspacePatch); - - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, false); - LOGGER.debug("Initial workspace: {}", workspace); - - validateWorkspacePatch(workspace, workspacePatch); - - LOGGER.debug("Initial WorkspaceRead: {}", buildWorkspaceRead(workspace)); - - applyPatchToStandardWorkspace(workspace, workspacePatch); - - LOGGER.debug("Patched Workspace before persisting: {}", workspace); - - if (workspacePatch.getWebhookConfigs() == null) { - // We aren't persisting any secrets. It's safe (and necessary) to use the NoSecrets variant because - // we never hydrated them in the first place. - configRepository.writeStandardWorkspaceNoSecrets(workspace); - } else { - // We're saving new webhook configs, so we need to persist the secrets. - persistStandardWorkspace(workspace); - } - - // after updating email or tracking info, we need to re-identify the instance. - TrackingClientSingleton.get().identify(workspaceId); - - return buildWorkspaceReadFromId(workspaceId); - } - - public WorkspaceRead updateWorkspaceName(final WorkspaceUpdateName workspaceUpdateName) - throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID workspaceId = workspaceUpdateName.getWorkspaceId(); - - final StandardWorkspace persistedWorkspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, false); - - persistedWorkspace - .withName(workspaceUpdateName.getName()) - .withSlug(generateUniqueSlug(workspaceUpdateName.getName())); - - // NOTE: it's safe (and necessary) to use the NoSecrets variant because we never hydrated them in - // the first place. - configRepository.writeStandardWorkspaceNoSecrets(persistedWorkspace); - - return buildWorkspaceReadFromId(workspaceId); - } - - public void setFeedbackDone(final WorkspaceGiveFeedback workspaceGiveFeedback) - throws JsonValidationException, ConfigNotFoundException, IOException { - configRepository.setFeedback(workspaceGiveFeedback.getWorkspaceId()); - } - - private WorkspaceRead buildWorkspaceReadFromId(final UUID workspaceId) throws ConfigNotFoundException, IOException, JsonValidationException { - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, false); - return buildWorkspaceRead(workspace); - } - - private String generateUniqueSlug(final String workspaceName) throws JsonValidationException, IOException { - final String proposedSlug = slugify.slugify(workspaceName); - - // todo (cgardens) - this is going to be too expensive once there are too many workspaces. needs to - // be replaced with an actual sql query. e.g. SELECT COUNT(*) WHERE slug=%s; - boolean isSlugUsed = configRepository.getWorkspaceBySlugOptional(proposedSlug, true).isPresent(); - String resolvedSlug = proposedSlug; - final int MAX_ATTEMPTS = 10; - int count = 0; - while (isSlugUsed) { - // todo (cgardens) - this is still susceptible to a race condition where we randomly generate the - // same slug in two different threads. this should be very unlikely. we can fix this by exposing - // database transaction, but that is not something we can do quickly. - resolvedSlug = proposedSlug + "-" + RandomStringUtils.randomAlphabetic(8); - isSlugUsed = configRepository.getWorkspaceBySlugOptional(resolvedSlug, true).isPresent(); - count++; - if (count > MAX_ATTEMPTS) { - throw new InternalServerKnownException(String.format("could not generate a valid slug after %s tries.", MAX_ATTEMPTS)); - } - } - - return resolvedSlug; - } - - private static WorkspaceRead buildWorkspaceRead(final StandardWorkspace workspace) { - final WorkspaceRead result = new WorkspaceRead() - .workspaceId(workspace.getWorkspaceId()) - .customerId(workspace.getCustomerId()) - .email(workspace.getEmail()) - .name(workspace.getName()) - .slug(workspace.getSlug()) - .initialSetupComplete(workspace.getInitialSetupComplete()) - .displaySetupWizard(workspace.getDisplaySetupWizard()) - .anonymousDataCollection(workspace.getAnonymousDataCollection()) - .news(workspace.getNews()) - .securityUpdates(workspace.getSecurityUpdates()) - .notifications(NotificationConverter.toApiList(workspace.getNotifications())) - .defaultGeography(Enums.convertTo(workspace.getDefaultGeography(), Geography.class)); - // Add read-only webhook configs. - if (workspace.getWebhookOperationConfigs() != null) { - result.setWebhookConfigs(WorkspaceWebhookConfigsConverter.toApiReads(workspace.getWebhookOperationConfigs())); - } - return result; - } - - private void validateWorkspacePatch(final StandardWorkspace persistedWorkspace, final WorkspaceUpdate workspacePatch) { - Preconditions.checkArgument(persistedWorkspace.getWorkspaceId().equals(workspacePatch.getWorkspaceId())); - } - - private void applyPatchToStandardWorkspace(final StandardWorkspace workspace, final WorkspaceUpdate workspacePatch) { - if (workspacePatch.getAnonymousDataCollection() != null) { - workspace.setAnonymousDataCollection(workspacePatch.getAnonymousDataCollection()); - } - if (workspacePatch.getNews() != null) { - workspace.setNews(workspacePatch.getNews()); - } - if (workspacePatch.getDisplaySetupWizard() != null) { - workspace.setDisplaySetupWizard(workspacePatch.getDisplaySetupWizard()); - } - if (workspacePatch.getSecurityUpdates() != null) { - workspace.setSecurityUpdates(workspacePatch.getSecurityUpdates()); - } - if (!Strings.isNullOrEmpty(workspacePatch.getEmail())) { - workspace.setEmail(workspacePatch.getEmail()); - } - if (workspacePatch.getInitialSetupComplete() != null) { - workspace.setInitialSetupComplete(workspacePatch.getInitialSetupComplete()); - } - if (workspacePatch.getNotifications() != null) { - workspace.setNotifications(NotificationConverter.toConfigList(workspacePatch.getNotifications())); - } - if (workspacePatch.getDefaultGeography() != null) { - workspace.setDefaultGeography(ApiPojoConverters.toPersistenceGeography(workspacePatch.getDefaultGeography())); - } - if (workspacePatch.getWebhookConfigs() != null) { - workspace.setWebhookOperationConfigs(WorkspaceWebhookConfigsConverter.toPersistenceWrite(workspacePatch.getWebhookConfigs(), uuidSupplier)); - } - } - - private WorkspaceRead persistStandardWorkspace(final StandardWorkspace workspace) throws JsonValidationException, IOException { - secretsRepositoryWriter.writeWorkspace(workspace); - return buildWorkspaceRead(workspace); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CatalogConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CatalogConverter.java deleted file mode 100644 index 0bdce39387e7..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CatalogConverter.java +++ /dev/null @@ -1,315 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers.helpers; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.api.model.generated.AirbyteCatalog; -import io.airbyte.api.model.generated.AirbyteStream; -import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration; -import io.airbyte.api.model.generated.AirbyteStreamConfiguration; -import io.airbyte.api.model.generated.DestinationSyncMode; -import io.airbyte.api.model.generated.SelectedFieldInfo; -import io.airbyte.api.model.generated.StreamDescriptor; -import io.airbyte.api.model.generated.SyncMode; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.text.Names; -import io.airbyte.config.FieldSelectionData; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.validation.json.JsonValidationException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Convert classes between io.airbyte.protocol.models and io.airbyte.api.model.generated - */ -public class CatalogConverter { - - private static final Logger LOGGER = LoggerFactory.getLogger(CatalogConverter.class); - - private static io.airbyte.api.model.generated.AirbyteStream toApi(final io.airbyte.protocol.models.AirbyteStream stream) { - return new io.airbyte.api.model.generated.AirbyteStream() - .name(stream.getName()) - .jsonSchema(stream.getJsonSchema()) - .supportedSyncModes(Enums.convertListTo(stream.getSupportedSyncModes(), io.airbyte.api.model.generated.SyncMode.class)) - .sourceDefinedCursor(stream.getSourceDefinedCursor()) - .defaultCursorField(stream.getDefaultCursorField()) - .sourceDefinedPrimaryKey(stream.getSourceDefinedPrimaryKey()) - .namespace(stream.getNamespace()); - } - - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - private static io.airbyte.protocol.models.AirbyteStream toConfiguredProtocol(final AirbyteStream stream, AirbyteStreamConfiguration config) - throws JsonValidationException { - if (config.getFieldSelectionEnabled() != null && config.getFieldSelectionEnabled()) { - // Validate the selected field paths. - if (config.getSelectedFields() == null) { - throw new JsonValidationException("Requested field selection but no selected fields provided"); - } - final JsonNode properties = stream.getJsonSchema().findValue("properties"); - if (properties == null || !properties.isObject()) { - throw new JsonValidationException("Requested field selection but no properties node found"); - } - for (final var selectedFieldInfo : config.getSelectedFields()) { - if (selectedFieldInfo.getFieldPath() == null || selectedFieldInfo.getFieldPath().isEmpty()) { - throw new JsonValidationException("Selected field path cannot be empty"); - } - if (selectedFieldInfo.getFieldPath().size() > 1) { - // TODO(mfsiega-airbyte): support nested fields. - throw new UnsupportedOperationException("Nested field selection not supported"); - } - } - // Only include the selected fields. - // NOTE: we verified above that each selected field has at least one element in the field path. - final Set selectedFieldNames = - config.getSelectedFields().stream().map((field) -> field.getFieldPath().get(0)).collect(Collectors.toSet()); - // TODO(mfsiega-airbyte): we only check the top level of the cursor/primary key fields because we - // don't support filtering nested fields yet. - if (config.getSyncMode().equals(SyncMode.INCREMENTAL) // INCREMENTAL sync mode, AND - && !config.getCursorField().isEmpty() // There is a cursor configured, AND - && !selectedFieldNames.contains(config.getCursorField().get(0))) { // The cursor isn't in the selected fields. - throw new JsonValidationException("Cursor field cannot be de-selected in INCREMENTAL syncs"); - } - if (config.getDestinationSyncMode().equals(DestinationSyncMode.APPEND_DEDUP)) { - for (final List primaryKeyComponent : config.getPrimaryKey()) { - if (!selectedFieldNames.contains(primaryKeyComponent.get(0))) { - throw new JsonValidationException("Primary key field cannot be de-selected in DEDUP mode"); - } - } - } - for (final String selectedFieldName : selectedFieldNames) { - if (!properties.has(selectedFieldName)) { - throw new JsonValidationException(String.format("Requested selected field %s not found in JSON schema", selectedFieldName)); - } - } - ((ObjectNode) properties).retain(selectedFieldNames); - } - return new io.airbyte.protocol.models.AirbyteStream() - .withName(stream.getName()) - .withJsonSchema(stream.getJsonSchema()) - .withSupportedSyncModes(Enums.convertListTo(stream.getSupportedSyncModes(), io.airbyte.protocol.models.SyncMode.class)) - .withSourceDefinedCursor(stream.getSourceDefinedCursor()) - .withDefaultCursorField(stream.getDefaultCursorField()) - .withSourceDefinedPrimaryKey(Optional.ofNullable(stream.getSourceDefinedPrimaryKey()).orElse(Collections.emptyList())) - .withNamespace(stream.getNamespace()); - } - - public static io.airbyte.api.model.generated.AirbyteCatalog toApi(final io.airbyte.protocol.models.AirbyteCatalog catalog, - StandardSourceDefinition sourceDefinition) { - List suggestedStreams = new ArrayList<>(); - Boolean suggestingStreams; - - // There are occasions in tests where we have not seeded the sourceDefinition fully. This is to - // prevent those tests from failing - if (sourceDefinition != null) { - suggestingStreams = sourceDefinition.getSuggestedStreams() != null; - if (suggestingStreams) { - suggestedStreams.addAll(sourceDefinition.getSuggestedStreams().getStreams()); - } - } else { - suggestingStreams = false; - } - - return new io.airbyte.api.model.generated.AirbyteCatalog() - .streams(catalog.getStreams() - .stream() - .map(CatalogConverter::toApi) - .map(s -> new io.airbyte.api.model.generated.AirbyteStreamAndConfiguration() - .stream(s) - .config(generateDefaultConfiguration(s, suggestingStreams, suggestedStreams))) - .collect(Collectors.toList())); - } - - private static io.airbyte.api.model.generated.AirbyteStreamConfiguration generateDefaultConfiguration(final io.airbyte.api.model.generated.AirbyteStream stream, - Boolean suggestingStreams, - List suggestedStreams) { - final io.airbyte.api.model.generated.AirbyteStreamConfiguration result = new io.airbyte.api.model.generated.AirbyteStreamConfiguration() - .aliasName(Names.toAlphanumericAndUnderscore(stream.getName())) - .cursorField(stream.getDefaultCursorField()) - .destinationSyncMode(io.airbyte.api.model.generated.DestinationSyncMode.APPEND) - .primaryKey(stream.getSourceDefinedPrimaryKey()) - .selected(!suggestingStreams) - .suggested(true); - - if (suggestingStreams) { - if (suggestedStreams.contains(stream.getName())) { - result.setSelected(true); - } else { - result.setSuggested(false); - } - } - - if (stream.getSupportedSyncModes().size() > 0) { - result.setSyncMode(stream.getSupportedSyncModes().get(0)); - } else { - result.setSyncMode(io.airbyte.api.model.generated.SyncMode.INCREMENTAL); - } - - return result; - } - - public static io.airbyte.api.model.generated.AirbyteCatalog toApi(final ConfiguredAirbyteCatalog catalog, FieldSelectionData fieldSelectionData) { - final List streams = catalog.getStreams() - .stream() - .map(configuredStream -> { - final var streamDescriptor = new StreamDescriptor() - .name(configuredStream.getStream().getName()) - .namespace(configuredStream.getStream().getNamespace()); - final io.airbyte.api.model.generated.AirbyteStreamConfiguration configuration = - new io.airbyte.api.model.generated.AirbyteStreamConfiguration() - .syncMode(Enums.convertTo(configuredStream.getSyncMode(), io.airbyte.api.model.generated.SyncMode.class)) - .cursorField(configuredStream.getCursorField()) - .destinationSyncMode( - Enums.convertTo(configuredStream.getDestinationSyncMode(), io.airbyte.api.model.generated.DestinationSyncMode.class)) - .primaryKey(configuredStream.getPrimaryKey()) - .aliasName(Names.toAlphanumericAndUnderscore(configuredStream.getStream().getName())) - .selected(true) - .fieldSelectionEnabled(getStreamHasFieldSelectionEnabled(fieldSelectionData, streamDescriptor)); - if (configuration.getFieldSelectionEnabled()) { - final List selectedColumns = new ArrayList<>(); - // TODO(mfsiega-airbyte): support nested fields here. - configuredStream.getStream().getJsonSchema().findValue("properties").fieldNames().forEachRemaining((name) -> selectedColumns.add(name)); - configuration.setSelectedFields( - selectedColumns.stream().map((fieldName) -> new SelectedFieldInfo().addFieldPathItem(fieldName)).collect(Collectors.toList())); - } - return new io.airbyte.api.model.generated.AirbyteStreamAndConfiguration() - .stream(toApi(configuredStream.getStream())) - .config(configuration); - }) - .collect(Collectors.toList()); - return new io.airbyte.api.model.generated.AirbyteCatalog().streams(streams); - } - - private static Boolean getStreamHasFieldSelectionEnabled(FieldSelectionData fieldSelectionData, StreamDescriptor streamDescriptor) { - if (fieldSelectionData == null - || fieldSelectionData.getAdditionalProperties().get(streamDescriptorToStringForFieldSelection(streamDescriptor)) == null) { - return false; - } - - return fieldSelectionData.getAdditionalProperties().get(streamDescriptorToStringForFieldSelection(streamDescriptor)); - } - - /** - * Converts the API catalog model into a protocol catalog. Note: returns all streams, regardless of - * selected status. See - * {@link CatalogConverter#toConfiguredProtocol(AirbyteStream, AirbyteStreamConfiguration)} for - * context. - * - * @param catalog api catalog - * @return protocol catalog - */ - public static io.airbyte.protocol.models.ConfiguredAirbyteCatalog toProtocolKeepAllStreams( - final io.airbyte.api.model.generated.AirbyteCatalog catalog) - throws JsonValidationException { - final AirbyteCatalog clone = Jsons.clone(catalog); - clone.getStreams().forEach(stream -> stream.getConfig().setSelected(true)); - return toConfiguredProtocol(clone); - } - - /** - * To convert AirbyteCatalog from APIs to model. This is to differentiate between - * toConfiguredProtocol as the other one converts to ConfiguredAirbyteCatalog object instead. - */ - public static io.airbyte.protocol.models.AirbyteCatalog toProtocol( - final io.airbyte.api.model.generated.AirbyteCatalog catalog) - throws JsonValidationException { - final ArrayList errors = new ArrayList<>(); - - io.airbyte.protocol.models.AirbyteCatalog protoCatalog = - new io.airbyte.protocol.models.AirbyteCatalog(); - var airbyteStream = catalog.getStreams().stream().map(stream -> { - try { - return toConfiguredProtocol(stream.getStream(), stream.getConfig()); - } catch (JsonValidationException e) { - LOGGER.error("Error parsing catalog: {}", e); - errors.add(e); - return null; - } - }).collect(Collectors.toList()); - - if (!errors.isEmpty()) { - throw errors.get(0); - } - protoCatalog.withStreams(airbyteStream); - return protoCatalog; - } - - /** - * Converts the API catalog model into a protocol catalog. Note: only streams marked as selected - * will be returned. This is included in this converter as the API model always carries all the - * streams it has access to and then marks the ones that should not be used as not selected, while - * the protocol version just uses the presence of the streams as evidence that it should be - * included. - * - * @param catalog api catalog - * @return protocol catalog - */ - public static io.airbyte.protocol.models.ConfiguredAirbyteCatalog toConfiguredProtocol(final io.airbyte.api.model.generated.AirbyteCatalog catalog) - throws JsonValidationException { - final ArrayList errors = new ArrayList<>(); - final List streams = catalog.getStreams() - .stream() - .filter(s -> s.getConfig().getSelected()) - .map(s -> { - try { - return new io.airbyte.protocol.models.ConfiguredAirbyteStream() - .withStream(toConfiguredProtocol(s.getStream(), s.getConfig())) - .withSyncMode(Enums.convertTo(s.getConfig().getSyncMode(), io.airbyte.protocol.models.SyncMode.class)) - .withCursorField(s.getConfig().getCursorField()) - .withDestinationSyncMode(Enums.convertTo(s.getConfig().getDestinationSyncMode(), - io.airbyte.protocol.models.DestinationSyncMode.class)) - .withPrimaryKey(Optional.ofNullable(s.getConfig().getPrimaryKey()).orElse(Collections.emptyList())); - } catch (JsonValidationException e) { - LOGGER.error("Error parsing catalog: {}", e); - errors.add(e); - return null; - } - }) - .collect(Collectors.toList()); - if (!errors.isEmpty()) { - throw errors.get(0); - } - return new io.airbyte.protocol.models.ConfiguredAirbyteCatalog() - .withStreams(streams); - } - - /** - * Generate the map from StreamDescriptor to indicator of whether field selection is enabled for - * that stream. - * - * @param syncCatalog the catalog - * @return the map as a FieldSelectionData object - */ - public static FieldSelectionData getFieldSelectionData(final AirbyteCatalog syncCatalog) { - if (syncCatalog == null) { - return null; - } - final var fieldSelectionData = new FieldSelectionData(); - for (final AirbyteStreamAndConfiguration streamAndConfig : syncCatalog.getStreams()) { - final var streamDescriptor = new StreamDescriptor() - .name(streamAndConfig.getStream().getName()) - .namespace(streamAndConfig.getStream().getNamespace()); - final boolean fieldSelectionEnabled = - streamAndConfig.getConfig().getFieldSelectionEnabled() == null ? false : streamAndConfig.getConfig().getFieldSelectionEnabled(); - fieldSelectionData.setAdditionalProperty(streamDescriptorToStringForFieldSelection(streamDescriptor), fieldSelectionEnabled); - } - return fieldSelectionData; - } - - // Return a string representation of a stream descriptor that's convenient to use as a key for the - // field selection data. - private static String streamDescriptorToStringForFieldSelection(final StreamDescriptor streamDescriptor) { - return String.format("%s/%s", streamDescriptor.getNamespace(), streamDescriptor.getName()); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionMatcher.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionMatcher.java deleted file mode 100644 index 1360536e36bb..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionMatcher.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers.helpers; - -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionSearch; -import org.apache.logging.log4j.util.Strings; - -public class ConnectionMatcher implements Matchable { - - private final ConnectionSearch search; - - public ConnectionMatcher(final ConnectionSearch search) { - this.search = search; - } - - @Override - public ConnectionRead match(final ConnectionRead query) { - if (search == null) { - return query; - } - - final ConnectionRead fromSearch = new ConnectionRead(); - fromSearch.connectionId(search.getConnectionId() == null ? query.getConnectionId() : search.getConnectionId()); - fromSearch.destinationId(search.getDestinationId() == null ? query.getDestinationId() : search.getDestinationId()); - fromSearch.name(Strings.isBlank(search.getName()) ? query.getName() : search.getName()); - fromSearch.namespaceFormat(Strings.isBlank(search.getNamespaceFormat()) || "null".equals(search.getNamespaceFormat()) - ? query.getNamespaceFormat() - : search.getNamespaceFormat()); - fromSearch.namespaceDefinition( - search.getNamespaceDefinition() == null ? query.getNamespaceDefinition() : search.getNamespaceDefinition()); - fromSearch.prefix(Strings.isBlank(search.getPrefix()) ? query.getPrefix() : search.getPrefix()); - fromSearch.schedule(search.getSchedule() == null ? query.getSchedule() : search.getSchedule()); - fromSearch.scheduleType(search.getScheduleType() == null ? query.getScheduleType() : search.getScheduleType()); - fromSearch.scheduleData(search.getScheduleData() == null ? query.getScheduleData() : search.getScheduleData()); - fromSearch.sourceId(search.getSourceId() == null ? query.getSourceId() : search.getSourceId()); - fromSearch.status(search.getStatus() == null ? query.getStatus() : search.getStatus()); - - // these properties are not enabled in the search - fromSearch.resourceRequirements(query.getResourceRequirements()); - fromSearch.syncCatalog(query.getSyncCatalog()); - fromSearch.operationIds(query.getOperationIds()); - fromSearch.sourceCatalogId(query.getSourceCatalogId()); - fromSearch.geography(query.getGeography()); - fromSearch.breakingChange(query.getBreakingChange()); - - return fromSearch; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionScheduleHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionScheduleHelper.java deleted file mode 100644 index 23c0fda9d4f7..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionScheduleHelper.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers.helpers; - -import io.airbyte.api.model.generated.ConnectionScheduleData; -import io.airbyte.api.model.generated.ConnectionScheduleType; -import io.airbyte.commons.server.converters.ApiPojoConverters; -import io.airbyte.config.BasicSchedule; -import io.airbyte.config.Cron; -import io.airbyte.config.Schedule; -import io.airbyte.config.ScheduleData; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.ScheduleType; -import io.airbyte.validation.json.JsonValidationException; -import java.text.ParseException; -import java.util.TimeZone; -import org.joda.time.DateTimeZone; -import org.quartz.CronExpression; - -/** - * Helper class to handle connection schedules, including validation and translating between API and - * config. - */ -public class ConnectionScheduleHelper { - - public static void populateSyncFromScheduleTypeAndData(final StandardSync standardSync, - final ConnectionScheduleType scheduleType, - final ConnectionScheduleData scheduleData) - throws JsonValidationException { - if (scheduleType != ConnectionScheduleType.MANUAL && scheduleData == null) { - throw new JsonValidationException("schedule data must be populated if schedule type is populated"); - } - switch (scheduleType) { - // NOTE: the `manual` column is marked required, so we populate it until it's removed. - case MANUAL -> { - standardSync.withScheduleType(ScheduleType.MANUAL).withScheduleData(null).withManual(true); - - // explicitly null out the legacy `schedule` column until it's removed. - standardSync.withSchedule(null); - } - case BASIC -> { - if (scheduleData.getBasicSchedule() == null) { - throw new JsonValidationException("if schedule type is basic, then scheduleData.basic must be populated"); - } - standardSync - .withScheduleType(ScheduleType.BASIC_SCHEDULE) - .withScheduleData(new ScheduleData().withBasicSchedule( - new BasicSchedule().withTimeUnit(ApiPojoConverters.toBasicScheduleTimeUnit(scheduleData.getBasicSchedule().getTimeUnit())) - .withUnits(scheduleData.getBasicSchedule().getUnits()))) - .withManual(false); - // Populate the legacy format for now as well, since some places still expect it to exist. - // TODO(https://github.com/airbytehq/airbyte/issues/11432): remove. - final Schedule schedule = new Schedule() - .withTimeUnit(ApiPojoConverters.toLegacyScheduleTimeUnit(scheduleData.getBasicSchedule().getTimeUnit())) - .withUnits(scheduleData.getBasicSchedule().getUnits()); - standardSync - .withManual(false) - .withSchedule(schedule); - } - case CRON -> { - if (scheduleData.getCron() == null) { - throw new JsonValidationException("if schedule type is cron, then scheduleData.cron must be populated"); - } - // Validate that this is a valid cron expression and timezone. - final String cronExpression = scheduleData.getCron().getCronExpression(); - final String cronTimeZone = scheduleData.getCron().getCronTimeZone(); - if (cronExpression == null || cronTimeZone == null) { - throw new JsonValidationException("Cron expression and timezone are required"); - } - if (cronTimeZone.toLowerCase().startsWith("etc")) { - throw new JsonValidationException("Etc/ timezones are unsupported"); - } - try { - final TimeZone timeZone = DateTimeZone.forID(cronTimeZone).toTimeZone(); - final CronExpression parsedCronExpression = new CronExpression(cronExpression); - parsedCronExpression.setTimeZone(timeZone); - } catch (ParseException e) { - throw (JsonValidationException) new JsonValidationException("invalid cron expression").initCause(e); - } catch (IllegalArgumentException e) { - throw (JsonValidationException) new JsonValidationException("invalid cron timezone").initCause(e); - } - standardSync - .withScheduleType(ScheduleType.CRON) - .withScheduleData(new ScheduleData().withCron(new Cron() - .withCronExpression(cronExpression) - .withCronTimeZone(cronTimeZone))) - .withManual(false); - - // explicitly null out the legacy `schedule` column until it's removed. - standardSync.withSchedule(null); - } - } - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/DestinationMatcher.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/DestinationMatcher.java deleted file mode 100644 index 03c8f9d58925..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/DestinationMatcher.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers.helpers; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationSearch; -import org.apache.logging.log4j.util.Strings; - -public class DestinationMatcher implements Matchable { - - private final DestinationSearch search; - - public DestinationMatcher(final DestinationSearch search) { - this.search = search; - } - - @Override - public DestinationRead match(final DestinationRead query) { - if (search == null) { - return query; - } - - final DestinationRead fromSearch = new DestinationRead(); - fromSearch.name(Strings.isBlank(search.getName()) ? query.getName() : search.getName()); - fromSearch.destinationDefinitionId(search.getDestinationDefinitionId() == null ? query.getDestinationDefinitionId() - : search.getDestinationDefinitionId()); - fromSearch - .destinationId(search.getDestinationId() == null ? query.getDestinationId() : search.getDestinationId()); - fromSearch.destinationName( - Strings.isBlank(search.getDestinationName()) ? query.getDestinationName() : search.getDestinationName()); - fromSearch.workspaceId(search.getWorkspaceId() == null ? query.getWorkspaceId() : search.getWorkspaceId()); - if (search.getConnectionConfiguration() == null) { - fromSearch.connectionConfiguration(query.getConnectionConfiguration()); - } else if (query.getConnectionConfiguration() == null) { - fromSearch.connectionConfiguration(search.getConnectionConfiguration()); - } else { - final JsonNode connectionConfiguration = search.getConnectionConfiguration(); - query.getConnectionConfiguration().fieldNames() - .forEachRemaining(field -> { - if (!connectionConfiguration.has(field) && connectionConfiguration instanceof ObjectNode) { - ((ObjectNode) connectionConfiguration).set(field, query.getConnectionConfiguration().get(field)); - } - }); - fromSearch.connectionConfiguration(connectionConfiguration); - } - - return fromSearch; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/Matchable.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/Matchable.java deleted file mode 100644 index 671c23e8480c..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/Matchable.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers.helpers; - -@FunctionalInterface -interface Matchable { - - K match(K k); - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthPathExtractor.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthPathExtractor.java deleted file mode 100644 index 18624ac1ff77..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthPathExtractor.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers.helpers; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class OAuthPathExtractor { - - private static final String PROPERTIES = "properties"; - private static final String PATH_IN_CONNECTOR_CONFIG = "path_in_connector_config"; - - public static Map> extractOauthConfigurationPaths(final JsonNode configuration) { - - if (configuration != null && configuration.has(PROPERTIES) && configuration.get(PROPERTIES).isObject()) { - final Map> result = new HashMap<>(); - - configuration.get(PROPERTIES).fields().forEachRemaining(entry -> { - final JsonNode value = entry.getValue(); - if (value.isObject() && value.has(PATH_IN_CONNECTOR_CONFIG) && value.get(PATH_IN_CONNECTOR_CONFIG).isArray()) { - final List path = new ArrayList<>(); - for (final JsonNode pathPart : value.get(PATH_IN_CONNECTOR_CONFIG)) { - path.add(pathPart.textValue()); - } - result.put(entry.getKey(), path); - } - }); - - return result; - } else { - return new HashMap<>(); - } - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/SourceMatcher.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/SourceMatcher.java deleted file mode 100644 index 6f9f120a8325..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/SourceMatcher.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers.helpers; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.SourceSearch; -import org.apache.logging.log4j.util.Strings; - -public class SourceMatcher implements Matchable { - - private final SourceSearch search; - - public SourceMatcher(final SourceSearch search) { - this.search = search; - } - - @Override - public SourceRead match(final SourceRead query) { - if (search == null) { - return query; - } - - final SourceRead fromSearch = new SourceRead(); - fromSearch.name(Strings.isBlank(search.getName()) ? query.getName() : search.getName()); - fromSearch.sourceDefinitionId(search.getSourceDefinitionId() == null ? query.getSourceDefinitionId() : search.getSourceDefinitionId()); - fromSearch.sourceId(search.getSourceId() == null ? query.getSourceId() : search.getSourceId()); - fromSearch.sourceName(Strings.isBlank(search.getSourceName()) ? query.getSourceName() : search.getSourceName()); - fromSearch.workspaceId(search.getWorkspaceId() == null ? query.getWorkspaceId() : search.getWorkspaceId()); - if (search.getConnectionConfiguration() == null) { - fromSearch.connectionConfiguration(query.getConnectionConfiguration()); - } else if (query.getConnectionConfiguration() == null) { - fromSearch.connectionConfiguration(search.getConnectionConfiguration()); - } else { - final JsonNode connectionConfiguration = search.getConnectionConfiguration(); - query.getConnectionConfiguration().fieldNames() - .forEachRemaining(field -> { - if (!connectionConfiguration.has(field) && connectionConfiguration instanceof ObjectNode) { - ((ObjectNode) connectionConfiguration).set(field, query.getConnectionConfiguration().get(field)); - } - }); - fromSearch.connectionConfiguration(connectionConfiguration); - } - - return fromSearch; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/DefaultSynchronousSchedulerClient.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/DefaultSynchronousSchedulerClient.java deleted file mode 100644 index f6a69bbd35c7..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/DefaultSynchronousSchedulerClient.java +++ /dev/null @@ -1,282 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.scheduler; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Charsets; -import com.google.common.hash.HashFunction; -import com.google.common.hash.Hashing; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.temporal.TemporalClient; -import io.airbyte.commons.temporal.TemporalJobType; -import io.airbyte.commons.temporal.TemporalResponse; -import io.airbyte.commons.temporal.scheduling.RouterService; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorType; -import io.airbyte.config.ConnectorJobOutput; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.JobCheckConnectionConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.JobDiscoverCatalogConfig; -import io.airbyte.config.JobGetSpecConfig; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardCheckConnectionOutput; -import io.airbyte.persistence.job.errorreporter.ConnectorJobReportingContext; -import io.airbyte.persistence.job.errorreporter.JobErrorReporter; -import io.airbyte.persistence.job.factory.OAuthConfigSupplier; -import io.airbyte.persistence.job.tracker.JobTracker; -import io.airbyte.persistence.job.tracker.JobTracker.JobState; -import io.airbyte.protocol.models.ConnectorSpecification; -import java.io.IOException; -import java.time.Instant; -import java.util.Optional; -import java.util.UUID; -import java.util.function.Function; -import java.util.function.Supplier; -import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DefaultSynchronousSchedulerClient implements SynchronousSchedulerClient { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultSynchronousSchedulerClient.class); - - private static final HashFunction HASH_FUNCTION = Hashing.md5(); - - private final TemporalClient temporalClient; - private final JobTracker jobTracker; - private final JobErrorReporter jobErrorReporter; - private final OAuthConfigSupplier oAuthConfigSupplier; - - private final RouterService routerService; - - public DefaultSynchronousSchedulerClient(final TemporalClient temporalClient, - final JobTracker jobTracker, - final JobErrorReporter jobErrorReporter, - final OAuthConfigSupplier oAuthConfigSupplier, - final RouterService routerService) { - this.temporalClient = temporalClient; - this.jobTracker = jobTracker; - this.jobErrorReporter = jobErrorReporter; - this.oAuthConfigSupplier = oAuthConfigSupplier; - this.routerService = routerService; - } - - @Override - public SynchronousResponse createSourceCheckConnectionJob(final SourceConnection source, - final String dockerImage, - final Version protocolVersion, - final boolean isCustomConnector) - throws IOException { - final JsonNode sourceConfiguration = oAuthConfigSupplier.injectSourceOAuthParameters( - source.getSourceDefinitionId(), - source.getWorkspaceId(), - source.getConfiguration()); - final JobCheckConnectionConfig jobCheckConnectionConfig = new JobCheckConnectionConfig() - .withActorType(ActorType.SOURCE) - .withActorId(source.getSourceId()) - .withConnectionConfiguration(sourceConfiguration) - .withDockerImage(dockerImage) - .withProtocolVersion(protocolVersion) - .withIsCustomConnector(isCustomConnector); - - final UUID jobId = UUID.randomUUID(); - final ConnectorJobReportingContext jobReportingContext = new ConnectorJobReportingContext(jobId, dockerImage); - String taskQueue = routerService.getTaskQueueForWorkspace(source.getWorkspaceId(), TemporalJobType.CHECK_CONNECTION); - - return execute( - ConfigType.CHECK_CONNECTION_SOURCE, - jobReportingContext, - source.getSourceDefinitionId(), - () -> temporalClient.submitCheckConnection(UUID.randomUUID(), 0, taskQueue, jobCheckConnectionConfig), - ConnectorJobOutput::getCheckConnection, - source.getWorkspaceId()); - } - - @Override - public SynchronousResponse createDestinationCheckConnectionJob(final DestinationConnection destination, - final String dockerImage, - final Version protocolVersion, - final boolean isCustomConnector) - throws IOException { - final JsonNode destinationConfiguration = oAuthConfigSupplier.injectDestinationOAuthParameters( - destination.getDestinationDefinitionId(), - destination.getWorkspaceId(), - destination.getConfiguration()); - final JobCheckConnectionConfig jobCheckConnectionConfig = new JobCheckConnectionConfig() - .withActorType(ActorType.DESTINATION) - .withActorId(destination.getDestinationId()) - .withConnectionConfiguration(destinationConfiguration) - .withDockerImage(dockerImage) - .withProtocolVersion(protocolVersion) - .withIsCustomConnector(isCustomConnector); - - final UUID jobId = UUID.randomUUID(); - final ConnectorJobReportingContext jobReportingContext = new ConnectorJobReportingContext(jobId, dockerImage); - String taskQueue = routerService.getTaskQueueForWorkspace(destination.getWorkspaceId(), TemporalJobType.CHECK_CONNECTION); - - return execute( - ConfigType.CHECK_CONNECTION_DESTINATION, - jobReportingContext, - destination.getDestinationDefinitionId(), - () -> temporalClient.submitCheckConnection(jobId, 0, taskQueue, jobCheckConnectionConfig), - ConnectorJobOutput::getCheckConnection, - destination.getWorkspaceId()); - } - - @Override - public SynchronousResponse createDiscoverSchemaJob(final SourceConnection source, - final String dockerImage, - final String connectorVersion, - final Version protocolVersion, - final boolean isCustomConnector) - throws IOException { - final JsonNode sourceConfiguration = oAuthConfigSupplier.injectSourceOAuthParameters( - source.getSourceDefinitionId(), - source.getWorkspaceId(), - source.getConfiguration()); - final JobDiscoverCatalogConfig jobDiscoverCatalogConfig = new JobDiscoverCatalogConfig() - .withConnectionConfiguration(sourceConfiguration) - .withDockerImage(dockerImage) - .withProtocolVersion(protocolVersion) - .withSourceId(source.getSourceId().toString()) - .withConfigHash(HASH_FUNCTION.hashBytes(Jsons.serialize(source.getConfiguration()).getBytes( - Charsets.UTF_8)).toString()) - .withConnectorVersion(connectorVersion) - .withIsCustomConnector(isCustomConnector); - - final UUID jobId = UUID.randomUUID(); - final ConnectorJobReportingContext jobReportingContext = new ConnectorJobReportingContext(jobId, dockerImage); - - String taskQueue = routerService.getTaskQueueForWorkspace(source.getWorkspaceId(), TemporalJobType.DISCOVER_SCHEMA); - - return execute( - ConfigType.DISCOVER_SCHEMA, - jobReportingContext, - source.getSourceDefinitionId(), - () -> temporalClient.submitDiscoverSchema(jobId, 0, taskQueue, jobDiscoverCatalogConfig), - ConnectorJobOutput::getDiscoverCatalogId, - source.getWorkspaceId()); - } - - @Override - public SynchronousResponse createGetSpecJob(final String dockerImage, final boolean isCustomConnector) throws IOException { - final JobGetSpecConfig jobSpecConfig = new JobGetSpecConfig().withDockerImage(dockerImage).withIsCustomConnector(isCustomConnector); - - final UUID jobId = UUID.randomUUID(); - final ConnectorJobReportingContext jobReportingContext = new ConnectorJobReportingContext(jobId, dockerImage); - - return execute( - ConfigType.GET_SPEC, - jobReportingContext, - null, - () -> temporalClient.submitGetSpec(jobId, 0, jobSpecConfig), - ConnectorJobOutput::getSpec, - null); - } - - @VisibleForTesting - SynchronousResponse execute(final ConfigType configType, - final ConnectorJobReportingContext jobContext, - @Nullable final UUID connectorDefinitionId, - final Supplier> executor, - final Function outputMapper, - final UUID workspaceId) { - final long createdAt = Instant.now().toEpochMilli(); - final UUID jobId = jobContext.jobId(); - try { - track(jobId, configType, connectorDefinitionId, workspaceId, JobState.STARTED, null); - final TemporalResponse temporalResponse = executor.get(); - final Optional jobOutput = temporalResponse.getOutput(); - final T mappedOutput = jobOutput.map(outputMapper).orElse(null); - final JobState outputState = temporalResponse.getMetadata().isSucceeded() ? JobState.SUCCEEDED : JobState.FAILED; - - track(jobId, configType, connectorDefinitionId, workspaceId, outputState, mappedOutput); - - if (outputState == JobState.FAILED && jobOutput.isPresent()) { - reportError(configType, jobContext, jobOutput.get(), connectorDefinitionId, workspaceId); - } - - final long endedAt = Instant.now().toEpochMilli(); - return SynchronousResponse.fromTemporalResponse( - temporalResponse, - jobOutput.orElse(null), - mappedOutput, - jobId, - configType, - connectorDefinitionId, - createdAt, - endedAt); - } catch (final RuntimeException e) { - track(jobId, configType, connectorDefinitionId, workspaceId, JobState.FAILED, null); - throw e; - } - } - - /** - * @param connectorDefinitionId either source or destination definition id - */ - private void track(final UUID jobId, - final ConfigType configType, - final UUID connectorDefinitionId, - final UUID workspaceId, - final JobState jobState, - final T value) { - switch (configType) { - case CHECK_CONNECTION_SOURCE -> jobTracker.trackCheckConnectionSource( - jobId, - connectorDefinitionId, - workspaceId, - jobState, - (StandardCheckConnectionOutput) value); - case CHECK_CONNECTION_DESTINATION -> jobTracker.trackCheckConnectionDestination( - jobId, - connectorDefinitionId, - workspaceId, - jobState, - (StandardCheckConnectionOutput) value); - case DISCOVER_SCHEMA -> jobTracker.trackDiscover(jobId, connectorDefinitionId, workspaceId, jobState); - case GET_SPEC -> { - // skip tracking for get spec to avoid noise. - } - default -> throw new IllegalArgumentException( - String.format("Jobs of type %s cannot be processed here. They should be consumed in the JobSubmitter.", configType)); - } - - } - - private void reportError(final ConfigType configType, - final ConnectorJobReportingContext jobContext, - final T jobOutput, - final UUID connectorDefinitionId, - final UUID workspaceId) { - Exceptions.swallow(() -> { - switch (configType) { - case CHECK_CONNECTION_SOURCE -> jobErrorReporter.reportSourceCheckJobFailure( - connectorDefinitionId, - workspaceId, - ((ConnectorJobOutput) jobOutput).getFailureReason(), - jobContext); - case CHECK_CONNECTION_DESTINATION -> jobErrorReporter.reportDestinationCheckJobFailure( - connectorDefinitionId, - workspaceId, - ((ConnectorJobOutput) jobOutput).getFailureReason(), - jobContext); - case DISCOVER_SCHEMA -> jobErrorReporter.reportDiscoverJobFailure( - connectorDefinitionId, - workspaceId, - ((ConnectorJobOutput) jobOutput).getFailureReason(), - jobContext); - case GET_SPEC -> jobErrorReporter.reportSpecJobFailure( - ((ConnectorJobOutput) jobOutput).getFailureReason(), - jobContext); - default -> LOGGER.error("Tried to report job failure for type {}, but this job type is not supported", configType); - } - }); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/EventRunner.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/EventRunner.java deleted file mode 100644 index 59ce6f4394f1..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/EventRunner.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.scheduler; - -import io.airbyte.commons.temporal.TemporalClient.ManualOperationResult; -import io.airbyte.protocol.models.StreamDescriptor; -import java.util.List; -import java.util.Set; -import java.util.UUID; - -public interface EventRunner { - - void createConnectionManagerWorkflow(final UUID connectionId); - - ManualOperationResult startNewManualSync(final UUID connectionId); - - ManualOperationResult startNewCancellation(final UUID connectionId); - - ManualOperationResult resetConnection(final UUID connectionId, final List streamsToReset, final boolean runSyncImmediately); - - void forceDeleteConnection(final UUID connectionId); - - // TODO: Delete - @Deprecated(forRemoval = true) - void migrateSyncIfNeeded(final Set connectionIds); - - void update(final UUID connectionId); - - void sendSchemaChangeNotification(final UUID connectionId, final String url); - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/SynchronousJobMetadata.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/SynchronousJobMetadata.java deleted file mode 100644 index 46b9658c3a95..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/SynchronousJobMetadata.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.scheduler; - -import io.airbyte.commons.temporal.JobMetadata; -import io.airbyte.config.JobConfig.ConfigType; -import java.nio.file.Path; -import java.time.Instant; -import java.util.Objects; -import java.util.Optional; -import java.util.UUID; - -public class SynchronousJobMetadata { - - private final UUID id; - private final ConfigType configType; - private final UUID configId; - - private final long createdAt; - private final long endedAt; - private final boolean succeeded; - private final boolean connectorConfigurationUpdated; - - private final Path logPath; - - public static SynchronousJobMetadata fromJobMetadata(final JobMetadata jobMetadata, - final UUID id, - final ConfigType configType, - final UUID configId, - final boolean connectorConfigurationUpdated, - final long createdAt, - final long endedAt) { - return new SynchronousJobMetadata( - id, - configType, - configId, - createdAt, - endedAt, - jobMetadata.isSucceeded(), - connectorConfigurationUpdated, - jobMetadata.getLogPath()); - } - - public SynchronousJobMetadata(final UUID id, - final ConfigType configType, - final UUID configId, - final long createdAt, - final long endedAt, - final boolean succeeded, - final boolean connectorConfigurationUpdated, - final Path logPath) { - this.id = id; - this.configType = configType; - this.configId = configId; - this.createdAt = createdAt; - this.endedAt = endedAt; - this.succeeded = succeeded; - this.connectorConfigurationUpdated = connectorConfigurationUpdated; - this.logPath = logPath; - } - - public UUID getId() { - return id; - } - - public ConfigType getConfigType() { - return configType; - } - - public Optional getConfigId() { - return Optional.ofNullable(configId); - } - - public long getCreatedAt() { - return createdAt; - } - - public long getEndedAt() { - return endedAt; - } - - public boolean isSucceeded() { - return succeeded; - } - - public boolean isConnectorConfigurationUpdated() { - return connectorConfigurationUpdated; - } - - public Path getLogPath() { - return logPath; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final SynchronousJobMetadata that = (SynchronousJobMetadata) o; - return createdAt == that.createdAt && endedAt == that.endedAt && succeeded == that.succeeded - && connectorConfigurationUpdated == that.connectorConfigurationUpdated && Objects.equals(id, that.id) - && configType == that.configType && Objects.equals(configId, that.configId) && Objects.equals(logPath, that.logPath); - } - - @Override - public int hashCode() { - return Objects.hash(id, configType, configId, createdAt, endedAt, succeeded, connectorConfigurationUpdated, logPath); - } - - @Override - public String toString() { - return "SynchronousJobMetadata{" + - "id=" + id + - ", configType=" + configType + - ", configId=" + configId + - ", createdAt=" + createdAt + - ", endedAt=" + endedAt + - ", succeeded=" + succeeded + - ", connectorConfigurationUpdated=" + connectorConfigurationUpdated + - ", logPath=" + logPath + - '}'; - } - - public static SynchronousJobMetadata mock(final ConfigType configType) { - final long now = Instant.now().toEpochMilli(); - final UUID configId = null; - final boolean succeeded = true; - final boolean connectorConfigurationUpdated = false; - final Path logPath = null; - - return new SynchronousJobMetadata( - UUID.randomUUID(), - configType, - configId, - now, - now, - succeeded, - connectorConfigurationUpdated, - logPath); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/SynchronousResponse.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/SynchronousResponse.java deleted file mode 100644 index 38f31929ceec..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/SynchronousResponse.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.scheduler; - -import io.airbyte.commons.temporal.TemporalResponse; -import io.airbyte.config.ConnectorJobOutput; -import io.airbyte.config.JobConfig.ConfigType; -import java.util.Objects; -import java.util.UUID; -import javax.annotation.Nullable; - -public class SynchronousResponse { - - private final T output; - private final SynchronousJobMetadata metadata; - - public static SynchronousResponse error(final SynchronousJobMetadata metadata) { - return new SynchronousResponse<>(null, metadata); - } - - public static SynchronousResponse success(final T output, final SynchronousJobMetadata metadata) { - return new SynchronousResponse<>(output, metadata); - } - - public static SynchronousResponse fromTemporalResponse(final TemporalResponse temporalResponse, - @Nullable final ConnectorJobOutput jobOutput, - @Nullable final T responseOutput, - final UUID id, - final ConfigType configType, - final UUID configId, - final long createdAt, - final long endedAt) { - - final SynchronousJobMetadata metadata = SynchronousJobMetadata.fromJobMetadata( - temporalResponse.getMetadata(), - id, - configType, - configId, - jobOutput != null ? jobOutput.getConnectorConfigurationUpdated() : false, - createdAt, - endedAt); - return new SynchronousResponse<>(responseOutput, metadata); - } - - public SynchronousResponse(final T output, final SynchronousJobMetadata metadata) { - this.output = output; - this.metadata = metadata; - } - - public boolean isSuccess() { - return metadata.isSucceeded(); - } - - public T getOutput() { - return output; - } - - public SynchronousJobMetadata getMetadata() { - return metadata; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final SynchronousResponse that = (SynchronousResponse) o; - return Objects.equals(output, that.output) && Objects.equals(metadata, that.metadata); - } - - @Override - public int hashCode() { - return Objects.hash(output, metadata); - } - - @Override - public String toString() { - return "SynchronousResponse{" + - "output=" + output + - ", metadata=" + metadata + - '}'; - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/SynchronousSchedulerClient.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/SynchronousSchedulerClient.java deleted file mode 100644 index 7be72c1ed8b0..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/SynchronousSchedulerClient.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.scheduler; - -import io.airbyte.commons.version.Version; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardCheckConnectionOutput; -import io.airbyte.protocol.models.ConnectorSpecification; -import java.io.IOException; -import java.util.UUID; - -/** - * Exposes a way of executing short-lived jobs as RPC calls. Blocks until the job completes. No - * metadata will be stored in the Jobs table for jobs triggered via this client. - */ -public interface SynchronousSchedulerClient { - - SynchronousResponse createSourceCheckConnectionJob(SourceConnection source, - String dockerImage, - Version protocolVersion, - boolean isCustomConnector) - throws IOException; - - SynchronousResponse createDestinationCheckConnectionJob(DestinationConnection destination, - String dockerImage, - Version protocolVersion, - boolean isCustomConnector) - throws IOException; - - SynchronousResponse createDiscoverSchemaJob(SourceConnection source, - String dockerImage, - String connectorVersion, - Version protocolVersion, - boolean isCustomConnector) - throws IOException; - - SynchronousResponse createGetSpecJob(String dockerImage, boolean isCustomConnector) throws IOException; - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/TemporalEventRunner.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/TemporalEventRunner.java deleted file mode 100644 index 9e6ed0068e51..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/TemporalEventRunner.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.scheduler; - -import io.airbyte.commons.temporal.TemporalClient; -import io.airbyte.commons.temporal.TemporalClient.ManualOperationResult; -import io.airbyte.protocol.models.StreamDescriptor; -import java.util.List; -import java.util.Set; -import java.util.UUID; -import lombok.AllArgsConstructor; - -@AllArgsConstructor -public class TemporalEventRunner implements EventRunner { - - private final TemporalClient temporalClient; - - @Override - public void createConnectionManagerWorkflow(final UUID connectionId) { - temporalClient.submitConnectionUpdaterAsync(connectionId); - } - - @Override - public ManualOperationResult startNewManualSync(final UUID connectionId) { - return temporalClient.startNewManualSync(connectionId); - } - - @Override - public ManualOperationResult startNewCancellation(final UUID connectionId) { - return temporalClient.startNewCancellation(connectionId); - } - - @Override - public ManualOperationResult resetConnection(final UUID connectionId, - final List streamsToReset, - final boolean runSyncImmediately) { - return temporalClient.resetConnection(connectionId, streamsToReset, runSyncImmediately); - } - - @Override - public void forceDeleteConnection(final UUID connectionId) { - temporalClient.forceDeleteWorkflow(connectionId); - } - - @Override - public void migrateSyncIfNeeded(final Set connectionIds) { - temporalClient.migrateSyncIfNeeded(connectionIds); - } - - @Override - public void update(final UUID connectionId) { - temporalClient.update(connectionId); - } - - @Override - public void sendSchemaChangeNotification(final UUID connectionId, final String url) { - temporalClient.sendSchemaChangeNotification(connectionId, url); - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/services/AirbyteGithubStore.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/services/AirbyteGithubStore.java deleted file mode 100644 index 658b53ab629f..000000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/services/AirbyteGithubStore.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.services; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.helpers.YamlListToStandardDefinitions; -import java.io.IOException; -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse.BodyHandlers; -import java.time.Duration; -import java.util.Collections; -import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Convenience class for retrieving files checked into the Airbyte Github repo. - */ -@SuppressWarnings("PMD.AvoidCatchingThrowable") -public class AirbyteGithubStore { - - private static final Logger LOGGER = LoggerFactory.getLogger(AirbyteGithubStore.class); - private static final EnvConfigs envConfigs = new EnvConfigs(); - private static final String GITHUB_BASE_URL = "https://raw.githubusercontent.com"; - private static final String SOURCE_DEFINITION_LIST_LOCATION_PATH = - "/airbytehq/airbyte/" + envConfigs.getGithubStoreBranch() + "/airbyte-config/init/src/main/resources/seed/source_definitions.yaml"; - private static final String DESTINATION_DEFINITION_LIST_LOCATION_PATH = - "/airbytehq/airbyte/" + envConfigs.getGithubStoreBranch() + "/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml"; - private static final HttpClient httpClient = HttpClient.newHttpClient(); - - private final String baseUrl; - private final Duration timeout; - - public static AirbyteGithubStore production() { - return new AirbyteGithubStore(GITHUB_BASE_URL, Duration.ofSeconds(30)); - } - - public static AirbyteGithubStore test(final String testBaseUrl, final Duration timeout) { - return new AirbyteGithubStore(testBaseUrl, timeout); - } - - public AirbyteGithubStore(final String baseUrl, final Duration timeout) { - this.baseUrl = baseUrl; - this.timeout = timeout; - } - - public List getLatestDestinations() throws InterruptedException { - try { - return YamlListToStandardDefinitions.toStandardDestinationDefinitions(getFile(DESTINATION_DEFINITION_LIST_LOCATION_PATH)); - } catch (final Throwable e) { - LOGGER.warn( - "Unable to retrieve latest Destination list from Github. Using the list bundled with Airbyte. This warning is expected if this Airbyte cluster does not have internet access.", - e); - return Collections.emptyList(); - } - } - - public List getLatestSources() throws InterruptedException { - try { - return YamlListToStandardDefinitions.toStandardSourceDefinitions(getFile(SOURCE_DEFINITION_LIST_LOCATION_PATH)); - } catch (final Throwable e) { - LOGGER.warn( - "Unable to retrieve latest Source list from Github. Using the list bundled with Airbyte. This warning is expected if this Airbyte cluster does not have internet access.", - e); - return Collections.emptyList(); - } - } - - @VisibleForTesting - String getFile(final String filePathWithSlashPrefix) throws IOException, InterruptedException { - final var request = HttpRequest - .newBuilder(URI.create(baseUrl + filePathWithSlashPrefix)) - .timeout(timeout) - .header("accept", "*/*") // accept any file type - .build(); - final var resp = httpClient.send(request, BodyHandlers.ofString()); - final Boolean isErrorResponse = resp.statusCode() / 100 != 2; - if (isErrorResponse) { - throw new IOException("getFile request ran into status code error: " + resp.statusCode() + "with message: " + resp.getClass()); - } - return resp.body(); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/RequestLoggerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/RequestLoggerTest.java deleted file mode 100644 index 409af39ac60a..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/RequestLoggerTest.java +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server; - -import io.airbyte.commons.io.IOs; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.helpers.LogConfigs; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; -import java.util.UUID; -import java.util.stream.Stream; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.container.ContainerResponseContext; -import lombok.RequiredArgsConstructor; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; -import org.slf4j.MDC; - -@SuppressWarnings({"PMD.AvoidPrintStackTrace", "PMD.JUnitTestsShouldIncludeAssert"}) -@ExtendWith(MockitoExtension.class) -class RequestLoggerTest { - - private static final String VALID_JSON_OBJECT = "{\"valid\":1}"; - private static final String INVALID_JSON_OBJECT = "invalid"; - private static final String ACCEPTED_CONTENT_TYPE = "application/json"; - private static final String NON_ACCEPTED_CONTENT_TYPE = "application/gzip"; - private static final String METHOD = "POST"; - private static final String REMOTE_ADDR = "123.456.789.101"; - private static final String URL = "/api/v1/test"; - private static final String REQUEST_BODY_PROPERTY = "requestBodyProperty"; - - private static final Random RANDOM = new Random(); - - @Mock - private HttpServletRequest mServletRequest; - - private Path logPath; - - @BeforeEach - void init() throws IOException { - Mockito.when(mServletRequest.getMethod()) - .thenReturn(METHOD); - Mockito.when(mServletRequest.getRemoteAddr()) - .thenReturn(REMOTE_ADDR); - Mockito.when(mServletRequest.getRequestURI()) - .thenReturn(URL); - - // set up the mdc so that actually log to a file, so that we can verify that file logging captures - // threads. - final Path jobRoot = Files.createTempDirectory(Path.of("/tmp"), "mdc_test"); - LogClientSingleton.getInstance().setJobMdc(WorkerEnvironment.DOCKER, - LogConfigs.EMPTY, - jobRoot); - logPath = jobRoot.resolve(LogClientSingleton.LOG_FILENAME); - } - - @Nested - @DisplayName("Formats logs correctly") - class RequestLoggerFormatsLogsCorrectly { - - private static final int ERROR_CODE = 401; - private static final int SUCCESS_CODE = 200; - private static final String errorPrefix = RequestLogger - .createLogPrefix(REMOTE_ADDR, METHOD, ERROR_CODE, URL) - .toString(); - private static final String successPrefix = RequestLogger - .createLogPrefix(REMOTE_ADDR, METHOD, SUCCESS_CODE, URL) - .toString(); - - static Stream logScenarios() { - return Stream.of( - Arguments.of(INVALID_JSON_OBJECT, NON_ACCEPTED_CONTENT_TYPE, ERROR_CODE, errorPrefix), - Arguments.of(INVALID_JSON_OBJECT, ACCEPTED_CONTENT_TYPE, ERROR_CODE, errorPrefix), - Arguments.of(VALID_JSON_OBJECT, NON_ACCEPTED_CONTENT_TYPE, ERROR_CODE, errorPrefix), - Arguments.of(VALID_JSON_OBJECT, ACCEPTED_CONTENT_TYPE, ERROR_CODE, errorPrefix + " - " + VALID_JSON_OBJECT), - Arguments.of(INVALID_JSON_OBJECT, NON_ACCEPTED_CONTENT_TYPE, SUCCESS_CODE, successPrefix), - Arguments.of(INVALID_JSON_OBJECT, ACCEPTED_CONTENT_TYPE, SUCCESS_CODE, successPrefix), - Arguments.of(VALID_JSON_OBJECT, NON_ACCEPTED_CONTENT_TYPE, SUCCESS_CODE, successPrefix), - Arguments.of(VALID_JSON_OBJECT, ACCEPTED_CONTENT_TYPE, SUCCESS_CODE, successPrefix + " - " + VALID_JSON_OBJECT)); - } - - @Mock - private ContainerRequestContext mRequestContext; - @Mock - private ContainerResponseContext mResponseContext; - - private RequestLogger requestLogger; - - @ParameterizedTest - @MethodSource("logScenarios") - @DisplayName("Check that the proper log is produced based on the scenario") - void test(final String requestBody, final String contentType, final int status, final String expectedLog) throws IOException { - // We have to instanciate the logger here, because the MDC config has been changed to log in a - // temporary file. - requestLogger = new RequestLogger(MDC.getCopyOfContextMap(), mServletRequest); - - stubRequestContext(mRequestContext, requestBody); - - Mockito.when(mResponseContext.getStatus()) - .thenReturn(status); - - Mockito.when(mServletRequest.getHeader("Content-Type")) - .thenReturn(contentType); - - // This is call to set the requestBody variable in the RequestLogger - requestLogger.filter(mRequestContext); - requestLogger.filter(mRequestContext, mResponseContext); - - final String expectedLogLevel = status == SUCCESS_CODE ? "INFO" : "ERROR"; - - final String logs = IOs.readFile(logPath); - final Stream matchingLines = logs.lines() - .filter(line -> line.endsWith(expectedLog)) - .filter(line -> line.contains(expectedLogLevel)); - - Assertions.assertThat(matchingLines).hasSize(1); - } - - } - - @Nested - @DisplayName("Logs correct requestBody") - class RequestLoggerCorrectRequestBody { - - /** - * This is a complex test that was written to prove that our requestLogger had a concurrency bug - * that caused incorrect request bodies to be logged. The RequestLogger originally used an instance - * variable that held the requestBody, which was written to by the request filter, and read by the - * response filter to generate a response log line that contained the original request body. If - * multiple requests were being processed at the same time, it was possible for the request filter - * of one request to overwrite the requestBody instance variable before the response log line was - * generated. The fixed implementation sets the requestBody as a custom property on the - * ContainerRequestFilter in the first filter method, and reads the custom requestBody property from - * the ContainerRequestFilter in the second filter method. - *

- * To cover this race condition, this test creates a single RequestLogger instance that is - * referenced from 100 threads. Each thread logs a unique request body. The main thread waits for - * all threads to finish, and then assures that every unique request body is included in the logs. - *

- * This test fails when using the instance variable approach for recording request bodies, because - * some request bodies are overwritten before they can be logged. - */ - @Test - void testRequestBodyConsistency() { - Mockito.when(mServletRequest.getHeader("Content-Type")) - .thenReturn(ACCEPTED_CONTENT_TYPE); - - final RequestLogger requestLogger = new RequestLogger(MDC.getCopyOfContextMap(), mServletRequest); - - final List testCases = new ArrayList<>(); - final List threads = new ArrayList<>(); - - for (int i = 1; i < 100; i++) { - testCases.add(createRunnableTestCase(requestLogger, UUID.randomUUID())); - } - - testCases.forEach(testCase -> { - final Thread thread = new Thread(testCase); - threads.add(thread); - thread.start(); - }); - - threads.forEach(thread -> { - try { - thread.join(); - } catch (final InterruptedException e) { - e.printStackTrace(); - } - }); - - testCases.forEach(testCase -> Assertions.assertThat(testCase.requestBodyWasLogged()).isTrue()); - } - - private RequestResponseRunnable createRunnableTestCase(final RequestLogger requestLogger, final UUID threadIdentifier) { - - // create thread-specific context mocks - final ContainerRequestContext mRequestContext = Mockito.mock(ContainerRequestContext.class); - final ContainerResponseContext mResponseContext = Mockito.mock(ContainerResponseContext.class); - - final String expectedRequestBody = String.format("{\"threadIdentifier\":\"%s\"}", threadIdentifier); - - stubRequestContext(mRequestContext, expectedRequestBody); - - return new RequestResponseRunnable(requestLogger, expectedRequestBody, mRequestContext, mResponseContext); - } - - @RequiredArgsConstructor - class RequestResponseRunnable implements Runnable { - - private final RequestLogger requestLogger; - private final String expectedRequestBody; - private final ContainerRequestContext mRequestContext; - private final ContainerResponseContext mResponseContext; - - @Override - public void run() { - try { - requestLogger.filter(mRequestContext); - Thread.sleep(RANDOM.nextInt(1000)); // random sleep to make race more likely - requestLogger.filter(mRequestContext, mResponseContext); - } catch (final IOException | InterruptedException e) { - e.printStackTrace(); - } - } - - // search all log lines to see if this thread's request body was logged - Boolean requestBodyWasLogged() { - return IOs.readFile(logPath).lines().anyMatch(line -> line.contains(expectedRequestBody)); - } - - } - - } - - private void stubRequestContext(final ContainerRequestContext mockContainerRequestContext, final String requestBody) { - Mockito.when(mockContainerRequestContext.getMethod()) - .thenReturn(METHOD); - - Mockito.when(mockContainerRequestContext.getEntityStream()) - .thenReturn(new ByteArrayInputStream(requestBody.getBytes(StandardCharsets.UTF_8))); - - Mockito.when(mockContainerRequestContext.getProperty(REQUEST_BODY_PROPERTY)).thenReturn(requestBody); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogConverterTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogConverterTest.java deleted file mode 100644 index 32eec2e43264..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogConverterTest.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import static io.airbyte.commons.server.helpers.ConnectionHelpers.FIELD_NAME; -import static io.airbyte.commons.server.helpers.ConnectionHelpers.SECOND_FIELD_NAME; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.api.model.generated.DestinationSyncMode; -import io.airbyte.api.model.generated.SelectedFieldInfo; -import io.airbyte.api.model.generated.SyncMode; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.server.handlers.helpers.CatalogConverter; -import io.airbyte.commons.server.helpers.ConnectionHelpers; -import io.airbyte.config.DataType; -import io.airbyte.config.FieldSelectionData; -import io.airbyte.validation.json.JsonValidationException; -import java.util.List; -import org.junit.jupiter.api.Test; - -class CatalogConverterTest { - - @Test - void testConvertToProtocol() throws JsonValidationException { - assertEquals(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog(), - CatalogConverter.toConfiguredProtocol(ConnectionHelpers.generateBasicApiCatalog())); - } - - @Test - void testConvertToAPI() { - assertEquals(ConnectionHelpers.generateBasicApiCatalog(), CatalogConverter.toApi(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog(), - new FieldSelectionData())); - } - - @Test - void testEnumConversion() { - assertTrue(Enums.isCompatible(io.airbyte.api.model.generated.DataType.class, DataType.class)); - assertTrue(Enums.isCompatible(io.airbyte.config.SyncMode.class, io.airbyte.api.model.generated.SyncMode.class)); - } - - @Test - void testConvertToProtocolColumnSelectionValidation() { - assertThrows(JsonValidationException.class, () -> { - // fieldSelectionEnabled=true but selectedFields=null. - final var catalog = ConnectionHelpers.generateBasicApiCatalog(); - catalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).selectedFields(null); - CatalogConverter.toConfiguredProtocol(catalog); - }); - - assertThrows(JsonValidationException.class, () -> { - // JSON schema has no `properties` node. - final var catalog = ConnectionHelpers.generateBasicApiCatalog(); - ((ObjectNode) catalog.getStreams().get(0).getStream().getJsonSchema()).remove("properties"); - catalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).addSelectedFieldsItem(new SelectedFieldInfo().addFieldPathItem("foo")); - CatalogConverter.toConfiguredProtocol(catalog); - }); - - assertThrows(JsonValidationException.class, () -> { - // SelectedFieldInfo with empty path. - final var catalog = ConnectionHelpers.generateBasicApiCatalog(); - catalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).addSelectedFieldsItem(new SelectedFieldInfo()); - CatalogConverter.toConfiguredProtocol(catalog); - }); - - assertThrows(UnsupportedOperationException.class, () -> { - // SelectedFieldInfo with nested field path. - final var catalog = ConnectionHelpers.generateBasicApiCatalog(); - catalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true) - .addSelectedFieldsItem(new SelectedFieldInfo().addFieldPathItem("foo").addFieldPathItem("bar")); - CatalogConverter.toConfiguredProtocol(catalog); - }); - - assertThrows(JsonValidationException.class, () -> { - // SelectedFieldInfo with empty path. - final var catalog = ConnectionHelpers.generateBasicApiCatalog(); - catalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).addSelectedFieldsItem(new SelectedFieldInfo().addFieldPathItem("foo")); - CatalogConverter.toConfiguredProtocol(catalog); - }); - - assertThrows(JsonValidationException.class, () -> { - final var catalog = ConnectionHelpers.generateApiCatalogWithTwoFields(); - // Only FIELD_NAME is selected. - catalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).addSelectedFieldsItem(new SelectedFieldInfo().addFieldPathItem(FIELD_NAME)); - // The sync mode is INCREMENTAL and SECOND_FIELD_NAME is a cursor field. - catalog.getStreams().get(0).getConfig().syncMode(SyncMode.INCREMENTAL).cursorField(List.of(SECOND_FIELD_NAME)); - CatalogConverter.toConfiguredProtocol(catalog); - }); - - assertDoesNotThrow(() -> { - final var catalog = ConnectionHelpers.generateApiCatalogWithTwoFields(); - // Only FIELD_NAME is selected. - catalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).addSelectedFieldsItem(new SelectedFieldInfo().addFieldPathItem(FIELD_NAME)); - // The cursor field is not selected, but it's okay because it's FULL_REFRESH so it doesn't throw. - catalog.getStreams().get(0).getConfig().syncMode(SyncMode.FULL_REFRESH).cursorField(List.of(SECOND_FIELD_NAME)); - CatalogConverter.toConfiguredProtocol(catalog); - }); - - assertThrows(JsonValidationException.class, () -> { - final var catalog = ConnectionHelpers.generateApiCatalogWithTwoFields(); - // Only FIELD_NAME is selected. - catalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).addSelectedFieldsItem(new SelectedFieldInfo().addFieldPathItem(FIELD_NAME)); - // The destination sync mode is DEDUP and SECOND_FIELD_NAME is a primary key. - catalog.getStreams().get(0).getConfig().destinationSyncMode(DestinationSyncMode.APPEND_DEDUP).primaryKey(List.of(List.of(SECOND_FIELD_NAME))); - CatalogConverter.toConfiguredProtocol(catalog); - }); - - assertDoesNotThrow(() -> { - final var catalog = ConnectionHelpers.generateApiCatalogWithTwoFields(); - // Only FIELD_NAME is selected. - catalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).addSelectedFieldsItem(new SelectedFieldInfo().addFieldPathItem(FIELD_NAME)); - // The primary key is not selected but that's okay because the destination sync mode is OVERWRITE. - catalog.getStreams().get(0).getConfig().destinationSyncMode(DestinationSyncMode.OVERWRITE).primaryKey(List.of(List.of(SECOND_FIELD_NAME))); - CatalogConverter.toConfiguredProtocol(catalog); - }); - } - - @Test - void testConvertToProtocolFieldSelection() throws JsonValidationException { - final var catalog = ConnectionHelpers.generateApiCatalogWithTwoFields(); - catalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).addSelectedFieldsItem(new SelectedFieldInfo().addFieldPathItem(FIELD_NAME)); - assertEquals(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog(), CatalogConverter.toConfiguredProtocol(catalog)); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogDiffConvertersTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogDiffConvertersTest.java deleted file mode 100644 index f419597c0072..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogDiffConvertersTest.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.api.model.generated.FieldTransform; -import io.airbyte.api.model.generated.StreamTransform; -import io.airbyte.commons.enums.Enums; -import io.airbyte.protocol.models.transform_models.FieldTransformType; -import io.airbyte.protocol.models.transform_models.StreamTransformType; -import org.junit.jupiter.api.Test; - -class CatalogDiffConvertersTest { - - @Test - void testEnumConversion() { - assertTrue(Enums.isCompatible(StreamTransform.TransformTypeEnum.class, StreamTransformType.class)); - assertTrue(Enums.isCompatible(FieldTransform.TransformTypeEnum.class, FieldTransformType.class)); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/ConfigurationUpdateTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/ConfigurationUpdateTest.java deleted file mode 100644 index eb565179128c..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/ConfigurationUpdateTest.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; -import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class ConfigurationUpdateTest { - - private static final String IMAGE_REPOSITORY = "foo"; - private static final String IMAGE_TAG = "bar"; - private static final UUID UUID1 = UUID.randomUUID(); - private static final UUID UUID2 = UUID.randomUUID(); - private static final JsonNode SPEC = CatalogHelpers.fieldsToJsonSchema( - Field.of(JdbcUtils.USERNAME_KEY, JsonSchemaType.STRING), - Field.of(JdbcUtils.PASSWORD_KEY, JsonSchemaType.STRING)); - private static final ConnectorSpecification CONNECTOR_SPECIFICATION = new ConnectorSpecification().withConnectionSpecification(SPEC); - private static final JsonNode ORIGINAL_CONFIGURATION = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.USERNAME_KEY, "airbyte") - .put(JdbcUtils.PASSWORD_KEY, "abc") - .build()); - private static final JsonNode NEW_CONFIGURATION = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.USERNAME_KEY, "airbyte") - .put(JdbcUtils.PASSWORD_KEY, "xyz") - .build()); - private static final StandardSourceDefinition SOURCE_DEFINITION = new StandardSourceDefinition() - .withDockerRepository(IMAGE_REPOSITORY) - .withDockerImageTag(IMAGE_TAG) - .withSpec(CONNECTOR_SPECIFICATION); - private static final SourceConnection ORIGINAL_SOURCE_CONNECTION = new SourceConnection() - .withSourceId(UUID1) - .withSourceDefinitionId(UUID2) - .withConfiguration(ORIGINAL_CONFIGURATION); - private static final SourceConnection NEW_SOURCE_CONNECTION = new SourceConnection() - .withSourceId(UUID1) - .withSourceDefinitionId(UUID2) - .withConfiguration(NEW_CONFIGURATION); - private static final StandardDestinationDefinition DESTINATION_DEFINITION = new StandardDestinationDefinition() - .withDockerRepository(IMAGE_REPOSITORY) - .withDockerImageTag(IMAGE_TAG) - .withSpec(CONNECTOR_SPECIFICATION); - private static final DestinationConnection ORIGINAL_DESTINATION_CONNECTION = new DestinationConnection() - .withDestinationId(UUID1) - .withDestinationDefinitionId(UUID2) - .withConfiguration(ORIGINAL_CONFIGURATION); - private static final DestinationConnection NEW_DESTINATION_CONNECTION = new DestinationConnection() - .withDestinationId(UUID1) - .withDestinationDefinitionId(UUID2) - .withConfiguration(NEW_CONFIGURATION); - - private ConfigRepository configRepository; - private SecretsRepositoryReader secretsRepositoryReader; - private JsonSecretsProcessor secretsProcessor; - private ConfigurationUpdate configurationUpdate; - - @BeforeEach - void setup() { - configRepository = mock(ConfigRepository.class); - secretsRepositoryReader = mock(SecretsRepositoryReader.class); - secretsProcessor = mock(JsonSecretsProcessor.class); - - configurationUpdate = new ConfigurationUpdate(configRepository, secretsRepositoryReader, secretsProcessor); - } - - @Test - void testSourceUpdate() throws JsonValidationException, IOException, ConfigNotFoundException { - when(secretsRepositoryReader.getSourceConnectionWithSecrets(UUID1)).thenReturn(ORIGINAL_SOURCE_CONNECTION); - when(configRepository.getStandardSourceDefinition(UUID2)).thenReturn(SOURCE_DEFINITION); - when(secretsProcessor.copySecrets(ORIGINAL_CONFIGURATION, NEW_CONFIGURATION, SPEC)).thenReturn(NEW_CONFIGURATION); - - final SourceConnection actual = configurationUpdate.source(UUID1, ORIGINAL_SOURCE_CONNECTION.getName(), NEW_CONFIGURATION); - - assertEquals(NEW_SOURCE_CONNECTION, actual); - } - - @Test - void testDestinationUpdate() throws JsonValidationException, IOException, ConfigNotFoundException { - when(secretsRepositoryReader.getDestinationConnectionWithSecrets(UUID1)).thenReturn(ORIGINAL_DESTINATION_CONNECTION); - when(configRepository.getStandardDestinationDefinition(UUID2)).thenReturn(DESTINATION_DEFINITION); - when(secretsProcessor.copySecrets(ORIGINAL_CONFIGURATION, NEW_CONFIGURATION, SPEC)).thenReturn(NEW_CONFIGURATION); - - final DestinationConnection actual = configurationUpdate.destination(UUID1, ORIGINAL_DESTINATION_CONNECTION.getName(), NEW_CONFIGURATION); - - assertEquals(NEW_DESTINATION_CONNECTION, actual); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/JobConverterTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/JobConverterTest.java deleted file mode 100644 index f6289e375859..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/JobConverterTest.java +++ /dev/null @@ -1,288 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.google.common.collect.Lists; -import io.airbyte.api.model.generated.AttemptFailureOrigin; -import io.airbyte.api.model.generated.AttemptFailureReason; -import io.airbyte.api.model.generated.AttemptFailureSummary; -import io.airbyte.api.model.generated.AttemptFailureType; -import io.airbyte.api.model.generated.AttemptInfoRead; -import io.airbyte.api.model.generated.AttemptRead; -import io.airbyte.api.model.generated.AttemptStats; -import io.airbyte.api.model.generated.AttemptStreamStats; -import io.airbyte.api.model.generated.DestinationDefinitionRead; -import io.airbyte.api.model.generated.JobConfigType; -import io.airbyte.api.model.generated.JobDebugRead; -import io.airbyte.api.model.generated.JobInfoLightRead; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.api.model.generated.JobRead; -import io.airbyte.api.model.generated.JobWithAttemptsRead; -import io.airbyte.api.model.generated.LogRead; -import io.airbyte.api.model.generated.ResetConfig; -import io.airbyte.api.model.generated.SourceDefinitionRead; -import io.airbyte.api.model.generated.StreamDescriptor; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.FailureReason; -import io.airbyte.config.FailureReason.FailureOrigin; -import io.airbyte.config.FailureReason.FailureType; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.JobOutput; -import io.airbyte.config.JobOutput.OutputType; -import io.airbyte.config.JobResetConnectionConfig; -import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.ResetSourceConfiguration; -import io.airbyte.config.StandardSyncOutput; -import io.airbyte.config.StandardSyncSummary; -import io.airbyte.config.StreamSyncStats; -import io.airbyte.config.SyncStats; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.AttemptStatus; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.persistence.job.models.JobStatus; -import io.airbyte.protocol.models.AirbyteStream; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import java.util.stream.Collectors; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class JobConverterTest { - - private static final long JOB_ID = 100L; - private static final Integer ATTEMPT_NUMBER = 0; - private static final String JOB_CONFIG_ID = "123"; - private static final JobStatus JOB_STATUS = JobStatus.RUNNING; - private static final AttemptStatus ATTEMPT_STATUS = AttemptStatus.RUNNING; - private static final JobConfig.ConfigType CONFIG_TYPE = ConfigType.SYNC; - private static final Path LOG_PATH = Path.of("log_path"); - private static final long CREATED_AT = System.currentTimeMillis() / 1000; - private static final long RECORDS_EMITTED = 15L; - private static final long BYTES_EMITTED = 100L; - private static final long RECORDS_COMMITTED = 10L; - private static final long STATE_MESSAGES_EMITTED = 2L; - private static final String STREAM_NAME = "stream1"; - private static final String FAILURE_EXTERNAL_MESSAGE = "something went wrong"; - private static final long FAILURE_TIMESTAMP = System.currentTimeMillis(); - private static final String FAILURE_STACKTRACE = "stacktrace"; - private static final boolean PARTIAL_SUCCESS = false; - - private static final JobConfig JOB_CONFIG = new JobConfig() - .withConfigType(CONFIG_TYPE) - .withSync(new JobSyncConfig().withConfiguredAirbyteCatalog(new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("users")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("accounts")))))); - - private static final JobOutput JOB_OUTPUT = new JobOutput() - .withOutputType(OutputType.SYNC) - .withSync(new StandardSyncOutput() - .withStandardSyncSummary(new StandardSyncSummary() - .withRecordsSynced(RECORDS_EMITTED) - .withBytesSynced(BYTES_EMITTED) - .withTotalStats(new SyncStats() - .withRecordsEmitted(RECORDS_EMITTED) - .withBytesEmitted(BYTES_EMITTED) - .withSourceStateMessagesEmitted(STATE_MESSAGES_EMITTED) - .withRecordsCommitted(RECORDS_COMMITTED)) - .withStreamStats(Lists.newArrayList(new StreamSyncStats() - .withStreamName(STREAM_NAME) - .withStats(new SyncStats() - .withRecordsEmitted(RECORDS_EMITTED) - .withBytesEmitted(BYTES_EMITTED) - .withSourceStateMessagesEmitted(STATE_MESSAGES_EMITTED) - .withRecordsCommitted(RECORDS_COMMITTED)))))); - - private JobConverter jobConverter; - private Job job; - - private static final JobInfoRead JOB_INFO = - new JobInfoRead() - .job(new JobRead() - .id(JOB_ID) - .configId(JOB_CONFIG_ID) - .status(io.airbyte.api.model.generated.JobStatus.RUNNING) - .configType(JobConfigType.SYNC) - .createdAt(CREATED_AT) - .updatedAt(CREATED_AT)) - .attempts(Lists.newArrayList(new AttemptInfoRead() - .attempt(new AttemptRead() - .id((long) ATTEMPT_NUMBER) - .status(io.airbyte.api.model.generated.AttemptStatus.RUNNING) - .recordsSynced(RECORDS_EMITTED) - .bytesSynced(BYTES_EMITTED) - .totalStats(new AttemptStats() - .recordsEmitted(RECORDS_EMITTED) - .bytesEmitted(BYTES_EMITTED) - .stateMessagesEmitted(STATE_MESSAGES_EMITTED) - .recordsCommitted(RECORDS_COMMITTED)) - .streamStats(Lists.newArrayList(new AttemptStreamStats() - .streamName(STREAM_NAME) - .stats(new AttemptStats() - .recordsEmitted(RECORDS_EMITTED) - .bytesEmitted(BYTES_EMITTED) - .stateMessagesEmitted(STATE_MESSAGES_EMITTED) - .recordsCommitted(RECORDS_COMMITTED)))) - .updatedAt(CREATED_AT) - .createdAt(CREATED_AT) - .endedAt(CREATED_AT) - .failureSummary(new AttemptFailureSummary() - .failures(Lists.newArrayList(new AttemptFailureReason() - .failureOrigin(AttemptFailureOrigin.SOURCE) - .failureType(AttemptFailureType.SYSTEM_ERROR) - .externalMessage(FAILURE_EXTERNAL_MESSAGE) - .stacktrace(FAILURE_STACKTRACE) - .timestamp(FAILURE_TIMESTAMP))) - .partialSuccess(PARTIAL_SUCCESS))) - .logs(new LogRead().logLines(new ArrayList<>())))); - - private static final String version = "0.33.4"; - private static final AirbyteVersion airbyteVersion = new AirbyteVersion(version); - private static final SourceDefinitionRead sourceDefinitionRead = new SourceDefinitionRead().sourceDefinitionId(UUID.randomUUID()); - private static final DestinationDefinitionRead destinationDefinitionRead = - new DestinationDefinitionRead().destinationDefinitionId(UUID.randomUUID()); - - private static final JobDebugRead JOB_DEBUG_INFO = - new JobDebugRead() - .id(JOB_ID) - .configId(JOB_CONFIG_ID) - .status(io.airbyte.api.model.generated.JobStatus.RUNNING) - .configType(JobConfigType.SYNC) - .airbyteVersion(airbyteVersion.serialize()) - .sourceDefinition(sourceDefinitionRead) - .destinationDefinition(destinationDefinitionRead); - - private static final JobWithAttemptsRead JOB_WITH_ATTEMPTS_READ = new JobWithAttemptsRead() - .job(JOB_INFO.getJob()) - .attempts(JOB_INFO.getAttempts().stream().map(AttemptInfoRead::getAttempt).collect(Collectors.toList())); - - private static final io.airbyte.config.AttemptFailureSummary FAILURE_SUMMARY = new io.airbyte.config.AttemptFailureSummary() - .withFailures(Lists.newArrayList(new FailureReason() - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.SYSTEM_ERROR) - .withExternalMessage(FAILURE_EXTERNAL_MESSAGE) - .withStacktrace(FAILURE_STACKTRACE) - .withTimestamp(FAILURE_TIMESTAMP))) - .withPartialSuccess(PARTIAL_SUCCESS); - - @BeforeEach - public void setUp() { - jobConverter = new JobConverter(WorkerEnvironment.DOCKER, LogConfigs.EMPTY); - job = mock(Job.class); - final Attempt attempt = mock(Attempt.class); - when(job.getId()).thenReturn(JOB_ID); - when(job.getConfigType()).thenReturn(JOB_CONFIG.getConfigType()); - when(job.getScope()).thenReturn(JOB_CONFIG_ID); - when(job.getConfig()).thenReturn(JOB_CONFIG); - when(job.getStatus()).thenReturn(JOB_STATUS); - when(job.getCreatedAtInSecond()).thenReturn(CREATED_AT); - when(job.getUpdatedAtInSecond()).thenReturn(CREATED_AT); - when(job.getAttempts()).thenReturn(Lists.newArrayList(attempt)); - when(attempt.getAttemptNumber()).thenReturn(ATTEMPT_NUMBER); - when(attempt.getStatus()).thenReturn(ATTEMPT_STATUS); - when(attempt.getOutput()).thenReturn(Optional.of(JOB_OUTPUT)); - when(attempt.getLogPath()).thenReturn(LOG_PATH); - when(attempt.getCreatedAtInSecond()).thenReturn(CREATED_AT); - when(attempt.getUpdatedAtInSecond()).thenReturn(CREATED_AT); - when(attempt.getEndedAtInSecond()).thenReturn(Optional.of(CREATED_AT)); - when(attempt.getFailureSummary()).thenReturn(Optional.of(FAILURE_SUMMARY)); - - } - - @Test - void testGetJobInfoRead() { - assertEquals(JOB_INFO, jobConverter.getJobInfoRead(job)); - } - - @Test - void testGetJobInfoLightRead() { - final JobInfoLightRead expected = new JobInfoLightRead().job(JOB_INFO.getJob()); - assertEquals(expected, jobConverter.getJobInfoLightRead(job)); - } - - @Test - void testGetDebugJobInfoRead() { - assertEquals(JOB_DEBUG_INFO, JobConverter.getDebugJobInfoRead(JOB_INFO, sourceDefinitionRead, destinationDefinitionRead, airbyteVersion)); - } - - @Test - void testGetJobWithAttemptsRead() { - assertEquals(JOB_WITH_ATTEMPTS_READ, JobConverter.getJobWithAttemptsRead(job)); - } - - @Test - void testGetJobRead() { - final JobWithAttemptsRead jobReadActual = JobConverter.getJobWithAttemptsRead(job); - assertEquals(JOB_WITH_ATTEMPTS_READ, jobReadActual); - } - - @Test - void testEnumConversion() { - assertTrue(Enums.isCompatible(JobConfig.ConfigType.class, JobConfigType.class)); - assertTrue(Enums.isCompatible(JobStatus.class, io.airbyte.api.model.generated.JobStatus.class)); - assertTrue(Enums.isCompatible(AttemptStatus.class, io.airbyte.api.model.generated.AttemptStatus.class)); - assertTrue(Enums.isCompatible(FailureReason.FailureOrigin.class, io.airbyte.api.model.generated.AttemptFailureOrigin.class)); - } - - // this test intentionally only looks at the reset config as the rest is the same here. - @Test - void testResetJobIncludesResetConfig() { - final JobConfig resetConfig = new JobConfig() - .withConfigType(ConfigType.RESET_CONNECTION) - .withResetConnection(new JobResetConnectionConfig().withResetSourceConfiguration(new ResetSourceConfiguration().withStreamsToReset(List.of( - new io.airbyte.protocol.models.StreamDescriptor().withName("users"), - new io.airbyte.protocol.models.StreamDescriptor().withName("accounts"))))); - final Job resetJob = new Job( - JOB_ID, - ConfigType.RESET_CONNECTION, - JOB_CONFIG_ID, - resetConfig, - Collections.emptyList(), - JobStatus.SUCCEEDED, - CREATED_AT, - CREATED_AT, - CREATED_AT); - - final ResetConfig expectedResetConfig = new ResetConfig().streamsToReset(List.of( - new StreamDescriptor().name("users"), - new StreamDescriptor().name("accounts"))); - assertEquals(expectedResetConfig, jobConverter.getJobInfoRead(resetJob).getJob().getResetConfig()); - } - - @Test - void testResetJobExcludesConfigIfNull() { - final JobConfig resetConfig = new JobConfig() - .withConfigType(ConfigType.RESET_CONNECTION) - .withResetConnection(new JobResetConnectionConfig().withResetSourceConfiguration(null)); - final Job resetJob = new Job( - JOB_ID, - ConfigType.RESET_CONNECTION, - JOB_CONFIG_ID, - resetConfig, - Collections.emptyList(), - JobStatus.SUCCEEDED, - CREATED_AT, - CREATED_AT, - CREATED_AT); - - assertNull(jobConverter.getJobInfoRead(resetJob).getJob().getResetConfig()); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/OauthModelConverterTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/OauthModelConverterTest.java deleted file mode 100644 index 40f484c32998..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/OauthModelConverterTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.converters; - -import static org.junit.jupiter.api.Assertions.*; - -import io.airbyte.protocol.models.AuthSpecification; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.protocol.models.OAuth2Specification; -import java.util.List; -import java.util.Optional; -import java.util.stream.Stream; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -class OauthModelConverterTest { - - private static Stream testProvider() { - return Stream.of( - // all fields filled out with nesting - Arguments.of( - List.of(List.of("init1"), List.of("init2-1", "init2-2")), - List.of(List.of("output1"), List.of("output2-1", "output2-2")), - List.of("path", "nestedPath", 1)), - // init params only - Arguments.of( - List.of(List.of("init1"), List.of("init2-1", "init2-2")), - List.of(List.of()), - List.of()), - // output params only - Arguments.of( - List.of(List.of()), - List.of(List.of("output1"), List.of("output2-1", "output2-2")), - List.of()), - // rootObject only - Arguments.of( - List.of(List.of()), - List.of(List.of()), - List.of("path", "nestedPath", 1))); - } - - @ParameterizedTest - @MethodSource("testProvider") - void testIt(final List> initParams, final List> outputParams, final List rootObject) { - final ConnectorSpecification input = new ConnectorSpecification().withAuthSpecification( - new AuthSpecification() - .withAuthType(AuthSpecification.AuthType.OAUTH_2_0) - .withOauth2Specification(new OAuth2Specification() - .withOauthFlowInitParameters(initParams) - .withOauthFlowOutputParameters(outputParams) - .withRootObject(rootObject))); - - final io.airbyte.api.model.generated.AuthSpecification expected = new io.airbyte.api.model.generated.AuthSpecification() - .authType(io.airbyte.api.model.generated.AuthSpecification.AuthTypeEnum.OAUTH2_0) - .oauth2Specification( - new io.airbyte.api.model.generated.OAuth2Specification() - .oauthFlowInitParameters(initParams) - .oauthFlowOutputParameters(outputParams) - .rootObject(rootObject)); - - final Optional authSpec = OauthModelConverter.getAuthSpec(input); - assertTrue(authSpec.isPresent()); - assertEquals(expected, authSpec.get()); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/AttemptHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/AttemptHandlerTest.java deleted file mode 100644 index 9f6e5f784632..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/AttemptHandlerTest.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.Mockito.doThrow; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.api.model.generated.AttemptSyncConfig; -import io.airbyte.api.model.generated.ConnectionState; -import io.airbyte.api.model.generated.ConnectionStateType; -import io.airbyte.api.model.generated.GlobalState; -import io.airbyte.api.model.generated.SaveAttemptSyncConfigRequestBody; -import io.airbyte.api.model.generated.SetWorkflowInAttemptRequestBody; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.converters.ApiPojoConverters; -import io.airbyte.persistence.job.JobPersistence; -import java.io.IOException; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; - -class AttemptHandlerTest { - - JobPersistence jobPersistence; - AttemptHandler handler; - - private static final UUID CONNECTION_ID = UUID.randomUUID(); - private static final long JOB_ID = 10002L; - private static final int ATTEMPT_NUMBER = 1; - - private static final String PROCESSING_TASK_QUEUE = "SYNC"; - - @BeforeEach - public void init() { - jobPersistence = Mockito.mock(JobPersistence.class); - handler = new AttemptHandler(jobPersistence); - } - - @Test - void testInternalWorkerHandlerSetsTemporalWorkflowId() throws Exception { - final String workflowId = UUID.randomUUID().toString(); - - final ArgumentCaptor attemptNumberCapture = ArgumentCaptor.forClass(Integer.class); - final ArgumentCaptor jobIdCapture = ArgumentCaptor.forClass(Long.class); - final ArgumentCaptor workflowIdCapture = ArgumentCaptor.forClass(String.class); - final ArgumentCaptor queueCapture = ArgumentCaptor.forClass(String.class); - - final SetWorkflowInAttemptRequestBody requestBody = - new SetWorkflowInAttemptRequestBody().attemptNumber(ATTEMPT_NUMBER).jobId(JOB_ID).workflowId(workflowId) - .processingTaskQueue(PROCESSING_TASK_QUEUE); - - assertTrue(handler.setWorkflowInAttempt(requestBody).getSucceeded()); - - Mockito.verify(jobPersistence).setAttemptTemporalWorkflowInfo(jobIdCapture.capture(), attemptNumberCapture.capture(), workflowIdCapture.capture(), - queueCapture.capture()); - - assertEquals(ATTEMPT_NUMBER, attemptNumberCapture.getValue()); - assertEquals(JOB_ID, jobIdCapture.getValue()); - assertEquals(workflowId, workflowIdCapture.getValue()); - assertEquals(PROCESSING_TASK_QUEUE, queueCapture.getValue()); - } - - @Test - void testInternalWorkerHandlerSetsTemporalWorkflowIdThrows() throws Exception { - final String workflowId = UUID.randomUUID().toString(); - - doThrow(IOException.class).when(jobPersistence).setAttemptTemporalWorkflowInfo(anyLong(), anyInt(), - any(), any()); - - final ArgumentCaptor attemptNumberCapture = ArgumentCaptor.forClass(Integer.class); - final ArgumentCaptor jobIdCapture = ArgumentCaptor.forClass(Long.class); - final ArgumentCaptor workflowIdCapture = ArgumentCaptor.forClass(String.class); - final ArgumentCaptor queueCapture = ArgumentCaptor.forClass(String.class); - - final SetWorkflowInAttemptRequestBody requestBody = - new SetWorkflowInAttemptRequestBody().attemptNumber(ATTEMPT_NUMBER).jobId(JOB_ID).workflowId(workflowId) - .processingTaskQueue(PROCESSING_TASK_QUEUE); - - assertFalse(handler.setWorkflowInAttempt(requestBody).getSucceeded()); - - Mockito.verify(jobPersistence).setAttemptTemporalWorkflowInfo(jobIdCapture.capture(), attemptNumberCapture.capture(), workflowIdCapture.capture(), - queueCapture.capture()); - - assertEquals(ATTEMPT_NUMBER, attemptNumberCapture.getValue()); - assertEquals(JOB_ID, jobIdCapture.getValue()); - assertEquals(workflowId, workflowIdCapture.getValue()); - assertEquals(PROCESSING_TASK_QUEUE, queueCapture.getValue()); - } - - @Test - void testInternalHandlerSetsAttemptSyncConfig() throws Exception { - final ArgumentCaptor attemptNumberCapture = ArgumentCaptor.forClass(Integer.class); - final ArgumentCaptor jobIdCapture = ArgumentCaptor.forClass(Long.class); - final ArgumentCaptor attemptSyncConfigCapture = - ArgumentCaptor.forClass(io.airbyte.config.AttemptSyncConfig.class); - - final JsonNode sourceConfig = Jsons.jsonNode(Map.of("source_key", "source_val")); - final JsonNode destinationConfig = Jsons.jsonNode(Map.of("destination_key", "destination_val")); - final ConnectionState state = new ConnectionState() - .connectionId(CONNECTION_ID) - .stateType(ConnectionStateType.GLOBAL) - .streamState(null) - .globalState(new GlobalState().sharedState(Jsons.jsonNode(Map.of("state_key", "state_val")))); - - final AttemptSyncConfig attemptSyncConfig = new AttemptSyncConfig() - .destinationConfiguration(destinationConfig) - .sourceConfiguration(sourceConfig) - .state(state); - - final SaveAttemptSyncConfigRequestBody requestBody = - new SaveAttemptSyncConfigRequestBody().attemptNumber(ATTEMPT_NUMBER).jobId(JOB_ID).syncConfig(attemptSyncConfig); - - assertTrue(handler.saveSyncConfig(requestBody).getSucceeded()); - - Mockito.verify(jobPersistence).writeAttemptSyncConfig(jobIdCapture.capture(), attemptNumberCapture.capture(), attemptSyncConfigCapture.capture()); - - final io.airbyte.config.AttemptSyncConfig expectedAttemptSyncConfig = ApiPojoConverters.attemptSyncConfigToInternal(attemptSyncConfig); - - assertEquals(ATTEMPT_NUMBER, attemptNumberCapture.getValue()); - assertEquals(JOB_ID, jobIdCapture.getValue()); - assertEquals(expectedAttemptSyncConfig, attemptSyncConfigCapture.getValue()); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionSchedulerHelperTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionSchedulerHelperTest.java deleted file mode 100644 index 238b2f95427b..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionSchedulerHelperTest.java +++ /dev/null @@ -1,681 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.api.model.generated.ConnectionScheduleData; -import io.airbyte.api.model.generated.ConnectionScheduleDataBasicSchedule; -import io.airbyte.api.model.generated.ConnectionScheduleDataBasicSchedule.TimeUnitEnum; -import io.airbyte.api.model.generated.ConnectionScheduleDataCron; -import io.airbyte.api.model.generated.ConnectionScheduleType; -import io.airbyte.commons.server.handlers.helpers.ConnectionScheduleHelper; -import io.airbyte.config.BasicSchedule.TimeUnit; -import io.airbyte.config.Schedule; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.ScheduleType; -import io.airbyte.validation.json.JsonValidationException; -import org.junit.jupiter.api.Test; - -class ConnectionSchedulerHelperTest { - - private final static String EXPECTED_CRON_TIMEZONE = "UTC"; - private final static String EXPECTED_CRON_EXPRESSION = "* */2 * * * ?"; - - @Test - void testPopulateSyncScheduleFromManualType() throws JsonValidationException { - final StandardSync actual = new StandardSync(); - ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, - ConnectionScheduleType.MANUAL, null); - assertTrue(actual.getManual()); - assertEquals(ScheduleType.MANUAL, actual.getScheduleType()); - assertNull(actual.getSchedule()); - assertNull(actual.getScheduleData()); - } - - @Test - void testPopulateSyncScheduleFromBasicType() throws JsonValidationException { - final StandardSync actual = new StandardSync(); - ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, - ConnectionScheduleType.BASIC, new ConnectionScheduleData() - .basicSchedule(new ConnectionScheduleDataBasicSchedule() - .timeUnit(TimeUnitEnum.HOURS) - .units(1L))); - assertFalse(actual.getManual()); - assertEquals(ScheduleType.BASIC_SCHEDULE, actual.getScheduleType()); - assertEquals(TimeUnit.HOURS, actual.getScheduleData().getBasicSchedule().getTimeUnit()); - assertEquals(1L, actual.getScheduleData().getBasicSchedule().getUnits()); - // We expect the old format to be dual-written. - assertEquals(Schedule.TimeUnit.HOURS, actual.getSchedule().getTimeUnit()); - assertEquals(1L, actual.getSchedule().getUnits()); - } - - @Test - void testPopulateSyncScheduleFromCron() throws JsonValidationException { - final StandardSync actual = new StandardSync(); - ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, - ConnectionScheduleType.CRON, new ConnectionScheduleData() - .cron(new ConnectionScheduleDataCron() - .cronTimeZone(EXPECTED_CRON_TIMEZONE) - .cronExpression(EXPECTED_CRON_EXPRESSION))); - assertEquals(ScheduleType.CRON, actual.getScheduleType()); - assertEquals(EXPECTED_CRON_TIMEZONE, actual.getScheduleData().getCron().getCronTimeZone()); - assertEquals(EXPECTED_CRON_EXPRESSION, actual.getScheduleData().getCron().getCronExpression()); - assertNull(actual.getSchedule()); - } - - @Test - void testScheduleValidation() { - final StandardSync actual = new StandardSync(); - assertThrows(JsonValidationException.class, () -> ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, - ConnectionScheduleType.CRON, null)); - assertThrows(JsonValidationException.class, - () -> ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, ConnectionScheduleType.BASIC, new ConnectionScheduleData())); - assertThrows(JsonValidationException.class, - () -> ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, ConnectionScheduleType.CRON, new ConnectionScheduleData())); - assertThrows(JsonValidationException.class, - () -> ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, ConnectionScheduleType.CRON, new ConnectionScheduleData() - .cron(new ConnectionScheduleDataCron()))); - assertThrows(JsonValidationException.class, - () -> ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, ConnectionScheduleType.CRON, new ConnectionScheduleData() - .cron(new ConnectionScheduleDataCron().cronExpression(EXPECTED_CRON_EXPRESSION).cronTimeZone("Etc/foo")))); - assertThrows(JsonValidationException.class, - () -> ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, ConnectionScheduleType.CRON, new ConnectionScheduleData() - .cron(new ConnectionScheduleDataCron().cronExpression("bad cron").cronTimeZone(EXPECTED_CRON_TIMEZONE)))); - } - - @Test - void testAvailableCronTimeZonesStayTheSame() { - /** - * NOTE: this test exists to make sure that the server stays in sync with the frontend. The list of - * supported timezones is copied from airbyte-webapp/src/config/availableCronTimeZones.json. If this - * test fails, then THAT file must be updated with the new timezones. - */ - String[] timezoneStrings = { - "Africa/Abidjan", - "Africa/Accra", - "Africa/Addis_Ababa", - "Africa/Algiers", - "Africa/Asmara", - "Africa/Asmera", - "Africa/Bamako", - "Africa/Bangui", - "Africa/Banjul", - "Africa/Bissau", - "Africa/Blantyre", - "Africa/Brazzaville", - "Africa/Bujumbura", - "Africa/Cairo", - "Africa/Casablanca", - "Africa/Ceuta", - "Africa/Conakry", - "Africa/Dakar", - "Africa/Dar_es_Salaam", - "Africa/Djibouti", - "Africa/Douala", - "Africa/El_Aaiun", - "Africa/Freetown", - "Africa/Gaborone", - "Africa/Harare", - "Africa/Johannesburg", - "Africa/Juba", - "Africa/Kampala", - "Africa/Khartoum", - "Africa/Kigali", - "Africa/Kinshasa", - "Africa/Lagos", - "Africa/Libreville", - "Africa/Lome", - "Africa/Luanda", - "Africa/Lubumbashi", - "Africa/Lusaka", - "Africa/Malabo", - "Africa/Maputo", - "Africa/Maseru", - "Africa/Mbabane", - "Africa/Mogadishu", - "Africa/Monrovia", - "Africa/Nairobi", - "Africa/Ndjamena", - "Africa/Niamey", - "Africa/Nouakchott", - "Africa/Ouagadougou", - "Africa/Porto-Novo", - "Africa/Sao_Tome", - "Africa/Timbuktu", - "Africa/Tripoli", - "Africa/Tunis", - "Africa/Windhoek", - "America/Adak", - "America/Anchorage", - "America/Anguilla", - "America/Antigua", - "America/Araguaina", - "America/Argentina/Buenos_Aires", - "America/Argentina/Catamarca", - "America/Argentina/ComodRivadavia", - "America/Argentina/Cordoba", - "America/Argentina/Jujuy", - "America/Argentina/La_Rioja", - "America/Argentina/Mendoza", - "America/Argentina/Rio_Gallegos", - "America/Argentina/Salta", - "America/Argentina/San_Juan", - "America/Argentina/San_Luis", - "America/Argentina/Tucuman", - "America/Argentina/Ushuaia", - "America/Aruba", - "America/Asuncion", - "America/Atikokan", - "America/Atka", - "America/Bahia", - "America/Bahia_Banderas", - "America/Barbados", - "America/Belem", - "America/Belize", - "America/Blanc-Sablon", - "America/Boa_Vista", - "America/Bogota", - "America/Boise", - "America/Buenos_Aires", - "America/Cambridge_Bay", - "America/Campo_Grande", - "America/Cancun", - "America/Caracas", - "America/Catamarca", - "America/Cayenne", - "America/Cayman", - "America/Chicago", - "America/Chihuahua", - "America/Coral_Harbour", - "America/Cordoba", - "America/Costa_Rica", - "America/Creston", - "America/Cuiaba", - "America/Curacao", - "America/Danmarkshavn", - "America/Dawson", - "America/Dawson_Creek", - "America/Denver", - "America/Detroit", - "America/Dominica", - "America/Edmonton", - "America/Eirunepe", - "America/El_Salvador", - "America/Ensenada", - "America/Fort_Nelson", - "America/Fort_Wayne", - "America/Fortaleza", - "America/Glace_Bay", - "America/Godthab", - "America/Goose_Bay", - "America/Grand_Turk", - "America/Grenada", - "America/Guadeloupe", - "America/Guatemala", - "America/Guayaquil", - "America/Guyana", - "America/Halifax", - "America/Havana", - "America/Hermosillo", - "America/Indiana/Indianapolis", - "America/Indiana/Knox", - "America/Indiana/Marengo", - "America/Indiana/Petersburg", - "America/Indiana/Tell_City", - "America/Indiana/Vevay", - "America/Indiana/Vincennes", - "America/Indiana/Winamac", - "America/Indianapolis", - "America/Inuvik", - "America/Iqaluit", - "America/Jamaica", - "America/Jujuy", - "America/Juneau", - "America/Kentucky/Louisville", - "America/Kentucky/Monticello", - "America/Knox_IN", - "America/Kralendijk", - "America/La_Paz", - "America/Lima", - "America/Los_Angeles", - "America/Louisville", - "America/Lower_Princes", - "America/Maceio", - "America/Managua", - "America/Manaus", - "America/Marigot", - "America/Martinique", - "America/Matamoros", - "America/Mazatlan", - "America/Mendoza", - "America/Menominee", - "America/Merida", - "America/Metlakatla", - "America/Mexico_City", - "America/Miquelon", - "America/Moncton", - "America/Monterrey", - "America/Montevideo", - "America/Montreal", - "America/Montserrat", - "America/Nassau", - "America/New_York", - "America/Nipigon", - "America/Nome", - "America/Noronha", - "America/North_Dakota/Beulah", - "America/North_Dakota/Center", - "America/North_Dakota/New_Salem", - "America/Ojinaga", - "America/Panama", - "America/Pangnirtung", - "America/Paramaribo", - "America/Phoenix", - "America/Port-au-Prince", - "America/Port_of_Spain", - "America/Porto_Acre", - "America/Porto_Velho", - "America/Puerto_Rico", - "America/Punta_Arenas", - "America/Rainy_River", - "America/Rankin_Inlet", - "America/Recife", - "America/Regina", - "America/Resolute", - "America/Rio_Branco", - "America/Rosario", - "America/Santa_Isabel", - "America/Santarem", - "America/Santiago", - "America/Santo_Domingo", - "America/Sao_Paulo", - "America/Scoresbysund", - "America/Shiprock", - "America/Sitka", - "America/St_Barthelemy", - "America/St_Johns", - "America/St_Kitts", - "America/St_Lucia", - "America/St_Thomas", - "America/St_Vincent", - "America/Swift_Current", - "America/Tegucigalpa", - "America/Thule", - "America/Thunder_Bay", - "America/Tijuana", - "America/Toronto", - "America/Tortola", - "America/Vancouver", - "America/Virgin", - "America/Whitehorse", - "America/Winnipeg", - "America/Yakutat", - "America/Yellowknife", - "Antarctica/Casey", - "Antarctica/Davis", - "Antarctica/DumontDUrville", - "Antarctica/Macquarie", - "Antarctica/Mawson", - "Antarctica/McMurdo", - "Antarctica/Palmer", - "Antarctica/Rothera", - "Antarctica/South_Pole", - "Antarctica/Syowa", - "Antarctica/Troll", - "Antarctica/Vostok", - "Arctic/Longyearbyen", - "Asia/Aden", - "Asia/Almaty", - "Asia/Amman", - "Asia/Anadyr", - "Asia/Aqtau", - "Asia/Aqtobe", - "Asia/Ashgabat", - "Asia/Ashkhabad", - "Asia/Atyrau", - "Asia/Baghdad", - "Asia/Bahrain", - "Asia/Baku", - "Asia/Bangkok", - "Asia/Barnaul", - "Asia/Beirut", - "Asia/Bishkek", - "Asia/Brunei", - "Asia/Calcutta", - "Asia/Chita", - "Asia/Choibalsan", - "Asia/Chongqing", - "Asia/Chungking", - "Asia/Colombo", - "Asia/Dacca", - "Asia/Damascus", - "Asia/Dhaka", - "Asia/Dili", - "Asia/Dubai", - "Asia/Dushanbe", - "Asia/Famagusta", - "Asia/Gaza", - "Asia/Harbin", - "Asia/Hebron", - "Asia/Ho_Chi_Minh", - "Asia/Hong_Kong", - "Asia/Hovd", - "Asia/Irkutsk", - "Asia/Istanbul", - "Asia/Jakarta", - "Asia/Jayapura", - "Asia/Jerusalem", - "Asia/Kabul", - "Asia/Kamchatka", - "Asia/Karachi", - "Asia/Kashgar", - "Asia/Kathmandu", - "Asia/Katmandu", - "Asia/Khandyga", - "Asia/Kolkata", - "Asia/Krasnoyarsk", - "Asia/Kuala_Lumpur", - "Asia/Kuching", - "Asia/Kuwait", - "Asia/Macao", - "Asia/Macau", - "Asia/Magadan", - "Asia/Makassar", - "Asia/Manila", - "Asia/Muscat", - "Asia/Nicosia", - "Asia/Novokuznetsk", - "Asia/Novosibirsk", - "Asia/Omsk", - "Asia/Oral", - "Asia/Phnom_Penh", - "Asia/Pontianak", - "Asia/Pyongyang", - "Asia/Qatar", - "Asia/Qostanay", - "Asia/Qyzylorda", - "Asia/Rangoon", - "Asia/Riyadh", - "Asia/Saigon", - "Asia/Sakhalin", - "Asia/Samarkand", - "Asia/Seoul", - "Asia/Shanghai", - "Asia/Singapore", - "Asia/Srednekolymsk", - "Asia/Taipei", - "Asia/Tashkent", - "Asia/Tbilisi", - "Asia/Tehran", - "Asia/Tel_Aviv", - "Asia/Thimbu", - "Asia/Thimphu", - "Asia/Tokyo", - "Asia/Tomsk", - "Asia/Ujung_Pandang", - "Asia/Ulaanbaatar", - "Asia/Ulan_Bator", - "Asia/Urumqi", - "Asia/Ust-Nera", - "Asia/Vientiane", - "Asia/Vladivostok", - "Asia/Yakutsk", - "Asia/Yangon", - "Asia/Yekaterinburg", - "Asia/Yerevan", - "Atlantic/Azores", - "Atlantic/Bermuda", - "Atlantic/Canary", - "Atlantic/Cape_Verde", - "Atlantic/Faeroe", - "Atlantic/Faroe", - "Atlantic/Jan_Mayen", - "Atlantic/Madeira", - "Atlantic/Reykjavik", - "Atlantic/South_Georgia", - "Atlantic/St_Helena", - "Atlantic/Stanley", - "Australia/ACT", - "Australia/Adelaide", - "Australia/Brisbane", - "Australia/Broken_Hill", - "Australia/Canberra", - "Australia/Currie", - "Australia/Darwin", - "Australia/Eucla", - "Australia/Hobart", - "Australia/LHI", - "Australia/Lindeman", - "Australia/Lord_Howe", - "Australia/Melbourne", - "Australia/NSW", - "Australia/North", - "Australia/Perth", - "Australia/Queensland", - "Australia/South", - "Australia/Sydney", - "Australia/Tasmania", - "Australia/Victoria", - "Australia/West", - "Australia/Yancowinna", - "Brazil/Acre", - "Brazil/DeNoronha", - "Brazil/East", - "Brazil/West", - "CET", - "CST6CDT", - "Canada/Atlantic", - "Canada/Central", - "Canada/Eastern", - "Canada/Mountain", - "Canada/Newfoundland", - "Canada/Pacific", - "Canada/Saskatchewan", - "Canada/Yukon", - "Chile/Continental", - "Chile/EasterIsland", - "Cuba", - "EET", - "EST", - "EST5EDT", - "Egypt", - "Eire", - "Europe/Amsterdam", - "Europe/Andorra", - "Europe/Astrakhan", - "Europe/Athens", - "Europe/Belfast", - "Europe/Belgrade", - "Europe/Berlin", - "Europe/Bratislava", - "Europe/Brussels", - "Europe/Bucharest", - "Europe/Budapest", - "Europe/Busingen", - "Europe/Chisinau", - "Europe/Copenhagen", - "Europe/Dublin", - "Europe/Gibraltar", - "Europe/Guernsey", - "Europe/Helsinki", - "Europe/Isle_of_Man", - "Europe/Istanbul", - "Europe/Jersey", - "Europe/Kaliningrad", - "Europe/Kiev", - "Europe/Kirov", - "Europe/Lisbon", - "Europe/Ljubljana", - "Europe/London", - "Europe/Luxembourg", - "Europe/Madrid", - "Europe/Malta", - "Europe/Mariehamn", - "Europe/Minsk", - "Europe/Monaco", - "Europe/Moscow", - "Europe/Nicosia", - "Europe/Oslo", - "Europe/Paris", - "Europe/Podgorica", - "Europe/Prague", - "Europe/Riga", - "Europe/Rome", - "Europe/Samara", - "Europe/San_Marino", - "Europe/Sarajevo", - "Europe/Saratov", - "Europe/Simferopol", - "Europe/Skopje", - "Europe/Sofia", - "Europe/Stockholm", - "Europe/Tallinn", - "Europe/Tirane", - "Europe/Tiraspol", - "Europe/Ulyanovsk", - "Europe/Uzhgorod", - "Europe/Vaduz", - "Europe/Vatican", - "Europe/Vienna", - "Europe/Vilnius", - "Europe/Volgograd", - "Europe/Warsaw", - "Europe/Zagreb", - "Europe/Zaporozhye", - "Europe/Zurich", - "GB", - "GB-Eire", - "GMT", - "GMT+0", - "GMT-0", - "GMT0", - "Greenwich", - "HST", - "Hongkong", - "Iceland", - "Indian/Antananarivo", - "Indian/Chagos", - "Indian/Christmas", - "Indian/Cocos", - "Indian/Comoro", - "Indian/Kerguelen", - "Indian/Mahe", - "Indian/Maldives", - "Indian/Mauritius", - "Indian/Mayotte", - "Indian/Reunion", - "Iran", - "Israel", - "Jamaica", - "Japan", - "Kwajalein", - "Libya", - "MET", - "MST", - "MST7MDT", - "Mexico/BajaNorte", - "Mexico/BajaSur", - "Mexico/General", - "NZ", - "NZ-CHAT", - "Navajo", - "PRC", - "PST8PDT", - "Pacific/Apia", - "Pacific/Auckland", - "Pacific/Bougainville", - "Pacific/Chatham", - "Pacific/Chuuk", - "Pacific/Easter", - "Pacific/Efate", - "Pacific/Enderbury", - "Pacific/Fakaofo", - "Pacific/Fiji", - "Pacific/Funafuti", - "Pacific/Galapagos", - "Pacific/Gambier", - "Pacific/Guadalcanal", - "Pacific/Guam", - "Pacific/Honolulu", - "Pacific/Johnston", - "Pacific/Kiritimati", - "Pacific/Kosrae", - "Pacific/Kwajalein", - "Pacific/Majuro", - "Pacific/Marquesas", - "Pacific/Midway", - "Pacific/Nauru", - "Pacific/Niue", - "Pacific/Norfolk", - "Pacific/Noumea", - "Pacific/Pago_Pago", - "Pacific/Palau", - "Pacific/Pitcairn", - "Pacific/Pohnpei", - "Pacific/Ponape", - "Pacific/Port_Moresby", - "Pacific/Rarotonga", - "Pacific/Saipan", - "Pacific/Samoa", - "Pacific/Tahiti", - "Pacific/Tarawa", - "Pacific/Tongatapu", - "Pacific/Truk", - "Pacific/Wake", - "Pacific/Wallis", - "Pacific/Yap", - "Poland", - "Portugal", - "ROC", - "ROK", - "Singapore", - "Turkey", - "UCT", - "US/Alaska", - "US/Aleutian", - "US/Arizona", - "US/Central", - "US/East-Indiana", - "US/Eastern", - "US/Hawaii", - "US/Indiana-Starke", - "US/Michigan", - "US/Mountain", - "US/Pacific", - "US/Samoa", - "UTC", - "Universal", - "W-SU", - "WET", - "Zulu" - }; - for (String expectedTimezone : timezoneStrings) { - try { - final StandardSync actual = new StandardSync(); - // NOTE: this method call is the one that parses the given timezone string - // and will throw an exception if it isn't supported. This method is called - // on the API handler path. - ConnectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, - ConnectionScheduleType.CRON, new ConnectionScheduleData() - .cron(new ConnectionScheduleDataCron() - .cronTimeZone(expectedTimezone) - .cronExpression(EXPECTED_CRON_EXPRESSION))); - assertEquals(ScheduleType.CRON, actual.getScheduleType()); - assertEquals(expectedTimezone, actual.getScheduleData().getCron().getCronTimeZone()); - assertEquals(EXPECTED_CRON_EXPRESSION, actual.getScheduleData().getCron().getCronExpression()); - } catch (IllegalArgumentException | JsonValidationException e) { - throw (RuntimeException) new RuntimeException( - "One of the timezones is not supported - update airbyte-webapp/src/config/availableCronTimeZones.json!") - .initCause(e); - } - } - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java deleted file mode 100644 index d595489c6be0..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java +++ /dev/null @@ -1,1377 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static io.airbyte.commons.server.helpers.ConnectionHelpers.FIELD_NAME; -import static io.airbyte.commons.server.helpers.ConnectionHelpers.SECOND_FIELD_NAME; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.google.common.collect.Lists; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.api.model.generated.AirbyteCatalog; -import io.airbyte.api.model.generated.AirbyteStream; -import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration; -import io.airbyte.api.model.generated.AirbyteStreamConfiguration; -import io.airbyte.api.model.generated.ConnectionCreate; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionReadList; -import io.airbyte.api.model.generated.ConnectionSchedule; -import io.airbyte.api.model.generated.ConnectionScheduleData; -import io.airbyte.api.model.generated.ConnectionScheduleDataBasicSchedule; -import io.airbyte.api.model.generated.ConnectionScheduleDataBasicSchedule.TimeUnitEnum; -import io.airbyte.api.model.generated.ConnectionScheduleDataCron; -import io.airbyte.api.model.generated.ConnectionScheduleType; -import io.airbyte.api.model.generated.ConnectionSearch; -import io.airbyte.api.model.generated.ConnectionStatus; -import io.airbyte.api.model.generated.ConnectionUpdate; -import io.airbyte.api.model.generated.DestinationSearch; -import io.airbyte.api.model.generated.DestinationSyncMode; -import io.airbyte.api.model.generated.NamespaceDefinitionType; -import io.airbyte.api.model.generated.ResourceRequirements; -import io.airbyte.api.model.generated.SelectedFieldInfo; -import io.airbyte.api.model.generated.SourceSearch; -import io.airbyte.api.model.generated.StreamDescriptor; -import io.airbyte.api.model.generated.SyncMode; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.converters.ApiPojoConverters; -import io.airbyte.commons.server.handlers.helpers.CatalogConverter; -import io.airbyte.commons.server.helpers.ConnectionHelpers; -import io.airbyte.commons.server.scheduler.EventRunner; -import io.airbyte.config.BasicSchedule; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.Cron; -import io.airbyte.config.DataType; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.FieldSelectionData; -import io.airbyte.config.Geography; -import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.Schedule; -import io.airbyte.config.Schedule.TimeUnit; -import io.airbyte.config.ScheduleData; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.ScheduleType; -import io.airbyte.config.StandardSync.Status; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.WorkspaceHelper; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.validation.json.JsonValidationException; -import io.airbyte.workers.helper.ConnectionHelper; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.UUID; -import java.util.function.Supplier; -import org.assertj.core.api.Assertions; -import org.junit.Assert; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; - -class ConnectionsHandlerTest { - - private ConfigRepository configRepository; - private Supplier uuidGenerator; - - private ConnectionsHandler connectionsHandler; - private UUID workspaceId; - private UUID sourceId; - private UUID destinationId; - - private SourceConnection source; - private DestinationConnection destination; - private StandardSync standardSync; - private StandardSync standardSyncDeleted; - private UUID connectionId; - private UUID operationId; - private UUID otherOperationId; - private WorkspaceHelper workspaceHelper; - private TrackingClient trackingClient; - private EventRunner eventRunner; - private ConnectionHelper connectionHelper; - - private static final String PRESTO_TO_HUDI = "presto to hudi"; - private static final String PRESTO_TO_HUDI_PREFIX = "presto_to_hudi"; - private static final String SOURCE_TEST = "source-test"; - private static final String DESTINATION_TEST = "destination-test"; - private static final String CURSOR1 = "cursor1"; - private static final String CURSOR2 = "cursor2"; - private static final String PK1 = "pk1"; - private static final String PK2 = "pk2"; - private static final String PK3 = "pk3"; - private static final String STREAM1 = "stream1"; - private static final String STREAM2 = "stream2"; - private static final String AZKABAN_USERS = "azkaban_users"; - private final static String CRON_TIMEZONE_UTC = "UTC"; - private final static String CRON_EXPRESSION = "* */2 * * * ?"; - - @SuppressWarnings("unchecked") - @BeforeEach - void setUp() throws IOException, JsonValidationException, ConfigNotFoundException { - - workspaceId = UUID.randomUUID(); - sourceId = UUID.randomUUID(); - destinationId = UUID.randomUUID(); - connectionId = UUID.randomUUID(); - operationId = UUID.randomUUID(); - otherOperationId = UUID.randomUUID(); - source = new SourceConnection() - .withSourceId(sourceId) - .withWorkspaceId(workspaceId) - .withName("presto"); - destination = new DestinationConnection() - .withDestinationId(destinationId) - .withWorkspaceId(workspaceId) - .withName("hudi") - .withConfiguration(Jsons.jsonNode(Collections.singletonMap("apiKey", "123-abc"))); - standardSync = new StandardSync() - .withConnectionId(connectionId) - .withName(PRESTO_TO_HUDI) - .withNamespaceDefinition(JobSyncConfig.NamespaceDefinitionType.SOURCE) - .withNamespaceFormat(null) - .withPrefix(PRESTO_TO_HUDI_PREFIX) - .withStatus(StandardSync.Status.ACTIVE) - .withCatalog(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog()) - .withFieldSelectionData(new FieldSelectionData().withAdditionalProperty("null/users-data0", false)) - .withSourceId(sourceId) - .withDestinationId(destinationId) - .withOperationIds(List.of(operationId)) - .withManual(false) - .withSchedule(ConnectionHelpers.generateBasicSchedule()) - .withScheduleType(ScheduleType.BASIC_SCHEDULE) - .withScheduleData(ConnectionHelpers.generateBasicScheduleData()) - .withResourceRequirements(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS) - .withSourceCatalogId(UUID.randomUUID()) - .withGeography(Geography.AUTO) - .withBreakingChange(false); - standardSyncDeleted = new StandardSync() - .withConnectionId(connectionId) - .withName("presto to hudi2") - .withNamespaceDefinition(JobSyncConfig.NamespaceDefinitionType.SOURCE) - .withNamespaceFormat(null) - .withPrefix("presto_to_hudi2") - .withStatus(StandardSync.Status.DEPRECATED) - .withCatalog(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog()) - .withSourceId(sourceId) - .withDestinationId(destinationId) - .withOperationIds(List.of(operationId)) - .withManual(false) - .withSchedule(ConnectionHelpers.generateBasicSchedule()) - .withResourceRequirements(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS) - .withGeography(Geography.US); - - configRepository = mock(ConfigRepository.class); - uuidGenerator = mock(Supplier.class); - workspaceHelper = mock(WorkspaceHelper.class); - trackingClient = mock(TrackingClient.class); - eventRunner = mock(EventRunner.class); - connectionHelper = mock(ConnectionHelper.class); - when(workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(sourceId)).thenReturn(workspaceId); - when(workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(destinationId)).thenReturn(workspaceId); - when(workspaceHelper.getWorkspaceForOperationIdIgnoreExceptions(operationId)).thenReturn(workspaceId); - when(workspaceHelper.getWorkspaceForOperationIdIgnoreExceptions(otherOperationId)).thenReturn(workspaceId); - } - - @Nested - class UnMockedConnectionHelper { - - @BeforeEach - void setUp() throws JsonValidationException, ConfigNotFoundException, IOException { - connectionsHandler = new ConnectionsHandler( - configRepository, - uuidGenerator, - workspaceHelper, - trackingClient, - eventRunner, - connectionHelper); - - when(uuidGenerator.get()).thenReturn(standardSync.getConnectionId()); - final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() - .withName(SOURCE_TEST) - .withSourceDefinitionId(UUID.randomUUID()); - final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() - .withName(DESTINATION_TEST) - .withDestinationDefinitionId(UUID.randomUUID()); - when(configRepository.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); - when(configRepository.getSourceDefinitionFromConnection(standardSync.getConnectionId())).thenReturn( - sourceDefinition); - when(configRepository.getDestinationDefinitionFromConnection(standardSync.getConnectionId())).thenReturn( - destinationDefinition); - when(configRepository.getSourceConnection(source.getSourceId())) - .thenReturn(source); - when(configRepository.getDestinationConnection(destination.getDestinationId())) - .thenReturn(destination); - } - - @Nested - class CreateConnection { - - private ConnectionCreate buildConnectionCreateRequest(final StandardSync standardSync, final AirbyteCatalog catalog) { - return new ConnectionCreate() - .sourceId(standardSync.getSourceId()) - .destinationId(standardSync.getDestinationId()) - .operationIds(standardSync.getOperationIds()) - .name(PRESTO_TO_HUDI) - .namespaceDefinition(NamespaceDefinitionType.SOURCE) - .namespaceFormat(null) - .prefix(PRESTO_TO_HUDI_PREFIX) - .status(ConnectionStatus.ACTIVE) - .schedule(ConnectionHelpers.generateBasicConnectionSchedule()) - .syncCatalog(catalog) - .resourceRequirements(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(standardSync.getResourceRequirements().getCpuRequest()) - .cpuLimit(standardSync.getResourceRequirements().getCpuLimit()) - .memoryRequest(standardSync.getResourceRequirements().getMemoryRequest()) - .memoryLimit(standardSync.getResourceRequirements().getMemoryLimit())) - .sourceCatalogId(standardSync.getSourceCatalogId()) - .geography(ApiPojoConverters.toApiGeography(standardSync.getGeography())); - } - - @Test - void testCreateConnection() throws JsonValidationException, ConfigNotFoundException, IOException { - - final AirbyteCatalog catalog = ConnectionHelpers.generateBasicApiCatalog(); - - // set a defaultGeography on the workspace as EU, but expect connection to be - // created AUTO because the ConnectionCreate geography takes precedence over the workspace - // defaultGeography. - final StandardWorkspace workspace = new StandardWorkspace() - .withWorkspaceId(workspaceId) - .withDefaultGeography(Geography.EU); - when(configRepository.getStandardWorkspaceNoSecrets(workspaceId, true)).thenReturn(workspace); - - final ConnectionCreate connectionCreate = buildConnectionCreateRequest(standardSync, catalog); - - final ConnectionRead actualConnectionRead = connectionsHandler.createConnection(connectionCreate); - - final ConnectionRead expectedConnectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); - - assertEquals(expectedConnectionRead, actualConnectionRead); - - verify(configRepository).writeStandardSync(standardSync); - - // Use new schedule schema, verify that we get the same results. - connectionCreate - .schedule(null) - .scheduleType(ConnectionScheduleType.BASIC) - .scheduleData(ConnectionHelpers.generateBasicConnectionScheduleData()); - assertEquals(expectedConnectionRead, connectionsHandler.createConnection(connectionCreate)); - } - - @Test - void testCreateConnectionUsesDefaultGeographyFromWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - - when(workspaceHelper.getWorkspaceForSourceId(sourceId)).thenReturn(workspaceId); - - final AirbyteCatalog catalog = ConnectionHelpers.generateBasicApiCatalog(); - - // don't set a geography on the ConnectionCreate to force inheritance from workspace default - final ConnectionCreate connectionCreate = buildConnectionCreateRequest(standardSync, catalog).geography(null); - - // set the workspace default to EU - final StandardWorkspace workspace = new StandardWorkspace() - .withWorkspaceId(workspaceId) - .withDefaultGeography(Geography.EU); - when(configRepository.getStandardWorkspaceNoSecrets(workspaceId, true)).thenReturn(workspace); - - // the expected read and verified write is generated from the standardSync, so set this to EU as - // well - standardSync.setGeography(Geography.EU); - - final ConnectionRead expectedConnectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); - final ConnectionRead actualConnectionRead = connectionsHandler.createConnection(connectionCreate); - - assertEquals(expectedConnectionRead, actualConnectionRead); - verify(configRepository).writeStandardSync(standardSync); - } - - @Test - void testCreateConnectionWithSelectedFields() throws IOException, JsonValidationException, ConfigNotFoundException { - final StandardWorkspace workspace = new StandardWorkspace() - .withWorkspaceId(workspaceId) - .withDefaultGeography(Geography.AUTO); - when(configRepository.getStandardWorkspaceNoSecrets(workspaceId, true)).thenReturn(workspace); - - final AirbyteCatalog catalogWithSelectedFields = ConnectionHelpers.generateApiCatalogWithTwoFields(); - // Only select one of the two fields. - catalogWithSelectedFields.getStreams().get(0).getConfig().fieldSelectionEnabled(true) - .selectedFields(List.of(new SelectedFieldInfo().addFieldPathItem(FIELD_NAME))); - - final ConnectionCreate connectionCreate = buildConnectionCreateRequest(standardSync, catalogWithSelectedFields); - - final ConnectionRead actualConnectionRead = connectionsHandler.createConnection(connectionCreate); - - final ConnectionRead expectedConnectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); - - assertEquals(expectedConnectionRead, actualConnectionRead); - - standardSync.withFieldSelectionData(new FieldSelectionData().withAdditionalProperty("null/users-data0", true)); - - verify(configRepository).writeStandardSync(standardSync); - } - - @Test - void testCreateFullRefreshConnectionWithSelectedFields() throws IOException, JsonValidationException, ConfigNotFoundException { - final StandardWorkspace workspace = new StandardWorkspace() - .withWorkspaceId(workspaceId) - .withDefaultGeography(Geography.AUTO); - when(configRepository.getStandardWorkspaceNoSecrets(workspaceId, true)).thenReturn(workspace); - - final AirbyteCatalog fullRefreshCatalogWithSelectedFields = ConnectionHelpers.generateApiCatalogWithTwoFields(); - fullRefreshCatalogWithSelectedFields.getStreams().get(0).getConfig() - .fieldSelectionEnabled(true) - .selectedFields(List.of(new SelectedFieldInfo().addFieldPathItem(FIELD_NAME))) - .cursorField(null) - .syncMode(SyncMode.FULL_REFRESH); - - final ConnectionCreate connectionCreate = buildConnectionCreateRequest(standardSync, fullRefreshCatalogWithSelectedFields); - - final ConnectionRead actualConnectionRead = connectionsHandler.createConnection(connectionCreate); - - final ConnectionRead expectedConnectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); - - assertEquals(expectedConnectionRead, actualConnectionRead); - - standardSync - .withFieldSelectionData(new FieldSelectionData().withAdditionalProperty("null/users-data0", true)) - .getCatalog().getStreams().get(0).withSyncMode(io.airbyte.protocol.models.SyncMode.FULL_REFRESH).withCursorField(null); - - verify(configRepository).writeStandardSync(standardSync); - } - - @Test - void testFieldSelectionRemoveCursorFails() throws JsonValidationException, ConfigNotFoundException, IOException { - // Test that if we try to de-select a field that's being used for the cursor, the request will fail. - // The connection initially has a catalog with one stream, and two fields in that stream. - standardSync.setCatalog(ConnectionHelpers.generateAirbyteCatalogWithTwoFields()); - - // Send an update that sets a cursor but de-selects that field. - final AirbyteCatalog catalogForUpdate = ConnectionHelpers.generateApiCatalogWithTwoFields(); - catalogForUpdate.getStreams().get(0).getConfig() - .fieldSelectionEnabled(true) - .selectedFields(List.of(new SelectedFieldInfo().addFieldPathItem(FIELD_NAME))) - .cursorField(List.of(SECOND_FIELD_NAME)) - .syncMode(SyncMode.INCREMENTAL); - - final ConnectionUpdate connectionUpdate = new ConnectionUpdate() - .connectionId(standardSync.getConnectionId()) - .syncCatalog(catalogForUpdate); - - assertThrows(JsonValidationException.class, () -> connectionsHandler.updateConnection(connectionUpdate)); - } - - @Test - void testFieldSelectionRemovePrimaryKeyFails() throws JsonValidationException, ConfigNotFoundException, IOException { - // Test that if we try to de-select a field that's being used for the primary key, the request will - // fail. - // The connection initially has a catalog with one stream, and two fields in that stream. - standardSync.setCatalog(ConnectionHelpers.generateAirbyteCatalogWithTwoFields()); - - // Send an update that sets a primary key but deselects that field. - final AirbyteCatalog catalogForUpdate = ConnectionHelpers.generateApiCatalogWithTwoFields(); - catalogForUpdate.getStreams().get(0).getConfig() - .fieldSelectionEnabled(true) - .selectedFields(List.of(new SelectedFieldInfo().addFieldPathItem(FIELD_NAME))) - .destinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .primaryKey(List.of(List.of(SECOND_FIELD_NAME))); - - final ConnectionUpdate connectionUpdate = new ConnectionUpdate() - .connectionId(standardSync.getConnectionId()) - .syncCatalog(catalogForUpdate); - - assertThrows(JsonValidationException.class, () -> connectionsHandler.updateConnection(connectionUpdate)); - } - - @Test - void testValidateConnectionCreateSourceAndDestinationInDifferenceWorkspace() { - - when(workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(destinationId)).thenReturn(UUID.randomUUID()); - - final ConnectionCreate connectionCreate = new ConnectionCreate() - .sourceId(standardSync.getSourceId()) - .destinationId(standardSync.getDestinationId()); - - assertThrows(IllegalArgumentException.class, () -> connectionsHandler.createConnection(connectionCreate)); - } - - @Test - void testValidateConnectionCreateOperationInDifferentWorkspace() { - - when(workspaceHelper.getWorkspaceForOperationIdIgnoreExceptions(operationId)).thenReturn(UUID.randomUUID()); - - final ConnectionCreate connectionCreate = new ConnectionCreate() - .sourceId(standardSync.getSourceId()) - .destinationId(standardSync.getDestinationId()) - .operationIds(Collections.singletonList(operationId)); - - assertThrows(IllegalArgumentException.class, () -> connectionsHandler.createConnection(connectionCreate)); - } - - @Test - void testCreateConnectionWithBadDefinitionIds() throws JsonValidationException, ConfigNotFoundException, IOException { - - final UUID sourceIdBad = UUID.randomUUID(); - final UUID destinationIdBad = UUID.randomUUID(); - - when(configRepository.getSourceConnection(sourceIdBad)) - .thenThrow(new ConfigNotFoundException(ConfigSchema.SOURCE_CONNECTION, sourceIdBad)); - when(configRepository.getDestinationConnection(destinationIdBad)) - .thenThrow(new ConfigNotFoundException(ConfigSchema.DESTINATION_CONNECTION, destinationIdBad)); - - final AirbyteCatalog catalog = ConnectionHelpers.generateBasicApiCatalog(); - - final ConnectionCreate connectionCreateBadSource = new ConnectionCreate() - .sourceId(sourceIdBad) - .destinationId(standardSync.getDestinationId()) - .operationIds(standardSync.getOperationIds()) - .name(PRESTO_TO_HUDI) - .namespaceDefinition(NamespaceDefinitionType.SOURCE) - .namespaceFormat(null) - .prefix(PRESTO_TO_HUDI_PREFIX) - .status(ConnectionStatus.ACTIVE) - .schedule(ConnectionHelpers.generateBasicConnectionSchedule()) - .syncCatalog(catalog); - - assertThrows(ConfigNotFoundException.class, () -> connectionsHandler.createConnection(connectionCreateBadSource)); - - final ConnectionCreate connectionCreateBadDestination = new ConnectionCreate() - .sourceId(standardSync.getSourceId()) - .destinationId(destinationIdBad) - .operationIds(standardSync.getOperationIds()) - .name(PRESTO_TO_HUDI) - .namespaceDefinition(NamespaceDefinitionType.SOURCE) - .namespaceFormat(null) - .prefix(PRESTO_TO_HUDI_PREFIX) - .status(ConnectionStatus.ACTIVE) - .schedule(ConnectionHelpers.generateBasicConnectionSchedule()) - .syncCatalog(catalog); - - assertThrows(ConfigNotFoundException.class, () -> connectionsHandler.createConnection(connectionCreateBadDestination)); - - } - - } - - @Nested - class UpdateConnection { - - @Test - void testUpdateConnectionPatchSingleField() throws Exception { - final ConnectionUpdate connectionUpdate = new ConnectionUpdate() - .connectionId(standardSync.getConnectionId()) - .name("newName"); - - final ConnectionRead expectedRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync) - .name("newName"); - final StandardSync expectedPersistedSync = Jsons.clone(standardSync).withName("newName"); - - when(configRepository.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); - - final ConnectionRead actualConnectionRead = connectionsHandler.updateConnection(connectionUpdate); - - assertEquals(expectedRead, actualConnectionRead); - verify(configRepository).writeStandardSync(expectedPersistedSync); - verify(eventRunner).update(connectionUpdate.getConnectionId()); - } - - @Test - void testUpdateConnectionPatchScheduleToManual() throws Exception { - final ConnectionUpdate connectionUpdate = new ConnectionUpdate() - .connectionId(standardSync.getConnectionId()) - .scheduleType(ConnectionScheduleType.MANUAL); - - final ConnectionRead expectedRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync) - .schedule(null) - .scheduleType(ConnectionScheduleType.MANUAL) - .scheduleData(null); - - final StandardSync expectedPersistedSync = Jsons.clone(standardSync) - .withSchedule(null) - .withScheduleType(ScheduleType.MANUAL) - .withScheduleData(null) - .withManual(true); - - when(configRepository.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); - - final ConnectionRead actualConnectionRead = connectionsHandler.updateConnection(connectionUpdate); - - assertEquals(expectedRead, actualConnectionRead); - verify(configRepository).writeStandardSync(expectedPersistedSync); - verify(eventRunner).update(connectionUpdate.getConnectionId()); - } - - @Test - void testUpdateConnectionPatchScheduleToCron() throws Exception { - - final ConnectionScheduleData cronScheduleData = new ConnectionScheduleData().cron( - new ConnectionScheduleDataCron().cronExpression(CRON_EXPRESSION).cronTimeZone(CRON_TIMEZONE_UTC)); - - final ConnectionUpdate connectionUpdate = new ConnectionUpdate() - .connectionId(standardSync.getConnectionId()) - .scheduleType(ConnectionScheduleType.CRON) - .scheduleData(cronScheduleData); - - final ConnectionRead expectedRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync) - .schedule(null) - .scheduleType(ConnectionScheduleType.CRON) - .scheduleData(cronScheduleData); - - final StandardSync expectedPersistedSync = Jsons.clone(standardSync) - .withSchedule(null) - .withScheduleType(ScheduleType.CRON) - .withScheduleData(new ScheduleData().withCron(new Cron().withCronExpression(CRON_EXPRESSION).withCronTimeZone(CRON_TIMEZONE_UTC))) - .withManual(false); - - when(configRepository.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); - - final ConnectionRead actualConnectionRead = connectionsHandler.updateConnection(connectionUpdate); - - assertEquals(expectedRead, actualConnectionRead); - verify(configRepository).writeStandardSync(expectedPersistedSync); - verify(eventRunner).update(connectionUpdate.getConnectionId()); - } - - @Test - void testUpdateConnectionPatchBasicSchedule() throws Exception { - - final ConnectionScheduleData newScheduleData = - new ConnectionScheduleData().basicSchedule(new ConnectionScheduleDataBasicSchedule().timeUnit(TimeUnitEnum.DAYS).units(10L)); - - final ConnectionUpdate connectionUpdate = new ConnectionUpdate() - .connectionId(standardSync.getConnectionId()) - .scheduleType(ConnectionScheduleType.BASIC) // update route requires this to be set even if it isn't changing - .scheduleData(newScheduleData); - - final ConnectionRead expectedRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync) - .schedule(new ConnectionSchedule().timeUnit(ConnectionSchedule.TimeUnitEnum.DAYS).units(10L)) // still dual-writing to legacy field - .scheduleType(ConnectionScheduleType.BASIC) - .scheduleData(newScheduleData); - - final StandardSync expectedPersistedSync = Jsons.clone(standardSync) - .withSchedule(new Schedule().withTimeUnit(TimeUnit.DAYS).withUnits(10L)) // still dual-writing to legacy field - .withScheduleType(ScheduleType.BASIC_SCHEDULE) - .withScheduleData(new ScheduleData().withBasicSchedule(new BasicSchedule().withTimeUnit(BasicSchedule.TimeUnit.DAYS).withUnits(10L))) - .withManual(false); - - when(configRepository.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); - - final ConnectionRead actualConnectionRead = connectionsHandler.updateConnection(connectionUpdate); - - assertEquals(expectedRead, actualConnectionRead); - verify(configRepository).writeStandardSync(expectedPersistedSync); - verify(eventRunner).update(connectionUpdate.getConnectionId()); - } - - @Test - void testUpdateConnectionPatchAddingNewStream() throws Exception { - // the connection initially has a catalog with one stream. this test generates another catalog with - // one stream, changes that stream's name to something new, and sends both streams in the patch - // request. - // the test expects the final result to include both streams. - final AirbyteCatalog catalogWithNewStream = ConnectionHelpers.generateBasicApiCatalog(); - catalogWithNewStream.getStreams().get(0).getStream().setName(AZKABAN_USERS); - catalogWithNewStream.getStreams().get(0).getConfig().setAliasName(AZKABAN_USERS); - - final AirbyteCatalog catalogForUpdate = ConnectionHelpers.generateMultipleStreamsApiCatalog(2); - catalogForUpdate.getStreams().get(1).getStream().setName(AZKABAN_USERS); - catalogForUpdate.getStreams().get(1).getConfig().setAliasName(AZKABAN_USERS); - - // expect two streams in the final persisted catalog -- the original unchanged stream, plus the new - // AZKABAN_USERS stream - - final ConfiguredAirbyteCatalog expectedPersistedCatalog = ConnectionHelpers.generateMultipleStreamsConfiguredAirbyteCatalog(2); - expectedPersistedCatalog.getStreams().get(1).getStream().setName(AZKABAN_USERS); - - final ConnectionUpdate connectionUpdate = new ConnectionUpdate() - .connectionId(standardSync.getConnectionId()) - .syncCatalog(catalogForUpdate); - - final ConnectionRead expectedRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync) - .syncCatalog(catalogForUpdate); - - final StandardSync expectedPersistedSync = Jsons.clone(standardSync) - .withCatalog(expectedPersistedCatalog) - .withFieldSelectionData(CatalogConverter.getFieldSelectionData(catalogForUpdate)); - - when(configRepository.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); - - final ConnectionRead actualConnectionRead = connectionsHandler.updateConnection(connectionUpdate); - - assertEquals(expectedRead, actualConnectionRead); - verify(configRepository).writeStandardSync(expectedPersistedSync); - verify(eventRunner).update(connectionUpdate.getConnectionId()); - } - - @Test - void testUpdateConnectionPatchEditExistingStreamWhileAddingNewStream() throws Exception { - // the connection initially has a catalog with two streams. this test updates the catalog - // with a sync mode change for one of the initial streams while also adding a brand-new - // stream. The result should be a catalog with three streams. - standardSync.setCatalog(ConnectionHelpers.generateMultipleStreamsConfiguredAirbyteCatalog(2)); - - final AirbyteCatalog catalogForUpdate = ConnectionHelpers.generateMultipleStreamsApiCatalog(3); - catalogForUpdate.getStreams().get(0).getConfig().setSyncMode(SyncMode.FULL_REFRESH); - catalogForUpdate.getStreams().get(2).getStream().setName(AZKABAN_USERS); - catalogForUpdate.getStreams().get(2).getConfig().setAliasName(AZKABAN_USERS); - - // expect three streams in the final persisted catalog - final ConfiguredAirbyteCatalog expectedPersistedCatalog = ConnectionHelpers.generateMultipleStreamsConfiguredAirbyteCatalog(3); - expectedPersistedCatalog.getStreams().get(0).withSyncMode(io.airbyte.protocol.models.SyncMode.FULL_REFRESH); - // index 1 is unchanged - expectedPersistedCatalog.getStreams().get(2).getStream().withName(AZKABAN_USERS); - - final ConnectionUpdate connectionUpdate = new ConnectionUpdate() - .connectionId(standardSync.getConnectionId()) - .syncCatalog(catalogForUpdate); - - final ConnectionRead expectedRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync) - .syncCatalog(catalogForUpdate); - - final StandardSync expectedPersistedSync = Jsons.clone(standardSync) - .withCatalog(expectedPersistedCatalog) - .withFieldSelectionData(CatalogConverter.getFieldSelectionData(catalogForUpdate)); - - when(configRepository.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); - - final ConnectionRead actualConnectionRead = connectionsHandler.updateConnection(connectionUpdate); - - assertEquals(expectedRead, actualConnectionRead); - verify(configRepository).writeStandardSync(expectedPersistedSync); - verify(eventRunner).update(connectionUpdate.getConnectionId()); - } - - @Test - void testUpdateConnectionPatchColumnSelection() throws Exception { - // The connection initially has a catalog with one stream, and two fields in that stream. - standardSync.setCatalog(ConnectionHelpers.generateAirbyteCatalogWithTwoFields()); - - // Send an update that only selects one of the fields. - final AirbyteCatalog catalogForUpdate = ConnectionHelpers.generateApiCatalogWithTwoFields(); - catalogForUpdate.getStreams().get(0).getConfig().fieldSelectionEnabled(true) - .selectedFields(List.of(new SelectedFieldInfo().addFieldPathItem(FIELD_NAME))); - - // Expect one column in the final persisted catalog - final ConfiguredAirbyteCatalog expectedPersistedCatalog = ConnectionHelpers.generateBasicConfiguredAirbyteCatalog(); - - final ConnectionUpdate connectionUpdate = new ConnectionUpdate() - .connectionId(standardSync.getConnectionId()) - .syncCatalog(catalogForUpdate); - - final ConnectionRead expectedRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync) - .syncCatalog(catalogForUpdate); - - final StandardSync expectedPersistedSync = Jsons.clone(standardSync) - .withCatalog(expectedPersistedCatalog) - .withFieldSelectionData(CatalogConverter.getFieldSelectionData(catalogForUpdate)); - - when(configRepository.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); - - final ConnectionRead actualConnectionRead = connectionsHandler.updateConnection(connectionUpdate); - - assertEquals(expectedRead, actualConnectionRead); - verify(configRepository).writeStandardSync(expectedPersistedSync); - verify(eventRunner).update(connectionUpdate.getConnectionId()); - } - - @Test - void testUpdateConnectionPatchingSeveralFieldsAndReplaceAStream() throws JsonValidationException, ConfigNotFoundException, IOException { - final AirbyteCatalog catalogForUpdate = ConnectionHelpers.generateMultipleStreamsApiCatalog(2); - - // deselect the existing stream, and add a new stream called 'azkaban_users'. - // result that we persist and read after update should be a catalog with a single - // stream called 'azkaban_users'. - catalogForUpdate.getStreams().get(0).getConfig().setSelected(false); - catalogForUpdate.getStreams().get(1).getStream().setName(AZKABAN_USERS); - catalogForUpdate.getStreams().get(1).getConfig().setAliasName(AZKABAN_USERS); - - final UUID newSourceCatalogId = UUID.randomUUID(); - - final ResourceRequirements resourceRequirements = new ResourceRequirements() - .cpuLimit(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS.getCpuLimit()) - .cpuRequest(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS.getCpuRequest()) - .memoryLimit(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS.getMemoryLimit()) - .memoryRequest(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS.getMemoryRequest()); - - final ConnectionUpdate connectionUpdate = new ConnectionUpdate() - .connectionId(standardSync.getConnectionId()) - .status(ConnectionStatus.INACTIVE) - .scheduleType(ConnectionScheduleType.MANUAL) - .syncCatalog(catalogForUpdate) - .resourceRequirements(resourceRequirements) - .sourceCatalogId(newSourceCatalogId) - .operationIds(List.of(operationId, otherOperationId)) - .geography(io.airbyte.api.model.generated.Geography.EU); - - final ConfiguredAirbyteCatalog expectedPersistedCatalog = ConnectionHelpers.generateBasicConfiguredAirbyteCatalog(); - expectedPersistedCatalog.getStreams().get(0).getStream().withName(AZKABAN_USERS); - - final StandardSync expectedPersistedSync = Jsons.clone(standardSync) - .withStatus(Status.INACTIVE) - .withScheduleType(ScheduleType.MANUAL) - .withScheduleData(null) - .withSchedule(null) - .withManual(true) - .withCatalog(expectedPersistedCatalog) - .withFieldSelectionData(CatalogConverter.getFieldSelectionData(catalogForUpdate)) - .withResourceRequirements(ApiPojoConverters.resourceRequirementsToInternal(resourceRequirements)) - .withSourceCatalogId(newSourceCatalogId) - .withOperationIds(List.of(operationId, otherOperationId)) - .withGeography(Geography.EU); - - when(configRepository.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); - - final ConnectionRead actualConnectionRead = connectionsHandler.updateConnection(connectionUpdate); - - final AirbyteCatalog expectedCatalogInRead = ConnectionHelpers.generateBasicApiCatalog(); - expectedCatalogInRead.getStreams().get(0).getStream().setName(AZKABAN_USERS); - expectedCatalogInRead.getStreams().get(0).getConfig().setAliasName(AZKABAN_USERS); - - final ConnectionRead expectedConnectionRead = ConnectionHelpers.generateExpectedConnectionRead( - standardSync.getConnectionId(), - standardSync.getSourceId(), - standardSync.getDestinationId(), - standardSync.getOperationIds(), - newSourceCatalogId, - ApiPojoConverters.toApiGeography(standardSync.getGeography()), false) - .status(ConnectionStatus.INACTIVE) - .scheduleType(ConnectionScheduleType.MANUAL) - .scheduleData(null) - .schedule(null) - .syncCatalog(expectedCatalogInRead) - .resourceRequirements(resourceRequirements); - - assertEquals(expectedConnectionRead, actualConnectionRead); - verify(configRepository).writeStandardSync(expectedPersistedSync); - verify(eventRunner).update(connectionUpdate.getConnectionId()); - } - - @Test - void testValidateConnectionUpdateOperationInDifferentWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - when(workspaceHelper.getWorkspaceForOperationIdIgnoreExceptions(operationId)).thenReturn(UUID.randomUUID()); - when(configRepository.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); - - final ConnectionUpdate connectionUpdate = new ConnectionUpdate() - .connectionId(standardSync.getConnectionId()) - .operationIds(Collections.singletonList(operationId)) - .syncCatalog(CatalogConverter.toApi(standardSync.getCatalog(), standardSync.getFieldSelectionData())); - - assertThrows(IllegalArgumentException.class, () -> connectionsHandler.updateConnection(connectionUpdate)); - } - - } - - @Test - void testGetConnection() throws JsonValidationException, ConfigNotFoundException, IOException { - when(configRepository.getStandardSync(standardSync.getConnectionId())) - .thenReturn(standardSync); - - final ConnectionRead actualConnectionRead = connectionsHandler.getConnection(standardSync.getConnectionId()); - - assertEquals(ConnectionHelpers.generateExpectedConnectionRead(standardSync), actualConnectionRead); - } - - @Test - void testListConnectionsForWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - when(configRepository.listWorkspaceStandardSyncs(source.getWorkspaceId(), false)) - .thenReturn(Lists.newArrayList(standardSync)); - when(configRepository.listWorkspaceStandardSyncs(source.getWorkspaceId(), true)) - .thenReturn(Lists.newArrayList(standardSync, standardSyncDeleted)); - when(configRepository.getStandardSync(standardSync.getConnectionId())) - .thenReturn(standardSync); - - final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody().workspaceId(source.getWorkspaceId()); - final ConnectionReadList actualConnectionReadList = connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody); - assertEquals(1, actualConnectionReadList.getConnections().size()); - assertEquals( - ConnectionHelpers.generateExpectedConnectionRead(standardSync), - actualConnectionReadList.getConnections().get(0)); - - final ConnectionReadList actualConnectionReadListWithDeleted = connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody, true); - final List connections = actualConnectionReadListWithDeleted.getConnections(); - assertEquals(2, connections.size()); - assertEquals(ApiPojoConverters.internalToConnectionRead(standardSync), connections.get(0)); - assertEquals(ApiPojoConverters.internalToConnectionRead(standardSyncDeleted), connections.get(1)); - - } - - @Test - void testListConnections() throws JsonValidationException, ConfigNotFoundException, IOException { - when(configRepository.listStandardSyncs()) - .thenReturn(Lists.newArrayList(standardSync)); - when(configRepository.getSourceConnection(source.getSourceId())) - .thenReturn(source); - when(configRepository.getStandardSync(standardSync.getConnectionId())) - .thenReturn(standardSync); - - final ConnectionReadList actualConnectionReadList = connectionsHandler.listConnections(); - - assertEquals( - ConnectionHelpers.generateExpectedConnectionRead(standardSync), - actualConnectionReadList.getConnections().get(0)); - } - - @Test - void testSearchConnections() throws JsonValidationException, ConfigNotFoundException, IOException { - final ConnectionRead connectionRead1 = ConnectionHelpers.connectionReadFromStandardSync(standardSync); - final StandardSync standardSync2 = new StandardSync() - .withConnectionId(UUID.randomUUID()) - .withName("test connection") - .withNamespaceDefinition(JobSyncConfig.NamespaceDefinitionType.CUSTOMFORMAT) - .withNamespaceFormat("ns_format") - .withPrefix("test_prefix") - .withStatus(StandardSync.Status.ACTIVE) - .withCatalog(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog()) - .withSourceId(sourceId) - .withDestinationId(destinationId) - .withOperationIds(List.of(operationId)) - .withManual(true) - .withResourceRequirements(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS) - .withGeography(Geography.US) - .withBreakingChange(false); - final ConnectionRead connectionRead2 = ConnectionHelpers.connectionReadFromStandardSync(standardSync2); - final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() - .withName(SOURCE_TEST) - .withSourceDefinitionId(UUID.randomUUID()); - final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() - .withName(DESTINATION_TEST) - .withDestinationDefinitionId(UUID.randomUUID()); - - when(configRepository.listStandardSyncs()) - .thenReturn(Lists.newArrayList(standardSync, standardSync2)); - when(configRepository.getSourceConnection(source.getSourceId())) - .thenReturn(source); - when(configRepository.getDestinationConnection(destination.getDestinationId())) - .thenReturn(destination); - when(configRepository.getStandardSync(standardSync.getConnectionId())) - .thenReturn(standardSync); - when(configRepository.getStandardSync(standardSync2.getConnectionId())) - .thenReturn(standardSync2); - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(sourceDefinition); - when(configRepository.getStandardDestinationDefinition(destination.getDestinationDefinitionId())) - .thenReturn(destinationDefinition); - - final ConnectionSearch connectionSearch = new ConnectionSearch(); - connectionSearch.namespaceDefinition(NamespaceDefinitionType.SOURCE); - ConnectionReadList actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(1, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead1, actualConnectionReadList.getConnections().get(0)); - - connectionSearch.namespaceDefinition(null); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(2, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead1, actualConnectionReadList.getConnections().get(0)); - assertEquals(connectionRead2, actualConnectionReadList.getConnections().get(1)); - - final SourceSearch sourceSearch = new SourceSearch().sourceId(UUID.randomUUID()); - connectionSearch.setSource(sourceSearch); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(0, actualConnectionReadList.getConnections().size()); - - sourceSearch.sourceId(connectionRead1.getSourceId()); - connectionSearch.setSource(sourceSearch); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(2, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead1, actualConnectionReadList.getConnections().get(0)); - assertEquals(connectionRead2, actualConnectionReadList.getConnections().get(1)); - - final DestinationSearch destinationSearch = new DestinationSearch(); - connectionSearch.setDestination(destinationSearch); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(2, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead1, actualConnectionReadList.getConnections().get(0)); - assertEquals(connectionRead2, actualConnectionReadList.getConnections().get(1)); - - destinationSearch.connectionConfiguration(Jsons.jsonNode(Collections.singletonMap("apiKey", "not-found"))); - connectionSearch.setDestination(destinationSearch); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(0, actualConnectionReadList.getConnections().size()); - - destinationSearch.connectionConfiguration(Jsons.jsonNode(Collections.singletonMap("apiKey", "123-abc"))); - connectionSearch.setDestination(destinationSearch); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(2, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead1, actualConnectionReadList.getConnections().get(0)); - assertEquals(connectionRead2, actualConnectionReadList.getConnections().get(1)); - - connectionSearch.name("non-existent"); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(0, actualConnectionReadList.getConnections().size()); - - connectionSearch.name(connectionRead1.getName()); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(1, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead1, actualConnectionReadList.getConnections().get(0)); - - connectionSearch.name(connectionRead2.getName()); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(1, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead2, actualConnectionReadList.getConnections().get(0)); - - connectionSearch.namespaceDefinition(connectionRead1.getNamespaceDefinition()); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(0, actualConnectionReadList.getConnections().size()); - - connectionSearch.name(null); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(1, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead1, actualConnectionReadList.getConnections().get(0)); - - connectionSearch.namespaceDefinition(connectionRead2.getNamespaceDefinition()); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(1, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead2, actualConnectionReadList.getConnections().get(0)); - - connectionSearch.namespaceDefinition(null); - connectionSearch.status(ConnectionStatus.INACTIVE); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(0, actualConnectionReadList.getConnections().size()); - - connectionSearch.status(ConnectionStatus.ACTIVE); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(2, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead1, actualConnectionReadList.getConnections().get(0)); - assertEquals(connectionRead2, actualConnectionReadList.getConnections().get(1)); - - connectionSearch.prefix(connectionRead1.getPrefix()); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(1, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead1, actualConnectionReadList.getConnections().get(0)); - - connectionSearch.prefix(connectionRead2.getPrefix()); - actualConnectionReadList = connectionsHandler.searchConnections(connectionSearch); - assertEquals(1, actualConnectionReadList.getConnections().size()); - assertEquals(connectionRead2, actualConnectionReadList.getConnections().get(0)); - } - - @Test - void testDeleteConnection() throws JsonValidationException, ConfigNotFoundException, IOException { - connectionsHandler.deleteConnection(connectionId); - - verify(connectionHelper).deleteConnection(connectionId); - } - - @Test - void failOnUnmatchedWorkspacesInCreate() throws JsonValidationException, ConfigNotFoundException, IOException { - when(workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(standardSync.getSourceId())).thenReturn(UUID.randomUUID()); - when(workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(standardSync.getDestinationId())).thenReturn(UUID.randomUUID()); - when(configRepository.getSourceConnection(source.getSourceId())) - .thenReturn(source); - when(configRepository.getDestinationConnection(destination.getDestinationId())) - .thenReturn(destination); - - when(uuidGenerator.get()).thenReturn(standardSync.getConnectionId()); - final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() - .withName(SOURCE_TEST) - .withSourceDefinitionId(UUID.randomUUID()); - final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() - .withName(DESTINATION_TEST) - .withDestinationDefinitionId(UUID.randomUUID()); - when(configRepository.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); - when(configRepository.getSourceDefinitionFromConnection(standardSync.getConnectionId())).thenReturn(sourceDefinition); - when(configRepository.getDestinationDefinitionFromConnection(standardSync.getConnectionId())).thenReturn(destinationDefinition); - - final AirbyteCatalog catalog = ConnectionHelpers.generateBasicApiCatalog(); - - final ConnectionCreate connectionCreate = new ConnectionCreate() - .sourceId(standardSync.getSourceId()) - .destinationId(standardSync.getDestinationId()) - .operationIds(standardSync.getOperationIds()) - .name(PRESTO_TO_HUDI) - .namespaceDefinition(NamespaceDefinitionType.SOURCE) - .namespaceFormat(null) - .prefix(PRESTO_TO_HUDI_PREFIX) - .status(ConnectionStatus.ACTIVE) - .schedule(ConnectionHelpers.generateBasicConnectionSchedule()) - .syncCatalog(catalog) - .resourceRequirements(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(standardSync.getResourceRequirements().getCpuRequest()) - .cpuLimit(standardSync.getResourceRequirements().getCpuLimit()) - .memoryRequest(standardSync.getResourceRequirements().getMemoryRequest()) - .memoryLimit(standardSync.getResourceRequirements().getMemoryLimit())); - - Assert.assertThrows(IllegalArgumentException.class, () -> { - connectionsHandler.createConnection(connectionCreate); - }); - } - - @Test - void testEnumConversion() { - assertTrue(Enums.isCompatible(ConnectionStatus.class, StandardSync.Status.class)); - assertTrue(Enums.isCompatible(io.airbyte.config.SyncMode.class, SyncMode.class)); - assertTrue(Enums.isCompatible(StandardSync.Status.class, ConnectionStatus.class)); - assertTrue(Enums.isCompatible(ConnectionSchedule.TimeUnitEnum.class, Schedule.TimeUnit.class)); - assertTrue(Enums.isCompatible(io.airbyte.api.model.generated.DataType.class, DataType.class)); - assertTrue(Enums.isCompatible(DataType.class, io.airbyte.api.model.generated.DataType.class)); - assertTrue(Enums.isCompatible(NamespaceDefinitionType.class, io.airbyte.config.JobSyncConfig.NamespaceDefinitionType.class)); - } - - } - - @Nested - class StreamConfigurationDiff { - - @BeforeEach - void setUp() { - connectionsHandler = new ConnectionsHandler( - configRepository, - uuidGenerator, - workspaceHelper, - trackingClient, - eventRunner, - connectionHelper); - } - - @Test - void testNoDiff() { - final AirbyteStreamConfiguration streamConfiguration1 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration2 = getStreamConfiguration( - List.of(CURSOR2), - List.of(List.of(PK2)), - SyncMode.FULL_REFRESH, - DestinationSyncMode.OVERWRITE); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - - assertTrue(connectionsHandler.getConfigurationDiff(catalog1, catalog2).isEmpty()); - } - - @Test - void testNoDiffIfStreamAdded() { - final AirbyteStreamConfiguration streamConfiguration1 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration2 = getStreamConfiguration( - List.of(CURSOR2), - List.of(List.of(PK2)), - SyncMode.FULL_REFRESH, - DestinationSyncMode.OVERWRITE); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - - assertTrue(connectionsHandler.getConfigurationDiff(catalog1, catalog2).isEmpty()); - } - - @Test - void testCursorOrderDoesMatter() { - final AirbyteStreamConfiguration streamConfiguration1 = getStreamConfiguration( - List.of(CURSOR1, "anotherCursor"), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration1WithOtherCursorOrder = getStreamConfiguration( - List.of("anotherCursor", CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration2 = getStreamConfiguration( - List.of(CURSOR2), - List.of(List.of(PK2)), - SyncMode.FULL_REFRESH, - DestinationSyncMode.OVERWRITE); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1WithOtherCursorOrder), - getStreamAndConfig(STREAM2, streamConfiguration2))); - - final Set changedSd = connectionsHandler.getConfigurationDiff(catalog1, catalog2); - assertFalse(changedSd.isEmpty()); - assertEquals(1, changedSd.size()); - assertEquals(Set.of(new StreamDescriptor().name(STREAM1)), changedSd); - } - - @Test - void testPkOrderDoesntMatter() { - final AirbyteStreamConfiguration streamConfiguration1 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1, PK3)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration1WithOtherPkOrder = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK3, PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration2 = getStreamConfiguration( - List.of(CURSOR2), - List.of(List.of(PK2), List.of(PK3)), - SyncMode.FULL_REFRESH, - DestinationSyncMode.OVERWRITE); - - final AirbyteStreamConfiguration streamConfiguration2WithOtherPkOrder = getStreamConfiguration( - List.of(CURSOR2), - List.of(List.of(PK3), List.of(PK2)), - SyncMode.FULL_REFRESH, - DestinationSyncMode.OVERWRITE); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1WithOtherPkOrder), - getStreamAndConfig(STREAM2, streamConfiguration2WithOtherPkOrder))); - - final Set changedSd = connectionsHandler.getConfigurationDiff(catalog1, catalog2); - assertFalse(changedSd.isEmpty()); - assertEquals(1, changedSd.size()); - assertEquals(Set.of(new StreamDescriptor().name(STREAM1)), changedSd); - } - - @Test - void testNoDiffIfStreamRemove() { - final AirbyteStreamConfiguration streamConfiguration1 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration2 = getStreamConfiguration( - List.of(CURSOR2), - List.of(List.of(PK2)), - SyncMode.FULL_REFRESH, - DestinationSyncMode.OVERWRITE); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1))); - - assertTrue(connectionsHandler.getConfigurationDiff(catalog1, catalog2).isEmpty()); - } - - @Test - void testDiffDifferentCursor() { - final AirbyteStreamConfiguration streamConfiguration1 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration1CursorDiff = getStreamConfiguration( - List.of(CURSOR1, "anotherCursor"), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration2 = getStreamConfiguration( - List.of(CURSOR2), - List.of(List.of(PK2)), - SyncMode.FULL_REFRESH, - DestinationSyncMode.OVERWRITE); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1CursorDiff), - getStreamAndConfig(STREAM2, streamConfiguration2))); - - final Set changedSd = connectionsHandler.getConfigurationDiff(catalog1, catalog2); - assertFalse(changedSd.isEmpty()); - assertEquals(1, changedSd.size()); - assertEquals(Set.of(new StreamDescriptor().name(STREAM1)), changedSd); - } - - @Test - void testDiffIfDifferentPrimaryKey() { - final AirbyteStreamConfiguration streamConfiguration1 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration1WithPkDiff = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1, PK3)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration2 = getStreamConfiguration( - List.of(CURSOR2), - List.of(List.of(PK2)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration2WithPkDiff = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1), List.of(PK3)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1WithPkDiff), - getStreamAndConfig(STREAM2, streamConfiguration2WithPkDiff))); - - final Set changedSd = connectionsHandler.getConfigurationDiff(catalog1, catalog2); - assertFalse(changedSd.isEmpty()); - assertEquals(2, changedSd.size()); - Assertions.assertThat(changedSd) - .containsExactlyInAnyOrder(new StreamDescriptor().name(STREAM1), new StreamDescriptor().name(STREAM2)); - } - - @Test - void testDiffDifferentSyncMode() { - final AirbyteStreamConfiguration streamConfiguration1 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration1CursorDiff = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.FULL_REFRESH, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration2 = getStreamConfiguration( - List.of(CURSOR2), - List.of(List.of(PK2)), - SyncMode.FULL_REFRESH, - DestinationSyncMode.OVERWRITE); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1CursorDiff), - getStreamAndConfig(STREAM2, streamConfiguration2))); - - final Set changedSd = connectionsHandler.getConfigurationDiff(catalog1, catalog2); - assertFalse(changedSd.isEmpty()); - assertEquals(1, changedSd.size()); - assertEquals(Set.of(new StreamDescriptor().name(STREAM1)), changedSd); - } - - @Test - void testDiffDifferentDestinationSyncMode() { - final AirbyteStreamConfiguration streamConfiguration1 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP); - - final AirbyteStreamConfiguration streamConfiguration1CursorDiff = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND); - - final AirbyteStreamConfiguration streamConfiguration2 = getStreamConfiguration( - List.of(CURSOR2), - List.of(List.of(PK2)), - SyncMode.FULL_REFRESH, - DestinationSyncMode.OVERWRITE); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1CursorDiff), - getStreamAndConfig(STREAM2, streamConfiguration2))); - - final Set changedSd = connectionsHandler.getConfigurationDiff(catalog1, catalog2); - assertFalse(changedSd.isEmpty()); - assertEquals(1, changedSd.size()); - assertEquals(Set.of(new StreamDescriptor().name(STREAM1)), changedSd); - } - - private AirbyteStreamAndConfiguration getStreamAndConfig(final String name, final AirbyteStreamConfiguration config) { - return new AirbyteStreamAndConfiguration() - .config(config) - .stream(new AirbyteStream().name(name)); - } - - private AirbyteStreamConfiguration getStreamConfiguration(final List cursors, - final List> primaryKeys, - final SyncMode syncMode, - final DestinationSyncMode destinationSyncMode) { - return new AirbyteStreamConfiguration() - .cursorField(cursors) - .primaryKey(primaryKeys) - .syncMode(syncMode) - .destinationSyncMode(destinationSyncMode); - - } - - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandlerTest.java deleted file mode 100644 index 27201fb73305..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandlerTest.java +++ /dev/null @@ -1,673 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.api.model.generated.CustomDestinationDefinitionCreate; -import io.airbyte.api.model.generated.DestinationDefinitionCreate; -import io.airbyte.api.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.DestinationDefinitionRead; -import io.airbyte.api.model.generated.DestinationDefinitionReadList; -import io.airbyte.api.model.generated.DestinationDefinitionUpdate; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationReadList; -import io.airbyte.api.model.generated.PrivateDestinationDefinitionRead; -import io.airbyte.api.model.generated.PrivateDestinationDefinitionReadList; -import io.airbyte.api.model.generated.ReleaseStage; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.errors.IdNotFoundKnownException; -import io.airbyte.commons.server.errors.UnsupportedProtocolVersionException; -import io.airbyte.commons.server.scheduler.SynchronousJobMetadata; -import io.airbyte.commons.server.scheduler.SynchronousResponse; -import io.airbyte.commons.server.scheduler.SynchronousSchedulerClient; -import io.airbyte.commons.server.services.AirbyteGithubStore; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorDefinitionResourceRequirements; -import io.airbyte.config.ActorType; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.NormalizationDestinationDefinitionConfig; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.time.LocalDate; -import java.util.Collections; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; - -class DestinationDefinitionsHandlerTest { - - private static final String TODAY_DATE_STRING = LocalDate.now().toString(); - private static final String DEFAULT_PROTOCOL_VERSION = "0.2.0"; - - private ConfigRepository configRepository; - private StandardDestinationDefinition destinationDefinition; - private StandardDestinationDefinition destinationDefinitionWithNormalization; - - private DestinationDefinitionsHandler destinationDefinitionsHandler; - private Supplier uuidSupplier; - private SynchronousSchedulerClient schedulerSynchronousClient; - private AirbyteGithubStore githubStore; - private DestinationHandler destinationHandler; - private UUID workspaceId; - private AirbyteProtocolVersionRange protocolVersionRange; - - @SuppressWarnings("unchecked") - @BeforeEach - void setUp() { - configRepository = mock(ConfigRepository.class); - uuidSupplier = mock(Supplier.class); - destinationDefinition = generateDestinationDefinition(); - destinationDefinitionWithNormalization = generateDestinationDefinitionWithNormalization(); - schedulerSynchronousClient = spy(SynchronousSchedulerClient.class); - githubStore = mock(AirbyteGithubStore.class); - destinationHandler = mock(DestinationHandler.class); - workspaceId = UUID.randomUUID(); - protocolVersionRange = new AirbyteProtocolVersionRange(new Version("0.0.0"), new Version("0.3.0")); - - destinationDefinitionsHandler = new DestinationDefinitionsHandler( - configRepository, - uuidSupplier, - schedulerSynchronousClient, - githubStore, - destinationHandler, - protocolVersionRange); - } - - private StandardDestinationDefinition generateDestinationDefinition() { - final ConnectorSpecification spec = new ConnectorSpecification() - .withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo", "bar"))); - - return new StandardDestinationDefinition() - .withDestinationDefinitionId(UUID.randomUUID()) - .withName("presto") - .withDockerImageTag("12.3") - .withDockerRepository("repo") - .withDocumentationUrl("https://hulu.com") - .withIcon("http.svg") - .withSpec(spec) - .withProtocolVersion("0.2.2") - .withTombstone(false) - .withReleaseStage(StandardDestinationDefinition.ReleaseStage.ALPHA) - .withReleaseDate(TODAY_DATE_STRING) - .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(new ResourceRequirements().withCpuRequest("2"))); - } - - private StandardDestinationDefinition generateDestinationDefinitionWithNormalization() { - final StandardDestinationDefinition definition = generateDestinationDefinition(); - return definition - .withSupportsDbt(true) - .withNormalizationConfig(new NormalizationDestinationDefinitionConfig() - .withNormalizationRepository("repository") - .withNormalizationTag("dev") - .withNormalizationIntegrationType("integration-type")); - } - - @Test - @DisplayName("listDestinationDefinition should return the right list") - void testListDestinations() throws JsonValidationException, IOException, URISyntaxException { - - when(configRepository.listStandardDestinationDefinitions(false)) - .thenReturn(Lists.newArrayList(destinationDefinition, destinationDefinitionWithNormalization)); - - final DestinationDefinitionRead expectedDestinationDefinitionRead1 = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) - .name(destinationDefinition.getName()) - .dockerRepository(destinationDefinition.getDockerRepository()) - .dockerImageTag(destinationDefinition.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinition.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinition.getIcon())) - .protocolVersion(destinationDefinition.getProtocolVersion()) - .releaseStage(ReleaseStage.fromValue(destinationDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinition.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final DestinationDefinitionRead expectedDestinationDefinitionRead2 = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinitionWithNormalization.getDestinationDefinitionId()) - .name(destinationDefinitionWithNormalization.getName()) - .dockerRepository(destinationDefinitionWithNormalization.getDockerRepository()) - .dockerImageTag(destinationDefinitionWithNormalization.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinitionWithNormalization.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinitionWithNormalization.getIcon())) - .protocolVersion(destinationDefinitionWithNormalization.getProtocolVersion()) - .releaseStage(ReleaseStage.fromValue(destinationDefinitionWithNormalization.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinitionWithNormalization.getReleaseDate())) - .supportsDbt(destinationDefinitionWithNormalization.getSupportsDbt()) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(true) - .normalizationRepository(destinationDefinitionWithNormalization.getNormalizationConfig().getNormalizationRepository()) - .normalizationTag(destinationDefinitionWithNormalization.getNormalizationConfig().getNormalizationTag()) - .normalizationIntegrationType(destinationDefinitionWithNormalization.getNormalizationConfig().getNormalizationIntegrationType())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinitionWithNormalization.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final DestinationDefinitionReadList actualDestinationDefinitionReadList = destinationDefinitionsHandler.listDestinationDefinitions(); - - assertEquals( - Lists.newArrayList(expectedDestinationDefinitionRead1, expectedDestinationDefinitionRead2), - actualDestinationDefinitionReadList.getDestinationDefinitions()); - } - - @Test - @DisplayName("listDestinationDefinitionsForWorkspace should return the right list") - void testListDestinationDefinitionsForWorkspace() throws IOException, URISyntaxException { - - when(configRepository.listPublicDestinationDefinitions(false)).thenReturn(Lists.newArrayList(destinationDefinition)); - when(configRepository.listGrantedDestinationDefinitions(workspaceId, false)) - .thenReturn(Lists.newArrayList(destinationDefinitionWithNormalization)); - - final DestinationDefinitionRead expectedDestinationDefinitionRead1 = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) - .name(destinationDefinition.getName()) - .dockerRepository(destinationDefinition.getDockerRepository()) - .dockerImageTag(destinationDefinition.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinition.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinition.getIcon())) - .protocolVersion(destinationDefinition.getProtocolVersion()) - .releaseStage(ReleaseStage.fromValue(destinationDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinition.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final DestinationDefinitionRead expectedDestinationDefinitionRead2 = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinitionWithNormalization.getDestinationDefinitionId()) - .name(destinationDefinitionWithNormalization.getName()) - .dockerRepository(destinationDefinitionWithNormalization.getDockerRepository()) - .dockerImageTag(destinationDefinitionWithNormalization.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinitionWithNormalization.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinitionWithNormalization.getIcon())) - .protocolVersion(destinationDefinitionWithNormalization.getProtocolVersion()) - .releaseStage(ReleaseStage.fromValue(destinationDefinitionWithNormalization.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinitionWithNormalization.getReleaseDate())) - .supportsDbt(destinationDefinitionWithNormalization.getSupportsDbt()) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(true) - .normalizationRepository(destinationDefinitionWithNormalization.getNormalizationConfig().getNormalizationRepository()) - .normalizationTag(destinationDefinitionWithNormalization.getNormalizationConfig().getNormalizationTag()) - .normalizationIntegrationType(destinationDefinitionWithNormalization.getNormalizationConfig().getNormalizationIntegrationType())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinitionWithNormalization.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final DestinationDefinitionReadList actualDestinationDefinitionReadList = destinationDefinitionsHandler - .listDestinationDefinitionsForWorkspace(new WorkspaceIdRequestBody().workspaceId(workspaceId)); - - assertEquals( - Lists.newArrayList(expectedDestinationDefinitionRead1, expectedDestinationDefinitionRead2), - actualDestinationDefinitionReadList.getDestinationDefinitions()); - } - - @Test - @DisplayName("listPrivateDestinationDefinitions should return the right list") - void testListPrivateDestinationDefinitions() throws IOException, URISyntaxException { - - when(configRepository.listGrantableDestinationDefinitions(workspaceId, false)).thenReturn( - Lists.newArrayList( - Map.entry(destinationDefinition, false), - Map.entry(destinationDefinitionWithNormalization, true))); - - final DestinationDefinitionRead expectedDestinationDefinitionRead1 = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) - .name(destinationDefinition.getName()) - .dockerRepository(destinationDefinition.getDockerRepository()) - .dockerImageTag(destinationDefinition.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinition.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinition.getIcon())) - .protocolVersion(destinationDefinition.getProtocolVersion()) - .releaseStage(ReleaseStage.fromValue(destinationDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinition.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final DestinationDefinitionRead expectedDestinationDefinitionRead2 = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinitionWithNormalization.getDestinationDefinitionId()) - .name(destinationDefinitionWithNormalization.getName()) - .dockerRepository(destinationDefinitionWithNormalization.getDockerRepository()) - .dockerImageTag(destinationDefinitionWithNormalization.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinitionWithNormalization.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinitionWithNormalization.getIcon())) - .protocolVersion(destinationDefinitionWithNormalization.getProtocolVersion()) - .releaseStage(ReleaseStage.fromValue(destinationDefinitionWithNormalization.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinitionWithNormalization.getReleaseDate())) - .supportsDbt(destinationDefinitionWithNormalization.getSupportsDbt()) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(true) - .normalizationRepository(destinationDefinitionWithNormalization.getNormalizationConfig().getNormalizationRepository()) - .normalizationTag(destinationDefinitionWithNormalization.getNormalizationConfig().getNormalizationTag()) - .normalizationIntegrationType(destinationDefinitionWithNormalization.getNormalizationConfig().getNormalizationIntegrationType())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinitionWithNormalization.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final PrivateDestinationDefinitionRead expectedDestinationDefinitionOptInRead1 = - new PrivateDestinationDefinitionRead().destinationDefinition(expectedDestinationDefinitionRead1).granted(false); - - final PrivateDestinationDefinitionRead expectedDestinationDefinitionOptInRead2 = - new PrivateDestinationDefinitionRead().destinationDefinition(expectedDestinationDefinitionRead2).granted(true); - - final PrivateDestinationDefinitionReadList actualDestinationDefinitionOptInReadList = - destinationDefinitionsHandler.listPrivateDestinationDefinitions( - new WorkspaceIdRequestBody().workspaceId(workspaceId)); - - assertEquals( - Lists.newArrayList(expectedDestinationDefinitionOptInRead1, expectedDestinationDefinitionOptInRead2), - actualDestinationDefinitionOptInReadList.getDestinationDefinitions()); - } - - @Test - @DisplayName("getDestinationDefinition should return the right destination") - void testGetDestination() throws JsonValidationException, ConfigNotFoundException, IOException, URISyntaxException { - when(configRepository.getStandardDestinationDefinition(destinationDefinition.getDestinationDefinitionId())) - .thenReturn(destinationDefinition); - - final DestinationDefinitionRead expectedDestinationDefinitionRead = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) - .name(destinationDefinition.getName()) - .dockerRepository(destinationDefinition.getDockerRepository()) - .dockerImageTag(destinationDefinition.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinition.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinition.getIcon())) - .protocolVersion(destinationDefinition.getProtocolVersion()) - .releaseStage(ReleaseStage.fromValue(destinationDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinition.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody = new DestinationDefinitionIdRequestBody() - .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()); - - final DestinationDefinitionRead actualDestinationDefinitionRead = - destinationDefinitionsHandler.getDestinationDefinition(destinationDefinitionIdRequestBody); - - assertEquals(expectedDestinationDefinitionRead, actualDestinationDefinitionRead); - } - - @Test - @DisplayName("getDestinationDefinitionForWorkspace should throw an exception for a missing grant") - void testGetDefinitionWithoutGrantForWorkspace() throws IOException { - when(configRepository.workspaceCanUseDefinition(destinationDefinition.getDestinationDefinitionId(), workspaceId)) - .thenReturn(false); - - final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId = new DestinationDefinitionIdWithWorkspaceId() - .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) - .workspaceId(workspaceId); - - assertThrows(IdNotFoundKnownException.class, - () -> destinationDefinitionsHandler.getDestinationDefinitionForWorkspace(destinationDefinitionIdWithWorkspaceId)); - } - - @Test - @DisplayName("getDestinationDefinitionForWorkspace should return the destination if the grant exists") - void testGetDefinitionWithGrantForWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException, URISyntaxException { - when(configRepository.workspaceCanUseDefinition(destinationDefinition.getDestinationDefinitionId(), workspaceId)) - .thenReturn(true); - when(configRepository.getStandardDestinationDefinition(destinationDefinition.getDestinationDefinitionId())) - .thenReturn(destinationDefinition); - - final DestinationDefinitionRead expectedDestinationDefinitionRead = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) - .name(destinationDefinition.getName()) - .dockerRepository(destinationDefinition.getDockerRepository()) - .dockerImageTag(destinationDefinition.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinition.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinition.getIcon())) - .protocolVersion(destinationDefinition.getProtocolVersion()) - .releaseStage(ReleaseStage.fromValue(destinationDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinition.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId = new DestinationDefinitionIdWithWorkspaceId() - .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) - .workspaceId(workspaceId); - - final DestinationDefinitionRead actualDestinationDefinitionRead = destinationDefinitionsHandler - .getDestinationDefinitionForWorkspace(destinationDefinitionIdWithWorkspaceId); - - assertEquals(expectedDestinationDefinitionRead, actualDestinationDefinitionRead); - } - - @Test - @DisplayName("createDestinationDefinition should not create a destinationDefinition with unsupported protocol version") - void testCreateDestinationDefinitionShouldCheckProtocolVersion() throws URISyntaxException, IOException, JsonValidationException { - final String invalidProtocolVersion = "121.5.6"; - final StandardDestinationDefinition destination = generateDestinationDefinition(); - destination.getSpec().setProtocolVersion(invalidProtocolVersion); - final String imageName = destination.getDockerRepository() + ":" + destination.getDockerImageTag(); - - when(uuidSupplier.get()).thenReturn(destination.getDestinationDefinitionId()); - when(schedulerSynchronousClient.createGetSpecJob(imageName, true)).thenReturn(new SynchronousResponse<>( - destination.getSpec(), - SynchronousJobMetadata.mock(ConfigType.GET_SPEC))); - - final DestinationDefinitionCreate create = new DestinationDefinitionCreate() - .name(destination.getName()) - .dockerRepository(destination.getDockerRepository()) - .dockerImageTag(destination.getDockerImageTag()) - .documentationUrl(new URI(destination.getDocumentationUrl())) - .icon(destination.getIcon()) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destination.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final CustomDestinationDefinitionCreate customCreate = new CustomDestinationDefinitionCreate() - .destinationDefinition(create) - .workspaceId(workspaceId); - - assertThrows(UnsupportedProtocolVersionException.class, () -> destinationDefinitionsHandler.createCustomDestinationDefinition(customCreate)); - - verify(schedulerSynchronousClient).createGetSpecJob(imageName, true); - verify(configRepository, never()).writeStandardDestinationDefinition(destination - .withProtocolVersion(DEFAULT_PROTOCOL_VERSION) - .withReleaseDate(null) - .withReleaseStage(StandardDestinationDefinition.ReleaseStage.CUSTOM)); - } - - @Test - @DisplayName("createCustomDestinationDefinition should correctly create a destinationDefinition") - void testCreateCustomDestinationDefinition() throws URISyntaxException, IOException, JsonValidationException { - final StandardDestinationDefinition destination = generateDestinationDefinition(); - final String imageName = destination.getDockerRepository() + ":" + destination.getDockerImageTag(); - - when(uuidSupplier.get()).thenReturn(destination.getDestinationDefinitionId()); - when(schedulerSynchronousClient.createGetSpecJob(imageName, true)).thenReturn(new SynchronousResponse<>( - destination.getSpec(), - SynchronousJobMetadata.mock(ConfigType.GET_SPEC))); - - final DestinationDefinitionCreate create = new DestinationDefinitionCreate() - .name(destination.getName()) - .dockerRepository(destination.getDockerRepository()) - .dockerImageTag(destination.getDockerImageTag()) - .documentationUrl(new URI(destination.getDocumentationUrl())) - .icon(destination.getIcon()) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destination.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final CustomDestinationDefinitionCreate customCreate = new CustomDestinationDefinitionCreate() - .destinationDefinition(create) - .workspaceId(workspaceId); - - final DestinationDefinitionRead expectedRead = new DestinationDefinitionRead() - .name(destination.getName()) - .dockerRepository(destination.getDockerRepository()) - .dockerImageTag(destination.getDockerImageTag()) - .documentationUrl(new URI(destination.getDocumentationUrl())) - .destinationDefinitionId(destination.getDestinationDefinitionId()) - .icon(DestinationDefinitionsHandler.loadIcon(destination.getIcon())) - .protocolVersion(DEFAULT_PROTOCOL_VERSION) - .releaseStage(ReleaseStage.CUSTOM) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destination.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final DestinationDefinitionRead actualRead = destinationDefinitionsHandler.createCustomDestinationDefinition(customCreate); - - assertEquals(expectedRead, actualRead); - verify(schedulerSynchronousClient).createGetSpecJob(imageName, true); - verify(configRepository).writeCustomDestinationDefinition( - destination - .withProtocolVersion(DEFAULT_PROTOCOL_VERSION) - .withReleaseDate(null) - .withReleaseStage(StandardDestinationDefinition.ReleaseStage.CUSTOM) - .withCustom(true), - workspaceId); - } - - @Test - @DisplayName("createCustomDestinationDefinition should not create a destinationDefinition with unsupported protocol range") - void testCreateCustomDestinationDefinitionWithInvalidProtocol() throws URISyntaxException, IOException, JsonValidationException { - final String invalidProtocol = "122.1.22"; - final StandardDestinationDefinition destination = generateDestinationDefinition(); - destination.getSpec().setProtocolVersion(invalidProtocol); - final String imageName = destination.getDockerRepository() + ":" + destination.getDockerImageTag(); - - when(uuidSupplier.get()).thenReturn(destination.getDestinationDefinitionId()); - when(schedulerSynchronousClient.createGetSpecJob(imageName, true)).thenReturn(new SynchronousResponse<>( - destination.getSpec(), - SynchronousJobMetadata.mock(ConfigType.GET_SPEC))); - - final DestinationDefinitionCreate create = new DestinationDefinitionCreate() - .name(destination.getName()) - .dockerRepository(destination.getDockerRepository()) - .dockerImageTag(destination.getDockerImageTag()) - .documentationUrl(new URI(destination.getDocumentationUrl())) - .icon(destination.getIcon()) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destination.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final CustomDestinationDefinitionCreate customCreate = new CustomDestinationDefinitionCreate() - .destinationDefinition(create) - .workspaceId(workspaceId); - - assertThrows(UnsupportedProtocolVersionException.class, () -> destinationDefinitionsHandler.createCustomDestinationDefinition(customCreate)); - - verify(schedulerSynchronousClient).createGetSpecJob(imageName, true); - verify(configRepository, never()).writeCustomDestinationDefinition( - destination - .withProtocolVersion(invalidProtocol) - .withReleaseDate(null) - .withReleaseStage(StandardDestinationDefinition.ReleaseStage.CUSTOM) - .withCustom(true), - workspaceId); - } - - @Test - @DisplayName("updateDestinationDefinition should correctly update a destinationDefinition") - void testUpdateDestination() throws ConfigNotFoundException, IOException, JsonValidationException { - when(configRepository.getStandardDestinationDefinition(destinationDefinition.getDestinationDefinitionId())).thenReturn(destinationDefinition); - final DestinationDefinitionRead currentDestination = destinationDefinitionsHandler - .getDestinationDefinition( - new DestinationDefinitionIdRequestBody().destinationDefinitionId(destinationDefinition.getDestinationDefinitionId())); - final String currentTag = currentDestination.getDockerImageTag(); - final String newDockerImageTag = "averydifferenttag"; - final String newProtocolVersion = "0.2.4"; - assertNotEquals(newDockerImageTag, currentTag); - assertNotEquals(newProtocolVersion, currentDestination.getProtocolVersion()); - - final String newImageName = destinationDefinition.getDockerRepository() + ":" + newDockerImageTag; - final ConnectorSpecification newSpec = new ConnectorSpecification() - .withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo2", "bar2"))) - .withProtocolVersion(newProtocolVersion); - when(schedulerSynchronousClient.createGetSpecJob(newImageName, false)).thenReturn(new SynchronousResponse<>( - newSpec, - SynchronousJobMetadata.mock(ConfigType.GET_SPEC))); - - final StandardDestinationDefinition updatedDestination = - Jsons.clone(destinationDefinition).withDockerImageTag(newDockerImageTag).withSpec(newSpec).withProtocolVersion(newProtocolVersion); - - final DestinationDefinitionRead destinationRead = destinationDefinitionsHandler.updateDestinationDefinition( - new DestinationDefinitionUpdate().destinationDefinitionId(this.destinationDefinition.getDestinationDefinitionId()) - .dockerImageTag(newDockerImageTag)); - - assertEquals(newDockerImageTag, destinationRead.getDockerImageTag()); - verify(schedulerSynchronousClient).createGetSpecJob(newImageName, false); - verify(configRepository).writeStandardDestinationDefinition(updatedDestination); - - verify(configRepository).clearUnsupportedProtocolVersionFlag(updatedDestination.getDestinationDefinitionId(), ActorType.DESTINATION, - protocolVersionRange); - } - - @Test - @DisplayName("updateDestinationDefinition should not update a destinationDefinition if protocol version is out of range") - void testOutOfProtocolRangeUpdateDestination() throws ConfigNotFoundException, IOException, JsonValidationException { - when(configRepository.getStandardDestinationDefinition(destinationDefinition.getDestinationDefinitionId())).thenReturn(destinationDefinition); - final DestinationDefinitionRead currentDestination = destinationDefinitionsHandler - .getDestinationDefinition( - new DestinationDefinitionIdRequestBody().destinationDefinitionId(destinationDefinition.getDestinationDefinitionId())); - final String currentTag = currentDestination.getDockerImageTag(); - final String newDockerImageTag = "averydifferenttagforprotocolversion"; - final String newProtocolVersion = "120.2.4"; - assertNotEquals(newDockerImageTag, currentTag); - assertNotEquals(newProtocolVersion, currentDestination.getProtocolVersion()); - - final String newImageName = destinationDefinition.getDockerRepository() + ":" + newDockerImageTag; - final ConnectorSpecification newSpec = new ConnectorSpecification() - .withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo2", "bar2"))) - .withProtocolVersion(newProtocolVersion); - when(schedulerSynchronousClient.createGetSpecJob(newImageName, false)).thenReturn(new SynchronousResponse<>( - newSpec, - SynchronousJobMetadata.mock(ConfigType.GET_SPEC))); - - final StandardDestinationDefinition updatedDestination = - Jsons.clone(destinationDefinition).withDockerImageTag(newDockerImageTag).withSpec(newSpec).withProtocolVersion(newProtocolVersion); - - assertThrows(UnsupportedProtocolVersionException.class, () -> destinationDefinitionsHandler.updateDestinationDefinition( - new DestinationDefinitionUpdate().destinationDefinitionId(this.destinationDefinition.getDestinationDefinitionId()) - .dockerImageTag(newDockerImageTag))); - - verify(schedulerSynchronousClient).createGetSpecJob(newImageName, false); - verify(configRepository, never()).writeStandardDestinationDefinition(updatedDestination); - } - - @Test - @DisplayName("deleteDestinationDefinition should correctly delete a sourceDefinition") - void testDeleteDestinationDefinition() throws ConfigNotFoundException, IOException, JsonValidationException { - final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody = - new DestinationDefinitionIdRequestBody().destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()); - final StandardDestinationDefinition updatedDestinationDefinition = Jsons.clone(this.destinationDefinition).withTombstone(true); - final DestinationRead destination = new DestinationRead(); - - when(configRepository.getStandardDestinationDefinition(destinationDefinition.getDestinationDefinitionId())) - .thenReturn(destinationDefinition); - when(destinationHandler.listDestinationsForDestinationDefinition(destinationDefinitionIdRequestBody)) - .thenReturn(new DestinationReadList().destinations(Collections.singletonList(destination))); - - assertFalse(destinationDefinition.getTombstone()); - - destinationDefinitionsHandler.deleteDestinationDefinition(destinationDefinitionIdRequestBody); - - verify(destinationHandler).deleteDestination(destination); - verify(configRepository).writeStandardDestinationDefinition(updatedDestinationDefinition); - } - - @Test - @DisplayName("grantDestinationDefinitionToWorkspace should correctly create a workspace grant") - void testGrantDestinationDefinitionToWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException, URISyntaxException { - when(configRepository.getStandardDestinationDefinition(destinationDefinition.getDestinationDefinitionId())) - .thenReturn(destinationDefinition); - - final DestinationDefinitionRead expectedDestinationDefinitionRead = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) - .name(destinationDefinition.getName()) - .dockerRepository(destinationDefinition.getDockerRepository()) - .dockerImageTag(destinationDefinition.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinition.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinition.getIcon())) - .protocolVersion(destinationDefinition.getProtocolVersion()) - .releaseStage(ReleaseStage.fromValue(destinationDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinition.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final PrivateDestinationDefinitionRead expectedPrivateDestinationDefinitionRead = - new PrivateDestinationDefinitionRead().destinationDefinition(expectedDestinationDefinitionRead).granted(true); - - final PrivateDestinationDefinitionRead actualPrivateDestinationDefinitionRead = - destinationDefinitionsHandler.grantDestinationDefinitionToWorkspace( - new DestinationDefinitionIdWithWorkspaceId() - .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) - .workspaceId(workspaceId)); - - assertEquals(expectedPrivateDestinationDefinitionRead, actualPrivateDestinationDefinitionRead); - verify(configRepository).writeActorDefinitionWorkspaceGrant( - destinationDefinition.getDestinationDefinitionId(), - workspaceId); - } - - @Test - @DisplayName("revokeDestinationDefinitionFromWorkspace should correctly delete a workspace grant") - void testRevokeDestinationDefinitionFromWorkspace() throws IOException { - destinationDefinitionsHandler.revokeDestinationDefinitionFromWorkspace(new DestinationDefinitionIdWithWorkspaceId() - .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) - .workspaceId(workspaceId)); - verify(configRepository).deleteActorDefinitionWorkspaceGrant( - destinationDefinition.getDestinationDefinitionId(), - workspaceId); - } - - @Nested - @DisplayName("listLatest") - class listLatest { - - @Test - @DisplayName("should return the latest list") - void testCorrect() throws InterruptedException { - final StandardDestinationDefinition destinationDefinition = generateDestinationDefinition(); - when(githubStore.getLatestDestinations()).thenReturn(Collections.singletonList(destinationDefinition)); - - final var destinationDefinitionReadList = destinationDefinitionsHandler.listLatestDestinationDefinitions().getDestinationDefinitions(); - assertEquals(1, destinationDefinitionReadList.size()); - - final var destinationDefinitionRead = destinationDefinitionReadList.get(0); - assertEquals(DestinationDefinitionsHandler.buildDestinationDefinitionRead(destinationDefinition), destinationDefinitionRead); - } - - @Test - @DisplayName("returns empty collection if cannot find latest definitions") - void testHttpTimeout() { - assertEquals(0, destinationDefinitionsHandler.listLatestDestinationDefinitions().getDestinationDefinitions().size()); - } - - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationHandlerTest.java deleted file mode 100644 index 390d7a8b7e1a..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationHandlerTest.java +++ /dev/null @@ -1,370 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.Lists; -import io.airbyte.api.model.generated.DestinationCloneConfiguration; -import io.airbyte.api.model.generated.DestinationCloneRequestBody; -import io.airbyte.api.model.generated.DestinationCreate; -import io.airbyte.api.model.generated.DestinationDefinitionSpecificationRead; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationReadList; -import io.airbyte.api.model.generated.DestinationSearch; -import io.airbyte.api.model.generated.DestinationUpdate; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.converters.ConfigurationUpdate; -import io.airbyte.commons.server.helpers.ConnectorSpecificationHelpers; -import io.airbyte.commons.server.helpers.DestinationHelpers; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; -import io.airbyte.persistence.job.factory.OAuthConfigSupplier; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.UUID; -import java.util.function.Supplier; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class DestinationHandlerTest { - - private ConfigRepository configRepository; - private SecretsRepositoryReader secretsRepositoryReader; - private SecretsRepositoryWriter secretsRepositoryWriter; - private StandardDestinationDefinition standardDestinationDefinition; - private DestinationDefinitionSpecificationRead destinationDefinitionSpecificationRead; - private DestinationConnection destinationConnection; - private DestinationHandler destinationHandler; - private ConnectionsHandler connectionsHandler; - private ConfigurationUpdate configurationUpdate; - private JsonSchemaValidator validator; - private Supplier uuidGenerator; - private JsonSecretsProcessor secretsProcessor; - private ConnectorSpecification connectorSpecification; - private OAuthConfigSupplier oAuthConfigSupplier; - - // needs to match name of file in src/test/resources/icons - private static final String ICON = "test-destination.svg"; - private static final String LOADED_ICON = DestinationDefinitionsHandler.loadIcon(ICON); - - @SuppressWarnings("unchecked") - @BeforeEach - void setUp() throws IOException { - configRepository = mock(ConfigRepository.class); - secretsRepositoryReader = mock(SecretsRepositoryReader.class); - secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); - validator = mock(JsonSchemaValidator.class); - uuidGenerator = mock(Supplier.class); - connectionsHandler = mock(ConnectionsHandler.class); - configurationUpdate = mock(ConfigurationUpdate.class); - secretsProcessor = mock(JsonSecretsProcessor.class); - oAuthConfigSupplier = mock(OAuthConfigSupplier.class); - - connectorSpecification = ConnectorSpecificationHelpers.generateConnectorSpecification(); - - standardDestinationDefinition = new StandardDestinationDefinition() - .withDestinationDefinitionId(UUID.randomUUID()) - .withName("db2") - .withDockerRepository("thebestrepo") - .withDockerImageTag("thelatesttag") - .withDocumentationUrl("https://wikipedia.org") - .withSpec(connectorSpecification) - .withIcon(ICON); - - destinationDefinitionSpecificationRead = new DestinationDefinitionSpecificationRead() - .connectionSpecification(connectorSpecification.getConnectionSpecification()) - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .documentationUrl(connectorSpecification.getDocumentationUrl().toString()); - - destinationConnection = DestinationHelpers.generateDestination(standardDestinationDefinition.getDestinationDefinitionId()); - - destinationHandler = - new DestinationHandler(configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - validator, - connectionsHandler, - uuidGenerator, - secretsProcessor, - configurationUpdate, - oAuthConfigSupplier); - } - - @Test - void testCreateDestination() throws JsonValidationException, ConfigNotFoundException, IOException { - when(uuidGenerator.get()) - .thenReturn(destinationConnection.getDestinationId()); - when(configRepository.getDestinationConnection(destinationConnection.getDestinationId())) - .thenReturn(destinationConnection); - when(configRepository.getStandardDestinationDefinition(standardDestinationDefinition.getDestinationDefinitionId())) - .thenReturn(standardDestinationDefinition); - when(oAuthConfigSupplier.maskDestinationOAuthParameters(destinationDefinitionSpecificationRead.getDestinationDefinitionId(), - destinationConnection.getWorkspaceId(), - destinationConnection.getConfiguration())).thenReturn(destinationConnection.getConfiguration()); - when(secretsProcessor.prepareSecretsForOutput(destinationConnection.getConfiguration(), - destinationDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(destinationConnection.getConfiguration()); - - final DestinationCreate destinationCreate = new DestinationCreate() - .name(destinationConnection.getName()) - .workspaceId(destinationConnection.getWorkspaceId()) - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .connectionConfiguration(DestinationHelpers.getTestDestinationJson()); - - final DestinationRead actualDestinationRead = - destinationHandler.createDestination(destinationCreate); - - final DestinationRead expectedDestinationRead = new DestinationRead() - .name(destinationConnection.getName()) - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .workspaceId(destinationConnection.getWorkspaceId()) - .destinationId(destinationConnection.getDestinationId()) - .connectionConfiguration(DestinationHelpers.getTestDestinationJson()) - .destinationName(standardDestinationDefinition.getName()) - .icon(LOADED_ICON); - - assertEquals(expectedDestinationRead, actualDestinationRead); - - verify(validator).ensure(destinationDefinitionSpecificationRead.getConnectionSpecification(), destinationConnection.getConfiguration()); - verify(secretsRepositoryWriter).writeDestinationConnection(destinationConnection, connectorSpecification); - verify(oAuthConfigSupplier).maskDestinationOAuthParameters(destinationDefinitionSpecificationRead.getDestinationDefinitionId(), - destinationConnection.getWorkspaceId(), destinationConnection.getConfiguration()); - verify(secretsProcessor) - .prepareSecretsForOutput(destinationConnection.getConfiguration(), destinationDefinitionSpecificationRead.getConnectionSpecification()); - } - - @Test - void testUpdateDestination() throws JsonValidationException, ConfigNotFoundException, IOException { - final String updatedDestName = "my updated dest name"; - final JsonNode newConfiguration = destinationConnection.getConfiguration(); - ((ObjectNode) newConfiguration).put("apiKey", "987-xyz"); - - final DestinationConnection expectedDestinationConnection = Jsons.clone(destinationConnection) - .withName(updatedDestName) - .withConfiguration(newConfiguration) - .withTombstone(false); - - final DestinationUpdate destinationUpdate = new DestinationUpdate() - .name(updatedDestName) - .destinationId(destinationConnection.getDestinationId()) - .connectionConfiguration(newConfiguration); - - when(secretsProcessor - .copySecrets(destinationConnection.getConfiguration(), newConfiguration, destinationDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(newConfiguration); - when(secretsProcessor.prepareSecretsForOutput(newConfiguration, destinationDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(newConfiguration); - when(oAuthConfigSupplier.maskDestinationOAuthParameters(destinationDefinitionSpecificationRead.getDestinationDefinitionId(), - destinationConnection.getWorkspaceId(), - newConfiguration)).thenReturn(newConfiguration); - when(configRepository.getStandardDestinationDefinition(standardDestinationDefinition.getDestinationDefinitionId())) - .thenReturn(standardDestinationDefinition); - when(configRepository.getDestinationDefinitionFromDestination(destinationConnection.getDestinationId())) - .thenReturn(standardDestinationDefinition); - when(configRepository.getDestinationConnection(destinationConnection.getDestinationId())) - .thenReturn(expectedDestinationConnection); - when(configurationUpdate.destination(destinationConnection.getDestinationId(), updatedDestName, newConfiguration)) - .thenReturn(expectedDestinationConnection); - - final DestinationRead actualDestinationRead = destinationHandler.updateDestination(destinationUpdate); - - final DestinationRead expectedDestinationRead = DestinationHelpers - .getDestinationRead(expectedDestinationConnection, standardDestinationDefinition).connectionConfiguration(newConfiguration); - - assertEquals(expectedDestinationRead, actualDestinationRead); - - verify(secretsProcessor).prepareSecretsForOutput(newConfiguration, destinationDefinitionSpecificationRead.getConnectionSpecification()); - verify(secretsRepositoryWriter).writeDestinationConnection(expectedDestinationConnection, connectorSpecification); - verify(oAuthConfigSupplier).maskDestinationOAuthParameters(destinationDefinitionSpecificationRead.getDestinationDefinitionId(), - destinationConnection.getWorkspaceId(), newConfiguration); - verify(validator).ensure(destinationDefinitionSpecificationRead.getConnectionSpecification(), newConfiguration); - } - - @Test - void testGetDestination() throws JsonValidationException, ConfigNotFoundException, IOException { - final DestinationRead expectedDestinationRead = new DestinationRead() - .name(destinationConnection.getName()) - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .workspaceId(destinationConnection.getWorkspaceId()) - .destinationId(destinationConnection.getDestinationId()) - .connectionConfiguration(destinationConnection.getConfiguration()) - .destinationName(standardDestinationDefinition.getName()) - .icon(LOADED_ICON); - final DestinationIdRequestBody destinationIdRequestBody = - new DestinationIdRequestBody().destinationId(expectedDestinationRead.getDestinationId()); - - when(secretsProcessor.prepareSecretsForOutput(destinationConnection.getConfiguration(), - destinationDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(destinationConnection.getConfiguration()); - when(configRepository.getDestinationConnection(destinationConnection.getDestinationId())).thenReturn(destinationConnection); - when(configRepository.getStandardDestinationDefinition(standardDestinationDefinition.getDestinationDefinitionId())) - .thenReturn(standardDestinationDefinition); - - final DestinationRead actualDestinationRead = destinationHandler.getDestination(destinationIdRequestBody); - - assertEquals(expectedDestinationRead, actualDestinationRead); - - // make sure the icon was loaded into actual svg content - assertTrue(expectedDestinationRead.getIcon().startsWith("")); - - verify(secretsProcessor) - .prepareSecretsForOutput(destinationConnection.getConfiguration(), destinationDefinitionSpecificationRead.getConnectionSpecification()); - } - - @Test - void testListDestinationForWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - final DestinationRead expectedDestinationRead = new DestinationRead() - .name(destinationConnection.getName()) - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .workspaceId(destinationConnection.getWorkspaceId()) - .destinationId(destinationConnection.getDestinationId()) - .connectionConfiguration(destinationConnection.getConfiguration()) - .destinationName(standardDestinationDefinition.getName()) - .icon(LOADED_ICON); - final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody().workspaceId(destinationConnection.getWorkspaceId()); - - when(configRepository.getDestinationConnection(destinationConnection.getDestinationId())).thenReturn(destinationConnection); - when(configRepository.listWorkspaceDestinationConnection(destinationConnection.getWorkspaceId())) - .thenReturn(Lists.newArrayList(destinationConnection)); - when(configRepository.getStandardDestinationDefinition(standardDestinationDefinition.getDestinationDefinitionId())) - .thenReturn(standardDestinationDefinition); - when(secretsProcessor.prepareSecretsForOutput(destinationConnection.getConfiguration(), - destinationDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(destinationConnection.getConfiguration()); - - final DestinationReadList actualDestinationRead = destinationHandler.listDestinationsForWorkspace(workspaceIdRequestBody); - - assertEquals(expectedDestinationRead, actualDestinationRead.getDestinations().get(0)); - verify(secretsProcessor) - .prepareSecretsForOutput(destinationConnection.getConfiguration(), destinationDefinitionSpecificationRead.getConnectionSpecification()); - } - - @Test - void testSearchDestinations() throws JsonValidationException, ConfigNotFoundException, IOException { - final DestinationRead expectedDestinationRead = new DestinationRead() - .name(destinationConnection.getName()) - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .workspaceId(destinationConnection.getWorkspaceId()) - .destinationId(destinationConnection.getDestinationId()) - .connectionConfiguration(destinationConnection.getConfiguration()) - .destinationName(standardDestinationDefinition.getName()) - .icon(LOADED_ICON); - - when(configRepository.getDestinationConnection(destinationConnection.getDestinationId())).thenReturn(destinationConnection); - when(configRepository.listDestinationConnection()).thenReturn(Lists.newArrayList(destinationConnection)); - when(configRepository.getStandardDestinationDefinition(standardDestinationDefinition.getDestinationDefinitionId())) - .thenReturn(standardDestinationDefinition); - when(secretsProcessor.prepareSecretsForOutput(destinationConnection.getConfiguration(), - destinationDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(destinationConnection.getConfiguration()); - - when(connectionsHandler.matchSearch(new DestinationSearch(), expectedDestinationRead)).thenReturn(true); - DestinationReadList actualDestinationRead = destinationHandler.searchDestinations(new DestinationSearch()); - assertEquals(1, actualDestinationRead.getDestinations().size()); - assertEquals(expectedDestinationRead, actualDestinationRead.getDestinations().get(0)); - verify(secretsProcessor) - .prepareSecretsForOutput(destinationConnection.getConfiguration(), destinationDefinitionSpecificationRead.getConnectionSpecification()); - - when(connectionsHandler.matchSearch(new DestinationSearch(), expectedDestinationRead)).thenReturn(false); - actualDestinationRead = destinationHandler.searchDestinations(new DestinationSearch()); - assertEquals(0, actualDestinationRead.getDestinations().size()); - } - - @Test - void testCloneDestinationWithConfiguration() throws JsonValidationException, ConfigNotFoundException, IOException { - final DestinationConnection clonedConnection = DestinationHelpers.generateDestination(standardDestinationDefinition.getDestinationDefinitionId()); - final DestinationRead expectedDestinationRead = new DestinationRead() - .name(clonedConnection.getName()) - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .workspaceId(clonedConnection.getWorkspaceId()) - .destinationId(clonedConnection.getDestinationId()) - .connectionConfiguration(clonedConnection.getConfiguration()) - .destinationName(standardDestinationDefinition.getName()) - .icon(LOADED_ICON); - final DestinationRead destinationRead = new DestinationRead() - .name(destinationConnection.getName()) - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .workspaceId(destinationConnection.getWorkspaceId()) - .destinationId(destinationConnection.getDestinationId()) - .connectionConfiguration(destinationConnection.getConfiguration()) - .destinationName(standardDestinationDefinition.getName()); - - final DestinationCloneConfiguration destinationCloneConfiguration = new DestinationCloneConfiguration().name("Copy Name"); - final DestinationCloneRequestBody destinationCloneRequestBody = new DestinationCloneRequestBody() - .destinationCloneId(destinationRead.getDestinationId()).destinationConfiguration(destinationCloneConfiguration); - - when(uuidGenerator.get()).thenReturn(clonedConnection.getDestinationId()); - when(secretsRepositoryReader.getDestinationConnectionWithSecrets(destinationConnection.getDestinationId())).thenReturn(destinationConnection); - when(configRepository.getDestinationConnection(clonedConnection.getDestinationId())).thenReturn(clonedConnection); - - when(configRepository.getStandardDestinationDefinition(destinationDefinitionSpecificationRead.getDestinationDefinitionId())) - .thenReturn(standardDestinationDefinition); - when(configRepository.getDestinationDefinitionFromDestination(destinationConnection.getDestinationId())) - .thenReturn(standardDestinationDefinition); - when(secretsProcessor.prepareSecretsForOutput(destinationConnection.getConfiguration(), - destinationDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(destinationConnection.getConfiguration()); - - final DestinationRead actualDestinationRead = destinationHandler.cloneDestination(destinationCloneRequestBody); - - assertEquals(expectedDestinationRead, actualDestinationRead); - } - - @Test - void testCloneDestinationWithoutConfiguration() throws JsonValidationException, ConfigNotFoundException, IOException { - final DestinationConnection clonedConnection = DestinationHelpers.generateDestination(standardDestinationDefinition.getDestinationDefinitionId()); - final DestinationRead expectedDestinationRead = new DestinationRead() - .name(clonedConnection.getName()) - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .workspaceId(clonedConnection.getWorkspaceId()) - .destinationId(clonedConnection.getDestinationId()) - .connectionConfiguration(clonedConnection.getConfiguration()) - .destinationName(standardDestinationDefinition.getName()) - .icon(LOADED_ICON); - final DestinationRead destinationRead = new DestinationRead() - .name(destinationConnection.getName()) - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .workspaceId(destinationConnection.getWorkspaceId()) - .destinationId(destinationConnection.getDestinationId()) - .connectionConfiguration(destinationConnection.getConfiguration()) - .destinationName(standardDestinationDefinition.getName()); - - final DestinationCloneRequestBody destinationCloneRequestBody = - new DestinationCloneRequestBody().destinationCloneId(destinationRead.getDestinationId()); - - when(uuidGenerator.get()).thenReturn(clonedConnection.getDestinationId()); - when(secretsRepositoryReader.getDestinationConnectionWithSecrets(destinationConnection.getDestinationId())).thenReturn(destinationConnection); - when(configRepository.getDestinationConnection(clonedConnection.getDestinationId())).thenReturn(clonedConnection); - - when(configRepository.getStandardDestinationDefinition(destinationDefinitionSpecificationRead.getDestinationDefinitionId())) - .thenReturn(standardDestinationDefinition); - when(configRepository.getDestinationDefinitionFromDestination(destinationConnection.getDestinationId())) - .thenReturn(standardDestinationDefinition); - when(secretsProcessor.prepareSecretsForOutput(destinationConnection.getConfiguration(), - destinationDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(destinationConnection.getConfiguration()); - - final DestinationRead actualDestinationRead = destinationHandler.cloneDestination(destinationCloneRequestBody); - - assertEquals(expectedDestinationRead, actualDestinationRead); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/HealthCheckHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/HealthCheckHandlerTest.java deleted file mode 100644 index 8b010efa63ec..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/HealthCheckHandlerTest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.HealthCheckRead; -import io.airbyte.config.persistence.ConfigRepository; -import org.junit.jupiter.api.Test; - -class HealthCheckHandlerTest { - - @Test - void testDbHealthSucceed() { - final var mRepository = mock(ConfigRepository.class); - when(mRepository.healthCheck()).thenReturn(true); - - final HealthCheckHandler healthCheckHandler = new HealthCheckHandler(mRepository); - assertEquals(new HealthCheckRead().available(true), healthCheckHandler.health()); - } - - @Test - void testDbHealthFail() { - final var mRepository = mock(ConfigRepository.class); - when(mRepository.healthCheck()).thenReturn(false); - - final HealthCheckHandler healthCheckHandler = new HealthCheckHandler(mRepository); - assertEquals(new HealthCheckRead().available(false), healthCheckHandler.health()); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java deleted file mode 100644 index 40465731df84..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java +++ /dev/null @@ -1,456 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.google.common.collect.ImmutableList; -import io.airbyte.api.model.generated.AttemptInfoRead; -import io.airbyte.api.model.generated.AttemptNormalizationStatusRead; -import io.airbyte.api.model.generated.AttemptNormalizationStatusReadList; -import io.airbyte.api.model.generated.AttemptRead; -import io.airbyte.api.model.generated.AttemptStreamStats; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.JobConfigType; -import io.airbyte.api.model.generated.JobDebugInfoRead; -import io.airbyte.api.model.generated.JobDebugRead; -import io.airbyte.api.model.generated.JobIdRequestBody; -import io.airbyte.api.model.generated.JobInfoLightRead; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.api.model.generated.JobListRequestBody; -import io.airbyte.api.model.generated.JobRead; -import io.airbyte.api.model.generated.JobReadList; -import io.airbyte.api.model.generated.JobWithAttemptsRead; -import io.airbyte.api.model.generated.LogRead; -import io.airbyte.api.model.generated.Pagination; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.server.converters.JobConverter; -import io.airbyte.commons.server.helpers.ConnectionHelpers; -import io.airbyte.commons.server.helpers.DestinationDefinitionHelpers; -import io.airbyte.commons.server.helpers.DestinationHelpers; -import io.airbyte.commons.server.helpers.SourceDefinitionHelpers; -import io.airbyte.commons.server.helpers.SourceHelpers; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.JobCheckConnectionConfig; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StreamSyncStats; -import io.airbyte.config.SyncStats; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.JobPersistence.AttemptStats; -import io.airbyte.persistence.job.JobPersistence.JobAttemptPair; -import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.AttemptNormalizationStatus; -import io.airbyte.persistence.job.models.AttemptStatus; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.persistence.job.models.JobStatus; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.URISyntaxException; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.function.Function; -import java.util.stream.Collectors; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; - -@DisplayName("Job History Handler") -class JobHistoryHandlerTest { - - private static final long JOB_ID = 100L; - private static final int ATTEMPT_NUMBER = 0; - private static final String JOB_CONFIG_ID = "ef296385-6796-413f-ac1b-49c4caba3f2b"; - private static final JobStatus JOB_STATUS = JobStatus.SUCCEEDED; - private static final JobConfig.ConfigType CONFIG_TYPE = JobConfig.ConfigType.CHECK_CONNECTION_SOURCE; - private static final JobConfigType CONFIG_TYPE_FOR_API = JobConfigType.CHECK_CONNECTION_SOURCE; - private static final JobConfig JOB_CONFIG = new JobConfig() - .withConfigType(CONFIG_TYPE) - .withCheckConnection(new JobCheckConnectionConfig()); - private static final Path LOG_PATH = Path.of("log_path"); - private static final LogRead EMPTY_LOG_READ = new LogRead().logLines(new ArrayList<>()); - private static final long CREATED_AT = System.currentTimeMillis() / 1000; - - private static final AttemptStats ATTEMPT_STATS = new AttemptStats(new SyncStats().withBytesEmitted(10L).withRecordsEmitted(10L), - List.of( - new StreamSyncStats().withStreamNamespace("ns1").withStreamName("stream1") - .withStats(new SyncStats().withRecordsEmitted(5L).withBytesEmitted(5L)), - new StreamSyncStats().withStreamName("stream2") - .withStats(new SyncStats().withRecordsEmitted(5L).withBytesEmitted(5L)))); - - private static final io.airbyte.api.model.generated.AttemptStats ATTEMPT_STATS_API = new io.airbyte.api.model.generated.AttemptStats() - .bytesEmitted(10L).recordsEmitted(10L); - - private static final List ATTEMPT_STREAM_STATS = List.of( - new AttemptStreamStats().streamNamespace("ns1").streamName("stream1") - .stats(new io.airbyte.api.model.generated.AttemptStats().recordsEmitted(5L).bytesEmitted(5L)), - new AttemptStreamStats().streamName("stream2").stats(new io.airbyte.api.model.generated.AttemptStats().recordsEmitted(5L).bytesEmitted(5L))); - - private ConnectionsHandler connectionsHandler; - private SourceHandler sourceHandler; - private DestinationHandler destinationHandler; - private Job testJob; - private Attempt testJobAttempt; - private JobPersistence jobPersistence; - private JobHistoryHandler jobHistoryHandler; - - private static JobRead toJobInfo(final Job job) { - return new JobRead().id(job.getId()) - .configId(job.getScope()) - .status(Enums.convertTo(job.getStatus(), io.airbyte.api.model.generated.JobStatus.class)) - .configType(Enums.convertTo(job.getConfigType(), io.airbyte.api.model.generated.JobConfigType.class)) - .createdAt(job.getCreatedAtInSecond()) - .updatedAt(job.getUpdatedAtInSecond()); - - } - - private static JobDebugRead toDebugJobInfo(final Job job) { - return new JobDebugRead().id(job.getId()) - .configId(job.getScope()) - .status(Enums.convertTo(job.getStatus(), io.airbyte.api.model.generated.JobStatus.class)) - .configType(Enums.convertTo(job.getConfigType(), io.airbyte.api.model.generated.JobConfigType.class)) - .sourceDefinition(null) - .destinationDefinition(null); - - } - - private static List toAttemptInfoList(final List attempts) { - final List attemptReads = attempts.stream().map(JobHistoryHandlerTest::toAttemptRead).collect(Collectors.toList()); - - final Function toAttemptInfoRead = (AttemptRead a) -> new AttemptInfoRead().attempt(a).logs(EMPTY_LOG_READ); - return attemptReads.stream().map(toAttemptInfoRead).collect(Collectors.toList()); - } - - private static AttemptRead toAttemptRead(final Attempt a) { - return new AttemptRead() - .id((long) a.getAttemptNumber()) - .status(Enums.convertTo(a.getStatus(), io.airbyte.api.model.generated.AttemptStatus.class)) - .createdAt(a.getCreatedAtInSecond()) - .updatedAt(a.getUpdatedAtInSecond()) - .endedAt(a.getEndedAtInSecond().orElse(null)); - } - - private static Attempt createAttempt(final long jobId, final long timestamps, final AttemptStatus status) { - return new Attempt(ATTEMPT_NUMBER, jobId, LOG_PATH, null, null, status, null, null, timestamps, timestamps, timestamps); - } - - @BeforeEach - void setUp() { - testJobAttempt = createAttempt(JOB_ID, CREATED_AT, AttemptStatus.SUCCEEDED); - testJob = new Job(JOB_ID, JOB_CONFIG.getConfigType(), JOB_CONFIG_ID, JOB_CONFIG, ImmutableList.of(testJobAttempt), JOB_STATUS, null, CREATED_AT, - CREATED_AT); - - connectionsHandler = mock(ConnectionsHandler.class); - sourceHandler = mock(SourceHandler.class); - destinationHandler = mock(DestinationHandler.class); - jobPersistence = mock(JobPersistence.class); - final SourceDefinitionsHandler sourceDefinitionsHandler = mock(SourceDefinitionsHandler.class); - final DestinationDefinitionsHandler destinationDefinitionsHandler = mock(DestinationDefinitionsHandler.class); - final AirbyteVersion airbyteVersion = mock(AirbyteVersion.class); - jobHistoryHandler = new JobHistoryHandler(jobPersistence, WorkerEnvironment.DOCKER, LogConfigs.EMPTY, connectionsHandler, sourceHandler, - sourceDefinitionsHandler, destinationHandler, destinationDefinitionsHandler, airbyteVersion); - } - - @Nested - @DisplayName("When listing jobs") - class ListJobs { - - @Test - @DisplayName("Should return jobs with/without attempts in descending order") - void testListJobs() throws IOException { - final var successfulJob = testJob; - final int pagesize = 25; - final int rowOffset = 0; - - final var jobId2 = JOB_ID + 100; - final var createdAt2 = CREATED_AT + 1000; - final var latestJobNoAttempt = - new Job(jobId2, JOB_CONFIG.getConfigType(), JOB_CONFIG_ID, JOB_CONFIG, Collections.emptyList(), JobStatus.PENDING, - null, createdAt2, createdAt2); - - when(jobPersistence.listJobs(Set.of(Enums.convertTo(CONFIG_TYPE_FOR_API, ConfigType.class)), JOB_CONFIG_ID, pagesize, rowOffset)) - .thenReturn(List.of(latestJobNoAttempt, successfulJob)); - when(jobPersistence.getJobCount(Set.of(Enums.convertTo(CONFIG_TYPE_FOR_API, ConfigType.class)), JOB_CONFIG_ID)).thenReturn(2L); - when(jobPersistence.getAttemptStats(List.of(200L, 100L))).thenReturn(Map.of( - new JobAttemptPair(100, 0), ATTEMPT_STATS, - new JobAttemptPair(jobId2, 0), ATTEMPT_STATS)); - - final var requestBody = new JobListRequestBody() - .configTypes(Collections.singletonList(CONFIG_TYPE_FOR_API)) - .configId(JOB_CONFIG_ID) - .pagination(new Pagination().pageSize(pagesize).rowOffset(rowOffset)); - final var jobReadList = jobHistoryHandler.listJobsFor(requestBody); - - final var expAttemptRead = toAttemptRead(testJobAttempt).totalStats(ATTEMPT_STATS_API).streamStats(ATTEMPT_STREAM_STATS); - final var successfulJobWithAttemptRead = new JobWithAttemptsRead().job(toJobInfo(successfulJob)).attempts(ImmutableList.of(expAttemptRead)); - final var latestJobWithAttemptRead = new JobWithAttemptsRead().job(toJobInfo(latestJobNoAttempt)).attempts(Collections.emptyList()); - final JobReadList expectedJobReadList = - new JobReadList().jobs(List.of(latestJobWithAttemptRead, successfulJobWithAttemptRead)).totalJobCount(2L); - - assertEquals(expectedJobReadList, jobReadList); - } - - @Test - @DisplayName("Should return jobs in descending order regardless of type") - void testListJobsFor() throws IOException { - final var firstJob = testJob; - final int pagesize = 25; - final int rowOffset = 0; - - final var secondJobId = JOB_ID + 100; - final var createdAt2 = CREATED_AT + 1000; - final var secondJobAttempt = createAttempt(secondJobId, createdAt2, AttemptStatus.SUCCEEDED); - final var secondJob = new Job(secondJobId, ConfigType.DISCOVER_SCHEMA, JOB_CONFIG_ID, JOB_CONFIG, ImmutableList.of(secondJobAttempt), - JobStatus.SUCCEEDED, null, createdAt2, createdAt2); - - final Set configTypes = Set.of( - Enums.convertTo(CONFIG_TYPE_FOR_API, ConfigType.class), - Enums.convertTo(JobConfigType.SYNC, ConfigType.class), - Enums.convertTo(JobConfigType.DISCOVER_SCHEMA, ConfigType.class)); - - final var latestJobId = secondJobId + 100; - final var createdAt3 = createdAt2 + 1000; - final var latestJob = - new Job(latestJobId, ConfigType.SYNC, JOB_CONFIG_ID, JOB_CONFIG, Collections.emptyList(), JobStatus.PENDING, null, createdAt3, createdAt3); - - when(jobPersistence.listJobs(configTypes, JOB_CONFIG_ID, pagesize, rowOffset)).thenReturn(List.of(latestJob, secondJob, firstJob)); - when(jobPersistence.getJobCount(configTypes, JOB_CONFIG_ID)).thenReturn(3L); - when(jobPersistence.getAttemptStats(List.of(300L, 200L, 100L))).thenReturn(Map.of( - new JobAttemptPair(100, 0), ATTEMPT_STATS, - new JobAttemptPair(secondJobId, 0), ATTEMPT_STATS, - new JobAttemptPair(latestJobId, 0), ATTEMPT_STATS)); - - final JobListRequestBody requestBody = new JobListRequestBody() - .configTypes(List.of(CONFIG_TYPE_FOR_API, JobConfigType.SYNC, JobConfigType.DISCOVER_SCHEMA)) - .configId(JOB_CONFIG_ID) - .pagination(new Pagination().pageSize(pagesize).rowOffset(rowOffset)); - final JobReadList jobReadList = jobHistoryHandler.listJobsFor(requestBody); - - final var firstJobWithAttemptRead = - new JobWithAttemptsRead().job(toJobInfo(firstJob)) - .attempts(ImmutableList.of(toAttemptRead(testJobAttempt).totalStats(ATTEMPT_STATS_API).streamStats(ATTEMPT_STREAM_STATS))); - final var secondJobWithAttemptRead = - new JobWithAttemptsRead().job(toJobInfo(secondJob)) - .attempts(ImmutableList.of(toAttemptRead(secondJobAttempt).totalStats(ATTEMPT_STATS_API).streamStats(ATTEMPT_STREAM_STATS))); - final var latestJobWithAttemptRead = new JobWithAttemptsRead().job(toJobInfo(latestJob)).attempts(Collections.emptyList()); - final JobReadList expectedJobReadList = - new JobReadList().jobs(List.of(latestJobWithAttemptRead, secondJobWithAttemptRead, firstJobWithAttemptRead)).totalJobCount(3L); - - assertEquals(expectedJobReadList, jobReadList); - } - - @Test - @DisplayName("Should return jobs including specified job id") - void testListJobsIncludingJobId() throws IOException { - final var successfulJob = testJob; - final int pagesize = 25; - final int rowOffset = 0; - - final var jobId2 = JOB_ID + 100; - final var createdAt2 = CREATED_AT + 1000; - final var latestJobNoAttempt = - new Job(jobId2, JOB_CONFIG.getConfigType(), JOB_CONFIG_ID, JOB_CONFIG, Collections.emptyList(), JobStatus.PENDING, - null, createdAt2, createdAt2); - - when(jobPersistence.listJobsIncludingId(Set.of(Enums.convertTo(CONFIG_TYPE_FOR_API, ConfigType.class)), JOB_CONFIG_ID, jobId2, pagesize)) - .thenReturn(List.of(latestJobNoAttempt, successfulJob)); - when(jobPersistence.getJobCount(Set.of(Enums.convertTo(CONFIG_TYPE_FOR_API, ConfigType.class)), JOB_CONFIG_ID)).thenReturn(2L); - when(jobPersistence.getAttemptStats(List.of(200L, 100L))).thenReturn(Map.of( - new JobAttemptPair(100, 0), ATTEMPT_STATS, - new JobAttemptPair(jobId2, 0), ATTEMPT_STATS)); - - final var requestBody = new JobListRequestBody() - .configTypes(Collections.singletonList(CONFIG_TYPE_FOR_API)) - .configId(JOB_CONFIG_ID) - .includingJobId(jobId2) - .pagination(new Pagination().pageSize(pagesize).rowOffset(rowOffset)); - final var jobReadList = jobHistoryHandler.listJobsFor(requestBody); - - final var successfulJobWithAttemptRead = new JobWithAttemptsRead().job(toJobInfo(successfulJob)).attempts(ImmutableList.of(toAttemptRead( - testJobAttempt).totalStats(ATTEMPT_STATS_API).streamStats(ATTEMPT_STREAM_STATS))); - final var latestJobWithAttemptRead = new JobWithAttemptsRead().job(toJobInfo(latestJobNoAttempt)).attempts(Collections.emptyList()); - final JobReadList expectedJobReadList = - new JobReadList().jobs(List.of(latestJobWithAttemptRead, successfulJobWithAttemptRead)).totalJobCount(2L); - - assertEquals(expectedJobReadList, jobReadList); - } - - } - - @Test - @DisplayName("Should return the right job info") - void testGetJobInfo() throws IOException { - when(jobPersistence.getJob(JOB_ID)).thenReturn(testJob); - - final JobIdRequestBody requestBody = new JobIdRequestBody().id(JOB_ID); - final JobInfoRead jobInfoActual = jobHistoryHandler.getJobInfo(requestBody); - - final JobInfoRead exp = new JobInfoRead().job(toJobInfo(testJob)).attempts(toAttemptInfoList(ImmutableList.of(testJobAttempt))); - - assertEquals(exp, jobInfoActual); - } - - @Test - @DisplayName("Should return the right job info without attempt information") - void testGetJobInfoLight() throws IOException { - when(jobPersistence.getJob(JOB_ID)).thenReturn(testJob); - - final JobIdRequestBody requestBody = new JobIdRequestBody().id(JOB_ID); - final JobInfoLightRead jobInfoLightActual = jobHistoryHandler.getJobInfoLight(requestBody); - - final JobInfoLightRead exp = new JobInfoLightRead().job(toJobInfo(testJob)); - - assertEquals(exp, jobInfoLightActual); - } - - @Test - @DisplayName("Should return the right info to debug this job") - void testGetDebugJobInfo() throws IOException, JsonValidationException, ConfigNotFoundException, URISyntaxException { - final StandardSourceDefinition standardSourceDefinition = SourceDefinitionHelpers.generateSourceDefinition(); - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final SourceRead sourceRead = SourceHelpers.getSourceRead(source, standardSourceDefinition); - - final StandardDestinationDefinition standardDestinationDefinition = DestinationDefinitionHelpers.generateDestination(); - final DestinationConnection destination = DestinationHelpers.generateDestination(UUID.randomUUID()); - final DestinationRead destinationRead = DestinationHelpers.getDestinationRead(destination, standardDestinationDefinition); - - final StandardSync standardSync = ConnectionHelpers.generateSyncWithSourceId(source.getSourceId()); - final ConnectionRead connectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); - when(connectionsHandler.getConnection(UUID.fromString(testJob.getScope()))).thenReturn(connectionRead); - - final SourceIdRequestBody sourceIdRequestBody = new SourceIdRequestBody(); - sourceIdRequestBody.setSourceId(connectionRead.getSourceId()); - when(sourceHandler.getSource(sourceIdRequestBody)).thenReturn(sourceRead); - - final DestinationIdRequestBody destinationIdRequestBody = new DestinationIdRequestBody(); - destinationIdRequestBody.setDestinationId(connectionRead.getDestinationId()); - when(destinationHandler.getDestination(destinationIdRequestBody)).thenReturn(destinationRead); - when(jobPersistence.getJob(JOB_ID)).thenReturn(testJob); - when(jobPersistence.getAttemptStats(anyLong(), anyInt())).thenReturn(ATTEMPT_STATS); - - final JobIdRequestBody requestBody = new JobIdRequestBody().id(JOB_ID); - final JobDebugInfoRead jobDebugInfoActual = jobHistoryHandler.getJobDebugInfo(requestBody); - final List attemptInfoReads = toAttemptInfoList(ImmutableList.of(testJobAttempt)); - attemptInfoReads.forEach(read -> read.getAttempt().totalStats(ATTEMPT_STATS_API).streamStats(ATTEMPT_STREAM_STATS)); - final JobDebugInfoRead exp = new JobDebugInfoRead().job(toDebugJobInfo(testJob)).attempts(attemptInfoReads); - - assertEquals(exp, jobDebugInfoActual); - } - - @Test - @DisplayName("Should return the latest running sync job") - void testGetLatestRunningSyncJob() throws IOException { - final var connectionId = UUID.randomUUID(); - - final var olderRunningJobId = JOB_ID + 100; - final var olderRunningCreatedAt = CREATED_AT + 1000; - final var olderRunningJobAttempt = createAttempt(olderRunningJobId, olderRunningCreatedAt, AttemptStatus.RUNNING); - final var olderRunningJob = new Job(olderRunningJobId, ConfigType.SYNC, JOB_CONFIG_ID, - JOB_CONFIG, ImmutableList.of(olderRunningJobAttempt), - JobStatus.RUNNING, null, olderRunningCreatedAt, olderRunningCreatedAt); - - // expect that we return the newer of the two running jobs. this should not happen in the real - // world but might as - // well test that we handle it properly. - final var newerRunningJobId = JOB_ID + 200; - final var newerRunningCreatedAt = CREATED_AT + 2000; - final var newerRunningJobAttempt = createAttempt(newerRunningJobId, newerRunningCreatedAt, AttemptStatus.RUNNING); - final var newerRunningJob = new Job(newerRunningJobId, ConfigType.SYNC, JOB_CONFIG_ID, - JOB_CONFIG, ImmutableList.of(newerRunningJobAttempt), - JobStatus.RUNNING, null, newerRunningCreatedAt, newerRunningCreatedAt); - - when(jobPersistence.listJobsForConnectionWithStatuses( - connectionId, - Collections.singleton(ConfigType.SYNC), - JobStatus.NON_TERMINAL_STATUSES)).thenReturn(List.of(newerRunningJob, olderRunningJob)); - - final Optional expectedJob = Optional.of(JobConverter.getJobRead(newerRunningJob)); - final Optional actualJob = jobHistoryHandler.getLatestRunningSyncJob(connectionId); - - assertEquals(expectedJob, actualJob); - } - - @Test - @DisplayName("Should return an empty optional if no running sync job") - void testGetLatestRunningSyncJobWhenNone() throws IOException { - final var connectionId = UUID.randomUUID(); - - when(jobPersistence.listJobsForConnectionWithStatuses( - connectionId, - Collections.singleton(ConfigType.SYNC), - JobStatus.NON_TERMINAL_STATUSES)).thenReturn(Collections.emptyList()); - - final Optional actual = jobHistoryHandler.getLatestRunningSyncJob(connectionId); - - assertTrue(actual.isEmpty()); - } - - @Test - @DisplayName("Should return the latest sync job") - void testGetLatestSyncJob() throws IOException { - final var connectionId = UUID.randomUUID(); - - // expect the newest job overall to be returned, even if it is failed - final var newerFailedJobId = JOB_ID + 200; - final var newerFailedCreatedAt = CREATED_AT + 2000; - final var newerFailedJobAttempt = createAttempt(newerFailedJobId, newerFailedCreatedAt, AttemptStatus.FAILED); - final var newerFailedJob = new Job(newerFailedJobId, ConfigType.SYNC, JOB_CONFIG_ID, - JOB_CONFIG, ImmutableList.of(newerFailedJobAttempt), - JobStatus.RUNNING, null, newerFailedCreatedAt, newerFailedCreatedAt); - - when(jobPersistence.getLastSyncJob(connectionId)).thenReturn(Optional.of(newerFailedJob)); - - final Optional expectedJob = Optional.of(JobConverter.getJobRead(newerFailedJob)); - final Optional actualJob = jobHistoryHandler.getLatestSyncJob(connectionId); - - assertEquals(expectedJob, actualJob); - } - - @Test - @DisplayName("Should have compatible config enums") - void testEnumConversion() { - assertTrue(Enums.isCompatible(JobConfig.ConfigType.class, JobConfigType.class)); - } - - @Test - @DisplayName("Should return attempt normalization info for the job") - void testGetAttemptNormalizationStatuses() throws IOException { - - final AttemptNormalizationStatus databaseReadResult = new AttemptNormalizationStatus(1, Optional.of(10L), /* hasNormalizationFailed= */ false); - - when(jobPersistence.getAttemptNormalizationStatusesForJob(JOB_ID)).thenReturn(List.of(databaseReadResult)); - - final AttemptNormalizationStatusReadList expectedStatus = new AttemptNormalizationStatusReadList().attemptNormalizationStatuses( - List.of(new AttemptNormalizationStatusRead().attemptNumber(1).hasRecordsCommitted(true).hasNormalizationFailed(false).recordsCommitted(10L))); - - assertEquals(expectedStatus, jobHistoryHandler.getAttemptNormalizationStatuses(new JobIdRequestBody().id(JOB_ID))); - - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/LogsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/LogsHandlerTest.java deleted file mode 100644 index eb19d78fba13..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/LogsHandlerTest.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.LogType; -import io.airbyte.api.model.generated.LogsRequestBody; -import io.airbyte.config.Configs; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.helpers.LogConfigs; -import java.io.File; -import java.nio.file.Path; -import org.junit.jupiter.api.Test; - -class LogsHandlerTest { - - @Test - void testServerLogs() { - final Configs configs = mock(Configs.class); - when(configs.getWorkspaceRoot()).thenReturn(Path.of("/workspace")); - when(configs.getWorkerEnvironment()).thenReturn(WorkerEnvironment.DOCKER); - when(configs.getLogConfigs()).thenReturn(LogConfigs.EMPTY); - - final File expected = Path.of(String.format("/workspace/server/logs/%s", LogClientSingleton.LOG_FILENAME)).toFile(); - final File actual = new LogsHandler(configs).getLogs(new LogsRequestBody().logType(LogType.SERVER)); - - assertEquals(expected, actual); - } - - @Test - void testSchedulerLogs() { - final Configs configs = mock(Configs.class); - when(configs.getWorkspaceRoot()).thenReturn(Path.of("/workspace")); - when(configs.getWorkerEnvironment()).thenReturn(WorkerEnvironment.DOCKER); - when(configs.getLogConfigs()).thenReturn(LogConfigs.EMPTY); - - final File expected = Path.of(String.format("/workspace/scheduler/logs/%s", LogClientSingleton.LOG_FILENAME)).toFile(); - final File actual = new LogsHandler(configs).getLogs(new LogsRequestBody().logType(LogType.SCHEDULER)); - - assertEquals(expected, actual); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OAuthHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OAuthHandlerTest.java deleted file mode 100644 index a3ca6b98aa82..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OAuthHandlerTest.java +++ /dev/null @@ -1,235 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.api.model.generated.SetInstancewideDestinationOauthParamsRequestBody; -import io.airbyte.api.model.generated.SetInstancewideSourceOauthParamsRequestBody; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; - -class OAuthHandlerTest { - - private ConfigRepository configRepository; - private OAuthHandler handler; - private TrackingClient trackingClient; - private HttpClient httpClient; - private SecretsRepositoryReader secretsRepositoryReader; - private static final String CLIENT_ID = "123"; - private static final String CLIENT_ID_KEY = "client_id"; - private static final String CLIENT_SECRET_KEY = "client_secret"; - private static final String CLIENT_SECRET = "hunter2"; - - @BeforeEach - public void init() { - configRepository = Mockito.mock(ConfigRepository.class); - trackingClient = mock(TrackingClient.class); - httpClient = Mockito.mock(HttpClient.class); - secretsRepositoryReader = mock(SecretsRepositoryReader.class); - handler = new OAuthHandler(configRepository, httpClient, trackingClient, secretsRepositoryReader); - } - - @Test - void setSourceInstancewideOauthParams() throws JsonValidationException, IOException { - final UUID sourceDefId = UUID.randomUUID(); - final Map params = new HashMap<>(); - params.put(CLIENT_ID_KEY, CLIENT_ID); - params.put(CLIENT_SECRET_KEY, CLIENT_SECRET); - - final SetInstancewideSourceOauthParamsRequestBody actualRequest = new SetInstancewideSourceOauthParamsRequestBody() - .sourceDefinitionId(sourceDefId) - .params(params); - - handler.setSourceInstancewideOauthParams(actualRequest); - - final ArgumentCaptor argument = ArgumentCaptor.forClass(SourceOAuthParameter.class); - Mockito.verify(configRepository).writeSourceOAuthParam(argument.capture()); - assertEquals(Jsons.jsonNode(params), argument.getValue().getConfiguration()); - assertEquals(sourceDefId, argument.getValue().getSourceDefinitionId()); - } - - @Test - void resetSourceInstancewideOauthParams() throws JsonValidationException, IOException { - final UUID sourceDefId = UUID.randomUUID(); - final Map firstParams = new HashMap<>(); - firstParams.put(CLIENT_ID_KEY, CLIENT_ID); - firstParams.put(CLIENT_SECRET_KEY, CLIENT_SECRET); - final SetInstancewideSourceOauthParamsRequestBody firstRequest = new SetInstancewideSourceOauthParamsRequestBody() - .sourceDefinitionId(sourceDefId) - .params(firstParams); - handler.setSourceInstancewideOauthParams(firstRequest); - - final UUID oauthParameterId = UUID.randomUUID(); - when(configRepository.getSourceOAuthParamByDefinitionIdOptional(null, sourceDefId)) - .thenReturn(Optional.of(new SourceOAuthParameter().withOauthParameterId(oauthParameterId))); - - final Map secondParams = new HashMap<>(); - secondParams.put(CLIENT_ID_KEY, "456"); - secondParams.put(CLIENT_SECRET_KEY, "hunter3"); - final SetInstancewideSourceOauthParamsRequestBody secondRequest = new SetInstancewideSourceOauthParamsRequestBody() - .sourceDefinitionId(sourceDefId) - .params(secondParams); - handler.setSourceInstancewideOauthParams(secondRequest); - - final ArgumentCaptor argument = ArgumentCaptor.forClass(SourceOAuthParameter.class); - Mockito.verify(configRepository, Mockito.times(2)).writeSourceOAuthParam(argument.capture()); - final List capturedValues = argument.getAllValues(); - assertEquals(Jsons.jsonNode(firstParams), capturedValues.get(0).getConfiguration()); - assertEquals(Jsons.jsonNode(secondParams), capturedValues.get(1).getConfiguration()); - assertEquals(sourceDefId, capturedValues.get(0).getSourceDefinitionId()); - assertEquals(sourceDefId, capturedValues.get(1).getSourceDefinitionId()); - assertEquals(oauthParameterId, capturedValues.get(1).getOauthParameterId()); - } - - @Test - void setDestinationInstancewideOauthParams() throws JsonValidationException, IOException { - final UUID destinationDefId = UUID.randomUUID(); - final Map params = new HashMap<>(); - params.put(CLIENT_ID_KEY, CLIENT_ID); - params.put(CLIENT_SECRET_KEY, CLIENT_SECRET); - - final SetInstancewideDestinationOauthParamsRequestBody actualRequest = new SetInstancewideDestinationOauthParamsRequestBody() - .destinationDefinitionId(destinationDefId) - .params(params); - - handler.setDestinationInstancewideOauthParams(actualRequest); - - final ArgumentCaptor argument = ArgumentCaptor.forClass(DestinationOAuthParameter.class); - Mockito.verify(configRepository).writeDestinationOAuthParam(argument.capture()); - assertEquals(Jsons.jsonNode(params), argument.getValue().getConfiguration()); - assertEquals(destinationDefId, argument.getValue().getDestinationDefinitionId()); - } - - @Test - void resetDestinationInstancewideOauthParams() throws JsonValidationException, IOException { - final UUID destinationDefId = UUID.randomUUID(); - final Map firstParams = new HashMap<>(); - firstParams.put(CLIENT_ID_KEY, CLIENT_ID); - firstParams.put(CLIENT_SECRET_KEY, CLIENT_SECRET); - final SetInstancewideDestinationOauthParamsRequestBody firstRequest = new SetInstancewideDestinationOauthParamsRequestBody() - .destinationDefinitionId(destinationDefId) - .params(firstParams); - handler.setDestinationInstancewideOauthParams(firstRequest); - - final UUID oauthParameterId = UUID.randomUUID(); - when(configRepository.getDestinationOAuthParamByDefinitionIdOptional(null, destinationDefId)) - .thenReturn(Optional.of(new DestinationOAuthParameter().withOauthParameterId(oauthParameterId))); - - final Map secondParams = new HashMap<>(); - secondParams.put(CLIENT_ID_KEY, "456"); - secondParams.put(CLIENT_SECRET_KEY, "hunter3"); - final SetInstancewideDestinationOauthParamsRequestBody secondRequest = new SetInstancewideDestinationOauthParamsRequestBody() - .destinationDefinitionId(destinationDefId) - .params(secondParams); - handler.setDestinationInstancewideOauthParams(secondRequest); - - final ArgumentCaptor argument = ArgumentCaptor.forClass(DestinationOAuthParameter.class); - Mockito.verify(configRepository, Mockito.times(2)).writeDestinationOAuthParam(argument.capture()); - final List capturedValues = argument.getAllValues(); - assertEquals(Jsons.jsonNode(firstParams), capturedValues.get(0).getConfiguration()); - assertEquals(Jsons.jsonNode(secondParams), capturedValues.get(1).getConfiguration()); - assertEquals(destinationDefId, capturedValues.get(0).getDestinationDefinitionId()); - assertEquals(destinationDefId, capturedValues.get(1).getDestinationDefinitionId()); - assertEquals(oauthParameterId, capturedValues.get(1).getOauthParameterId()); - } - - @Test - void testBuildJsonPathFromOAuthFlowInitParameters() { - final Map> input = Map.ofEntries( - Map.entry("field1", List.of("1")), - Map.entry("field2", List.of("2", "3"))); - - final Map expected = Map.ofEntries( - Map.entry("field1", "$.1"), - Map.entry("field2", "$.2.3")); - - assertEquals(expected, handler.buildJsonPathFromOAuthFlowInitParameters(input)); - } - - @Test - void testGetOAuthInputConfiguration() { - final JsonNode hydratedConfig = Jsons.deserialize( - """ - { - "field1": "1", - "field2": "2", - "field3": { - "field3_1": "3_1", - "field3_2": "3_2" - } - } - """); - - final Map pathsToGet = Map.ofEntries( - Map.entry("field1", "$.field1"), - Map.entry("field3_1", "$.field3.field3_1"), - Map.entry("field3_2", "$.field3.field3_2"), - Map.entry("field4", "$.someNonexistentField")); - - final JsonNode expected = Jsons.deserialize( - """ - { - "field1": "1", - "field3_1": "3_1", - "field3_2": "3_2" - } - """); - - assertEquals(expected, handler.getOAuthInputConfiguration(hydratedConfig, pathsToGet)); - } - - @Test - void testGetOauthFromDBIfNeeded() { - final JsonNode fromInput = Jsons.deserialize( - """ - { - "testMask": "**********", - "testNotMask": "this", - "testOtherType": true - } - """); - - final JsonNode fromDb = Jsons.deserialize( - """ - { - "testMask": "mask", - "testNotMask": "notThis", - "testOtherType": true - } - """); - - final JsonNode expected = Jsons.deserialize( - """ - { - "testMask": "mask", - "testNotMask": "this", - "testOtherType": true - } - """); - - assertEquals(expected, handler.getOauthFromDBIfNeeded(fromDb, fromInput)); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OpenApiConfigHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OpenApiConfigHandlerTest.java deleted file mode 100644 index 14b2e734ed2e..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OpenApiConfigHandlerTest.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.*; - -import com.google.common.io.Files; -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.List; -import org.junit.jupiter.api.Test; - -class OpenApiConfigHandlerTest { - - @Test - void testGetFile() throws IOException { - final List lines = Files.readLines(new OpenApiConfigHandler().getFile(), Charset.defaultCharset()); - assertTrue(lines.get(0).contains("openapi")); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OperationsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OperationsHandlerTest.java deleted file mode 100644 index 84fc0f8280fb..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OperationsHandlerTest.java +++ /dev/null @@ -1,386 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.OperationCreate; -import io.airbyte.api.model.generated.OperationIdRequestBody; -import io.airbyte.api.model.generated.OperationRead; -import io.airbyte.api.model.generated.OperationReadList; -import io.airbyte.api.model.generated.OperationUpdate; -import io.airbyte.api.model.generated.OperatorConfiguration; -import io.airbyte.api.model.generated.OperatorDbt; -import io.airbyte.api.model.generated.OperatorNormalization; -import io.airbyte.api.model.generated.OperatorNormalization.OptionEnum; -import io.airbyte.api.model.generated.OperatorType; -import io.airbyte.api.model.generated.OperatorWebhook; -import io.airbyte.api.model.generated.OperatorWebhook.WebhookTypeEnum; -import io.airbyte.api.model.generated.OperatorWebhookDbtCloud; -import io.airbyte.commons.enums.Enums; -import io.airbyte.config.OperatorNormalization.Option; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.UUID; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class OperationsHandlerTest { - - private static final String WEBHOOK_OPERATION_NAME = "fake-operation-name"; - private static final UUID WEBHOOK_CONFIG_ID = UUID.randomUUID(); - private static final UUID WEBHOOK_OPERATION_ID = UUID.randomUUID(); - private static final Integer DBT_CLOUD_WEBHOOK_ACCOUNT_ID = 123; - private static final Integer DBT_CLOUD_WEBHOOK_JOB_ID = 456; - private static final Integer NEW_DBT_CLOUD_WEBHOOK_ACCOUNT_ID = 789; - public static final String EXECUTION_BODY = "{\"cause\": \"airbyte\"}"; - public static final String EXECUTION_URL_TEMPLATE = "https://cloud.getdbt.com/api/v2/accounts/%d/jobs/%d/run/"; - private ConfigRepository configRepository; - private Supplier uuidGenerator; - private OperationsHandler operationsHandler; - private StandardSyncOperation standardSyncOperation; - - @SuppressWarnings("unchecked") - @BeforeEach - void setUp() throws IOException { - configRepository = mock(ConfigRepository.class); - uuidGenerator = mock(Supplier.class); - - operationsHandler = new OperationsHandler(configRepository, uuidGenerator); - standardSyncOperation = new StandardSyncOperation() - .withWorkspaceId(UUID.randomUUID()) - .withOperationId(UUID.randomUUID()) - .withName("presto to hudi") - .withOperatorType(io.airbyte.config.StandardSyncOperation.OperatorType.NORMALIZATION) - .withOperatorNormalization(new io.airbyte.config.OperatorNormalization().withOption(Option.BASIC)) - .withOperatorDbt(null) - .withTombstone(false); - } - - @Test - void testCreateOperation() throws JsonValidationException, ConfigNotFoundException, IOException { - when(uuidGenerator.get()).thenReturn(standardSyncOperation.getOperationId()); - - when(configRepository.getStandardSyncOperation(standardSyncOperation.getOperationId())).thenReturn(standardSyncOperation); - - final OperationCreate operationCreate = new OperationCreate() - .workspaceId(standardSyncOperation.getWorkspaceId()) - .name(standardSyncOperation.getName()) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.NORMALIZATION) - .normalization(new OperatorNormalization().option(OptionEnum.BASIC))); - - final OperationRead actualOperationRead = operationsHandler.createOperation(operationCreate); - - final OperationRead expectedOperationRead = new OperationRead() - .workspaceId(standardSyncOperation.getWorkspaceId()) - .operationId(standardSyncOperation.getOperationId()) - .name(standardSyncOperation.getName()) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.NORMALIZATION) - .normalization(new OperatorNormalization().option(OptionEnum.BASIC))); - - assertEquals(expectedOperationRead, actualOperationRead); - - verify(configRepository).writeStandardSyncOperation(standardSyncOperation); - } - - @Test - void testCreateWebhookOperation() throws JsonValidationException, ConfigNotFoundException, IOException { - when(uuidGenerator.get()).thenReturn(WEBHOOK_OPERATION_ID); - final OperatorWebhook webhookConfig = new OperatorWebhook() - .webhookConfigId(WEBHOOK_CONFIG_ID) - .webhookType(WebhookTypeEnum.DBTCLOUD) - .dbtCloud(new OperatorWebhookDbtCloud() - .accountId(DBT_CLOUD_WEBHOOK_ACCOUNT_ID) - .jobId(DBT_CLOUD_WEBHOOK_JOB_ID)); - final OperationCreate operationCreate = new OperationCreate() - .workspaceId(standardSyncOperation.getWorkspaceId()) - .name(WEBHOOK_OPERATION_NAME) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.WEBHOOK).webhook(webhookConfig)); - - final StandardSyncOperation expectedPersistedOperation = new StandardSyncOperation() - .withWorkspaceId(standardSyncOperation.getWorkspaceId()) - .withOperationId(WEBHOOK_OPERATION_ID) - .withName(WEBHOOK_OPERATION_NAME) - .withOperatorType(StandardSyncOperation.OperatorType.WEBHOOK) - .withOperatorWebhook(new io.airbyte.config.OperatorWebhook() - .withWebhookConfigId(WEBHOOK_CONFIG_ID) - .withExecutionUrl(String.format(EXECUTION_URL_TEMPLATE, DBT_CLOUD_WEBHOOK_ACCOUNT_ID, - DBT_CLOUD_WEBHOOK_JOB_ID)) - .withExecutionBody(EXECUTION_BODY)) - .withTombstone(false); - - when(configRepository.getStandardSyncOperation(WEBHOOK_OPERATION_ID)).thenReturn(expectedPersistedOperation); - - final OperationRead actualOperationRead = operationsHandler.createOperation(operationCreate); - - assertEquals(operationCreate.getWorkspaceId(), actualOperationRead.getWorkspaceId()); - assertEquals(WEBHOOK_OPERATION_ID, actualOperationRead.getOperationId()); - assertEquals(WEBHOOK_OPERATION_NAME, actualOperationRead.getName()); - assertEquals(OperatorType.WEBHOOK, actualOperationRead.getOperatorConfiguration().getOperatorType()); - - // NOTE: we expect the server to dual-write on read until the frontend moves to the new format. - final OperatorWebhook expectedWebhookConfigRead = - webhookConfig.executionUrl(String.format(EXECUTION_URL_TEMPLATE, DBT_CLOUD_WEBHOOK_ACCOUNT_ID, - DBT_CLOUD_WEBHOOK_JOB_ID)).executionBody(EXECUTION_BODY); - assertEquals(expectedWebhookConfigRead, actualOperationRead.getOperatorConfiguration().getWebhook()); - - verify(configRepository).writeStandardSyncOperation(eq(expectedPersistedOperation)); - } - - @Test - void testUpdateOperation() throws JsonValidationException, ConfigNotFoundException, IOException { - final OperationUpdate operationUpdate = new OperationUpdate() - .operationId(standardSyncOperation.getOperationId()) - .name(standardSyncOperation.getName()) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.DBT) - .dbt(new OperatorDbt() - .gitRepoUrl("git_repo_url") - .gitRepoBranch("git_repo_branch") - .dockerImage("docker") - .dbtArguments("--full-refresh"))); - - final StandardSyncOperation updatedStandardSyncOperation = new StandardSyncOperation() - .withWorkspaceId(standardSyncOperation.getWorkspaceId()) - .withOperationId(standardSyncOperation.getOperationId()) - .withName(standardSyncOperation.getName()) - .withOperatorType(io.airbyte.config.StandardSyncOperation.OperatorType.DBT) - .withOperatorDbt(new io.airbyte.config.OperatorDbt() - .withGitRepoUrl("git_repo_url") - .withGitRepoBranch("git_repo_branch") - .withDockerImage("docker") - .withDbtArguments("--full-refresh")) - .withOperatorNormalization(null) - .withTombstone(false); - - when(configRepository.getStandardSyncOperation(standardSyncOperation.getOperationId())).thenReturn(standardSyncOperation) - .thenReturn(updatedStandardSyncOperation); - - final OperationRead actualOperationRead = operationsHandler.updateOperation(operationUpdate); - - final OperationRead expectedOperationRead = new OperationRead() - .workspaceId(standardSyncOperation.getWorkspaceId()) - .operationId(standardSyncOperation.getOperationId()) - .name(standardSyncOperation.getName()) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.DBT) - .dbt(new OperatorDbt() - .gitRepoUrl("git_repo_url") - .gitRepoBranch("git_repo_branch") - .dockerImage("docker") - .dbtArguments("--full-refresh"))); - - assertEquals(expectedOperationRead, actualOperationRead); - - verify(configRepository).writeStandardSyncOperation(updatedStandardSyncOperation); - } - - @Test - void testUpdateWebhookOperation() throws JsonValidationException, ConfigNotFoundException, IOException { - when(uuidGenerator.get()).thenReturn(WEBHOOK_OPERATION_ID); - final OperatorWebhook webhookConfig = new OperatorWebhook() - .webhookConfigId(WEBHOOK_CONFIG_ID) - .webhookType(WebhookTypeEnum.DBTCLOUD) - .dbtCloud(new OperatorWebhookDbtCloud() - .accountId(NEW_DBT_CLOUD_WEBHOOK_ACCOUNT_ID) - .jobId(DBT_CLOUD_WEBHOOK_JOB_ID)); - final OperationUpdate operationUpdate = new OperationUpdate() - .name(WEBHOOK_OPERATION_NAME) - .operationId(WEBHOOK_OPERATION_ID) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.WEBHOOK).webhook(webhookConfig)); - - final var persistedWebhook = new io.airbyte.config.OperatorWebhook() - .withWebhookConfigId(WEBHOOK_CONFIG_ID) - .withExecutionUrl(String.format(EXECUTION_URL_TEMPLATE, DBT_CLOUD_WEBHOOK_ACCOUNT_ID, - DBT_CLOUD_WEBHOOK_JOB_ID)) - .withExecutionBody(EXECUTION_BODY); - - final var updatedWebhook = new io.airbyte.config.OperatorWebhook() - .withWebhookConfigId(WEBHOOK_CONFIG_ID) - .withExecutionUrl(String.format(EXECUTION_URL_TEMPLATE, NEW_DBT_CLOUD_WEBHOOK_ACCOUNT_ID, - DBT_CLOUD_WEBHOOK_JOB_ID)) - .withExecutionBody(EXECUTION_BODY); - - final StandardSyncOperation persistedOperation = new StandardSyncOperation() - .withWorkspaceId(standardSyncOperation.getWorkspaceId()) - .withOperationId(WEBHOOK_OPERATION_ID) - .withName(WEBHOOK_OPERATION_NAME) - .withOperatorType(StandardSyncOperation.OperatorType.WEBHOOK) - .withOperatorWebhook(persistedWebhook); - - final StandardSyncOperation updatedOperation = new StandardSyncOperation() - .withWorkspaceId(standardSyncOperation.getWorkspaceId()) - .withOperationId(WEBHOOK_OPERATION_ID) - .withName(WEBHOOK_OPERATION_NAME) - .withOperatorType(StandardSyncOperation.OperatorType.WEBHOOK) - .withOperatorWebhook(updatedWebhook); - - when(configRepository.getStandardSyncOperation(WEBHOOK_OPERATION_ID)).thenReturn(persistedOperation).thenReturn(updatedOperation); - - final OperationRead actualOperationRead = operationsHandler.updateOperation(operationUpdate); - - assertEquals(WEBHOOK_OPERATION_ID, actualOperationRead.getOperationId()); - assertEquals(WEBHOOK_OPERATION_NAME, actualOperationRead.getName()); - assertEquals(OperatorType.WEBHOOK, actualOperationRead.getOperatorConfiguration().getOperatorType()); - final OperatorWebhook expectedWebhookConfigRead = - webhookConfig.executionUrl(String.format(EXECUTION_URL_TEMPLATE, NEW_DBT_CLOUD_WEBHOOK_ACCOUNT_ID, - DBT_CLOUD_WEBHOOK_JOB_ID)).executionBody(EXECUTION_BODY); - assertEquals(expectedWebhookConfigRead, actualOperationRead.getOperatorConfiguration().getWebhook()); - - verify(configRepository) - .writeStandardSyncOperation(persistedOperation.withOperatorWebhook(persistedOperation.getOperatorWebhook().withExecutionUrl( - String.format(EXECUTION_URL_TEMPLATE, NEW_DBT_CLOUD_WEBHOOK_ACCOUNT_ID, - DBT_CLOUD_WEBHOOK_JOB_ID)))); - } - - @Test - void testGetOperation() throws JsonValidationException, ConfigNotFoundException, IOException { - when(configRepository.getStandardSyncOperation(standardSyncOperation.getOperationId())).thenReturn(standardSyncOperation); - - final OperationIdRequestBody operationIdRequestBody = new OperationIdRequestBody().operationId(standardSyncOperation.getOperationId()); - final OperationRead actualOperationRead = operationsHandler.getOperation(operationIdRequestBody); - - final OperationRead expectedOperationRead = generateOperationRead(); - - assertEquals(expectedOperationRead, actualOperationRead); - } - - private OperationRead generateOperationRead() { - return new OperationRead() - .workspaceId(standardSyncOperation.getWorkspaceId()) - .operationId(standardSyncOperation.getOperationId()) - .name(standardSyncOperation.getName()) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.NORMALIZATION) - .normalization(new OperatorNormalization().option(OptionEnum.BASIC))); - } - - @Test - void testListOperationsForConnection() throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID connectionId = UUID.randomUUID(); - - when(configRepository.getStandardSync(connectionId)) - .thenReturn(new StandardSync() - .withOperationIds(List.of(standardSyncOperation.getOperationId()))); - - when(configRepository.getStandardSyncOperation(standardSyncOperation.getOperationId())) - .thenReturn(standardSyncOperation); - - when(configRepository.listStandardSyncOperations()) - .thenReturn(List.of(standardSyncOperation)); - - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(connectionId); - final OperationReadList actualOperationReadList = operationsHandler.listOperationsForConnection(connectionIdRequestBody); - - assertEquals(generateOperationRead(), actualOperationReadList.getOperations().get(0)); - } - - @Test - void testDeleteOperation() throws IOException { - final OperationIdRequestBody operationIdRequestBody = new OperationIdRequestBody().operationId(standardSyncOperation.getOperationId()); - - final OperationsHandler spiedOperationsHandler = spy(operationsHandler); - - spiedOperationsHandler.deleteOperation(operationIdRequestBody); - - verify(configRepository).deleteStandardSyncOperation(standardSyncOperation.getOperationId()); - } - - @Test - void testDeleteOperationsForConnection() throws JsonValidationException, IOException, ConfigNotFoundException { - final UUID syncConnectionId = UUID.randomUUID(); - final UUID otherConnectionId = UUID.randomUUID(); - final UUID operationId = UUID.randomUUID(); - final UUID remainingOperationId = UUID.randomUUID(); - final List toDelete = Stream.of(standardSyncOperation.getOperationId(), operationId).collect(Collectors.toList()); - final StandardSync sync = new StandardSync() - .withConnectionId(syncConnectionId) - .withOperationIds(List.of(standardSyncOperation.getOperationId(), operationId, remainingOperationId)); - when(configRepository.listStandardSyncs()).thenReturn(List.of( - sync, - new StandardSync() - .withConnectionId(otherConnectionId) - .withOperationIds(List.of(standardSyncOperation.getOperationId())))); - final StandardSyncOperation operation = new StandardSyncOperation().withOperationId(operationId); - final StandardSyncOperation remainingOperation = new StandardSyncOperation().withOperationId(remainingOperationId); - when(configRepository.getStandardSyncOperation(operationId)).thenReturn(operation); - when(configRepository.getStandardSyncOperation(remainingOperationId)).thenReturn(remainingOperation); - when(configRepository.getStandardSyncOperation(standardSyncOperation.getOperationId())).thenReturn(standardSyncOperation); - - // first, test that a remaining operation results in proper call - operationsHandler.deleteOperationsForConnection(sync, toDelete); - verify(configRepository).writeStandardSyncOperation(operation.withTombstone(true)); - verify(configRepository).updateConnectionOperationIds(syncConnectionId, Collections.singleton(remainingOperationId)); - - // next, test that removing all operations results in proper call - toDelete.add(remainingOperationId); - operationsHandler.deleteOperationsForConnection(sync, toDelete); - verify(configRepository).updateConnectionOperationIds(syncConnectionId, Collections.emptySet()); - } - - @Test - void testEnumConversion() { - assertTrue(Enums.isCompatible(io.airbyte.api.model.generated.OperatorType.class, io.airbyte.config.StandardSyncOperation.OperatorType.class)); - assertTrue(Enums.isCompatible(io.airbyte.api.model.generated.OperatorNormalization.OptionEnum.class, - io.airbyte.config.OperatorNormalization.Option.class)); - } - - @Test - void testDbtCloudRegex() { - // Validate that a non-url is rejected. - assertThrows(IllegalArgumentException.class, () -> checkDbtCloudUrl("not-a-url")); - // Validate that the URL is anchored to the beginning. - assertThrows(IllegalArgumentException.class, - () -> checkDbtCloudUrl("some-nonsense-" + String.format(EXECUTION_URL_TEMPLATE, DBT_CLOUD_WEBHOOK_ACCOUNT_ID, - DBT_CLOUD_WEBHOOK_JOB_ID))); - // Validate that the URL is anchored to the end. - assertThrows(IllegalArgumentException.class, - () -> checkDbtCloudUrl(String.format(EXECUTION_URL_TEMPLATE, DBT_CLOUD_WEBHOOK_ACCOUNT_ID, - DBT_CLOUD_WEBHOOK_JOB_ID) + "-some-nonsense")); - // Validate that the account id must be an integer. - assertThrows(IllegalArgumentException.class, () -> checkDbtCloudUrl("https://cloud.getdbt.com/api/v2/accounts/abc/jobs/123/run/")); - // Validate that the job id must be an integer. - assertThrows(IllegalArgumentException.class, () -> checkDbtCloudUrl("https://cloud.getdbt.com/api/v2/accounts/123/jobs/abc/run/")); - } - - private void checkDbtCloudUrl(final String urlToCheck) throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSyncOperation persistedOperation = new StandardSyncOperation() - .withWorkspaceId(standardSyncOperation.getWorkspaceId()) - .withOperationId(WEBHOOK_OPERATION_ID) - .withName(WEBHOOK_OPERATION_NAME) - .withOperatorType(StandardSyncOperation.OperatorType.WEBHOOK) - .withOperatorWebhook(new io.airbyte.config.OperatorWebhook() - .withWebhookConfigId(WEBHOOK_CONFIG_ID) - .withExecutionUrl(urlToCheck) - .withExecutionBody(EXECUTION_BODY)) - .withTombstone(false); - when(configRepository.getStandardSyncOperation(WEBHOOK_OPERATION_ID)).thenReturn(persistedOperation); - - final OperationIdRequestBody operationIdRequestBody = new OperationIdRequestBody().operationId(WEBHOOK_OPERATION_ID); - operationsHandler.getOperation(operationIdRequestBody); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java deleted file mode 100644 index be7a71f93adf..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java +++ /dev/null @@ -1,1248 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.RETURNS_DEEP_STUBS; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -import com.google.common.collect.Lists; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.model.generated.CatalogDiff; -import io.airbyte.api.model.generated.CheckConnectionRead; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionReadList; -import io.airbyte.api.model.generated.ConnectionStatus; -import io.airbyte.api.model.generated.ConnectionUpdate; -import io.airbyte.api.model.generated.DestinationCoreConfig; -import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.DestinationDefinitionSpecificationRead; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.DestinationUpdate; -import io.airbyte.api.model.generated.FieldTransform; -import io.airbyte.api.model.generated.JobIdRequestBody; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.api.model.generated.NonBreakingChangesPreference; -import io.airbyte.api.model.generated.SourceCoreConfig; -import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.SourceDefinitionSpecificationRead; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRead; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.SourceUpdate; -import io.airbyte.api.model.generated.StreamTransform; -import io.airbyte.api.model.generated.StreamTransform.TransformTypeEnum; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.server.converters.ConfigurationUpdate; -import io.airbyte.commons.server.converters.JobConverter; -import io.airbyte.commons.server.errors.ValueConflictKnownException; -import io.airbyte.commons.server.handlers.helpers.CatalogConverter; -import io.airbyte.commons.server.helpers.DestinationHelpers; -import io.airbyte.commons.server.helpers.SourceHelpers; -import io.airbyte.commons.server.scheduler.EventRunner; -import io.airbyte.commons.server.scheduler.SynchronousJobMetadata; -import io.airbyte.commons.server.scheduler.SynchronousResponse; -import io.airbyte.commons.server.scheduler.SynchronousSchedulerClient; -import io.airbyte.commons.temporal.ErrorCode; -import io.airbyte.commons.temporal.TemporalClient.ManualOperationResult; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorCatalog; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardCheckConnectionOutput; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.WebUrlHelper; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.persistence.job.models.JobStatus; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.StreamDescriptor; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.URI; -import java.util.HashMap; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; - -@SuppressWarnings("unchecked") -class SchedulerHandlerTest { - - private static final String SOURCE_DOCKER_REPO = "srcimage"; - private static final String SOURCE_DOCKER_TAG = "tag"; - private static final String SOURCE_DOCKER_IMAGE = SOURCE_DOCKER_REPO + ":" + SOURCE_DOCKER_TAG; - private static final String SOURCE_PROTOCOL_VERSION = "0.4.5"; - - private static final String DESTINATION_DOCKER_REPO = "dstimage"; - private static final String DESTINATION_DOCKER_TAG = "tag"; - private static final String DESTINATION_DOCKER_IMAGE = DESTINATION_DOCKER_REPO + ":" + DESTINATION_DOCKER_TAG; - private static final String DESTINATION_PROTOCOL_VERSION = "0.7.9"; - private static final String NAME = "name"; - private static final String DOGS = "dogs"; - private static final String SHOES = "shoes"; - private static final String SKU = "sku"; - private static final String CONNECTION_URL = "connection_url"; - - private static final AirbyteCatalog airbyteCatalog = CatalogHelpers.createAirbyteCatalog(SHOES, - Field.of(SKU, JsonSchemaType.STRING)); - - private static final SourceConnection SOURCE = new SourceConnection() - .withName("my postgres db") - .withWorkspaceId(UUID.randomUUID()) - .withSourceDefinitionId(UUID.randomUUID()) - .withSourceId(UUID.randomUUID()) - .withConfiguration(Jsons.emptyObject()) - .withTombstone(false); - - private static final DestinationConnection DESTINATION = new DestinationConnection() - .withName("my db2 instance") - .withWorkspaceId(UUID.randomUUID()) - .withDestinationDefinitionId(UUID.randomUUID()) - .withDestinationId(UUID.randomUUID()) - .withConfiguration(Jsons.emptyObject()) - .withTombstone(false); - - private static final ConnectorSpecification CONNECTOR_SPECIFICATION = new ConnectorSpecification() - .withDocumentationUrl(Exceptions.toRuntime(() -> new URI("https://google.com"))) - .withChangelogUrl(Exceptions.toRuntime(() -> new URI("https://google.com"))) - .withConnectionSpecification(Jsons.jsonNode(new HashMap<>())); - - private static final ConnectorSpecification CONNECTOR_SPECIFICATION_WITHOUT_DOCS_URL = new ConnectorSpecification() - .withChangelogUrl(Exceptions.toRuntime(() -> new URI("https://google.com"))) - .withConnectionSpecification(Jsons.jsonNode(new HashMap<>())); - - private static final StreamDescriptor STREAM_DESCRIPTOR = new StreamDescriptor().withName("1"); - - private SchedulerHandler schedulerHandler; - private ConfigRepository configRepository; - private SecretsRepositoryWriter secretsRepositoryWriter; - private Job completedJob; - private SynchronousSchedulerClient synchronousSchedulerClient; - private SynchronousResponse jobResponse; - private ConfigurationUpdate configurationUpdate; - private JsonSchemaValidator jsonSchemaValidator; - private JobPersistence jobPersistence; - private EventRunner eventRunner; - private JobConverter jobConverter; - private ConnectionsHandler connectionsHandler; - private EnvVariableFeatureFlags envVariableFeatureFlags; - private WebUrlHelper webUrlHelper; - - @BeforeEach - void setup() { - completedJob = mock(Job.class, RETURNS_DEEP_STUBS); - jobResponse = mock(SynchronousResponse.class, RETURNS_DEEP_STUBS); - final SynchronousJobMetadata synchronousJobMetadata = mock(SynchronousJobMetadata.class); - when(synchronousJobMetadata.getConfigType()) - .thenReturn(ConfigType.SYNC); - when(jobResponse.getMetadata()) - .thenReturn(synchronousJobMetadata); - configurationUpdate = mock(ConfigurationUpdate.class); - jsonSchemaValidator = mock(JsonSchemaValidator.class); - when(completedJob.getStatus()).thenReturn(JobStatus.SUCCEEDED); - when(completedJob.getConfig().getConfigType()).thenReturn(ConfigType.SYNC); - when(completedJob.getScope()).thenReturn("sync:123"); - - synchronousSchedulerClient = mock(SynchronousSchedulerClient.class); - configRepository = mock(ConfigRepository.class); - secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); - jobPersistence = mock(JobPersistence.class); - eventRunner = mock(EventRunner.class); - connectionsHandler = mock(ConnectionsHandler.class); - envVariableFeatureFlags = mock(EnvVariableFeatureFlags.class); - webUrlHelper = mock(WebUrlHelper.class); - - jobConverter = spy(new JobConverter(WorkerEnvironment.DOCKER, LogConfigs.EMPTY)); - - schedulerHandler = new SchedulerHandler( - configRepository, - secretsRepositoryWriter, - synchronousSchedulerClient, - configurationUpdate, - jsonSchemaValidator, - jobPersistence, - eventRunner, - jobConverter, - connectionsHandler, - envVariableFeatureFlags, - webUrlHelper); - } - - @Test - void testCheckSourceConnectionFromSourceId() throws JsonValidationException, IOException, ConfigNotFoundException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final SourceIdRequestBody request = new SourceIdRequestBody().sourceId(source.getSourceId()); - final Version protocolVersion = new Version(SOURCE_PROTOCOL_VERSION); - - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId())); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(synchronousSchedulerClient.createSourceCheckConnectionJob(source, SOURCE_DOCKER_IMAGE, protocolVersion, false)) - .thenReturn((SynchronousResponse) jobResponse); - - schedulerHandler.checkSourceConnectionFromSourceId(request); - - verify(configRepository).getSourceConnection(source.getSourceId()); - verify(synchronousSchedulerClient).createSourceCheckConnectionJob(source, SOURCE_DOCKER_IMAGE, protocolVersion, false); - } - - @Test - void testCheckSourceConnectionFromSourceCreate() throws JsonValidationException, IOException, ConfigNotFoundException { - final SourceConnection source = new SourceConnection() - .withSourceDefinitionId(SOURCE.getSourceDefinitionId()) - .withConfiguration(SOURCE.getConfiguration()); - - final SourceCoreConfig sourceCoreConfig = new SourceCoreConfig() - .sourceDefinitionId(source.getSourceDefinitionId()) - .connectionConfiguration(source.getConfiguration()) - .workspaceId(source.getWorkspaceId()); - - final Version protocolVersion = new Version(SOURCE_PROTOCOL_VERSION); - - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId())); - when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( - eq(source.getConfiguration()), - any())).thenReturn(source.getConfiguration()); - when(synchronousSchedulerClient.createSourceCheckConnectionJob(source, SOURCE_DOCKER_IMAGE, protocolVersion, false)) - .thenReturn((SynchronousResponse) jobResponse); - - schedulerHandler.checkSourceConnectionFromSourceCreate(sourceCoreConfig); - - verify(synchronousSchedulerClient).createSourceCheckConnectionJob(source, SOURCE_DOCKER_IMAGE, protocolVersion, false); - } - - @Test - void testCheckSourceConnectionFromUpdate() throws IOException, JsonValidationException, ConfigNotFoundException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final SourceUpdate sourceUpdate = new SourceUpdate() - .name(source.getName()) - .sourceId(source.getSourceId()) - .connectionConfiguration(source.getConfiguration()); - final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() - .withDockerRepository(DESTINATION_DOCKER_REPO) - .withDockerImageTag(DESTINATION_DOCKER_TAG) - .withProtocolVersion(DESTINATION_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId()) - .withSpec(CONNECTOR_SPECIFICATION); - final Version protocolVersion = new Version(DESTINATION_PROTOCOL_VERSION); - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(sourceDefinition); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(configurationUpdate.source(source.getSourceId(), source.getName(), sourceUpdate.getConnectionConfiguration())).thenReturn(source); - final SourceConnection submittedSource = new SourceConnection() - .withSourceId(source.getSourceId()) - .withSourceDefinitionId(source.getSourceDefinitionId()) - .withConfiguration(source.getConfiguration()) - .withWorkspaceId(source.getWorkspaceId()); - when(synchronousSchedulerClient.createSourceCheckConnectionJob(submittedSource, DESTINATION_DOCKER_IMAGE, protocolVersion, false)) - .thenReturn((SynchronousResponse) jobResponse); - when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( - eq(source.getConfiguration()), - any())).thenReturn(source.getConfiguration()); - schedulerHandler.checkSourceConnectionFromSourceIdForUpdate(sourceUpdate); - - verify(jsonSchemaValidator).ensure(CONNECTOR_SPECIFICATION.getConnectionSpecification(), source.getConfiguration()); - verify(synchronousSchedulerClient).createSourceCheckConnectionJob(submittedSource, DESTINATION_DOCKER_IMAGE, protocolVersion, false); - } - - @Test - void testGetSourceSpec() throws JsonValidationException, IOException, ConfigNotFoundException { - final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId = - new SourceDefinitionIdWithWorkspaceId().sourceDefinitionId(UUID.randomUUID()).workspaceId(UUID.randomUUID()); - - final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() - .withName(NAME) - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withSourceDefinitionId(sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId()) - .withSpec(CONNECTOR_SPECIFICATION); - when(configRepository.getStandardSourceDefinition(sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId())) - .thenReturn(sourceDefinition); - - final SourceDefinitionSpecificationRead response = schedulerHandler.getSourceDefinitionSpecification(sourceDefinitionIdWithWorkspaceId); - - verify(configRepository).getStandardSourceDefinition(sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId()); - assertEquals(CONNECTOR_SPECIFICATION.getConnectionSpecification(), response.getConnectionSpecification()); - } - - @Test - void testGetSourceSpecWithoutDocs() throws JsonValidationException, IOException, ConfigNotFoundException { - final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId = - new SourceDefinitionIdWithWorkspaceId().sourceDefinitionId(UUID.randomUUID()).workspaceId(UUID.randomUUID()); - - final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() - .withName(NAME) - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withSourceDefinitionId(sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId()) - .withSpec(CONNECTOR_SPECIFICATION_WITHOUT_DOCS_URL); - when(configRepository.getStandardSourceDefinition(sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId())) - .thenReturn(sourceDefinition); - - final SourceDefinitionSpecificationRead response = schedulerHandler.getSourceDefinitionSpecification(sourceDefinitionIdWithWorkspaceId); - - verify(configRepository).getStandardSourceDefinition(sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId()); - assertEquals(CONNECTOR_SPECIFICATION_WITHOUT_DOCS_URL.getConnectionSpecification(), response.getConnectionSpecification()); - } - - @Test - void testGetDestinationSpec() throws JsonValidationException, IOException, ConfigNotFoundException { - final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId = - new DestinationDefinitionIdWithWorkspaceId().destinationDefinitionId(UUID.randomUUID()).workspaceId(UUID.randomUUID()); - - final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() - .withName(NAME) - .withDockerRepository(DESTINATION_DOCKER_REPO) - .withDockerImageTag(DESTINATION_DOCKER_TAG) - .withDestinationDefinitionId(destinationDefinitionIdWithWorkspaceId.getDestinationDefinitionId()) - .withSpec(CONNECTOR_SPECIFICATION); - when(configRepository.getStandardDestinationDefinition(destinationDefinitionIdWithWorkspaceId.getDestinationDefinitionId())) - .thenReturn(destinationDefinition); - - final DestinationDefinitionSpecificationRead response = schedulerHandler.getDestinationSpecification(destinationDefinitionIdWithWorkspaceId); - - verify(configRepository).getStandardDestinationDefinition(destinationDefinitionIdWithWorkspaceId.getDestinationDefinitionId()); - assertEquals(CONNECTOR_SPECIFICATION.getConnectionSpecification(), response.getConnectionSpecification()); - } - - @Test - void testCheckDestinationConnectionFromDestinationId() throws IOException, JsonValidationException, ConfigNotFoundException { - final DestinationConnection destination = DestinationHelpers.generateDestination(UUID.randomUUID()); - final DestinationIdRequestBody request = new DestinationIdRequestBody().destinationId(destination.getDestinationId()); - - when(configRepository.getStandardDestinationDefinition(destination.getDestinationDefinitionId())) - .thenReturn(new StandardDestinationDefinition() - .withDockerRepository(DESTINATION_DOCKER_REPO) - .withDockerImageTag(DESTINATION_DOCKER_TAG) - .withProtocolVersion(DESTINATION_PROTOCOL_VERSION) - .withDestinationDefinitionId(destination.getDestinationDefinitionId())); - when(configRepository.getDestinationConnection(destination.getDestinationId())).thenReturn(destination); - when(synchronousSchedulerClient.createDestinationCheckConnectionJob(destination, DESTINATION_DOCKER_IMAGE, - new Version(DESTINATION_PROTOCOL_VERSION), false)) - .thenReturn((SynchronousResponse) jobResponse); - - schedulerHandler.checkDestinationConnectionFromDestinationId(request); - - verify(configRepository).getDestinationConnection(destination.getDestinationId()); - verify(synchronousSchedulerClient).createDestinationCheckConnectionJob(destination, DESTINATION_DOCKER_IMAGE, - new Version(DESTINATION_PROTOCOL_VERSION), false); - } - - @Test - void testCheckDestinationConnectionFromDestinationCreate() throws JsonValidationException, IOException, ConfigNotFoundException { - final DestinationConnection destination = new DestinationConnection() - .withDestinationDefinitionId(DESTINATION.getDestinationDefinitionId()) - .withConfiguration(DESTINATION.getConfiguration()); - - final DestinationCoreConfig destinationCoreConfig = new DestinationCoreConfig() - .destinationDefinitionId(destination.getDestinationDefinitionId()) - .connectionConfiguration(destination.getConfiguration()); - - when(configRepository.getStandardDestinationDefinition(destination.getDestinationDefinitionId())) - .thenReturn(new StandardDestinationDefinition() - .withDockerRepository(DESTINATION_DOCKER_REPO) - .withDockerImageTag(DESTINATION_DOCKER_TAG) - .withProtocolVersion(DESTINATION_PROTOCOL_VERSION) - .withDestinationDefinitionId(destination.getDestinationDefinitionId())); - - when(synchronousSchedulerClient.createDestinationCheckConnectionJob(destination, DESTINATION_DOCKER_IMAGE, - new Version(DESTINATION_PROTOCOL_VERSION), false)) - .thenReturn((SynchronousResponse) jobResponse); - when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( - eq(destination.getConfiguration()), - any())).thenReturn(destination.getConfiguration()); - schedulerHandler.checkDestinationConnectionFromDestinationCreate(destinationCoreConfig); - - verify(synchronousSchedulerClient).createDestinationCheckConnectionJob(destination, DESTINATION_DOCKER_IMAGE, - new Version(DESTINATION_PROTOCOL_VERSION), false); - } - - @Test - void testCheckDestinationConnectionFromUpdate() throws IOException, JsonValidationException, ConfigNotFoundException { - final DestinationConnection destination = DestinationHelpers.generateDestination(UUID.randomUUID()); - final DestinationUpdate destinationUpdate = new DestinationUpdate() - .name(destination.getName()) - .destinationId(destination.getDestinationId()) - .connectionConfiguration(destination.getConfiguration()); - final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() - .withDockerRepository(DESTINATION_DOCKER_REPO) - .withDockerImageTag(DESTINATION_DOCKER_TAG) - .withProtocolVersion(DESTINATION_PROTOCOL_VERSION) - .withDestinationDefinitionId(destination.getDestinationDefinitionId()) - .withSpec(CONNECTOR_SPECIFICATION); - when(configRepository.getStandardDestinationDefinition(destination.getDestinationDefinitionId())) - .thenReturn(destinationDefinition); - when(configRepository.getDestinationConnection(destination.getDestinationId())).thenReturn(destination); - when(configurationUpdate.destination(destination.getDestinationId(), destination.getName(), destinationUpdate.getConnectionConfiguration())) - .thenReturn(destination); - final DestinationConnection submittedDestination = new DestinationConnection() - .withDestinationId(destination.getDestinationId()) - .withDestinationDefinitionId(destination.getDestinationDefinitionId()) - .withConfiguration(destination.getConfiguration()) - .withWorkspaceId(destination.getWorkspaceId()); - when(synchronousSchedulerClient.createDestinationCheckConnectionJob(submittedDestination, DESTINATION_DOCKER_IMAGE, - new Version(DESTINATION_PROTOCOL_VERSION), false)) - .thenReturn((SynchronousResponse) jobResponse); - when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( - eq(destination.getConfiguration()), - any())).thenReturn(destination.getConfiguration()); - schedulerHandler.checkDestinationConnectionFromDestinationIdForUpdate(destinationUpdate); - - verify(jsonSchemaValidator).ensure(CONNECTOR_SPECIFICATION.getConnectionSpecification(), destination.getConfiguration()); - verify(synchronousSchedulerClient).createDestinationCheckConnectionJob(submittedDestination, DESTINATION_DOCKER_IMAGE, - new Version(DESTINATION_PROTOCOL_VERSION), false); - } - - @Test - void testDiscoverSchemaForSourceFromSourceId() throws IOException, JsonValidationException, ConfigNotFoundException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()); - - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SynchronousJobMetadata metadata = mock(SynchronousJobMetadata.class); - when(discoverResponse.isSuccess()).thenReturn(true); - when(discoverResponse.getOutput()).thenReturn(UUID.randomUUID()); - final ActorCatalog actorCatalog = new ActorCatalog() - .withCatalog(Jsons.jsonNode(airbyteCatalog)) - .withCatalogHash("") - .withId(UUID.randomUUID()); - when(configRepository.getActorCatalogById(any())).thenReturn(actorCatalog); - when(discoverResponse.getMetadata()).thenReturn(metadata); - when(metadata.isSucceeded()).thenReturn(true); - - final ConnectionRead connectionRead = new ConnectionRead(); - final ConnectionReadList connectionReadList = new ConnectionReadList().connections(List.of(connectionRead)); - when(connectionsHandler.listConnectionsForSource(source.getSourceId(), false)).thenReturn(connectionReadList); - - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId())); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(configRepository.getActorCatalog(any(), any(), any())).thenReturn(Optional.empty()); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - - assertNotNull(actual.getCatalog()); - assertEquals(actual.getCatalogId(), discoverResponse.getOutput()); - assertNotNull(actual.getJobInfo()); - assertTrue(actual.getJobInfo().getSucceeded()); - verify(configRepository).getSourceConnection(source.getSourceId()); - verify(configRepository).getActorCatalog(eq(request.getSourceId()), eq(SOURCE_DOCKER_TAG), any()); - verify(synchronousSchedulerClient).createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false); - } - - @Test - void testDiscoverSchemaForSourceFromSourceIdCachedCatalog() throws IOException, JsonValidationException, ConfigNotFoundException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()); - - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SynchronousJobMetadata metadata = mock(SynchronousJobMetadata.class); - final UUID thisCatalogId = UUID.randomUUID(); - when(discoverResponse.isSuccess()).thenReturn(true); - when(discoverResponse.getOutput()).thenReturn(thisCatalogId); - when(discoverResponse.getMetadata()).thenReturn(metadata); - when(metadata.isSucceeded()).thenReturn(true); - - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withSourceDefinitionId(source.getSourceDefinitionId())); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - final ActorCatalog actorCatalog = new ActorCatalog() - .withCatalog(Jsons.jsonNode(airbyteCatalog)) - .withCatalogHash("") - .withId(thisCatalogId); - when(configRepository.getActorCatalog(any(), any(), any())).thenReturn(Optional.of(actorCatalog)); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - - assertNotNull(actual.getCatalog()); - assertNotNull(actual.getJobInfo()); - assertEquals(actual.getCatalogId(), discoverResponse.getOutput()); - assertTrue(actual.getJobInfo().getSucceeded()); - verify(configRepository).getSourceConnection(source.getSourceId()); - verify(configRepository).getActorCatalog(eq(request.getSourceId()), any(), any()); - verify(configRepository, never()).writeActorCatalogFetchEvent(any(), any(), any(), any()); - verify(synchronousSchedulerClient, never()).createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, - new Version(SOURCE_PROTOCOL_VERSION), false); - } - - @Test - void testDiscoverSchemaForSourceFromSourceIdDisableCache() throws IOException, JsonValidationException, ConfigNotFoundException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()).disableCache(true); - - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SynchronousJobMetadata metadata = mock(SynchronousJobMetadata.class); - when(discoverResponse.isSuccess()).thenReturn(true); - final UUID discoveredCatalogId = UUID.randomUUID(); - when(discoverResponse.getOutput()).thenReturn(discoveredCatalogId); - when(discoverResponse.getMetadata()).thenReturn(metadata); - when(metadata.isSucceeded()).thenReturn(true); - - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId())); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - final ActorCatalog actorCatalog = new ActorCatalog() - .withCatalog(Jsons.jsonNode(airbyteCatalog)) - .withCatalogHash("") - .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - - assertNotNull(actual.getCatalog()); - assertNotNull(actual.getJobInfo()); - assertTrue(actual.getJobInfo().getSucceeded()); - verify(configRepository).getSourceConnection(source.getSourceId()); - verify(configRepository).getActorCatalog(eq(request.getSourceId()), any(), any()); - verify(synchronousSchedulerClient).createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false); - } - - @Test - void testDiscoverSchemaForSourceFromSourceIdFailed() throws IOException, JsonValidationException, ConfigNotFoundException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()); - - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId())); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn((SynchronousResponse) jobResponse); - when(completedJob.getSuccessOutput()).thenReturn(Optional.empty()); - when(completedJob.getStatus()).thenReturn(JobStatus.FAILED); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - - assertNull(actual.getCatalog()); - assertNotNull(actual.getJobInfo()); - assertFalse(actual.getJobInfo().getSucceeded()); - verify(configRepository).getSourceConnection(source.getSourceId()); - verify(synchronousSchedulerClient).createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false); - } - - @Test - void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreaking() - throws IOException, JsonValidationException, ConfigNotFoundException, InterruptedException, ApiException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final UUID connectionId = UUID.randomUUID(); - final UUID discoveredCatalogId = UUID.randomUUID(); - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SourceDiscoverSchemaRequestBody request = - new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()).connectionId(connectionId).disableCache(true).notifySchemaChange(true); - final StreamTransform streamTransform = new StreamTransform().transformType(TransformTypeEnum.REMOVE_STREAM) - .streamDescriptor(new io.airbyte.api.model.generated.StreamDescriptor().name(DOGS)); - final CatalogDiff catalogDiff = new CatalogDiff().addTransformsItem(streamTransform); - final StandardSourceDefinition sourceDef = new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId()); - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(sourceDef); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - when(webUrlHelper.getConnectionUrl(source.getWorkspaceId(), connectionId)).thenReturn(CONNECTION_URL); - - when(discoverResponse.isSuccess()).thenReturn(true); - when(discoverResponse.getOutput()).thenReturn(discoveredCatalogId); - - final AirbyteCatalog airbyteCatalogCurrent = new AirbyteCatalog().withStreams(Lists.newArrayList( - CatalogHelpers.createAirbyteStream(SHOES, Field.of(SKU, JsonSchemaType.STRING)), - CatalogHelpers.createAirbyteStream(DOGS, Field.of(NAME, JsonSchemaType.STRING)))); - - final ConnectionRead connectionRead = - new ConnectionRead().syncCatalog(CatalogConverter.toApi(airbyteCatalogCurrent, sourceDef)).connectionId(connectionId) - .notifySchemaChanges(true); - when(connectionsHandler.getConnection(request.getConnectionId())).thenReturn(connectionRead); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); - final ConnectionReadList connectionReadList = new ConnectionReadList().connections(List.of(connectionRead)); - when(connectionsHandler.listConnectionsForSource(source.getSourceId(), false)).thenReturn(connectionReadList); - - final ActorCatalog actorCatalog = new ActorCatalog() - .withCatalog(Jsons.jsonNode(airbyteCatalog)) - .withCatalogHash("") - .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); - - final AirbyteCatalog persistenceCatalog = Jsons.object(actorCatalog.getCatalog(), - io.airbyte.protocol.models.AirbyteCatalog.class); - final io.airbyte.api.model.generated.AirbyteCatalog expectedActorCatalog = CatalogConverter.toApi(persistenceCatalog, sourceDef); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - assertEquals(actual.getCatalogDiff(), catalogDiff); - assertEquals(actual.getCatalog(), expectedActorCatalog); - verify(eventRunner).sendSchemaChangeNotification(connectionId, CONNECTION_URL); - } - - @Test - void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionPreferenceNoFeatureFlag() - throws IOException, JsonValidationException, ConfigNotFoundException, InterruptedException, ApiException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final UUID connectionId = UUID.randomUUID(); - final UUID discoveredCatalogId = UUID.randomUUID(); - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SourceDiscoverSchemaRequestBody request = - new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()).connectionId(connectionId).disableCache(true).notifySchemaChange(true); - final StreamTransform streamTransform = new StreamTransform().transformType(TransformTypeEnum.REMOVE_STREAM) - .streamDescriptor(new io.airbyte.api.model.generated.StreamDescriptor().name(DOGS)); - final CatalogDiff catalogDiff = new CatalogDiff().addTransformsItem(streamTransform); - final StandardSourceDefinition sourceDef = new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId()); - when(envVariableFeatureFlags.autoDetectSchema()).thenReturn(false); - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(sourceDef); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - when(webUrlHelper.getConnectionUrl(source.getWorkspaceId(), connectionId)).thenReturn(CONNECTION_URL); - - when(discoverResponse.isSuccess()).thenReturn(true); - when(discoverResponse.getOutput()).thenReturn(discoveredCatalogId); - - final AirbyteCatalog airbyteCatalogCurrent = new AirbyteCatalog().withStreams(Lists.newArrayList( - CatalogHelpers.createAirbyteStream(SHOES, Field.of(SKU, JsonSchemaType.STRING)), - CatalogHelpers.createAirbyteStream(DOGS, Field.of(NAME, JsonSchemaType.STRING)))); - - final ConnectionRead connectionRead = - new ConnectionRead().syncCatalog(CatalogConverter.toApi(airbyteCatalogCurrent, sourceDef)).nonBreakingChangesPreference( - NonBreakingChangesPreference.DISABLE).status(ConnectionStatus.ACTIVE).connectionId(connectionId).notifySchemaChanges(true); - when(connectionsHandler.getConnection(request.getConnectionId())).thenReturn(connectionRead); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); - final ConnectionReadList connectionReadList = new ConnectionReadList().connections(List.of(connectionRead)); - when(connectionsHandler.listConnectionsForSource(source.getSourceId(), false)).thenReturn(connectionReadList); - - final ActorCatalog actorCatalog = new ActorCatalog() - .withCatalog(Jsons.jsonNode(airbyteCatalog)) - .withCatalogHash("") - .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); - - final AirbyteCatalog persistenceCatalog = Jsons.object(actorCatalog.getCatalog(), - io.airbyte.protocol.models.AirbyteCatalog.class); - final io.airbyte.api.model.generated.AirbyteCatalog expectedActorCatalog = CatalogConverter.toApi(persistenceCatalog, sourceDef); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - assertEquals(actual.getCatalogDiff(), catalogDiff); - assertEquals(actual.getCatalog(), expectedActorCatalog); - assertEquals(actual.getConnectionStatus(), ConnectionStatus.ACTIVE); - verify(eventRunner).sendSchemaChangeNotification(connectionId, CONNECTION_URL); - } - - @Test - void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionPreferenceFeatureFlag() - throws IOException, JsonValidationException, ConfigNotFoundException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final UUID connectionId = UUID.randomUUID(); - final UUID discoveredCatalogId = UUID.randomUUID(); - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SourceDiscoverSchemaRequestBody request = - new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()).connectionId(connectionId).disableCache(true).notifySchemaChange(true); - final StreamTransform streamTransform = new StreamTransform().transformType(TransformTypeEnum.REMOVE_STREAM) - .streamDescriptor(new io.airbyte.api.model.generated.StreamDescriptor().name(DOGS)); - final CatalogDiff catalogDiff = new CatalogDiff().addTransformsItem(streamTransform); - final StandardSourceDefinition sourceDef = new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId()); - when(envVariableFeatureFlags.autoDetectSchema()).thenReturn(true); - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(sourceDef); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - - when(discoverResponse.isSuccess()).thenReturn(true); - when(discoverResponse.getOutput()).thenReturn(discoveredCatalogId); - - final AirbyteCatalog airbyteCatalogCurrent = new AirbyteCatalog().withStreams(Lists.newArrayList( - CatalogHelpers.createAirbyteStream(SHOES, Field.of(SKU, JsonSchemaType.STRING)), - CatalogHelpers.createAirbyteStream(DOGS, Field.of(NAME, JsonSchemaType.STRING)))); - - final ConnectionRead connectionRead = - new ConnectionRead().syncCatalog(CatalogConverter.toApi(airbyteCatalogCurrent, sourceDef)).nonBreakingChangesPreference( - NonBreakingChangesPreference.DISABLE).connectionId(connectionId).notifySchemaChanges(false); - when(connectionsHandler.getConnection(request.getConnectionId())).thenReturn(connectionRead); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); - final ConnectionReadList connectionReadList = new ConnectionReadList().connections(List.of(connectionRead)); - when(connectionsHandler.listConnectionsForSource(source.getSourceId(), false)).thenReturn(connectionReadList); - - final ActorCatalog actorCatalog = new ActorCatalog() - .withCatalog(Jsons.jsonNode(airbyteCatalog)) - .withCatalogHash("") - .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); - - final AirbyteCatalog persistenceCatalog = Jsons.object(actorCatalog.getCatalog(), - io.airbyte.protocol.models.AirbyteCatalog.class); - final io.airbyte.api.model.generated.AirbyteCatalog expectedActorCatalog = CatalogConverter.toApi(persistenceCatalog, sourceDef); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - assertEquals(actual.getCatalogDiff(), catalogDiff); - assertEquals(actual.getCatalog(), expectedActorCatalog); - assertEquals(actual.getConnectionStatus(), ConnectionStatus.INACTIVE); - verifyNoInteractions(eventRunner); - } - - @Test - void testDiscoverSchemaFromSourceIdWithConnectionIdBreaking() - throws IOException, JsonValidationException, ConfigNotFoundException, InterruptedException, ApiException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final UUID connectionId = UUID.randomUUID(); - final UUID discoveredCatalogId = UUID.randomUUID(); - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SourceDiscoverSchemaRequestBody request = - new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()).connectionId(connectionId).disableCache(true).notifySchemaChange(true); - final StreamTransform streamTransform = new StreamTransform().transformType(TransformTypeEnum.UPDATE_STREAM) - .streamDescriptor(new io.airbyte.api.model.generated.StreamDescriptor().name(DOGS)).addUpdateStreamItem(new FieldTransform().transformType( - FieldTransform.TransformTypeEnum.REMOVE_FIELD).breaking(true)); - final CatalogDiff catalogDiff = new CatalogDiff().addTransformsItem(streamTransform); - final StandardSourceDefinition sourceDef = new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId()); - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(sourceDef); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - when(webUrlHelper.getConnectionUrl(source.getWorkspaceId(), connectionId)).thenReturn(CONNECTION_URL); - - when(discoverResponse.isSuccess()).thenReturn(true); - when(discoverResponse.getOutput()).thenReturn(discoveredCatalogId); - - final AirbyteCatalog airbyteCatalogCurrent = new AirbyteCatalog().withStreams(Lists.newArrayList( - CatalogHelpers.createAirbyteStream(SHOES, Field.of(SKU, JsonSchemaType.STRING)), - CatalogHelpers.createAirbyteStream(DOGS, Field.of(NAME, JsonSchemaType.STRING)))); - - final ConnectionRead connectionRead = - new ConnectionRead().syncCatalog(CatalogConverter.toApi(airbyteCatalogCurrent, sourceDef)).status(ConnectionStatus.ACTIVE) - .connectionId(connectionId) - .notifySchemaChanges(true); - when(connectionsHandler.getConnection(request.getConnectionId())).thenReturn(connectionRead); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); - final ConnectionReadList connectionReadList = new ConnectionReadList().connections(List.of(connectionRead)); - when(connectionsHandler.listConnectionsForSource(source.getSourceId(), false)).thenReturn(connectionReadList); - - final ActorCatalog actorCatalog = new ActorCatalog() - .withCatalog(Jsons.jsonNode(airbyteCatalog)) - .withCatalogHash("") - .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); - - final AirbyteCatalog persistenceCatalog = Jsons.object(actorCatalog.getCatalog(), - io.airbyte.protocol.models.AirbyteCatalog.class); - final io.airbyte.api.model.generated.AirbyteCatalog expectedActorCatalog = CatalogConverter.toApi(persistenceCatalog, sourceDef); - final ConnectionUpdate expectedConnectionUpdate = - new ConnectionUpdate().connectionId(connectionId).breakingChange(true).status(ConnectionStatus.ACTIVE); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - assertEquals(actual.getCatalogDiff(), catalogDiff); - assertEquals(actual.getCatalog(), expectedActorCatalog); - assertEquals(actual.getConnectionStatus(), ConnectionStatus.ACTIVE); - verify(connectionsHandler).updateConnection(expectedConnectionUpdate); - verify(eventRunner).sendSchemaChangeNotification(connectionId, CONNECTION_URL); - } - - @Test - void testDiscoverSchemaFromSourceIdWithConnectionIdBreakingFeatureFlagOn() - throws IOException, JsonValidationException, ConfigNotFoundException, InterruptedException, ApiException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final UUID connectionId = UUID.randomUUID(); - final UUID discoveredCatalogId = UUID.randomUUID(); - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SourceDiscoverSchemaRequestBody request = - new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()).connectionId(connectionId).disableCache(true).notifySchemaChange(true); - final StreamTransform streamTransform = new StreamTransform().transformType(TransformTypeEnum.UPDATE_STREAM) - .streamDescriptor(new io.airbyte.api.model.generated.StreamDescriptor().name(DOGS)).addUpdateStreamItem(new FieldTransform().transformType( - FieldTransform.TransformTypeEnum.REMOVE_FIELD).breaking(true)); - final CatalogDiff catalogDiff = new CatalogDiff().addTransformsItem(streamTransform); - final StandardSourceDefinition sourceDef = new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId()); - when(envVariableFeatureFlags.autoDetectSchema()).thenReturn(true); - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(sourceDef); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - when(webUrlHelper.getConnectionUrl(source.getWorkspaceId(), connectionId)).thenReturn(CONNECTION_URL); - - when(discoverResponse.isSuccess()).thenReturn(true); - when(discoverResponse.getOutput()).thenReturn(discoveredCatalogId); - - final AirbyteCatalog airbyteCatalogCurrent = new AirbyteCatalog().withStreams(Lists.newArrayList( - CatalogHelpers.createAirbyteStream(SHOES, Field.of(SKU, JsonSchemaType.STRING)), - CatalogHelpers.createAirbyteStream(DOGS, Field.of(NAME, JsonSchemaType.STRING)))); - - final ConnectionRead connectionRead = - new ConnectionRead().syncCatalog(CatalogConverter.toApi(airbyteCatalogCurrent, sourceDef)).connectionId(connectionId) - .notifySchemaChanges(true); - when(connectionsHandler.getConnection(request.getConnectionId())).thenReturn(connectionRead); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); - final ConnectionReadList connectionReadList = new ConnectionReadList().connections(List.of(connectionRead)); - when(connectionsHandler.listConnectionsForSource(source.getSourceId(), false)).thenReturn(connectionReadList); - - final ActorCatalog actorCatalog = new ActorCatalog() - .withCatalog(Jsons.jsonNode(airbyteCatalog)) - .withCatalogHash("") - .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); - - final AirbyteCatalog persistenceCatalog = Jsons.object(actorCatalog.getCatalog(), - io.airbyte.protocol.models.AirbyteCatalog.class); - final io.airbyte.api.model.generated.AirbyteCatalog expectedActorCatalog = CatalogConverter.toApi(persistenceCatalog, sourceDef); - final ConnectionUpdate expectedConnectionUpdate = - new ConnectionUpdate().connectionId(connectionId).breakingChange(true).status(ConnectionStatus.INACTIVE); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - assertEquals(actual.getCatalogDiff(), catalogDiff); - assertEquals(actual.getCatalog(), expectedActorCatalog); - assertEquals(actual.getConnectionStatus(), ConnectionStatus.INACTIVE); - verify(connectionsHandler).updateConnection(expectedConnectionUpdate); - verify(eventRunner).sendSchemaChangeNotification(connectionId, CONNECTION_URL); - } - - @Test - void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionPreferenceFeatureFlagNoDiff() - throws IOException, JsonValidationException, ConfigNotFoundException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final UUID connectionId = UUID.randomUUID(); - final UUID discoveredCatalogId = UUID.randomUUID(); - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SourceDiscoverSchemaRequestBody request = - new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()).connectionId(connectionId).disableCache(true).notifySchemaChange(true); - final CatalogDiff catalogDiff = new CatalogDiff(); - final StandardSourceDefinition sourceDef = new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId()); - when(envVariableFeatureFlags.autoDetectSchema()).thenReturn(true); - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(sourceDef); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - - when(discoverResponse.isSuccess()).thenReturn(true); - when(discoverResponse.getOutput()).thenReturn(discoveredCatalogId); - - final AirbyteCatalog airbyteCatalogCurrent = new AirbyteCatalog().withStreams(Lists.newArrayList( - CatalogHelpers.createAirbyteStream(SHOES, Field.of(SKU, JsonSchemaType.STRING)), - CatalogHelpers.createAirbyteStream(DOGS, Field.of(NAME, JsonSchemaType.STRING)))); - - final ConnectionRead connectionRead = - new ConnectionRead().syncCatalog(CatalogConverter.toApi(airbyteCatalogCurrent, sourceDef)).nonBreakingChangesPreference( - NonBreakingChangesPreference.DISABLE).status(ConnectionStatus.INACTIVE).connectionId(connectionId).notifySchemaChanges(false); - when(connectionsHandler.getConnection(request.getConnectionId())).thenReturn(connectionRead); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); - final ConnectionReadList connectionReadList = new ConnectionReadList().connections(List.of(connectionRead)); - when(connectionsHandler.listConnectionsForSource(source.getSourceId(), false)).thenReturn(connectionReadList); - - final ActorCatalog actorCatalog = new ActorCatalog() - .withCatalog(Jsons.jsonNode(airbyteCatalog)) - .withCatalogHash("") - .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); - - final AirbyteCatalog persistenceCatalog = Jsons.object(actorCatalog.getCatalog(), - io.airbyte.protocol.models.AirbyteCatalog.class); - final io.airbyte.api.model.generated.AirbyteCatalog expectedActorCatalog = CatalogConverter.toApi(persistenceCatalog, sourceDef); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - assertEquals(actual.getCatalogDiff(), catalogDiff); - assertEquals(actual.getCatalog(), expectedActorCatalog); - assertEquals(actual.getConnectionStatus(), ConnectionStatus.INACTIVE); - // notification preferences are turned on, but there is no schema diff detected - verifyNoInteractions(eventRunner); - } - - @Test - void testDiscoverSchemaForSourceMultipleConnectionsFeatureFlagOn() - throws IOException, JsonValidationException, ConfigNotFoundException, InterruptedException, ApiException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final UUID connectionId = UUID.randomUUID(); - final UUID connectionId2 = UUID.randomUUID(); - final UUID connectionId3 = UUID.randomUUID(); - final UUID discoveredCatalogId = UUID.randomUUID(); - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SourceDiscoverSchemaRequestBody request = - new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()).connectionId(connectionId).disableCache(true).notifySchemaChange(true); - - // 3 connections use the same source. 2 will generate catalog diffs that are non-breaking, 1 will - // generate a breaking catalog diff - final StreamTransform nonBreakingStreamTransform = new StreamTransform().transformType(TransformTypeEnum.UPDATE_STREAM) - .streamDescriptor(new io.airbyte.api.model.generated.StreamDescriptor().name(DOGS)).addUpdateStreamItem(new FieldTransform().transformType( - FieldTransform.TransformTypeEnum.REMOVE_FIELD).breaking(false)); - final StreamTransform breakingStreamTransform = new StreamTransform().transformType(TransformTypeEnum.UPDATE_STREAM) - .streamDescriptor(new io.airbyte.api.model.generated.StreamDescriptor().name(DOGS)).addUpdateStreamItem(new FieldTransform().transformType( - FieldTransform.TransformTypeEnum.REMOVE_FIELD).breaking(true)); - - final CatalogDiff catalogDiff1 = new CatalogDiff().addTransformsItem(nonBreakingStreamTransform); - final CatalogDiff catalogDiff2 = new CatalogDiff().addTransformsItem(nonBreakingStreamTransform); - final CatalogDiff catalogDiff3 = new CatalogDiff().addTransformsItem(breakingStreamTransform); - final StandardSourceDefinition sourceDef = new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId()); - - when(envVariableFeatureFlags.autoDetectSchema()).thenReturn(true); - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(sourceDef); - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - when(webUrlHelper.getConnectionUrl(source.getWorkspaceId(), connectionId)).thenReturn(CONNECTION_URL); - when(webUrlHelper.getConnectionUrl(source.getWorkspaceId(), connectionId2)).thenReturn(CONNECTION_URL); - when(webUrlHelper.getConnectionUrl(source.getWorkspaceId(), connectionId3)).thenReturn(CONNECTION_URL); - - when(discoverResponse.isSuccess()).thenReturn(true); - when(discoverResponse.getOutput()).thenReturn(discoveredCatalogId); - - final AirbyteCatalog airbyteCatalogCurrent = new AirbyteCatalog().withStreams(Lists.newArrayList( - CatalogHelpers.createAirbyteStream(SHOES, Field.of(SKU, JsonSchemaType.STRING)), - CatalogHelpers.createAirbyteStream(DOGS, Field.of(NAME, JsonSchemaType.STRING)))); - - final ConnectionRead connectionRead = - new ConnectionRead().syncCatalog(CatalogConverter.toApi(airbyteCatalogCurrent, sourceDef)).nonBreakingChangesPreference( - NonBreakingChangesPreference.IGNORE).status(ConnectionStatus.ACTIVE).connectionId(connectionId).notifySchemaChanges(true); - - final ConnectionRead connectionRead2 = - new ConnectionRead().syncCatalog(CatalogConverter.toApi(airbyteCatalogCurrent, sourceDef)).nonBreakingChangesPreference( - NonBreakingChangesPreference.IGNORE).status(ConnectionStatus.ACTIVE).connectionId(connectionId2).notifySchemaChanges(true); - - final ConnectionRead connectionRead3 = - new ConnectionRead().syncCatalog(CatalogConverter.toApi(airbyteCatalogCurrent, sourceDef)).nonBreakingChangesPreference( - NonBreakingChangesPreference.DISABLE).status(ConnectionStatus.ACTIVE).connectionId(connectionId3).notifySchemaChanges(false); - - when(connectionsHandler.getConnection(request.getConnectionId())).thenReturn(connectionRead, connectionRead2, connectionRead3); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff1, catalogDiff2, catalogDiff3); - final ConnectionReadList connectionReadList = new ConnectionReadList().connections(List.of(connectionRead, connectionRead2, connectionRead3)); - when(connectionsHandler.listConnectionsForSource(source.getSourceId(), false)).thenReturn(connectionReadList); - - final ActorCatalog actorCatalog = new ActorCatalog() - .withCatalog(Jsons.jsonNode(airbyteCatalog)) - .withCatalogHash("") - .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); - - final AirbyteCatalog persistenceCatalog = Jsons.object(actorCatalog.getCatalog(), - io.airbyte.protocol.models.AirbyteCatalog.class); - final io.airbyte.api.model.generated.AirbyteCatalog expectedActorCatalog = CatalogConverter.toApi(persistenceCatalog, sourceDef); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - assertEquals(catalogDiff1, actual.getCatalogDiff()); - assertEquals(expectedActorCatalog, actual.getCatalog()); - assertEquals(ConnectionStatus.ACTIVE, actual.getConnectionStatus()); - - final ArgumentCaptor expectedArgumentCaptor = ArgumentCaptor.forClass(ConnectionUpdate.class); - verify(connectionsHandler, times(3)).updateConnection(expectedArgumentCaptor.capture()); - final List connectionUpdateValues = expectedArgumentCaptor.getAllValues(); - assertEquals(ConnectionStatus.ACTIVE, connectionUpdateValues.get(0).getStatus()); - assertEquals(ConnectionStatus.ACTIVE, connectionUpdateValues.get(1).getStatus()); - assertEquals(ConnectionStatus.INACTIVE, connectionUpdateValues.get(2).getStatus()); - verify(eventRunner).sendSchemaChangeNotification(connectionId, CONNECTION_URL); - verify(eventRunner).sendSchemaChangeNotification(connectionId2, CONNECTION_URL); - verify(eventRunner, times(0)).sendSchemaChangeNotification(connectionId3, CONNECTION_URL); - } - - @Test - void testDiscoverSchemaFromSourceIdWithConnectionUpdateNonSuccessResponse() throws IOException, JsonValidationException, ConfigNotFoundException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()) - .connectionId(UUID.randomUUID()).notifySchemaChange(true); - - // Mock the source definition. - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId())); - // Mock the source itself. - when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - // Mock the Discover job results. - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SynchronousJobMetadata metadata = mock(SynchronousJobMetadata.class); - when(discoverResponse.isSuccess()).thenReturn(false); - when(discoverResponse.getMetadata()).thenReturn(metadata); - when(metadata.isSucceeded()).thenReturn(false); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); - - assertNull(actual.getCatalog()); - assertNotNull(actual.getJobInfo()); - assertFalse(actual.getJobInfo().getSucceeded()); - verify(synchronousSchedulerClient).createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false); - } - - @Test - void testDiscoverSchemaForSourceFromSourceCreate() throws JsonValidationException, IOException, ConfigNotFoundException { - final SourceConnection source = new SourceConnection() - .withSourceDefinitionId(SOURCE.getSourceDefinitionId()) - .withConfiguration(SOURCE.getConfiguration()); - - final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; - final SynchronousJobMetadata metadata = mock(SynchronousJobMetadata.class); - when(discoverResponse.isSuccess()).thenReturn(true); - when(discoverResponse.getOutput()).thenReturn(UUID.randomUUID()); - when(discoverResponse.getMetadata()).thenReturn(metadata); - when(metadata.isSucceeded()).thenReturn(true); - - final SourceCoreConfig sourceCoreConfig = new SourceCoreConfig() - .sourceDefinitionId(source.getSourceDefinitionId()) - .connectionConfiguration(source.getConfiguration()) - .workspaceId(source.getWorkspaceId()); - final ActorCatalog actorCatalog = new ActorCatalog() - .withCatalog(Jsons.jsonNode(airbyteCatalog)) - .withCatalogHash("") - .withId(UUID.randomUUID()); - when(configRepository.getActorCatalogById(any())).thenReturn(actorCatalog); - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId())); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn(discoverResponse); - when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( - eq(source.getConfiguration()), - any())).thenReturn(source.getConfiguration()); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceCreate(sourceCoreConfig); - - assertNotNull(actual.getCatalog()); - assertNotNull(actual.getJobInfo()); - assertEquals(actual.getCatalogId(), discoverResponse.getOutput()); - assertTrue(actual.getJobInfo().getSucceeded()); - verify(synchronousSchedulerClient).createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false); - } - - @Test - void testDiscoverSchemaForSourceFromSourceCreateFailed() throws JsonValidationException, IOException, ConfigNotFoundException { - final SourceConnection source = new SourceConnection() - .withSourceDefinitionId(SOURCE.getSourceDefinitionId()) - .withConfiguration(SOURCE.getConfiguration()); - - final SourceCoreConfig sourceCoreConfig = new SourceCoreConfig() - .sourceDefinitionId(source.getSourceDefinitionId()) - .connectionConfiguration(source.getConfiguration()) - .workspaceId(source.getWorkspaceId()); - - when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPO) - .withDockerImageTag(SOURCE_DOCKER_TAG) - .withProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withSourceDefinitionId(source.getSourceDefinitionId())); - when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false)) - .thenReturn((SynchronousResponse) jobResponse); - when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( - eq(source.getConfiguration()), - any())).thenReturn(source.getConfiguration()); - when(completedJob.getSuccessOutput()).thenReturn(Optional.empty()); - when(completedJob.getStatus()).thenReturn(JobStatus.FAILED); - - final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceCreate(sourceCoreConfig); - - assertNull(actual.getCatalog()); - assertNotNull(actual.getJobInfo()); - assertFalse(actual.getJobInfo().getSucceeded()); - verify(synchronousSchedulerClient).createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE, SOURCE_DOCKER_TAG, new Version(SOURCE_PROTOCOL_VERSION), - false); - } - - @Test - void testEnumConversion() { - assertTrue(Enums.isCompatible(StandardCheckConnectionOutput.Status.class, CheckConnectionRead.StatusEnum.class)); - assertTrue(Enums.isCompatible(JobStatus.class, io.airbyte.api.model.generated.JobStatus.class)); - } - - @Test - void testSyncConnection() throws IOException, JsonValidationException, ConfigNotFoundException { - final UUID connectionId = UUID.randomUUID(); - - final long jobId = 123L; - final ManualOperationResult manualOperationResult = ManualOperationResult - .builder() - .failingReason(Optional.empty()) - .jobId(Optional.of(jobId)) - .build(); - - when(eventRunner.startNewManualSync(connectionId)) - .thenReturn(manualOperationResult); - - doReturn(new JobInfoRead()) - .when(jobConverter).getJobInfoRead(any()); - - schedulerHandler.syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - verify(eventRunner).startNewManualSync(connectionId); - } - - @Test - void testSyncConnectionFailWithOtherSyncRunning() throws IOException { - final UUID connectionId = UUID.randomUUID(); - - final ManualOperationResult manualOperationResult = ManualOperationResult - .builder() - .failingReason(Optional.of("another sync running")) - .jobId(Optional.empty()) - .errorCode(Optional.of(ErrorCode.WORKFLOW_RUNNING)) - .build(); - - when(eventRunner.startNewManualSync(connectionId)) - .thenReturn(manualOperationResult); - - assertThrows(ValueConflictKnownException.class, - () -> schedulerHandler.syncConnection(new ConnectionIdRequestBody().connectionId(connectionId))); - - } - - @Test - void testResetConnection() throws IOException, JsonValidationException, ConfigNotFoundException { - final UUID connectionId = UUID.randomUUID(); - - final long jobId = 123L; - final ManualOperationResult manualOperationResult = ManualOperationResult - .builder() - .failingReason(Optional.empty()) - .jobId(Optional.of(jobId)) - .build(); - - final List streamDescriptors = List.of(STREAM_DESCRIPTOR); - when(configRepository.getAllStreamsForConnection(connectionId)) - .thenReturn(streamDescriptors); - - when(eventRunner.resetConnection(connectionId, streamDescriptors, false)) - .thenReturn(manualOperationResult); - - doReturn(new JobInfoRead()) - .when(jobConverter).getJobInfoRead(any()); - - schedulerHandler.resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - verify(eventRunner).resetConnection(connectionId, streamDescriptors, false); - } - - @Test - void testCancelJob() throws IOException { - final UUID connectionId = UUID.randomUUID(); - final long jobId = 123L; - final Job job = mock(Job.class); - when(job.getScope()).thenReturn(connectionId.toString()); - when(jobPersistence.getJob(jobId)).thenReturn(job); - - final ManualOperationResult manualOperationResult = ManualOperationResult - .builder() - .failingReason(Optional.empty()) - .jobId(Optional.of(jobId)) - .build(); - - when(eventRunner.startNewCancellation(connectionId)) - .thenReturn(manualOperationResult); - - doReturn(new JobInfoRead()) - .when(jobConverter).getJobInfoRead(any()); - - schedulerHandler.cancelJob(new JobIdRequestBody().id(jobId)); - - verify(eventRunner).startNewCancellation(connectionId); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandlerTest.java deleted file mode 100644 index c4a98f1e85f8..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandlerTest.java +++ /dev/null @@ -1,622 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.api.model.generated.CustomSourceDefinitionCreate; -import io.airbyte.api.model.generated.PrivateSourceDefinitionRead; -import io.airbyte.api.model.generated.PrivateSourceDefinitionReadList; -import io.airbyte.api.model.generated.ReleaseStage; -import io.airbyte.api.model.generated.SourceDefinitionCreate; -import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.SourceDefinitionRead; -import io.airbyte.api.model.generated.SourceDefinitionReadList; -import io.airbyte.api.model.generated.SourceDefinitionUpdate; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.SourceReadList; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.errors.IdNotFoundKnownException; -import io.airbyte.commons.server.errors.UnsupportedProtocolVersionException; -import io.airbyte.commons.server.scheduler.SynchronousJobMetadata; -import io.airbyte.commons.server.scheduler.SynchronousResponse; -import io.airbyte.commons.server.scheduler.SynchronousSchedulerClient; -import io.airbyte.commons.server.services.AirbyteGithubStore; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorDefinitionResourceRequirements; -import io.airbyte.config.ActorType; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.time.LocalDate; -import java.util.Collections; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; - -class SourceDefinitionsHandlerTest { - - private static final String TODAY_DATE_STRING = LocalDate.now().toString(); - private static final String DEFAULT_PROTOCOL_VERSION = "0.2.0"; - - private ConfigRepository configRepository; - private StandardSourceDefinition sourceDefinition; - private SourceDefinitionsHandler sourceDefinitionsHandler; - private Supplier uuidSupplier; - private SynchronousSchedulerClient schedulerSynchronousClient; - private AirbyteGithubStore githubStore; - private SourceHandler sourceHandler; - private UUID workspaceId; - private AirbyteProtocolVersionRange protocolVersionRange; - - @SuppressWarnings("unchecked") - @BeforeEach - void setUp() { - configRepository = mock(ConfigRepository.class); - uuidSupplier = mock(Supplier.class); - schedulerSynchronousClient = spy(SynchronousSchedulerClient.class); - githubStore = mock(AirbyteGithubStore.class); - sourceHandler = mock(SourceHandler.class); - workspaceId = UUID.randomUUID(); - - sourceDefinition = generateSourceDefinition(); - - protocolVersionRange = new AirbyteProtocolVersionRange(new Version("0.0.0"), new Version("0.3.0")); - - sourceDefinitionsHandler = new SourceDefinitionsHandler(configRepository, uuidSupplier, schedulerSynchronousClient, githubStore, sourceHandler, - protocolVersionRange); - } - - private StandardSourceDefinition generateSourceDefinition() { - final UUID sourceDefinitionId = UUID.randomUUID(); - final ConnectorSpecification spec = new ConnectorSpecification().withConnectionSpecification( - Jsons.jsonNode(ImmutableMap.of("foo", "bar"))); - - return new StandardSourceDefinition() - .withSourceDefinitionId(sourceDefinitionId) - .withName("presto") - .withDocumentationUrl("https://netflix.com") - .withDockerRepository("dockerstuff") - .withDockerImageTag("12.3") - .withIcon("rss.svg") - .withSpec(spec) - .withTombstone(false) - .withReleaseStage(StandardSourceDefinition.ReleaseStage.ALPHA) - .withReleaseDate(TODAY_DATE_STRING) - .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(new ResourceRequirements().withCpuRequest("2"))); - - } - - @Test - @DisplayName("listSourceDefinition should return the right list") - void testListSourceDefinitions() throws JsonValidationException, IOException, URISyntaxException { - final StandardSourceDefinition sourceDefinition2 = generateSourceDefinition(); - - when(configRepository.listStandardSourceDefinitions(false)).thenReturn(Lists.newArrayList(sourceDefinition, sourceDefinition2)); - - final SourceDefinitionRead expectedSourceDefinitionRead1 = new SourceDefinitionRead() - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .name(sourceDefinition.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon())) - .releaseStage(ReleaseStage.fromValue(sourceDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(sourceDefinition.getReleaseDate())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final SourceDefinitionRead expectedSourceDefinitionRead2 = new SourceDefinitionRead() - .sourceDefinitionId(sourceDefinition2.getSourceDefinitionId()) - .name(sourceDefinition2.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon())) - .releaseStage(ReleaseStage.fromValue(sourceDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(sourceDefinition.getReleaseDate())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition2.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final SourceDefinitionReadList actualSourceDefinitionReadList = sourceDefinitionsHandler.listSourceDefinitions(); - - assertEquals( - Lists.newArrayList(expectedSourceDefinitionRead1, expectedSourceDefinitionRead2), - actualSourceDefinitionReadList.getSourceDefinitions()); - } - - @Test - @DisplayName("listSourceDefinitionsForWorkspace should return the right list") - void testListSourceDefinitionsForWorkspace() throws IOException, URISyntaxException { - final StandardSourceDefinition sourceDefinition2 = generateSourceDefinition(); - - when(configRepository.listPublicSourceDefinitions(false)).thenReturn(Lists.newArrayList(sourceDefinition)); - when(configRepository.listGrantedSourceDefinitions(workspaceId, false)).thenReturn(Lists.newArrayList(sourceDefinition2)); - - final SourceDefinitionRead expectedSourceDefinitionRead1 = new SourceDefinitionRead() - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .name(sourceDefinition.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon())) - .releaseStage(ReleaseStage.fromValue(sourceDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(sourceDefinition.getReleaseDate())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final SourceDefinitionRead expectedSourceDefinitionRead2 = new SourceDefinitionRead() - .sourceDefinitionId(sourceDefinition2.getSourceDefinitionId()) - .name(sourceDefinition2.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon())) - .releaseStage(ReleaseStage.fromValue(sourceDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(sourceDefinition.getReleaseDate())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition2.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final SourceDefinitionReadList actualSourceDefinitionReadList = - sourceDefinitionsHandler.listSourceDefinitionsForWorkspace(new WorkspaceIdRequestBody().workspaceId(workspaceId)); - - assertEquals( - Lists.newArrayList(expectedSourceDefinitionRead1, expectedSourceDefinitionRead2), - actualSourceDefinitionReadList.getSourceDefinitions()); - } - - @Test - @DisplayName("listPrivateSourceDefinitions should return the right list") - void testListPrivateSourceDefinitions() throws IOException, URISyntaxException { - final StandardSourceDefinition sourceDefinition2 = generateSourceDefinition(); - - when(configRepository.listGrantableSourceDefinitions(workspaceId, false)).thenReturn( - Lists.newArrayList( - Map.entry(sourceDefinition, false), - Map.entry(sourceDefinition2, true))); - - final SourceDefinitionRead expectedSourceDefinitionRead1 = new SourceDefinitionRead() - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .name(sourceDefinition.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon())) - .releaseStage(ReleaseStage.fromValue(sourceDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(sourceDefinition.getReleaseDate())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final SourceDefinitionRead expectedSourceDefinitionRead2 = new SourceDefinitionRead() - .sourceDefinitionId(sourceDefinition2.getSourceDefinitionId()) - .name(sourceDefinition2.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon())) - .releaseStage(ReleaseStage.fromValue(sourceDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(sourceDefinition.getReleaseDate())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition2.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final PrivateSourceDefinitionRead expectedSourceDefinitionOptInRead1 = - new PrivateSourceDefinitionRead().sourceDefinition(expectedSourceDefinitionRead1).granted(false); - - final PrivateSourceDefinitionRead expectedSourceDefinitionOptInRead2 = - new PrivateSourceDefinitionRead().sourceDefinition(expectedSourceDefinitionRead2).granted(true); - - final PrivateSourceDefinitionReadList actualSourceDefinitionOptInReadList = sourceDefinitionsHandler.listPrivateSourceDefinitions( - new WorkspaceIdRequestBody().workspaceId(workspaceId)); - - assertEquals( - Lists.newArrayList(expectedSourceDefinitionOptInRead1, expectedSourceDefinitionOptInRead2), - actualSourceDefinitionOptInReadList.getSourceDefinitions()); - } - - @Test - @DisplayName("getSourceDefinition should return the right source") - void testGetSourceDefinition() throws JsonValidationException, ConfigNotFoundException, IOException, URISyntaxException { - when(configRepository.getStandardSourceDefinition(sourceDefinition.getSourceDefinitionId())) - .thenReturn(sourceDefinition); - - final SourceDefinitionRead expectedSourceDefinitionRead = new SourceDefinitionRead() - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .name(sourceDefinition.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon())) - .releaseStage(ReleaseStage.fromValue(sourceDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(sourceDefinition.getReleaseDate())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody = - new SourceDefinitionIdRequestBody().sourceDefinitionId(sourceDefinition.getSourceDefinitionId()); - - final SourceDefinitionRead actualSourceDefinitionRead = sourceDefinitionsHandler.getSourceDefinition(sourceDefinitionIdRequestBody); - - assertEquals(expectedSourceDefinitionRead, actualSourceDefinitionRead); - } - - @Test - @DisplayName("getSourceDefinitionForWorkspace should throw an exception for a missing grant") - void testGetDefinitionWithoutGrantForWorkspace() throws IOException { - when(configRepository.workspaceCanUseDefinition(sourceDefinition.getSourceDefinitionId(), workspaceId)) - .thenReturn(false); - - final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId = new SourceDefinitionIdWithWorkspaceId() - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .workspaceId(workspaceId); - - assertThrows(IdNotFoundKnownException.class, () -> sourceDefinitionsHandler.getSourceDefinitionForWorkspace(sourceDefinitionIdWithWorkspaceId)); - } - - @Test - @DisplayName("getSourceDefinitionForWorkspace should return the source if the grant exists") - void testGetDefinitionWithGrantForWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException, URISyntaxException { - when(configRepository.workspaceCanUseDefinition(sourceDefinition.getSourceDefinitionId(), workspaceId)) - .thenReturn(true); - when(configRepository.getStandardSourceDefinition(sourceDefinition.getSourceDefinitionId())) - .thenReturn(sourceDefinition); - - final SourceDefinitionRead expectedSourceDefinitionRead = new SourceDefinitionRead() - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .name(sourceDefinition.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon())) - .releaseStage(ReleaseStage.fromValue(sourceDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(sourceDefinition.getReleaseDate())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId = new SourceDefinitionIdWithWorkspaceId() - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .workspaceId(workspaceId); - - final SourceDefinitionRead actualSourceDefinitionRead = sourceDefinitionsHandler - .getSourceDefinitionForWorkspace(sourceDefinitionIdWithWorkspaceId); - - assertEquals(expectedSourceDefinitionRead, actualSourceDefinitionRead); - } - - @Test - @DisplayName("createSourceDefinition should not create a sourceDefinition with an unsupported protocol version") - void testCreateSourceDefinitionWithInvalidProtocol() throws URISyntaxException, IOException, JsonValidationException { - final String invalidProtocol = "131.1.2"; - final StandardSourceDefinition sourceDefinition = generateSourceDefinition(); - sourceDefinition.getSpec().setProtocolVersion(invalidProtocol); - final String imageName = sourceDefinition.getDockerRepository() + ":" + sourceDefinition.getDockerImageTag(); - - when(uuidSupplier.get()).thenReturn(sourceDefinition.getSourceDefinitionId()); - when(schedulerSynchronousClient.createGetSpecJob(imageName, true)).thenReturn(new SynchronousResponse<>( - sourceDefinition.getSpec(), - SynchronousJobMetadata.mock(ConfigType.GET_SPEC))); - - final SourceDefinitionCreate create = new SourceDefinitionCreate() - .name(sourceDefinition.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(sourceDefinition.getIcon()) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - final CustomSourceDefinitionCreate customCreate = new CustomSourceDefinitionCreate() - .sourceDefinition(create) - .workspaceId(workspaceId); - assertThrows(UnsupportedProtocolVersionException.class, () -> sourceDefinitionsHandler.createCustomSourceDefinition(customCreate)); - - verify(schedulerSynchronousClient).createGetSpecJob(imageName, true); - verify(configRepository, never()) - .writeStandardSourceDefinition( - sourceDefinition - .withReleaseDate(null) - .withReleaseStage(StandardSourceDefinition.ReleaseStage.CUSTOM) - .withProtocolVersion(DEFAULT_PROTOCOL_VERSION)); - } - - @Test - @DisplayName("createCustomSourceDefinition should correctly create a sourceDefinition") - void testCreateCustomSourceDefinition() throws URISyntaxException, IOException, JsonValidationException { - final StandardSourceDefinition sourceDefinition = generateSourceDefinition(); - final String imageName = sourceDefinition.getDockerRepository() + ":" + sourceDefinition.getDockerImageTag(); - - when(uuidSupplier.get()).thenReturn(sourceDefinition.getSourceDefinitionId()); - when(schedulerSynchronousClient.createGetSpecJob(imageName, true)).thenReturn(new SynchronousResponse<>( - sourceDefinition.getSpec(), - SynchronousJobMetadata.mock(ConfigType.GET_SPEC))); - - final SourceDefinitionCreate create = new SourceDefinitionCreate() - .name(sourceDefinition.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(sourceDefinition.getIcon()) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final CustomSourceDefinitionCreate customCreate = new CustomSourceDefinitionCreate() - .sourceDefinition(create) - .workspaceId(workspaceId); - - final SourceDefinitionRead expectedRead = new SourceDefinitionRead() - .name(sourceDefinition.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .icon(SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon())) - .protocolVersion(DEFAULT_PROTOCOL_VERSION) - .releaseStage(ReleaseStage.CUSTOM) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final SourceDefinitionRead actualRead = sourceDefinitionsHandler.createCustomSourceDefinition(customCreate); - - assertEquals(expectedRead, actualRead); - verify(schedulerSynchronousClient).createGetSpecJob(imageName, true); - verify(configRepository).writeCustomSourceDefinition( - sourceDefinition - .withReleaseDate(null) - .withReleaseStage(StandardSourceDefinition.ReleaseStage.CUSTOM) - .withProtocolVersion(DEFAULT_PROTOCOL_VERSION) - .withCustom(true), - workspaceId); - } - - @Test - @DisplayName("createCustomSourceDefinition should not create a sourceDefinition with unspported protocol version") - void testCreateCustomSourceDefinitionWithInvalidProtocol() throws URISyntaxException, IOException, JsonValidationException { - final String invalidVersion = "130.0.0"; - final StandardSourceDefinition sourceDefinition = generateSourceDefinition(); - sourceDefinition.getSpec().setProtocolVersion(invalidVersion); - final String imageName = sourceDefinition.getDockerRepository() + ":" + sourceDefinition.getDockerImageTag(); - - when(uuidSupplier.get()).thenReturn(sourceDefinition.getSourceDefinitionId()); - when(schedulerSynchronousClient.createGetSpecJob(imageName, true)).thenReturn(new SynchronousResponse<>( - sourceDefinition.getSpec(), - SynchronousJobMetadata.mock(ConfigType.GET_SPEC))); - - final SourceDefinitionCreate create = new SourceDefinitionCreate() - .name(sourceDefinition.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(sourceDefinition.getIcon()) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final CustomSourceDefinitionCreate customCreate = new CustomSourceDefinitionCreate() - .sourceDefinition(create) - .workspaceId(workspaceId); - - assertThrows(UnsupportedProtocolVersionException.class, () -> sourceDefinitionsHandler.createCustomSourceDefinition(customCreate)); - - verify(schedulerSynchronousClient).createGetSpecJob(imageName, true); - verify(configRepository, never()).writeCustomSourceDefinition( - sourceDefinition - .withReleaseDate(null) - .withReleaseStage(StandardSourceDefinition.ReleaseStage.CUSTOM) - .withProtocolVersion(invalidVersion) - .withCustom(true), - workspaceId); - } - - @Test - @DisplayName("updateSourceDefinition should correctly update a sourceDefinition") - void testUpdateSourceDefinition() throws ConfigNotFoundException, IOException, JsonValidationException, URISyntaxException { - when(configRepository.getStandardSourceDefinition(sourceDefinition.getSourceDefinitionId())).thenReturn(sourceDefinition); - final String newDockerImageTag = "averydifferenttag"; - final String newProtocolVersion = "0.2.1"; - final SourceDefinitionRead sourceDefinition = sourceDefinitionsHandler - .getSourceDefinition(new SourceDefinitionIdRequestBody().sourceDefinitionId(this.sourceDefinition.getSourceDefinitionId())); - final String currentTag = sourceDefinition.getDockerImageTag(); - assertNotEquals(newDockerImageTag, currentTag); - - final String newImageName = this.sourceDefinition.getDockerRepository() + ":" + newDockerImageTag; - final ConnectorSpecification newSpec = new ConnectorSpecification() - .withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo2", "bar2"))) - .withProtocolVersion(newProtocolVersion); - when(schedulerSynchronousClient.createGetSpecJob(newImageName, false)).thenReturn(new SynchronousResponse<>( - newSpec, - SynchronousJobMetadata.mock(ConfigType.GET_SPEC))); - - final StandardSourceDefinition updatedSource = Jsons.clone(this.sourceDefinition) - .withDockerImageTag(newDockerImageTag).withSpec(newSpec).withProtocolVersion(newProtocolVersion); - - final SourceDefinitionRead sourceDefinitionRead = sourceDefinitionsHandler - .updateSourceDefinition( - new SourceDefinitionUpdate().sourceDefinitionId(this.sourceDefinition.getSourceDefinitionId()).dockerImageTag(newDockerImageTag)); - - assertEquals(newDockerImageTag, sourceDefinitionRead.getDockerImageTag()); - verify(schedulerSynchronousClient).createGetSpecJob(newImageName, false); - verify(configRepository).writeStandardSourceDefinition(updatedSource); - - verify(configRepository).clearUnsupportedProtocolVersionFlag(updatedSource.getSourceDefinitionId(), ActorType.SOURCE, protocolVersionRange); - } - - @Test - @DisplayName("updateSourceDefinition should not update a sourceDefinition with an invalid protocol version") - void testUpdateSourceDefinitionWithInvalidProtocol() throws ConfigNotFoundException, IOException, JsonValidationException, URISyntaxException { - when(configRepository.getStandardSourceDefinition(sourceDefinition.getSourceDefinitionId())).thenReturn(sourceDefinition); - final String newDockerImageTag = "averydifferenttag"; - final String newProtocolVersion = "132.2.1"; - final SourceDefinitionRead sourceDefinition = sourceDefinitionsHandler - .getSourceDefinition(new SourceDefinitionIdRequestBody().sourceDefinitionId(this.sourceDefinition.getSourceDefinitionId())); - final String currentTag = sourceDefinition.getDockerImageTag(); - assertNotEquals(newDockerImageTag, currentTag); - - final String newImageName = this.sourceDefinition.getDockerRepository() + ":" + newDockerImageTag; - final ConnectorSpecification newSpec = new ConnectorSpecification() - .withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo2", "bar2"))) - .withProtocolVersion(newProtocolVersion); - when(schedulerSynchronousClient.createGetSpecJob(newImageName, false)).thenReturn(new SynchronousResponse<>( - newSpec, - SynchronousJobMetadata.mock(ConfigType.GET_SPEC))); - - final StandardSourceDefinition updatedSource = Jsons.clone(this.sourceDefinition) - .withDockerImageTag(newDockerImageTag).withSpec(newSpec).withProtocolVersion(newProtocolVersion); - - assertThrows(UnsupportedProtocolVersionException.class, () -> sourceDefinitionsHandler - .updateSourceDefinition( - new SourceDefinitionUpdate().sourceDefinitionId(this.sourceDefinition.getSourceDefinitionId()).dockerImageTag(newDockerImageTag))); - - verify(schedulerSynchronousClient).createGetSpecJob(newImageName, false); - verify(configRepository, never()).writeStandardSourceDefinition(updatedSource); - } - - @Test - @DisplayName("deleteSourceDefinition should correctly delete a sourceDefinition") - void testDeleteSourceDefinition() throws ConfigNotFoundException, IOException, JsonValidationException { - final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody = - new SourceDefinitionIdRequestBody().sourceDefinitionId(sourceDefinition.getSourceDefinitionId()); - final StandardSourceDefinition updatedSourceDefinition = Jsons.clone(this.sourceDefinition).withTombstone(true); - final SourceRead source = new SourceRead(); - - when(configRepository.getStandardSourceDefinition(sourceDefinition.getSourceDefinitionId())) - .thenReturn(sourceDefinition); - when(sourceHandler.listSourcesForSourceDefinition(sourceDefinitionIdRequestBody)) - .thenReturn(new SourceReadList().sources(Collections.singletonList(source))); - - assertFalse(sourceDefinition.getTombstone()); - - sourceDefinitionsHandler.deleteSourceDefinition(sourceDefinitionIdRequestBody); - - verify(sourceHandler).deleteSource(source); - verify(configRepository).writeStandardSourceDefinition(updatedSourceDefinition); - } - - @Test - @DisplayName("grantSourceDefinitionToWorkspace should correctly create a workspace grant") - void testGrantSourceDefinitionToWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException, URISyntaxException { - when(configRepository.getStandardSourceDefinition(sourceDefinition.getSourceDefinitionId())) - .thenReturn(sourceDefinition); - - final SourceDefinitionRead expectedSourceDefinitionRead = new SourceDefinitionRead() - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .name(sourceDefinition.getName()) - .dockerRepository(sourceDefinition.getDockerRepository()) - .dockerImageTag(sourceDefinition.getDockerImageTag()) - .documentationUrl(new URI(sourceDefinition.getDocumentationUrl())) - .icon(SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon())) - .releaseStage(ReleaseStage.fromValue(sourceDefinition.getReleaseStage().value())) - .releaseDate(LocalDate.parse(sourceDefinition.getReleaseDate())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - - final PrivateSourceDefinitionRead expectedPrivateSourceDefinitionRead = - new PrivateSourceDefinitionRead().sourceDefinition(expectedSourceDefinitionRead).granted(true); - - final PrivateSourceDefinitionRead actualPrivateSourceDefinitionRead = - sourceDefinitionsHandler.grantSourceDefinitionToWorkspace( - new SourceDefinitionIdWithWorkspaceId() - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .workspaceId(workspaceId)); - - assertEquals(expectedPrivateSourceDefinitionRead, actualPrivateSourceDefinitionRead); - verify(configRepository).writeActorDefinitionWorkspaceGrant( - sourceDefinition.getSourceDefinitionId(), - workspaceId); - } - - @Test - @DisplayName("revokeSourceDefinitionFromWorkspace should correctly delete a workspace grant") - void testRevokeSourceDefinitionFromWorkspace() throws IOException { - sourceDefinitionsHandler.revokeSourceDefinitionFromWorkspace(new SourceDefinitionIdWithWorkspaceId() - .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) - .workspaceId(workspaceId)); - verify(configRepository).deleteActorDefinitionWorkspaceGrant( - sourceDefinition.getSourceDefinitionId(), - workspaceId); - } - - @Nested - @DisplayName("listLatest") - class listLatest { - - @Test - @DisplayName("should return the latest list") - void testCorrect() throws IOException, InterruptedException { - final StandardSourceDefinition sourceDefinition = generateSourceDefinition(); - when(githubStore.getLatestSources()).thenReturn(Collections.singletonList(sourceDefinition)); - - final var sourceDefinitionReadList = sourceDefinitionsHandler.listLatestSourceDefinitions().getSourceDefinitions(); - assertEquals(1, sourceDefinitionReadList.size()); - - final var sourceDefinitionRead = sourceDefinitionReadList.get(0); - assertEquals(SourceDefinitionsHandler.buildSourceDefinitionRead(sourceDefinition), sourceDefinitionRead); - } - - @Test - @DisplayName("returns empty collection if cannot find latest definitions") - void testHttpTimeout() { - assertEquals(0, sourceDefinitionsHandler.listLatestSourceDefinitions().getSourceDefinitions().size()); - } - - @Test - @DisplayName("Icon should be an SVG icon") - void testIconHoldsData() { - final String icon = SourceDefinitionsHandler.loadIcon(sourceDefinition.getIcon()); - assertNotNull(icon); - assertTrue(icon.contains(" uuidGenerator; - private JsonSecretsProcessor secretsProcessor; - private ConnectorSpecification connectorSpecification; - private OAuthConfigSupplier oAuthConfigSupplier; - - private static final String SHOES = "shoes"; - private static final String SKU = "sku"; - private static final AirbyteCatalog airbyteCatalog = CatalogHelpers.createAirbyteCatalog(SHOES, - Field.of(SKU, JsonSchemaType.STRING)); - - // needs to match name of file in src/test/resources/icons - private static final String ICON = "test-source.svg"; - - @SuppressWarnings("unchecked") - @BeforeEach - void setUp() throws IOException { - configRepository = mock(ConfigRepository.class); - secretsRepositoryReader = mock(SecretsRepositoryReader.class); - secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); - validator = mock(JsonSchemaValidator.class); - connectionsHandler = mock(ConnectionsHandler.class); - configurationUpdate = mock(ConfigurationUpdate.class); - uuidGenerator = mock(Supplier.class); - secretsProcessor = mock(JsonSecretsProcessor.class); - oAuthConfigSupplier = mock(OAuthConfigSupplier.class); - - connectorSpecification = ConnectorSpecificationHelpers.generateConnectorSpecification(); - - standardSourceDefinition = new StandardSourceDefinition() - .withSourceDefinitionId(UUID.randomUUID()) - .withName("marketo") - .withDockerRepository("thebestrepo") - .withDockerImageTag("thelatesttag") - .withDocumentationUrl("https://wikipedia.org") - .withSpec(connectorSpecification) - .withIcon(ICON); - - sourceDefinitionSpecificationRead = new SourceDefinitionSpecificationRead() - .sourceDefinitionId(standardSourceDefinition.getSourceDefinitionId()) - .connectionSpecification(connectorSpecification.getConnectionSpecification()) - .documentationUrl(connectorSpecification.getDocumentationUrl().toString()); - - sourceConnection = SourceHelpers.generateSource(standardSourceDefinition.getSourceDefinitionId()); - - sourceHandler = new SourceHandler(configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - validator, - connectionsHandler, - uuidGenerator, - secretsProcessor, - configurationUpdate, - oAuthConfigSupplier); - } - - @Test - void testCreateSource() throws JsonValidationException, ConfigNotFoundException, IOException { - final SourceCreate sourceCreate = new SourceCreate() - .name(sourceConnection.getName()) - .workspaceId(sourceConnection.getWorkspaceId()) - .sourceDefinitionId(standardSourceDefinition.getSourceDefinitionId()) - .connectionConfiguration(sourceConnection.getConfiguration()); - - when(uuidGenerator.get()).thenReturn(sourceConnection.getSourceId()); - when(configRepository.getSourceConnection(sourceConnection.getSourceId())).thenReturn(sourceConnection); - when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) - .thenReturn(standardSourceDefinition); - when(oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionSpecificationRead.getSourceDefinitionId(), sourceConnection.getWorkspaceId(), - sourceCreate.getConnectionConfiguration())).thenReturn(sourceCreate.getConnectionConfiguration()); - when(secretsProcessor.prepareSecretsForOutput(sourceCreate.getConnectionConfiguration(), - sourceDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(sourceCreate.getConnectionConfiguration()); - - final SourceRead actualSourceRead = sourceHandler.createSource(sourceCreate); - - final SourceRead expectedSourceRead = SourceHelpers.getSourceRead(sourceConnection, standardSourceDefinition) - .connectionConfiguration(sourceConnection.getConfiguration()); - - assertEquals(expectedSourceRead, actualSourceRead); - - verify(secretsProcessor).prepareSecretsForOutput(sourceCreate.getConnectionConfiguration(), - sourceDefinitionSpecificationRead.getConnectionSpecification()); - verify(oAuthConfigSupplier).maskSourceOAuthParameters(sourceDefinitionSpecificationRead.getSourceDefinitionId(), - sourceConnection.getWorkspaceId(), sourceCreate.getConnectionConfiguration()); - verify(secretsRepositoryWriter).writeSourceConnection(sourceConnection, connectorSpecification); - verify(validator).ensure(sourceDefinitionSpecificationRead.getConnectionSpecification(), sourceConnection.getConfiguration()); - } - - @Test - void testUpdateSource() throws JsonValidationException, ConfigNotFoundException, IOException { - final String updatedSourceName = "my updated source name"; - final JsonNode newConfiguration = sourceConnection.getConfiguration(); - ((ObjectNode) newConfiguration).put("apiKey", "987-xyz"); - - final SourceConnection expectedSourceConnection = Jsons.clone(sourceConnection) - .withName(updatedSourceName) - .withConfiguration(newConfiguration) - .withTombstone(false); - - final SourceUpdate sourceUpdate = new SourceUpdate() - .name(updatedSourceName) - .sourceId(sourceConnection.getSourceId()) - .connectionConfiguration(newConfiguration); - - when(secretsProcessor - .copySecrets(sourceConnection.getConfiguration(), newConfiguration, sourceDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(newConfiguration); - when(secretsProcessor.prepareSecretsForOutput(newConfiguration, sourceDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(newConfiguration); - when(oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionSpecificationRead.getSourceDefinitionId(), sourceConnection.getWorkspaceId(), - newConfiguration)).thenReturn(newConfiguration); - when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) - .thenReturn(standardSourceDefinition); - when(configRepository.getSourceDefinitionFromSource(sourceConnection.getSourceId())) - .thenReturn(standardSourceDefinition); - when(configRepository.getSourceConnection(sourceConnection.getSourceId())) - .thenReturn(sourceConnection) - .thenReturn(expectedSourceConnection); - when(configurationUpdate.source(sourceConnection.getSourceId(), updatedSourceName, newConfiguration)) - .thenReturn(expectedSourceConnection); - - final SourceRead actualSourceRead = sourceHandler.updateSource(sourceUpdate); - final SourceRead expectedSourceRead = - SourceHelpers.getSourceRead(expectedSourceConnection, standardSourceDefinition).connectionConfiguration(newConfiguration); - - assertEquals(expectedSourceRead, actualSourceRead); - - verify(secretsProcessor).prepareSecretsForOutput(newConfiguration, sourceDefinitionSpecificationRead.getConnectionSpecification()); - verify(oAuthConfigSupplier).maskSourceOAuthParameters(sourceDefinitionSpecificationRead.getSourceDefinitionId(), - sourceConnection.getWorkspaceId(), newConfiguration); - verify(secretsRepositoryWriter).writeSourceConnection(expectedSourceConnection, connectorSpecification); - verify(validator).ensure(sourceDefinitionSpecificationRead.getConnectionSpecification(), newConfiguration); - } - - @Test - void testGetSource() throws JsonValidationException, ConfigNotFoundException, IOException { - final SourceRead expectedSourceRead = SourceHelpers.getSourceRead(sourceConnection, standardSourceDefinition); - final SourceIdRequestBody sourceIdRequestBody = new SourceIdRequestBody().sourceId(expectedSourceRead.getSourceId()); - - when(configRepository.getSourceConnection(sourceConnection.getSourceId())).thenReturn(sourceConnection); - when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) - .thenReturn(standardSourceDefinition); - when(configRepository.getSourceDefinitionFromSource(sourceConnection.getSourceId())).thenReturn(standardSourceDefinition); - when( - secretsProcessor.prepareSecretsForOutput(sourceConnection.getConfiguration(), sourceDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(sourceConnection.getConfiguration()); - - final SourceRead actualSourceRead = sourceHandler.getSource(sourceIdRequestBody); - - assertEquals(expectedSourceRead, actualSourceRead); - - // make sure the icon was loaded into actual svg content - assertTrue(expectedSourceRead.getIcon().startsWith("")); - - verify(secretsProcessor).prepareSecretsForOutput(sourceConnection.getConfiguration(), - sourceDefinitionSpecificationRead.getConnectionSpecification()); - } - - @Test - void testCloneSourceWithoutConfigChange() throws JsonValidationException, ConfigNotFoundException, IOException { - final SourceConnection clonedConnection = SourceHelpers.generateSource(standardSourceDefinition.getSourceDefinitionId()); - final SourceRead expectedClonedSourceRead = SourceHelpers.getSourceRead(clonedConnection, standardSourceDefinition); - final SourceRead sourceRead = SourceHelpers.getSourceRead(sourceConnection, standardSourceDefinition); - - final SourceCloneRequestBody sourceCloneRequestBody = new SourceCloneRequestBody().sourceCloneId(sourceRead.getSourceId()); - - when(uuidGenerator.get()).thenReturn(clonedConnection.getSourceId()); - when(secretsRepositoryReader.getSourceConnectionWithSecrets(sourceConnection.getSourceId())).thenReturn(sourceConnection); - when(configRepository.getSourceConnection(clonedConnection.getSourceId())).thenReturn(clonedConnection); - - when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) - .thenReturn(standardSourceDefinition); - when(configRepository.getSourceDefinitionFromSource(sourceConnection.getSourceId())).thenReturn(standardSourceDefinition); - when( - secretsProcessor.prepareSecretsForOutput(sourceConnection.getConfiguration(), sourceDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(sourceConnection.getConfiguration()); - - final SourceRead actualSourceRead = sourceHandler.cloneSource(sourceCloneRequestBody); - - assertEquals(expectedClonedSourceRead, actualSourceRead); - } - - @Test - void testCloneSourceWithConfigChange() throws JsonValidationException, ConfigNotFoundException, IOException { - final SourceConnection clonedConnection = SourceHelpers.generateSource(standardSourceDefinition.getSourceDefinitionId()); - final SourceRead expectedClonedSourceRead = SourceHelpers.getSourceRead(clonedConnection, standardSourceDefinition); - final SourceRead sourceRead = SourceHelpers.getSourceRead(sourceConnection, standardSourceDefinition); - - final SourceCloneConfiguration sourceCloneConfiguration = new SourceCloneConfiguration().name("Copy Name"); - final SourceCloneRequestBody sourceCloneRequestBody = - new SourceCloneRequestBody().sourceCloneId(sourceRead.getSourceId()).sourceConfiguration(sourceCloneConfiguration); - - when(uuidGenerator.get()).thenReturn(clonedConnection.getSourceId()); - when(secretsRepositoryReader.getSourceConnectionWithSecrets(sourceConnection.getSourceId())).thenReturn(sourceConnection); - when(configRepository.getSourceConnection(clonedConnection.getSourceId())).thenReturn(clonedConnection); - - when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) - .thenReturn(standardSourceDefinition); - when(configRepository.getSourceDefinitionFromSource(sourceConnection.getSourceId())).thenReturn(standardSourceDefinition); - when( - secretsProcessor.prepareSecretsForOutput(sourceConnection.getConfiguration(), sourceDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(sourceConnection.getConfiguration()); - - final SourceRead actualSourceRead = sourceHandler.cloneSource(sourceCloneRequestBody); - - assertEquals(expectedClonedSourceRead, actualSourceRead); - } - - @Test - void testListSourcesForWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - final SourceRead expectedSourceRead = SourceHelpers.getSourceRead(sourceConnection, standardSourceDefinition); - final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody().workspaceId(sourceConnection.getWorkspaceId()); - - when(configRepository.getSourceConnection(sourceConnection.getSourceId())).thenReturn(sourceConnection); - when(configRepository.getSourceConnection(sourceConnection.getSourceId())).thenReturn(sourceConnection); - - when(configRepository.listWorkspaceSourceConnection(sourceConnection.getWorkspaceId())).thenReturn(Lists.newArrayList(sourceConnection)); - when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) - .thenReturn(standardSourceDefinition); - when(configRepository.getSourceDefinitionFromSource(sourceConnection.getSourceId())).thenReturn(standardSourceDefinition); - when( - secretsProcessor.prepareSecretsForOutput(sourceConnection.getConfiguration(), sourceDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(sourceConnection.getConfiguration()); - - final SourceReadList actualSourceReadList = sourceHandler.listSourcesForWorkspace(workspaceIdRequestBody); - - assertEquals(expectedSourceRead, actualSourceReadList.getSources().get(0)); - verify(secretsProcessor).prepareSecretsForOutput(sourceConnection.getConfiguration(), - sourceDefinitionSpecificationRead.getConnectionSpecification()); - } - - @Test - void testListSourcesForSourceDefinition() throws JsonValidationException, ConfigNotFoundException, IOException { - final SourceRead expectedSourceRead = SourceHelpers.getSourceRead(sourceConnection, standardSourceDefinition); - final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody = - new SourceDefinitionIdRequestBody().sourceDefinitionId(sourceConnection.getSourceDefinitionId()); - - when(configRepository.getSourceConnection(sourceConnection.getSourceId())).thenReturn(sourceConnection); - when(configRepository.listSourcesForDefinition(sourceConnection.getSourceDefinitionId())).thenReturn(Lists.newArrayList(sourceConnection)); - when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) - .thenReturn(standardSourceDefinition); - when(configRepository.getSourceDefinitionFromSource(sourceConnection.getSourceId())).thenReturn(standardSourceDefinition); - when( - secretsProcessor.prepareSecretsForOutput(sourceConnection.getConfiguration(), sourceDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(sourceConnection.getConfiguration()); - - final SourceReadList actualSourceReadList = sourceHandler.listSourcesForSourceDefinition(sourceDefinitionIdRequestBody); - - assertEquals(expectedSourceRead, actualSourceReadList.getSources().get(0)); - verify(secretsProcessor).prepareSecretsForOutput(sourceConnection.getConfiguration(), - sourceDefinitionSpecificationRead.getConnectionSpecification()); - } - - @Test - void testSearchSources() throws JsonValidationException, ConfigNotFoundException, IOException { - final SourceRead expectedSourceRead = SourceHelpers.getSourceRead(sourceConnection, standardSourceDefinition); - - when(configRepository.getSourceConnection(sourceConnection.getSourceId())).thenReturn(sourceConnection); - when(configRepository.listSourceConnection()).thenReturn(Lists.newArrayList(sourceConnection)); - when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) - .thenReturn(standardSourceDefinition); - when(configRepository.getSourceDefinitionFromSource(sourceConnection.getSourceId())).thenReturn(standardSourceDefinition); - when( - secretsProcessor.prepareSecretsForOutput(sourceConnection.getConfiguration(), sourceDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(sourceConnection.getConfiguration()); - - when(connectionsHandler.matchSearch(new SourceSearch(), expectedSourceRead)).thenReturn(true); - SourceReadList actualSourceReadList = sourceHandler.searchSources(new SourceSearch()); - assertEquals(1, actualSourceReadList.getSources().size()); - assertEquals(expectedSourceRead, actualSourceReadList.getSources().get(0)); - - when(connectionsHandler.matchSearch(new SourceSearch(), expectedSourceRead)).thenReturn(false); - actualSourceReadList = sourceHandler.searchSources(new SourceSearch()); - assertEquals(0, actualSourceReadList.getSources().size()); - } - - @Test - void testDeleteSource() throws JsonValidationException, ConfigNotFoundException, IOException { - final JsonNode newConfiguration = sourceConnection.getConfiguration(); - ((ObjectNode) newConfiguration).put("apiKey", "987-xyz"); - - final SourceConnection expectedSourceConnection = Jsons.clone(sourceConnection).withTombstone(true); - - final SourceIdRequestBody sourceIdRequestBody = new SourceIdRequestBody().sourceId(sourceConnection.getSourceId()); - final StandardSync standardSync = ConnectionHelpers.generateSyncWithSourceId(sourceConnection.getSourceId()); - final ConnectionRead connectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); - final ConnectionReadList connectionReadList = new ConnectionReadList().connections(Collections.singletonList(connectionRead)); - final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody().workspaceId(sourceConnection.getWorkspaceId()); - - when(configRepository.getSourceConnection(sourceConnection.getSourceId())) - .thenReturn(sourceConnection) - .thenReturn(expectedSourceConnection); - when(secretsRepositoryReader.getSourceConnectionWithSecrets(sourceConnection.getSourceId())) - .thenReturn(sourceConnection) - .thenReturn(expectedSourceConnection); - when(oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionSpecificationRead.getSourceDefinitionId(), sourceConnection.getWorkspaceId(), - newConfiguration)).thenReturn(newConfiguration); - when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) - .thenReturn(standardSourceDefinition); - when(configRepository.getSourceDefinitionFromSource(sourceConnection.getSourceId())).thenReturn(standardSourceDefinition); - when(connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody)).thenReturn(connectionReadList); - when( - secretsProcessor.prepareSecretsForOutput(sourceConnection.getConfiguration(), sourceDefinitionSpecificationRead.getConnectionSpecification())) - .thenReturn(sourceConnection.getConfiguration()); - - sourceHandler.deleteSource(sourceIdRequestBody); - - verify(secretsRepositoryWriter).writeSourceConnection(expectedSourceConnection, connectorSpecification); - verify(connectionsHandler).listConnectionsForWorkspace(workspaceIdRequestBody); - verify(connectionsHandler).deleteConnection(connectionRead.getConnectionId()); - } - - @Test - void testWriteDiscoverCatalogResult() throws JsonValidationException, IOException { - UUID actorId = UUID.randomUUID(); - UUID catalogId = UUID.randomUUID(); - String connectorVersion = "0.0.1"; - String hashValue = "0123456789abcd"; - final StandardSourceDefinition sourceDefinition = configRepository.getSourceDefinitionFromSource(actorId); - - SourceDiscoverSchemaWriteRequestBody request = new SourceDiscoverSchemaWriteRequestBody().catalog( - CatalogConverter.toApi(airbyteCatalog, sourceDefinition)).sourceId(actorId).connectorVersion(connectorVersion).configurationHash(hashValue); - - when(configRepository.writeActorCatalogFetchEvent(airbyteCatalog, actorId, connectorVersion, hashValue)).thenReturn(catalogId); - DiscoverCatalogResult result = sourceHandler.writeDiscoverCatalogResult(request); - - verify(configRepository).writeActorCatalogFetchEvent(airbyteCatalog, actorId, connectorVersion, hashValue); - assert (result.getCatalogId()).equals(catalogId); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/StateHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/StateHandlerTest.java deleted file mode 100644 index 381924a7e84c..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/StateHandlerTest.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionState; -import io.airbyte.api.model.generated.ConnectionStateType; -import io.airbyte.api.model.generated.GlobalState; -import io.airbyte.api.model.generated.StreamState; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.StateType; -import io.airbyte.config.StateWrapper; -import io.airbyte.config.persistence.StatePersistence; -import io.airbyte.protocol.models.AirbyteGlobalState; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.AirbyteStreamState; -import io.airbyte.protocol.models.StreamDescriptor; -import io.airbyte.workers.helper.ProtocolConverters; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class StateHandlerTest { - - public static final UUID CONNECTION_ID = UUID.randomUUID(); - private static final JsonNode JSON_BLOB = Jsons.deserialize("{\"users\": 10}"); - public static final StreamDescriptor STREAM_DESCRIPTOR1 = new StreamDescriptor().withName("coffee"); - public static final StreamDescriptor STREAM_DESCRIPTOR2 = new StreamDescriptor().withName("tea"); - - private StateHandler stateHandler; - private StatePersistence statePersistence; - - @BeforeEach - void setup() { - statePersistence = mock(StatePersistence.class); - stateHandler = new StateHandler(statePersistence); - } - - @Test - void testGetCurrentStateEmpty() throws IOException { - when(statePersistence.getCurrentState(CONNECTION_ID)).thenReturn(Optional.empty()); - - final ConnectionState expected = new ConnectionState().connectionId(CONNECTION_ID).stateType(ConnectionStateType.NOT_SET).streamState(null); - final ConnectionState actual = stateHandler.getState(new ConnectionIdRequestBody().connectionId(CONNECTION_ID)); - assertEquals(expected, actual); - } - - @Test - void testGetLegacyState() throws IOException { - when(statePersistence.getCurrentState(CONNECTION_ID)).thenReturn(Optional.of( - new StateWrapper() - .withStateType(StateType.LEGACY) - .withLegacyState(JSON_BLOB))); - - final ConnectionState expected = new ConnectionState() - .connectionId(CONNECTION_ID) - .stateType(ConnectionStateType.LEGACY) - .streamState(null) - .state(JSON_BLOB); - final ConnectionState actual = stateHandler.getState(new ConnectionIdRequestBody().connectionId(CONNECTION_ID)); - assertEquals(expected, actual); - } - - @Test - void testGetGlobalState() throws IOException { - when(statePersistence.getCurrentState(CONNECTION_ID)).thenReturn(Optional.of( - new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(new AirbyteGlobalState() - .withSharedState(JSON_BLOB) - .withStreamStates(List.of( - new AirbyteStreamState().withStreamDescriptor(STREAM_DESCRIPTOR1).withStreamState(JSON_BLOB), - new AirbyteStreamState().withStreamDescriptor(STREAM_DESCRIPTOR2).withStreamState(JSON_BLOB))))))); - - final ConnectionState expected = new ConnectionState() - .connectionId(CONNECTION_ID) - .stateType(ConnectionStateType.GLOBAL) - .streamState(null) - .globalState(new GlobalState().sharedState(JSON_BLOB).streamStates(List.of( - new StreamState().streamDescriptor(ProtocolConverters.streamDescriptorToApi(STREAM_DESCRIPTOR1)).streamState(JSON_BLOB), - new StreamState().streamDescriptor(ProtocolConverters.streamDescriptorToApi(STREAM_DESCRIPTOR2)).streamState(JSON_BLOB)))); - final ConnectionState actual = stateHandler.getState(new ConnectionIdRequestBody().connectionId(CONNECTION_ID)); - assertEquals(expected, actual); - } - - @Test - void testGetStreamState() throws IOException { - when(statePersistence.getCurrentState(CONNECTION_ID)).thenReturn(Optional.of( - new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(List.of( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(STREAM_DESCRIPTOR1).withStreamState(JSON_BLOB)), - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(STREAM_DESCRIPTOR2).withStreamState(JSON_BLOB)))))); - - final ConnectionState expected = new ConnectionState() - .connectionId(CONNECTION_ID) - .stateType(ConnectionStateType.STREAM) - .streamState(List.of( - new StreamState().streamDescriptor(ProtocolConverters.streamDescriptorToApi(STREAM_DESCRIPTOR1)).streamState(JSON_BLOB), - new StreamState().streamDescriptor(ProtocolConverters.streamDescriptorToApi(STREAM_DESCRIPTOR2)).streamState(JSON_BLOB))); - final ConnectionState actual = stateHandler.getState(new ConnectionIdRequestBody().connectionId(CONNECTION_ID)); - assertEquals(expected, actual); - } - - // the api type has an extra type, so the verifying the compatibility of the type conversion is more - // involved - @Test - void testEnumConversion() { - assertEquals(3, AirbyteStateType.class.getEnumConstants().length); - assertEquals(4, ConnectionStateType.class.getEnumConstants().length); - - // to AirbyteStateType => ConnectionStateType - assertEquals(ConnectionStateType.GLOBAL, Enums.convertTo(AirbyteStateType.GLOBAL, ConnectionStateType.class)); - assertEquals(ConnectionStateType.STREAM, Enums.convertTo(AirbyteStateType.STREAM, ConnectionStateType.class)); - assertEquals(ConnectionStateType.LEGACY, Enums.convertTo(AirbyteStateType.LEGACY, ConnectionStateType.class)); - - // to ConnectionStateType => AirbyteStateType - assertEquals(AirbyteStateType.GLOBAL, Enums.convertTo(ConnectionStateType.GLOBAL, AirbyteStateType.class)); - assertEquals(AirbyteStateType.STREAM, Enums.convertTo(ConnectionStateType.STREAM, AirbyteStateType.class)); - assertEquals(AirbyteStateType.LEGACY, Enums.convertTo(ConnectionStateType.LEGACY, AirbyteStateType.class)); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendCheckUpdatesHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendCheckUpdatesHandlerTest.java deleted file mode 100644 index ff4db111593c..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendCheckUpdatesHandlerTest.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.WebBackendCheckUpdatesRead; -import io.airbyte.commons.server.services.AirbyteGithubStore; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigRepository; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class WebBackendCheckUpdatesHandlerTest { - - ConfigRepository configRepository; - AirbyteGithubStore githubStore; - WebBackendCheckUpdatesHandler webBackendCheckUpdatesHandler; - - final static boolean INCLUDE_TOMBSTONE = false; - - @BeforeEach - void beforeEach() { - configRepository = mock(ConfigRepository.class); - githubStore = mock(AirbyteGithubStore.class); - webBackendCheckUpdatesHandler = new WebBackendCheckUpdatesHandler(configRepository, githubStore); - } - - @Test - void testCheckWithoutUpdate() throws IOException, InterruptedException { - final UUID source1 = UUID.randomUUID(); - final UUID source2 = UUID.randomUUID(); - final String sourceTag1 = "1.0.0"; - final String sourceTag2 = "2.0.0"; - - final UUID dest1 = UUID.randomUUID(); - final UUID dest2 = UUID.randomUUID(); - final String destTag1 = "0.1.0"; - final String destTag2 = "0.2.0"; - - setMocks( - List.of(Map.entry(source1, sourceTag1), Map.entry(source2, sourceTag2), Map.entry(source2, sourceTag2)), - List.of(Map.entry(source1, sourceTag1), Map.entry(source2, sourceTag2)), - List.of(Map.entry(dest1, destTag1), Map.entry(dest2, destTag2)), - List.of(Map.entry(dest1, destTag1), Map.entry(dest2, destTag2))); - - final WebBackendCheckUpdatesRead actual = webBackendCheckUpdatesHandler.checkUpdates(); - - assertEquals(new WebBackendCheckUpdatesRead().destinationDefinitions(0).sourceDefinitions(0), actual); - } - - @Test - void testCheckWithUpdate() throws IOException, InterruptedException { - final UUID source1 = UUID.randomUUID(); - final UUID source2 = UUID.randomUUID(); - final String sourceTag1 = "1.1.0"; - final String sourceTag2 = "2.1.0"; - - final UUID dest1 = UUID.randomUUID(); - final UUID dest2 = UUID.randomUUID(); - final String destTag1 = "0.1.0"; - final String destTag2 = "0.2.0"; - - setMocks( - List.of(Map.entry(source1, sourceTag1), Map.entry(source2, sourceTag2), Map.entry(source2, sourceTag2)), - List.of(Map.entry(source1, "1.1.1"), Map.entry(source2, sourceTag2)), - List.of(Map.entry(dest1, destTag1), Map.entry(dest2, destTag2), Map.entry(dest2, destTag2)), - List.of(Map.entry(dest1, destTag1), Map.entry(dest2, "0.3.0"))); - - final WebBackendCheckUpdatesRead actual = webBackendCheckUpdatesHandler.checkUpdates(); - - assertEquals(new WebBackendCheckUpdatesRead().destinationDefinitions(2).sourceDefinitions(1), actual); - } - - @Test - void testCheckWithMissingActorDefFromLatest() throws IOException, InterruptedException { - final UUID source1 = UUID.randomUUID(); - final UUID source2 = UUID.randomUUID(); - final String sourceTag1 = "1.0.0"; - final String sourceTag2 = "2.0.0"; - - final UUID dest1 = UUID.randomUUID(); - final UUID dest2 = UUID.randomUUID(); - final String destTag1 = "0.1.0"; - final String destTag2 = "0.2.0"; - - setMocks( - List.of(Map.entry(source1, sourceTag1), Map.entry(source2, sourceTag2), Map.entry(source2, sourceTag2)), - List.of(Map.entry(source2, sourceTag2)), - List.of(Map.entry(dest1, destTag1), Map.entry(dest2, destTag2)), - List.of(Map.entry(dest1, destTag1))); - - final WebBackendCheckUpdatesRead actual = webBackendCheckUpdatesHandler.checkUpdates(); - - assertEquals(new WebBackendCheckUpdatesRead().destinationDefinitions(0).sourceDefinitions(0), actual); - } - - @Test - void testCheckErrorNoCurrentDestinations() throws IOException, InterruptedException { - setMocksForExceptionCases(); - when(configRepository.listStandardDestinationDefinitions(INCLUDE_TOMBSTONE)).thenThrow(new IOException("unable to read current destinations")); - - final WebBackendCheckUpdatesRead actual = webBackendCheckUpdatesHandler.checkUpdates(); - - assertEquals(new WebBackendCheckUpdatesRead().destinationDefinitions(0).sourceDefinitions(1), actual); - } - - @Test - void testCheckErrorNoCurrentSources() throws IOException, InterruptedException { - setMocksForExceptionCases(); - when(configRepository.listStandardSourceDefinitions(INCLUDE_TOMBSTONE)).thenThrow(new IOException("unable to read current sources")); - - final WebBackendCheckUpdatesRead actual = webBackendCheckUpdatesHandler.checkUpdates(); - - assertEquals(new WebBackendCheckUpdatesRead().destinationDefinitions(1).sourceDefinitions(0), actual); - } - - @Test - void testCheckErrorNoLatestDestinations() throws IOException, InterruptedException { - setMocksForExceptionCases(); - when(githubStore.getLatestDestinations()).thenThrow(new InterruptedException("unable to read latest destinations")); - - final WebBackendCheckUpdatesRead actual = webBackendCheckUpdatesHandler.checkUpdates(); - - assertEquals(new WebBackendCheckUpdatesRead().destinationDefinitions(0).sourceDefinitions(1), actual); - } - - @Test - void testCheckErrorNoLatestSources() throws IOException, InterruptedException { - setMocksForExceptionCases(); - when(githubStore.getLatestSources()).thenThrow(new InterruptedException("unable to read latest sources")); - - final WebBackendCheckUpdatesRead actual = webBackendCheckUpdatesHandler.checkUpdates(); - - assertEquals(new WebBackendCheckUpdatesRead().destinationDefinitions(1).sourceDefinitions(0), actual); - } - - private void setMocksForExceptionCases() throws IOException, InterruptedException { - final UUID source1 = UUID.randomUUID(); - final String sourceTag1 = source1.toString(); - - final UUID dest1 = UUID.randomUUID(); - final String destTag1 = dest1.toString(); - - setMocks( - List.of(Map.entry(source1, sourceTag1)), - List.of(Map.entry(source1, UUID.randomUUID().toString())), - List.of(Map.entry(dest1, destTag1)), - List.of(Map.entry(dest1, UUID.randomUUID().toString()))); - } - - private void setMocks(final List> currentSources, - final List> latestSources, - final List> currentDestinations, - final List> latestDestinations) - throws IOException, InterruptedException { - when(configRepository.listStandardSourceDefinitions(INCLUDE_TOMBSTONE)) - .thenReturn(currentSources.stream().map(this::createSourceDef).toList()); - when(githubStore.getLatestSources()) - .thenReturn(latestSources.stream().map(this::createSourceDef).toList()); - - when(configRepository.listStandardDestinationDefinitions(INCLUDE_TOMBSTONE)) - .thenReturn(currentDestinations.stream().map(this::createDestinationDef).toList()); - when(githubStore.getLatestDestinations()) - .thenReturn(latestDestinations.stream().map(this::createDestinationDef).toList()); - } - - private StandardDestinationDefinition createDestinationDef(final Entry idImageTagEntry) { - return new StandardDestinationDefinition() - .withDestinationDefinitionId(idImageTagEntry.getKey()) - .withDockerImageTag(idImageTagEntry.getValue()); - } - - private StandardSourceDefinition createSourceDef(final Entry idImageTagEntry) { - return new StandardSourceDefinition() - .withSourceDefinitionId(idImageTagEntry.getKey()) - .withDockerImageTag(idImageTagEntry.getValue()); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java deleted file mode 100644 index 83789d5b89fe..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java +++ /dev/null @@ -1,1396 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.Lists; -import io.airbyte.api.model.generated.AirbyteCatalog; -import io.airbyte.api.model.generated.AirbyteStream; -import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration; -import io.airbyte.api.model.generated.AttemptRead; -import io.airbyte.api.model.generated.AttemptStatus; -import io.airbyte.api.model.generated.CatalogDiff; -import io.airbyte.api.model.generated.ConnectionCreate; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionSchedule; -import io.airbyte.api.model.generated.ConnectionSchedule.TimeUnitEnum; -import io.airbyte.api.model.generated.ConnectionState; -import io.airbyte.api.model.generated.ConnectionStateType; -import io.airbyte.api.model.generated.ConnectionStatus; -import io.airbyte.api.model.generated.ConnectionUpdate; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationSyncMode; -import io.airbyte.api.model.generated.FieldAdd; -import io.airbyte.api.model.generated.FieldRemove; -import io.airbyte.api.model.generated.FieldTransform; -import io.airbyte.api.model.generated.Geography; -import io.airbyte.api.model.generated.JobConfigType; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.api.model.generated.JobRead; -import io.airbyte.api.model.generated.JobStatus; -import io.airbyte.api.model.generated.JobWithAttemptsRead; -import io.airbyte.api.model.generated.NamespaceDefinitionType; -import io.airbyte.api.model.generated.NonBreakingChangesPreference; -import io.airbyte.api.model.generated.OperationRead; -import io.airbyte.api.model.generated.OperationReadList; -import io.airbyte.api.model.generated.OperationUpdate; -import io.airbyte.api.model.generated.ResourceRequirements; -import io.airbyte.api.model.generated.SchemaChange; -import io.airbyte.api.model.generated.SelectedFieldInfo; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRead; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.StreamDescriptor; -import io.airbyte.api.model.generated.StreamTransform; -import io.airbyte.api.model.generated.StreamTransform.TransformTypeEnum; -import io.airbyte.api.model.generated.SyncMode; -import io.airbyte.api.model.generated.SynchronousJobRead; -import io.airbyte.api.model.generated.WebBackendConnectionCreate; -import io.airbyte.api.model.generated.WebBackendConnectionListItem; -import io.airbyte.api.model.generated.WebBackendConnectionListRequestBody; -import io.airbyte.api.model.generated.WebBackendConnectionRead; -import io.airbyte.api.model.generated.WebBackendConnectionReadList; -import io.airbyte.api.model.generated.WebBackendConnectionRequestBody; -import io.airbyte.api.model.generated.WebBackendConnectionUpdate; -import io.airbyte.api.model.generated.WebBackendOperationCreateOrUpdate; -import io.airbyte.api.model.generated.WebBackendWorkspaceState; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.handlers.helpers.CatalogConverter; -import io.airbyte.commons.server.helpers.ConnectionHelpers; -import io.airbyte.commons.server.helpers.DestinationDefinitionHelpers; -import io.airbyte.commons.server.helpers.DestinationHelpers; -import io.airbyte.commons.server.helpers.SourceDefinitionHelpers; -import io.airbyte.commons.server.helpers.SourceHelpers; -import io.airbyte.commons.server.scheduler.EventRunner; -import io.airbyte.commons.temporal.TemporalClient.ManualOperationResult; -import io.airbyte.config.ActorCatalog; -import io.airbyte.config.ActorCatalogFetchEvent; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.Status; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.ConfigRepository.DestinationAndDefinition; -import io.airbyte.config.persistence.ConfigRepository.SourceAndDefinition; -import io.airbyte.config.persistence.ConfigRepository.StandardSyncQuery; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.lang.reflect.Method; -import java.time.Instant; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.InOrder; - -class WebBackendConnectionsHandlerTest { - - private ConnectionsHandler connectionsHandler; - private OperationsHandler operationsHandler; - private SchedulerHandler schedulerHandler; - private StateHandler stateHandler; - private WebBackendConnectionsHandler wbHandler; - private SourceRead sourceRead; - private ConnectionRead connectionRead; - private ConnectionRead brokenConnectionRead; - private WebBackendConnectionListItem expectedListItem; - private OperationReadList operationReadList; - private OperationReadList brokenOperationReadList; - private WebBackendConnectionRead expected; - private WebBackendConnectionRead expectedWithNewSchema; - private WebBackendConnectionRead expectedWithNewSchemaAndBreakingChange; - private WebBackendConnectionRead expectedWithNewSchemaBroken; - private WebBackendConnectionRead expectedNoDiscoveryWithNewSchema; - private EventRunner eventRunner; - private ConfigRepository configRepository; - - private static final String STREAM1 = "stream1"; - private static final String STREAM2 = "stream2"; - private static final String FIELD1 = "field1"; - private static final String FIELD2 = "field2"; - private static final String FIELD3 = "field3"; - private static final String FIELD5 = "field5"; - - // needs to match name of file in src/test/resources/icons - private static final String SOURCE_ICON = "test-source.svg"; - private static final String DESTINATION_ICON = "test-destination.svg"; - private static final String SVG = ""; - - @BeforeEach - void setup() throws IOException, JsonValidationException, ConfigNotFoundException { - connectionsHandler = mock(ConnectionsHandler.class); - stateHandler = mock(StateHandler.class); - operationsHandler = mock(OperationsHandler.class); - final SourceHandler sourceHandler = mock(SourceHandler.class); - final DestinationHandler destinationHandler = mock(DestinationHandler.class); - final JobHistoryHandler jobHistoryHandler = mock(JobHistoryHandler.class); - configRepository = mock(ConfigRepository.class); - schedulerHandler = mock(SchedulerHandler.class); - eventRunner = mock(EventRunner.class); - wbHandler = new WebBackendConnectionsHandler( - connectionsHandler, - stateHandler, - sourceHandler, - destinationHandler, - jobHistoryHandler, - schedulerHandler, - operationsHandler, - eventRunner, - configRepository); - - final StandardSourceDefinition sourceDefinition = SourceDefinitionHelpers.generateSourceDefinition(); - sourceDefinition.setIcon(SOURCE_ICON); - final SourceConnection source = SourceHelpers.generateSource(sourceDefinition.getSourceDefinitionId()); - sourceRead = SourceHelpers.getSourceRead(source, sourceDefinition); - - final StandardDestinationDefinition destinationDefinition = DestinationDefinitionHelpers.generateDestination(); - destinationDefinition.setIcon(DESTINATION_ICON); - final DestinationConnection destination = DestinationHelpers.generateDestination(destinationDefinition.getDestinationDefinitionId()); - final DestinationRead destinationRead = DestinationHelpers.getDestinationRead(destination, destinationDefinition); - - final StandardSync standardSync = - ConnectionHelpers.generateSyncWithSourceAndDestinationId(source.getSourceId(), destination.getDestinationId(), false, Status.ACTIVE); - final StandardSync brokenStandardSync = - ConnectionHelpers.generateSyncWithSourceAndDestinationId(source.getSourceId(), destination.getDestinationId(), true, Status.INACTIVE); - - when(configRepository.listWorkspaceStandardSyncs(new StandardSyncQuery(sourceRead.getWorkspaceId(), null, null, false))) - .thenReturn(Collections.singletonList(standardSync)); - when(configRepository.getSourceAndDefinitionsFromSourceIds(Collections.singletonList(source.getSourceId()))) - .thenReturn(Collections.singletonList(new SourceAndDefinition(source, sourceDefinition))); - when(configRepository.getDestinationAndDefinitionsFromDestinationIds(Collections.singletonList(destination.getDestinationId()))) - .thenReturn(Collections.singletonList(new DestinationAndDefinition(destination, destinationDefinition))); - - connectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); - brokenConnectionRead = ConnectionHelpers.generateExpectedConnectionRead(brokenStandardSync); - operationReadList = new OperationReadList() - .operations(List.of(new OperationRead() - .operationId(connectionRead.getOperationIds().get(0)) - .name("Test Operation"))); - brokenOperationReadList = new OperationReadList() - .operations(List.of(new OperationRead() - .operationId(brokenConnectionRead.getOperationIds().get(0)) - .name("Test Operation"))); - - final SourceIdRequestBody sourceIdRequestBody = new SourceIdRequestBody(); - sourceIdRequestBody.setSourceId(connectionRead.getSourceId()); - when(sourceHandler.getSource(sourceIdRequestBody)).thenReturn(sourceRead); - - final DestinationIdRequestBody destinationIdRequestBody = new DestinationIdRequestBody(); - destinationIdRequestBody.setDestinationId(connectionRead.getDestinationId()); - when(destinationHandler.getDestination(destinationIdRequestBody)).thenReturn(destinationRead); - - final Instant now = Instant.now(); - final JobWithAttemptsRead jobRead = new JobWithAttemptsRead() - .job(new JobRead() - .configId(connectionRead.getConnectionId().toString()) - .configType(JobConfigType.SYNC) - .id(10L) - .status(JobStatus.SUCCEEDED) - .createdAt(now.getEpochSecond()) - .updatedAt(now.getEpochSecond())) - .attempts(Lists.newArrayList(new AttemptRead() - .id(12L) - .status(AttemptStatus.SUCCEEDED) - .bytesSynced(100L) - .recordsSynced(15L) - .createdAt(now.getEpochSecond()) - .updatedAt(now.getEpochSecond()) - .endedAt(now.getEpochSecond()))); - - when(jobHistoryHandler.getLatestSyncJob(connectionRead.getConnectionId())).thenReturn(Optional.of(jobRead.getJob())); - - when(jobHistoryHandler.getLatestSyncJobsForConnections(Collections.singletonList(connectionRead.getConnectionId()))) - .thenReturn(Collections.singletonList(jobRead.getJob())); - - final JobWithAttemptsRead brokenJobRead = new JobWithAttemptsRead() - .job(new JobRead() - .configId(brokenConnectionRead.getConnectionId().toString()) - .configType(JobConfigType.SYNC) - .id(10L) - .status(JobStatus.SUCCEEDED) - .createdAt(now.getEpochSecond()) - .updatedAt(now.getEpochSecond())) - .attempts(Lists.newArrayList(new AttemptRead() - .id(12L) - .status(AttemptStatus.SUCCEEDED) - .bytesSynced(100L) - .recordsSynced(15L) - .createdAt(now.getEpochSecond()) - .updatedAt(now.getEpochSecond()) - .endedAt(now.getEpochSecond()))); - - when(jobHistoryHandler.getLatestSyncJob(brokenConnectionRead.getConnectionId())).thenReturn(Optional.of(brokenJobRead.getJob())); - - when(jobHistoryHandler.getLatestSyncJobsForConnections(Collections.singletonList(brokenConnectionRead.getConnectionId()))) - .thenReturn(Collections.singletonList(brokenJobRead.getJob())); - - expectedListItem = ConnectionHelpers.generateExpectedWebBackendConnectionListItem( - standardSync, - sourceRead, - destinationRead, - false, - jobRead.getJob().getCreatedAt(), - jobRead.getJob().getStatus(), - SchemaChange.NO_CHANGE); - - expected = expectedWebBackendConnectionReadObject(connectionRead, sourceRead, destinationRead, operationReadList, SchemaChange.NO_CHANGE, now, - connectionRead.getSyncCatalog(), connectionRead.getSourceCatalogId()); - expectedNoDiscoveryWithNewSchema = expectedWebBackendConnectionReadObject(connectionRead, sourceRead, destinationRead, operationReadList, - SchemaChange.NON_BREAKING, now, connectionRead.getSyncCatalog(), connectionRead.getSourceCatalogId()); - - final AirbyteCatalog modifiedCatalog = ConnectionHelpers.generateMultipleStreamsApiCatalog(2); - final SourceDiscoverSchemaRequestBody sourceDiscoverSchema = new SourceDiscoverSchemaRequestBody(); - sourceDiscoverSchema.setSourceId(connectionRead.getSourceId()); - sourceDiscoverSchema.setDisableCache(true); - when(schedulerHandler.discoverSchemaForSourceFromSourceId(sourceDiscoverSchema)).thenReturn( - new SourceDiscoverSchemaRead() - .jobInfo(mock(SynchronousJobRead.class)) - .catalog(modifiedCatalog)); - - expectedWithNewSchema = expectedWebBackendConnectionReadObject(connectionRead, sourceRead, destinationRead, - new OperationReadList().operations(expected.getOperations()), SchemaChange.NON_BREAKING, now, modifiedCatalog, null) - .catalogDiff(new CatalogDiff().transforms(List.of( - new StreamTransform().transformType(TransformTypeEnum.ADD_STREAM) - .streamDescriptor(new io.airbyte.api.model.generated.StreamDescriptor().name("users-data1")) - .updateStream(null)))); - - expectedWithNewSchemaAndBreakingChange = expectedWebBackendConnectionReadObject(brokenConnectionRead, sourceRead, destinationRead, - new OperationReadList().operations(expected.getOperations()), SchemaChange.BREAKING, now, modifiedCatalog, null) - .catalogDiff(new CatalogDiff().transforms(List.of( - new StreamTransform().transformType(TransformTypeEnum.ADD_STREAM) - .streamDescriptor(new io.airbyte.api.model.generated.StreamDescriptor().name("users-data1")) - .updateStream(null)))); - - expectedWithNewSchemaBroken = expectedWebBackendConnectionReadObject(brokenConnectionRead, sourceRead, destinationRead, brokenOperationReadList, - SchemaChange.BREAKING, now, connectionRead.getSyncCatalog(), brokenConnectionRead.getSourceCatalogId()); - when(schedulerHandler.resetConnection(any(ConnectionIdRequestBody.class))) - .thenReturn(new JobInfoRead().job(new JobRead().status(JobStatus.SUCCEEDED))); - } - - WebBackendConnectionRead expectedWebBackendConnectionReadObject( - final ConnectionRead connectionRead, - final SourceRead sourceRead, - final DestinationRead destinationRead, - final OperationReadList operationReadList, - final SchemaChange schemaChange, - final Instant now, - final AirbyteCatalog syncCatalog, - final UUID catalogId) { - return new WebBackendConnectionRead() - .connectionId(connectionRead.getConnectionId()) - .sourceId(connectionRead.getSourceId()) - .destinationId(connectionRead.getDestinationId()) - .operationIds(connectionRead.getOperationIds()) - .name(connectionRead.getName()) - .namespaceDefinition(connectionRead.getNamespaceDefinition()) - .namespaceFormat(connectionRead.getNamespaceFormat()) - .prefix(connectionRead.getPrefix()) - .syncCatalog(syncCatalog) - .catalogId(catalogId) - .status(connectionRead.getStatus()) - .schedule(connectionRead.getSchedule()) - .scheduleType(connectionRead.getScheduleType()) - .scheduleData(connectionRead.getScheduleData()) - .source(sourceRead) - .destination(destinationRead) - .operations(operationReadList.getOperations()) - .latestSyncJobCreatedAt(now.getEpochSecond()) - .latestSyncJobStatus(JobStatus.SUCCEEDED) - .isSyncing(false) - .schemaChange(schemaChange) - .resourceRequirements(new ResourceRequirements() - .cpuRequest(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS.getCpuRequest()) - .cpuLimit(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS.getCpuLimit()) - .memoryRequest(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS.getMemoryRequest()) - .memoryLimit(ConnectionHelpers.TESTING_RESOURCE_REQUIREMENTS.getMemoryLimit())); - } - - @Test - void testGetWorkspaceState() throws IOException { - final UUID uuid = UUID.randomUUID(); - final WebBackendWorkspaceState request = new WebBackendWorkspaceState().workspaceId(uuid); - when(configRepository.countSourcesForWorkspace(uuid)).thenReturn(5); - when(configRepository.countDestinationsForWorkspace(uuid)).thenReturn(2); - when(configRepository.countConnectionsForWorkspace(uuid)).thenReturn(8); - final var actual = wbHandler.getWorkspaceState(request); - assertTrue(actual.getHasConnections()); - assertTrue(actual.getHasDestinations()); - assertTrue((actual.getHasSources())); - } - - @Test - void testGetWorkspaceStateEmpty() throws IOException { - final UUID uuid = UUID.randomUUID(); - final WebBackendWorkspaceState request = new WebBackendWorkspaceState().workspaceId(uuid); - when(configRepository.countSourcesForWorkspace(uuid)).thenReturn(0); - when(configRepository.countDestinationsForWorkspace(uuid)).thenReturn(0); - when(configRepository.countConnectionsForWorkspace(uuid)).thenReturn(0); - final var actual = wbHandler.getWorkspaceState(request); - assertFalse(actual.getHasConnections()); - assertFalse(actual.getHasDestinations()); - assertFalse(actual.getHasSources()); - } - - @Test - void testWebBackendListConnectionsForWorkspace() throws IOException, JsonValidationException, ConfigNotFoundException { - final WebBackendConnectionListRequestBody webBackendConnectionListRequestBody = new WebBackendConnectionListRequestBody(); - webBackendConnectionListRequestBody.setWorkspaceId(sourceRead.getWorkspaceId()); - - final WebBackendConnectionReadList WebBackendConnectionReadList = - wbHandler.webBackendListConnectionsForWorkspace(webBackendConnectionListRequestBody); - - assertEquals(1, WebBackendConnectionReadList.getConnections().size()); - assertEquals(expectedListItem, WebBackendConnectionReadList.getConnections().get(0)); - - // make sure the icons were loaded into actual svg content - assertTrue(expectedListItem.getSource().getIcon().startsWith(SVG)); - assertTrue(expectedListItem.getDestination().getIcon().startsWith(SVG)); - } - - @Test - void testWebBackendGetConnection() throws ConfigNotFoundException, IOException, JsonValidationException { - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody(); - connectionIdRequestBody.setConnectionId(connectionRead.getConnectionId()); - - final WebBackendConnectionRequestBody webBackendConnectionRequestBody = new WebBackendConnectionRequestBody(); - webBackendConnectionRequestBody.setConnectionId(connectionRead.getConnectionId()); - - when(connectionsHandler.getConnection(connectionRead.getConnectionId())).thenReturn(connectionRead); - when(operationsHandler.listOperationsForConnection(connectionIdRequestBody)).thenReturn(operationReadList); - - final WebBackendConnectionRead webBackendConnectionRead = wbHandler.webBackendGetConnection(webBackendConnectionRequestBody); - - assertEquals(expected, webBackendConnectionRead); - - // make sure the icons were loaded into actual svg content - assertTrue(expected.getSource().getIcon().startsWith(SVG)); - assertTrue(expected.getDestination().getIcon().startsWith(SVG)); - } - - WebBackendConnectionRead testWebBackendGetConnection(final boolean withCatalogRefresh, - final ConnectionRead connectionRead, - final OperationReadList operationReadList) - throws JsonValidationException, ConfigNotFoundException, IOException { - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody(); - connectionIdRequestBody.setConnectionId(connectionRead.getConnectionId()); - - final WebBackendConnectionRequestBody webBackendConnectionIdRequestBody = new WebBackendConnectionRequestBody(); - webBackendConnectionIdRequestBody.setConnectionId(connectionRead.getConnectionId()); - if (withCatalogRefresh) { - webBackendConnectionIdRequestBody.setWithRefreshedCatalog(true); - } - - when(connectionsHandler.getConnection(connectionRead.getConnectionId())).thenReturn(connectionRead); - when(operationsHandler.listOperationsForConnection(connectionIdRequestBody)).thenReturn(operationReadList); - - return wbHandler.webBackendGetConnection(webBackendConnectionIdRequestBody); - } - - @Test - void testWebBackendGetConnectionWithDiscoveryAndNewSchema() throws ConfigNotFoundException, - IOException, JsonValidationException { - final UUID newCatalogId = UUID.randomUUID(); - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) - .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(newCatalogId))); - when(configRepository.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); - final SourceDiscoverSchemaRead schemaRead = - new SourceDiscoverSchemaRead().catalogDiff(expectedWithNewSchema.getCatalogDiff()).catalog(expectedWithNewSchema.getSyncCatalog()) - .breakingChange(false).connectionStatus(ConnectionStatus.ACTIVE); - when(schedulerHandler.discoverSchemaForSourceFromSourceId(any())).thenReturn(schemaRead); - when(connectionsHandler.getConnectionAirbyteCatalog(connectionRead.getConnectionId())).thenReturn(Optional.of(connectionRead.getSyncCatalog())); - - final WebBackendConnectionRead result = testWebBackendGetConnection(true, connectionRead, - operationReadList); - assertEquals(expectedWithNewSchema, result); - } - - @Test - void testWebBackendGetConnectionWithDiscoveryAndNewSchemaBreakingChange() throws ConfigNotFoundException, - IOException, JsonValidationException { - final UUID newCatalogId = UUID.randomUUID(); - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) - .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(newCatalogId))); - when(configRepository.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); - final SourceDiscoverSchemaRead schemaRead = - new SourceDiscoverSchemaRead().catalogDiff(expectedWithNewSchema.getCatalogDiff()).catalog(expectedWithNewSchema.getSyncCatalog()) - .breakingChange(true).connectionStatus(ConnectionStatus.INACTIVE); - when(schedulerHandler.discoverSchemaForSourceFromSourceId(any())).thenReturn(schemaRead); - when(connectionsHandler.getConnectionAirbyteCatalog(brokenConnectionRead.getConnectionId())) - .thenReturn(Optional.of(connectionRead.getSyncCatalog())); - - final WebBackendConnectionRead result = testWebBackendGetConnection(true, brokenConnectionRead, - operationReadList); - assertEquals(expectedWithNewSchemaAndBreakingChange, result); - } - - @Test - void testWebBackendGetConnectionWithDiscoveryMissingCatalogUsedToMakeConfiguredCatalog() - throws IOException, ConfigNotFoundException, JsonValidationException { - final UUID newCatalogId = UUID.randomUUID(); - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) - .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(newCatalogId))); - when(configRepository.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); - final SourceDiscoverSchemaRead schemaRead = - new SourceDiscoverSchemaRead().catalogDiff(expectedWithNewSchema.getCatalogDiff()).catalog(expectedWithNewSchema.getSyncCatalog()) - .breakingChange(false).connectionStatus(ConnectionStatus.ACTIVE); - when(schedulerHandler.discoverSchemaForSourceFromSourceId(any())).thenReturn(schemaRead); - when(connectionsHandler.getConnectionAirbyteCatalog(connectionRead.getConnectionId())).thenReturn(Optional.empty()); - - final WebBackendConnectionRead result = testWebBackendGetConnection(true, connectionRead, - operationReadList); - assertEquals(expectedWithNewSchema, result); - } - - @Test - void testWebBackendGetConnectionWithDiscoveryAndFieldSelectionAddField() throws ConfigNotFoundException, - IOException, JsonValidationException { - // Mock this because the API uses it to determine whether there was a schema change. - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) - .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); - - // Original configured catalog has two fields, and only one of them is selected. - final AirbyteCatalog originalConfiguredCatalog = ConnectionHelpers.generateApiCatalogWithTwoFields(); - originalConfiguredCatalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true) - .selectedFields(List.of(new SelectedFieldInfo().addFieldPathItem( - ConnectionHelpers.FIELD_NAME))); - connectionRead.syncCatalog(originalConfiguredCatalog); - - // Original discovered catalog has the same two fields but no selection info because it's a - // discovered catalog. - when(connectionsHandler.getConnectionAirbyteCatalog(connectionRead.getConnectionId())).thenReturn( - Optional.of(ConnectionHelpers.generateApiCatalogWithTwoFields())); - - // Newly-discovered catalog has an extra field. There is no field selection info because it's a - // discovered catalog. - final AirbyteCatalog newCatalogToDiscover = ConnectionHelpers.generateApiCatalogWithTwoFields(); - final JsonNode newFieldSchema = Jsons.deserialize("{\"type\": \"string\"}"); - ((ObjectNode) newCatalogToDiscover.getStreams().get(0).getStream().getJsonSchema().findPath("properties")) - .putObject("a-new-field") - .put("type", "string"); - final SourceDiscoverSchemaRead schemaRead = - new SourceDiscoverSchemaRead() - .catalogDiff( - new CatalogDiff().addTransformsItem(new StreamTransform().addUpdateStreamItem(new FieldTransform().transformType( - FieldTransform.TransformTypeEnum.ADD_FIELD).addFieldNameItem("a-new-field").breaking(false) - .addField(new FieldAdd().schema(newFieldSchema))))) - .catalog(newCatalogToDiscover) - .breakingChange(false) - .connectionStatus(ConnectionStatus.ACTIVE); - when(schedulerHandler.discoverSchemaForSourceFromSourceId(any())).thenReturn(schemaRead); - - final WebBackendConnectionRead result = testWebBackendGetConnection(true, connectionRead, - operationReadList); - - // We expect the discovered catalog with two fields selected: the one that was originally selected, - // plus the newly-discovered field. - final AirbyteCatalog expectedNewCatalog = Jsons.clone(newCatalogToDiscover); - expectedNewCatalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).selectedFields( - List.of(new SelectedFieldInfo().addFieldPathItem(ConnectionHelpers.FIELD_NAME), new SelectedFieldInfo().addFieldPathItem("a-new-field"))); - expectedWithNewSchema.catalogDiff(schemaRead.getCatalogDiff()).syncCatalog(expectedNewCatalog); - assertEquals(expectedWithNewSchema, result); - } - - @Test - void testWebBackendGetConnectionWithDiscoveryAndFieldSelectionRemoveField() throws ConfigNotFoundException, - IOException, JsonValidationException { - // Mock this because the API uses it to determine whether there was a schema change. - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) - .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); - - // Original configured catalog has two fields, and both of them are selected. - final AirbyteCatalog originalConfiguredCatalog = ConnectionHelpers.generateApiCatalogWithTwoFields(); - originalConfiguredCatalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true) - .selectedFields(List.of(new SelectedFieldInfo().addFieldPathItem( - ConnectionHelpers.FIELD_NAME), new SelectedFieldInfo().addFieldPathItem(ConnectionHelpers.FIELD_NAME + "2"))); - connectionRead.syncCatalog(originalConfiguredCatalog); - - // Original discovered catalog has the same two fields but no selection info because it's a - // discovered catalog. - when(connectionsHandler.getConnectionAirbyteCatalog(connectionRead.getConnectionId())).thenReturn( - Optional.of(ConnectionHelpers.generateApiCatalogWithTwoFields())); - - // Newly-discovered catalog has one of the fields removed. There is no field selection info because - // it's a - // discovered catalog. - final AirbyteCatalog newCatalogToDiscover = ConnectionHelpers.generateBasicApiCatalog(); - final JsonNode removedFieldSchema = Jsons.deserialize("{\"type\": \"string\"}"); - final SourceDiscoverSchemaRead schemaRead = - new SourceDiscoverSchemaRead() - .catalogDiff(new CatalogDiff().addTransformsItem(new StreamTransform().addUpdateStreamItem( - new FieldTransform().transformType(FieldTransform.TransformTypeEnum.REMOVE_FIELD).addFieldNameItem(ConnectionHelpers.FIELD_NAME + "2") - .breaking(false).removeField(new FieldRemove().schema(removedFieldSchema))))) - .catalog(newCatalogToDiscover) - .breakingChange(false) - .connectionStatus(ConnectionStatus.ACTIVE); - when(schedulerHandler.discoverSchemaForSourceFromSourceId(any())).thenReturn(schemaRead); - - final WebBackendConnectionRead result = testWebBackendGetConnection(true, connectionRead, - operationReadList); - - // We expect the discovered catalog with two fields selected: the one that was originally selected, - // plus the newly-discovered field. - final AirbyteCatalog expectedNewCatalog = Jsons.clone(newCatalogToDiscover); - expectedNewCatalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).selectedFields( - List.of(new SelectedFieldInfo().addFieldPathItem(ConnectionHelpers.FIELD_NAME))); - expectedWithNewSchema.catalogDiff(schemaRead.getCatalogDiff()).syncCatalog(expectedNewCatalog); - assertEquals(expectedWithNewSchema, result); - } - - @Test - void testWebBackendGetConnectionNoRefreshCatalog() - throws JsonValidationException, ConfigNotFoundException, IOException { - final WebBackendConnectionRead result = testWebBackendGetConnection(false, connectionRead, operationReadList); - verify(schedulerHandler, never()).discoverSchemaForSourceFromSourceId(any()); - assertEquals(expected, result); - } - - @Test - void testWebBackendGetConnectionNoDiscoveryWithNewSchema() throws JsonValidationException, ConfigNotFoundException, IOException { - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) - .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); - when(configRepository.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); - final WebBackendConnectionRead result = testWebBackendGetConnection(false, connectionRead, operationReadList); - assertEquals(expectedNoDiscoveryWithNewSchema, result); - } - - @Test - void testWebBackendGetConnectionNoDiscoveryWithNewSchemaBreaking() throws JsonValidationException, ConfigNotFoundException, IOException { - when(connectionsHandler.getConnection(brokenConnectionRead.getConnectionId())).thenReturn(brokenConnectionRead); - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) - .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); - when(configRepository.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); - final WebBackendConnectionRead result = testWebBackendGetConnection(false, brokenConnectionRead, brokenOperationReadList); - assertEquals(expectedWithNewSchemaBroken, result); - } - - @Test - void testToConnectionCreate() throws IOException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final StandardSync standardSync = ConnectionHelpers.generateSyncWithSourceId(source.getSourceId()); - - final AirbyteCatalog catalog = ConnectionHelpers.generateBasicApiCatalog(); - catalog.getStreams().get(0).getStream().setName("azkaban_users"); - - final ConnectionSchedule schedule = new ConnectionSchedule().units(1L).timeUnit(TimeUnitEnum.MINUTES); - - final UUID newSourceId = UUID.randomUUID(); - final UUID newDestinationId = UUID.randomUUID(); - final UUID newOperationId = UUID.randomUUID(); - final UUID sourceCatalogId = UUID.randomUUID(); - final WebBackendConnectionCreate input = new WebBackendConnectionCreate() - .name("testConnectionCreate") - .namespaceDefinition(Enums.convertTo(standardSync.getNamespaceDefinition(), NamespaceDefinitionType.class)) - .namespaceFormat(standardSync.getNamespaceFormat()) - .prefix(standardSync.getPrefix()) - .sourceId(newSourceId) - .destinationId(newDestinationId) - .operationIds(List.of(newOperationId)) - .status(ConnectionStatus.INACTIVE) - .schedule(schedule) - .syncCatalog(catalog) - .sourceCatalogId(sourceCatalogId) - .geography(Geography.US) - .nonBreakingChangesPreference(NonBreakingChangesPreference.DISABLE); - - final List operationIds = List.of(newOperationId); - - final ConnectionCreate expected = new ConnectionCreate() - .name("testConnectionCreate") - .namespaceDefinition(Enums.convertTo(standardSync.getNamespaceDefinition(), NamespaceDefinitionType.class)) - .namespaceFormat(standardSync.getNamespaceFormat()) - .prefix(standardSync.getPrefix()) - .sourceId(newSourceId) - .destinationId(newDestinationId) - .operationIds(operationIds) - .status(ConnectionStatus.INACTIVE) - .schedule(schedule) - .syncCatalog(catalog) - .sourceCatalogId(sourceCatalogId) - .geography(Geography.US) - .nonBreakingChangesPreference(NonBreakingChangesPreference.DISABLE); - - final ConnectionCreate actual = WebBackendConnectionsHandler.toConnectionCreate(input, operationIds); - - assertEquals(expected, actual); - } - - @Test - void testToConnectionPatch() throws IOException { - final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); - final StandardSync standardSync = ConnectionHelpers.generateSyncWithSourceId(source.getSourceId()); - - final AirbyteCatalog catalog = ConnectionHelpers.generateBasicApiCatalog(); - catalog.getStreams().get(0).getStream().setName("azkaban_users"); - - final ConnectionSchedule schedule = new ConnectionSchedule().units(1L).timeUnit(TimeUnitEnum.MINUTES); - - final UUID newOperationId = UUID.randomUUID(); - final WebBackendConnectionUpdate input = new WebBackendConnectionUpdate() - .namespaceDefinition(Enums.convertTo(standardSync.getNamespaceDefinition(), NamespaceDefinitionType.class)) - .namespaceFormat(standardSync.getNamespaceFormat()) - .prefix(standardSync.getPrefix()) - .connectionId(standardSync.getConnectionId()) - .operations(List.of(new WebBackendOperationCreateOrUpdate().operationId(newOperationId))) - .status(ConnectionStatus.INACTIVE) - .schedule(schedule) - .name(standardSync.getName()) - .syncCatalog(catalog) - .geography(Geography.US) - .nonBreakingChangesPreference(NonBreakingChangesPreference.DISABLE) - .notifySchemaChanges(false); - - final List operationIds = List.of(newOperationId); - - final ConnectionUpdate expected = new ConnectionUpdate() - .namespaceDefinition(Enums.convertTo(standardSync.getNamespaceDefinition(), NamespaceDefinitionType.class)) - .namespaceFormat(standardSync.getNamespaceFormat()) - .prefix(standardSync.getPrefix()) - .connectionId(standardSync.getConnectionId()) - .operationIds(operationIds) - .status(ConnectionStatus.INACTIVE) - .schedule(schedule) - .name(standardSync.getName()) - .syncCatalog(catalog) - .geography(Geography.US) - .nonBreakingChangesPreference(NonBreakingChangesPreference.DISABLE) - .notifySchemaChanges(false) - .breakingChange(false); - - final ConnectionUpdate actual = WebBackendConnectionsHandler.toConnectionPatch(input, operationIds, false); - - assertEquals(expected, actual); - } - - @Test - void testForConnectionCreateCompleteness() { - final Set handledMethods = - Set.of("name", "namespaceDefinition", "namespaceFormat", "prefix", "sourceId", "destinationId", "operationIds", - "addOperationIdsItem", "removeOperationIdsItem", "syncCatalog", "schedule", "scheduleType", "scheduleData", - "status", "resourceRequirements", "sourceCatalogId", "geography", "nonBreakingChangesPreference", "notifySchemaChanges"); - - final Set methods = Arrays.stream(ConnectionCreate.class.getMethods()) - .filter(method -> method.getReturnType() == ConnectionCreate.class) - .map(Method::getName) - .collect(Collectors.toSet()); - - final String message = - """ - If this test is failing, it means you added a field to ConnectionCreate! - Congratulations, but you're not done yet.. - \tYou should update WebBackendConnectionsHandler::toConnectionCreate - \tand ensure that the field is tested in WebBackendConnectionsHandlerTest::testToConnectionCreate - Then you can add the field name here to make this test pass. Cheers!"""; - assertEquals(handledMethods, methods, message); - } - - @Test - void testForConnectionPatchCompleteness() { - final Set handledMethods = - Set.of("schedule", "connectionId", "syncCatalog", "namespaceDefinition", "namespaceFormat", "prefix", "status", - "operationIds", "addOperationIdsItem", "removeOperationIdsItem", "resourceRequirements", "name", - "sourceCatalogId", "scheduleType", "scheduleData", "geography", "breakingChange", "notifySchemaChanges", "nonBreakingChangesPreference"); - - final Set methods = Arrays.stream(ConnectionUpdate.class.getMethods()) - .filter(method -> method.getReturnType() == ConnectionUpdate.class) - .map(Method::getName) - .collect(Collectors.toSet()); - - final String message = - """ - If this test is failing, it means you added a field to ConnectionUpdate! - Congratulations, but you're not done yet.. - \tYou should update WebBackendConnectionsHandler::toConnectionPatch - \tand ensure that the field is tested in WebBackendConnectionsHandlerTest::testToConnectionPatch - Then you can add the field name here to make this test pass. Cheers!"""; - assertEquals(handledMethods, methods, message); - } - - @Test - void testUpdateConnection() throws JsonValidationException, ConfigNotFoundException, IOException { - final WebBackendConnectionUpdate updateBody = new WebBackendConnectionUpdate() - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .connectionId(expected.getConnectionId()) - .schedule(expected.getSchedule()) - .status(expected.getStatus()) - .syncCatalog(expected.getSyncCatalog()) - .sourceCatalogId(expected.getCatalogId()); - - when(configRepository.getConfiguredCatalogForConnection(expected.getConnectionId())) - .thenReturn(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog()); - - final CatalogDiff catalogDiff = new CatalogDiff().transforms(List.of()); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(expected.getConnectionId()); - when(stateHandler.getState(connectionIdRequestBody)).thenReturn(new ConnectionState().stateType(ConnectionStateType.LEGACY)); - - when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn( - new ConnectionRead().connectionId(expected.getConnectionId())); - when(connectionsHandler.updateConnection(any())).thenReturn( - new ConnectionRead() - .connectionId(expected.getConnectionId()) - .sourceId(expected.getSourceId()) - .destinationId(expected.getDestinationId()) - .name(expected.getName()) - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .syncCatalog(expected.getSyncCatalog()) - .status(expected.getStatus()) - .schedule(expected.getSchedule()).breakingChange(false)); - when(operationsHandler.listOperationsForConnection(any())).thenReturn(operationReadList); - final ConnectionIdRequestBody connectionId = new ConnectionIdRequestBody().connectionId(connectionRead.getConnectionId()); - - final AirbyteCatalog fullAirbyteCatalog = ConnectionHelpers.generateMultipleStreamsApiCatalog(2); - when(connectionsHandler.getConnectionAirbyteCatalog(connectionRead.getConnectionId())).thenReturn(Optional.ofNullable(fullAirbyteCatalog)); - - final AirbyteCatalog expectedCatalogReturned = - WebBackendConnectionsHandler.updateSchemaWithRefreshedDiscoveredCatalog(expected.getSyncCatalog(), expected.getSyncCatalog(), - fullAirbyteCatalog); - final WebBackendConnectionRead connectionRead = wbHandler.webBackendUpdateConnection(updateBody); - - assertEquals(expectedCatalogReturned, connectionRead.getSyncCatalog()); - - verify(schedulerHandler, times(0)).resetConnection(connectionId); - verify(schedulerHandler, times(0)).syncConnection(connectionId); - } - - @Test - void testUpdateConnectionWithOperations() throws JsonValidationException, ConfigNotFoundException, IOException { - final WebBackendOperationCreateOrUpdate operationCreateOrUpdate = new WebBackendOperationCreateOrUpdate() - .name("Test Operation") - .operationId(connectionRead.getOperationIds().get(0)); - final OperationUpdate operationUpdate = WebBackendConnectionsHandler.toOperationUpdate(operationCreateOrUpdate); - final WebBackendConnectionUpdate updateBody = new WebBackendConnectionUpdate() - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .connectionId(expected.getConnectionId()) - .schedule(expected.getSchedule()) - .status(expected.getStatus()) - .syncCatalog(expected.getSyncCatalog()) - .operations(List.of(operationCreateOrUpdate)); - - when(configRepository.getConfiguredCatalogForConnection(expected.getConnectionId())) - .thenReturn(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog()); - - final CatalogDiff catalogDiff = new CatalogDiff().transforms(List.of()); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(expected.getConnectionId()); - when(stateHandler.getState(connectionIdRequestBody)).thenReturn(new ConnectionState().stateType(ConnectionStateType.LEGACY)); - - when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn( - new ConnectionRead() - .connectionId(expected.getConnectionId()) - .operationIds(connectionRead.getOperationIds()) - .breakingChange(false)); - when(connectionsHandler.updateConnection(any())).thenReturn( - new ConnectionRead() - .connectionId(expected.getConnectionId()) - .sourceId(expected.getSourceId()) - .destinationId(expected.getDestinationId()) - .operationIds(connectionRead.getOperationIds()) - .name(expected.getName()) - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .syncCatalog(expected.getSyncCatalog()) - .status(expected.getStatus()) - .schedule(expected.getSchedule()).breakingChange(false)); - when(operationsHandler.updateOperation(operationUpdate)).thenReturn(new OperationRead().operationId(operationUpdate.getOperationId())); - when(operationsHandler.listOperationsForConnection(any())).thenReturn(operationReadList); - - final WebBackendConnectionRead actualConnectionRead = wbHandler.webBackendUpdateConnection(updateBody); - - assertEquals(connectionRead.getOperationIds(), actualConnectionRead.getOperationIds()); - verify(operationsHandler, times(1)).updateOperation(operationUpdate); - } - - @Test - void testUpdateConnectionWithUpdatedSchemaLegacy() throws JsonValidationException, ConfigNotFoundException, IOException { - final WebBackendConnectionUpdate updateBody = new WebBackendConnectionUpdate() - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .connectionId(expected.getConnectionId()) - .schedule(expected.getSchedule()) - .status(expected.getStatus()) - .syncCatalog(expectedWithNewSchema.getSyncCatalog()); - - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(expected.getConnectionId()); - - when(stateHandler.getState(connectionIdRequestBody)).thenReturn(new ConnectionState().stateType(ConnectionStateType.LEGACY)); - when(configRepository.getConfiguredCatalogForConnection(expected.getConnectionId())) - .thenReturn(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog()); - - final StreamDescriptor streamDescriptorAdd = new StreamDescriptor().name("addStream"); - final StreamTransform streamTransformAdd = - new StreamTransform().streamDescriptor(streamDescriptorAdd).transformType(TransformTypeEnum.ADD_STREAM); - - final CatalogDiff catalogDiff = new CatalogDiff().transforms(List.of(streamTransformAdd)); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); - - when(operationsHandler.listOperationsForConnection(any())).thenReturn(operationReadList); - when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn( - new ConnectionRead().connectionId(expected.getConnectionId())); - final ConnectionRead connectionRead = new ConnectionRead() - .connectionId(expected.getConnectionId()) - .sourceId(expected.getSourceId()) - .destinationId(expected.getDestinationId()) - .name(expected.getName()) - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .syncCatalog(expectedWithNewSchema.getSyncCatalog()) - .status(expected.getStatus()) - .schedule(expected.getSchedule()).breakingChange(false); - when(connectionsHandler.updateConnection(any())).thenReturn(connectionRead); - when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn(connectionRead); - - final List connectionStreams = List.of(ConnectionHelpers.STREAM_DESCRIPTOR); - when(configRepository.getAllStreamsForConnection(expected.getConnectionId())).thenReturn(connectionStreams); - - final ManualOperationResult successfulResult = ManualOperationResult.builder().jobId(Optional.empty()).failingReason(Optional.empty()).build(); - when(eventRunner.resetConnection(any(), any(), anyBoolean())).thenReturn(successfulResult); - when(eventRunner.startNewManualSync(any())).thenReturn(successfulResult); - - final WebBackendConnectionRead result = wbHandler.webBackendUpdateConnection(updateBody); - - assertEquals(expectedWithNewSchema.getSyncCatalog(), result.getSyncCatalog()); - - final ConnectionIdRequestBody connectionId = new ConnectionIdRequestBody().connectionId(result.getConnectionId()); - verify(schedulerHandler, times(0)).resetConnection(connectionId); - verify(schedulerHandler, times(0)).syncConnection(connectionId); - verify(connectionsHandler, times(1)).updateConnection(any()); - final InOrder orderVerifier = inOrder(eventRunner); - orderVerifier.verify(eventRunner, times(1)).resetConnection(connectionId.getConnectionId(), connectionStreams, true); - } - - @Test - void testUpdateConnectionWithUpdatedSchemaPerStream() throws JsonValidationException, ConfigNotFoundException, IOException { - final WebBackendConnectionUpdate updateBody = new WebBackendConnectionUpdate() - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .connectionId(expected.getConnectionId()) - .schedule(expected.getSchedule()) - .status(expected.getStatus()) - .syncCatalog(expectedWithNewSchema.getSyncCatalog()); - - // state is per-stream - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(expected.getConnectionId()); - when(stateHandler.getState(connectionIdRequestBody)).thenReturn(new ConnectionState().stateType(ConnectionStateType.STREAM)); - when(configRepository.getConfiguredCatalogForConnection(expected.getConnectionId())) - .thenReturn(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog()); - - final StreamDescriptor streamDescriptorAdd = new StreamDescriptor().name("addStream"); - final StreamDescriptor streamDescriptorRemove = new StreamDescriptor().name("removeStream"); - final StreamDescriptor streamDescriptorUpdate = new StreamDescriptor().name("updateStream"); - - final StreamTransform streamTransformAdd = - new StreamTransform().streamDescriptor(streamDescriptorAdd).transformType(TransformTypeEnum.ADD_STREAM); - final StreamTransform streamTransformRemove = - new StreamTransform().streamDescriptor(streamDescriptorRemove).transformType(TransformTypeEnum.REMOVE_STREAM); - final StreamTransform streamTransformUpdate = - new StreamTransform().streamDescriptor(streamDescriptorUpdate).transformType(TransformTypeEnum.UPDATE_STREAM); - - final CatalogDiff catalogDiff = new CatalogDiff().transforms(List.of(streamTransformAdd, streamTransformRemove, streamTransformUpdate)); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); - when(connectionsHandler.getConfigurationDiff(any(), any())).thenReturn(Set.of(new StreamDescriptor().name("configUpdateStream"))); - - when(operationsHandler.listOperationsForConnection(any())).thenReturn(operationReadList); - when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn( - new ConnectionRead().connectionId(expected.getConnectionId()).breakingChange(false)); - final ConnectionRead connectionRead = new ConnectionRead() - .connectionId(expected.getConnectionId()) - .sourceId(expected.getSourceId()) - .destinationId(expected.getDestinationId()) - .name(expected.getName()) - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .syncCatalog(expectedWithNewSchema.getSyncCatalog()) - .status(expected.getStatus()) - .schedule(expected.getSchedule()) - .breakingChange(false); - when(connectionsHandler.updateConnection(any())).thenReturn(connectionRead); - when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn(connectionRead); - - final ManualOperationResult successfulResult = ManualOperationResult.builder().jobId(Optional.empty()).failingReason(Optional.empty()).build(); - when(eventRunner.resetConnection(any(), any(), anyBoolean())).thenReturn(successfulResult); - when(eventRunner.startNewManualSync(any())).thenReturn(successfulResult); - - final WebBackendConnectionRead result = wbHandler.webBackendUpdateConnection(updateBody); - - assertEquals(expectedWithNewSchema.getSyncCatalog(), result.getSyncCatalog()); - - final ConnectionIdRequestBody connectionId = new ConnectionIdRequestBody().connectionId(result.getConnectionId()); - verify(schedulerHandler, times(0)).resetConnection(connectionId); - verify(schedulerHandler, times(0)).syncConnection(connectionId); - verify(connectionsHandler, times(1)).updateConnection(any()); - final InOrder orderVerifier = inOrder(eventRunner); - orderVerifier.verify(eventRunner, times(1)).resetConnection(connectionId.getConnectionId(), - List.of(new io.airbyte.protocol.models.StreamDescriptor().withName("addStream"), - new io.airbyte.protocol.models.StreamDescriptor().withName("updateStream"), - new io.airbyte.protocol.models.StreamDescriptor().withName("configUpdateStream"), - new io.airbyte.protocol.models.StreamDescriptor().withName("removeStream")), - true); - } - - @Test - void testUpdateConnectionNoStreamsToReset() throws JsonValidationException, ConfigNotFoundException, IOException { - final WebBackendConnectionUpdate updateBody = new WebBackendConnectionUpdate() - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .connectionId(expected.getConnectionId()) - .schedule(expected.getSchedule()) - .status(expected.getStatus()) - .syncCatalog(expectedWithNewSchema.getSyncCatalog()); - - // state is per-stream - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(expected.getConnectionId()); - final ConfiguredAirbyteCatalog configuredAirbyteCatalog = ConnectionHelpers.generateBasicConfiguredAirbyteCatalog(); - when(stateHandler.getState(connectionIdRequestBody)).thenReturn(new ConnectionState().stateType(ConnectionStateType.STREAM)); - when(configRepository.getConfiguredCatalogForConnection(expected.getConnectionId())) - .thenReturn(configuredAirbyteCatalog); - - final CatalogDiff catalogDiff = new CatalogDiff().transforms(List.of()); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); - - when(operationsHandler.listOperationsForConnection(any())).thenReturn(operationReadList); - when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn( - new ConnectionRead().connectionId(expected.getConnectionId())); - final ConnectionRead connectionRead = new ConnectionRead() - .connectionId(expected.getConnectionId()) - .sourceId(expected.getSourceId()) - .destinationId(expected.getDestinationId()) - .name(expected.getName()) - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .syncCatalog(expectedWithNewSchema.getSyncCatalog()) - .status(expected.getStatus()) - .schedule(expected.getSchedule()).breakingChange(false); - when(connectionsHandler.updateConnection(any())).thenReturn(connectionRead); - when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn(connectionRead); - - final WebBackendConnectionRead result = wbHandler.webBackendUpdateConnection(updateBody); - - assertEquals(expectedWithNewSchema.getSyncCatalog(), result.getSyncCatalog()); - - final ConnectionIdRequestBody connectionId = new ConnectionIdRequestBody().connectionId(result.getConnectionId()); - - verify(connectionsHandler).getDiff(expected.getSyncCatalog(), expectedWithNewSchema.getSyncCatalog(), - CatalogConverter.toConfiguredProtocol(result.getSyncCatalog())); - verify(connectionsHandler).getConfigurationDiff(expected.getSyncCatalog(), expectedWithNewSchema.getSyncCatalog()); - verify(schedulerHandler, times(0)).resetConnection(connectionId); - verify(schedulerHandler, times(0)).syncConnection(connectionId); - verify(connectionsHandler, times(1)).updateConnection(any()); - final InOrder orderVerifier = inOrder(eventRunner); - orderVerifier.verify(eventRunner, times(0)).resetConnection(eq(connectionId.getConnectionId()), any(), anyBoolean()); - orderVerifier.verify(eventRunner, times(0)).startNewManualSync(connectionId.getConnectionId()); - } - - @Test - void testUpdateConnectionWithSkipReset() throws JsonValidationException, ConfigNotFoundException, IOException { - final WebBackendConnectionUpdate updateBody = new WebBackendConnectionUpdate() - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .connectionId(expected.getConnectionId()) - .schedule(expected.getSchedule()) - .status(expected.getStatus()) - .syncCatalog(expectedWithNewSchema.getSyncCatalog()) - .skipReset(true); - - when(configRepository.getConfiguredCatalogForConnection(expected.getConnectionId())) - .thenReturn(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog()); - when(operationsHandler.listOperationsForConnection(any())).thenReturn(operationReadList); - when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn( - new ConnectionRead().connectionId(expected.getConnectionId())); - final ConnectionRead connectionRead = new ConnectionRead() - .connectionId(expected.getConnectionId()) - .sourceId(expected.getSourceId()) - .destinationId(expected.getDestinationId()) - .name(expected.getName()) - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .syncCatalog(expectedWithNewSchema.getSyncCatalog()) - .status(expected.getStatus()) - .schedule(expected.getSchedule()) - .breakingChange(false); - when(connectionsHandler.updateConnection(any())).thenReturn(connectionRead); - - final WebBackendConnectionRead result = wbHandler.webBackendUpdateConnection(updateBody); - - assertEquals(expectedWithNewSchema.getSyncCatalog(), result.getSyncCatalog()); - - final ConnectionIdRequestBody connectionId = new ConnectionIdRequestBody().connectionId(result.getConnectionId()); - verify(schedulerHandler, times(0)).resetConnection(connectionId); - verify(schedulerHandler, times(0)).syncConnection(connectionId); - verify(connectionsHandler, times(0)).getDiff(any(), any(), any()); - verify(connectionsHandler, times(1)).updateConnection(any()); - verify(eventRunner, times(0)).resetConnection(any(), any(), eq(true)); - } - - @Test - void testUpdateConnectionFixingBreakingSchemaChange() throws JsonValidationException, ConfigNotFoundException, IOException { - final WebBackendConnectionUpdate updateBody = new WebBackendConnectionUpdate() - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .connectionId(expected.getConnectionId()) - .schedule(expected.getSchedule()) - .status(expected.getStatus()) - .syncCatalog(expectedWithNewSchema.getSyncCatalog()) - .skipReset(false) - .connectionId(expected.getConnectionId()); - - final UUID sourceId = UUID.randomUUID(); - - // existing connection has a breaking change - when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn( - new ConnectionRead().connectionId(expected.getConnectionId()).breakingChange(true).sourceId(sourceId)); - - final CatalogDiff catalogDiff = new CatalogDiff().transforms(List.of()); - - when(configRepository.getMostRecentActorCatalogForSource(sourceId)).thenReturn(Optional.of(new ActorCatalog().withCatalog(Jsons.deserialize( - "{\"streams\": [{\"name\": \"cat_names\", \"namespace\": \"public\", \"json_schema\": {\"type\": \"object\", \"properties\": {\"id\": {\"type\": \"number\", \"airbyte_type\": \"integer\"}}}}]}")))); - when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff, catalogDiff); - - when(configRepository.getConfiguredCatalogForConnection(expected.getConnectionId())) - .thenReturn(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog()); - when(operationsHandler.listOperationsForConnection(any())).thenReturn(operationReadList); - - final ConnectionRead connectionRead = new ConnectionRead() - .connectionId(expected.getConnectionId()) - .sourceId(expected.getSourceId()) - .destinationId(expected.getDestinationId()) - .name(expected.getName()) - .namespaceDefinition(expected.getNamespaceDefinition()) - .namespaceFormat(expected.getNamespaceFormat()) - .prefix(expected.getPrefix()) - .syncCatalog(expectedWithNewSchema.getSyncCatalog()) - .status(expected.getStatus()) - .schedule(expected.getSchedule()) - .breakingChange(false); - - when(connectionsHandler.updateConnection(any())).thenReturn(connectionRead); - - final WebBackendConnectionRead result = wbHandler.webBackendUpdateConnection(updateBody); - - assertEquals(expectedWithNewSchema.getSyncCatalog(), result.getSyncCatalog()); - - final ConnectionIdRequestBody connectionId = new ConnectionIdRequestBody().connectionId(result.getConnectionId()); - ArgumentCaptor expectedArgumentCaptor = ArgumentCaptor.forClass(ConnectionUpdate.class); - verify(connectionsHandler, times(1)).updateConnection(expectedArgumentCaptor.capture()); - List connectionUpdateValues = expectedArgumentCaptor.getAllValues(); - // Expect the ConnectionUpdate object to have breakingChange: false - assertEquals(false, connectionUpdateValues.get(0).getBreakingChange()); - - verify(schedulerHandler, times(0)).resetConnection(connectionId); - verify(schedulerHandler, times(0)).syncConnection(connectionId); - verify(connectionsHandler, times(2)).getDiff(any(), any(), any()); - verify(connectionsHandler, times(1)).updateConnection(any()); - } - - @Test - void testUpdateSchemaWithDiscoveryFromEmpty() { - final AirbyteCatalog original = new AirbyteCatalog().streams(List.of()); - final AirbyteCatalog discovered = ConnectionHelpers.generateBasicApiCatalog(); - discovered.getStreams().get(0).getStream() - .name(STREAM1) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of(FIELD1, JsonSchemaType.STRING))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH)); - discovered.getStreams().get(0).getConfig() - .syncMode(SyncMode.FULL_REFRESH) - .cursorField(Collections.emptyList()) - .destinationSyncMode(DestinationSyncMode.OVERWRITE) - .primaryKey(Collections.emptyList()) - .aliasName(STREAM1); - - final AirbyteCatalog expected = ConnectionHelpers.generateBasicApiCatalog(); - expected.getStreams().get(0).getStream() - .name(STREAM1) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of(FIELD1, JsonSchemaType.STRING))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH)); - expected.getStreams().get(0).getConfig() - .syncMode(SyncMode.FULL_REFRESH) - .cursorField(Collections.emptyList()) - .destinationSyncMode(DestinationSyncMode.OVERWRITE) - .primaryKey(Collections.emptyList()) - .aliasName(STREAM1) - .setSelected(false); - - final AirbyteCatalog actual = WebBackendConnectionsHandler.updateSchemaWithRefreshedDiscoveredCatalog(original, original, discovered); - - assertEquals(expected, actual); - } - - @Test - void testUpdateSchemaWithDiscoveryResetStream() { - final AirbyteCatalog original = ConnectionHelpers.generateBasicApiCatalog(); - original.getStreams().get(0).getStream() - .name("random-stream") - .defaultCursorField(List.of(FIELD1)) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema( - Field.of(FIELD1, JsonSchemaType.NUMBER), - Field.of(FIELD2, JsonSchemaType.NUMBER), - Field.of(FIELD5, JsonSchemaType.STRING))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - original.getStreams().get(0).getConfig() - .syncMode(SyncMode.INCREMENTAL) - .cursorField(List.of(FIELD1)) - .destinationSyncMode(DestinationSyncMode.APPEND) - .primaryKey(Collections.emptyList()) - .aliasName("random_stream"); - - final AirbyteCatalog discovered = ConnectionHelpers.generateBasicApiCatalog(); - discovered.getStreams().get(0).getStream() - .name(STREAM1) - .defaultCursorField(List.of(FIELD3)) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of(FIELD2, JsonSchemaType.STRING))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - discovered.getStreams().get(0).getConfig() - .syncMode(SyncMode.FULL_REFRESH) - .cursorField(Collections.emptyList()) - .destinationSyncMode(DestinationSyncMode.OVERWRITE) - .primaryKey(Collections.emptyList()) - .aliasName(STREAM1); - - final AirbyteCatalog expected = ConnectionHelpers.generateBasicApiCatalog(); - expected.getStreams().get(0).getStream() - .name(STREAM1) - .defaultCursorField(List.of(FIELD3)) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of(FIELD2, JsonSchemaType.STRING))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - expected.getStreams().get(0).getConfig() - .syncMode(SyncMode.FULL_REFRESH) - .cursorField(Collections.emptyList()) - .destinationSyncMode(DestinationSyncMode.OVERWRITE) - .primaryKey(Collections.emptyList()) - .aliasName(STREAM1) - .setSelected(false); - - final AirbyteCatalog actual = WebBackendConnectionsHandler.updateSchemaWithRefreshedDiscoveredCatalog(original, original, discovered); - - assertEquals(expected, actual); - } - - @Test - void testUpdateSchemaWithDiscoveryMergeNewStream() { - final AirbyteCatalog original = ConnectionHelpers.generateBasicApiCatalog(); - original.getStreams().get(0).getStream() - .name(STREAM1) - .defaultCursorField(List.of(FIELD1)) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema( - Field.of(FIELD1, JsonSchemaType.NUMBER), - Field.of(FIELD2, JsonSchemaType.NUMBER), - Field.of(FIELD5, JsonSchemaType.STRING))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - original.getStreams().get(0).getConfig() - .syncMode(SyncMode.INCREMENTAL) - .cursorField(List.of(FIELD1)) - .destinationSyncMode(DestinationSyncMode.APPEND) - .primaryKey(Collections.emptyList()) - .aliasName("renamed_stream"); - - final AirbyteCatalog discovered = ConnectionHelpers.generateBasicApiCatalog(); - discovered.getStreams().get(0).getStream() - .name(STREAM1) - .defaultCursorField(List.of(FIELD3)) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of(FIELD2, JsonSchemaType.STRING))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - discovered.getStreams().get(0).getConfig() - .syncMode(SyncMode.FULL_REFRESH) - .cursorField(Collections.emptyList()) - .destinationSyncMode(DestinationSyncMode.OVERWRITE) - .primaryKey(Collections.emptyList()) - .aliasName(STREAM1); - final AirbyteStreamAndConfiguration newStream = ConnectionHelpers.generateBasicApiCatalog().getStreams().get(0); - newStream.getStream() - .name(STREAM2) - .defaultCursorField(List.of(FIELD5)) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of(FIELD5, JsonSchemaType.BOOLEAN))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH)); - newStream.getConfig() - .syncMode(SyncMode.FULL_REFRESH) - .cursorField(Collections.emptyList()) - .destinationSyncMode(DestinationSyncMode.OVERWRITE) - .primaryKey(Collections.emptyList()) - .aliasName(STREAM2); - discovered.getStreams().add(newStream); - - final AirbyteCatalog expected = ConnectionHelpers.generateBasicApiCatalog(); - expected.getStreams().get(0).getStream() - .name(STREAM1) - .defaultCursorField(List.of(FIELD3)) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of(FIELD2, JsonSchemaType.STRING))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - expected.getStreams().get(0).getConfig() - .syncMode(SyncMode.INCREMENTAL) - .cursorField(List.of(FIELD1)) - .destinationSyncMode(DestinationSyncMode.APPEND) - .primaryKey(Collections.emptyList()) - .aliasName("renamed_stream") - .setSelected(true); - final AirbyteStreamAndConfiguration expectedNewStream = ConnectionHelpers.generateBasicApiCatalog().getStreams().get(0); - expectedNewStream.getStream() - .name(STREAM2) - .defaultCursorField(List.of(FIELD5)) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of(FIELD5, JsonSchemaType.BOOLEAN))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH)); - expectedNewStream.getConfig() - .syncMode(SyncMode.FULL_REFRESH) - .cursorField(Collections.emptyList()) - .destinationSyncMode(DestinationSyncMode.OVERWRITE) - .primaryKey(Collections.emptyList()) - .aliasName(STREAM2) - .setSelected(false); - expected.getStreams().add(expectedNewStream); - - final AirbyteCatalog actual = WebBackendConnectionsHandler.updateSchemaWithRefreshedDiscoveredCatalog(original, original, discovered); - - assertEquals(expected, actual); - } - - @Test - void testUpdateSchemaWithNamespacedStreams() { - final AirbyteCatalog original = ConnectionHelpers.generateBasicApiCatalog(); - final AirbyteStreamAndConfiguration stream1Config = original.getStreams().get(0); - final AirbyteStream stream1 = stream1Config.getStream(); - final AirbyteStream stream2 = new AirbyteStream() - .name(stream1.getName()) - .namespace("second_namespace") - .jsonSchema(stream1.getJsonSchema()) - .defaultCursorField(stream1.getDefaultCursorField()) - .supportedSyncModes(stream1.getSupportedSyncModes()) - .sourceDefinedCursor(stream1.getSourceDefinedCursor()) - .sourceDefinedPrimaryKey(stream1.getSourceDefinedPrimaryKey()); - final AirbyteStreamAndConfiguration stream2Config = new AirbyteStreamAndConfiguration() - .config(stream1Config.getConfig()) - .stream(stream2); - original.getStreams().add(stream2Config); - - final AirbyteCatalog discovered = ConnectionHelpers.generateBasicApiCatalog(); - discovered.getStreams().get(0).getStream() - .name(STREAM1) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of(FIELD1, JsonSchemaType.STRING))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH)); - discovered.getStreams().get(0).getConfig() - .syncMode(SyncMode.FULL_REFRESH) - .cursorField(Collections.emptyList()) - .destinationSyncMode(DestinationSyncMode.OVERWRITE) - .primaryKey(Collections.emptyList()) - .aliasName(STREAM1); - - final AirbyteCatalog expected = ConnectionHelpers.generateBasicApiCatalog(); - expected.getStreams().get(0).getStream() - .name(STREAM1) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of(FIELD1, JsonSchemaType.STRING))) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH)); - expected.getStreams().get(0).getConfig() - .syncMode(SyncMode.FULL_REFRESH) - .cursorField(Collections.emptyList()) - .destinationSyncMode(DestinationSyncMode.OVERWRITE) - .primaryKey(Collections.emptyList()) - .aliasName(STREAM1) - .setSelected(false); - - final AirbyteCatalog actual = WebBackendConnectionsHandler.updateSchemaWithRefreshedDiscoveredCatalog(original, original, discovered); - - assertEquals(expected, actual); - } - - @Test - void testGetStreamsToReset() { - final StreamTransform streamTransformAdd = - new StreamTransform().transformType(TransformTypeEnum.ADD_STREAM).streamDescriptor(new StreamDescriptor().name("added_stream")); - final StreamTransform streamTransformRemove = - new StreamTransform().transformType(TransformTypeEnum.REMOVE_STREAM).streamDescriptor(new StreamDescriptor().name("removed_stream")); - final StreamTransform streamTransformUpdate = - new StreamTransform().transformType(TransformTypeEnum.UPDATE_STREAM).streamDescriptor(new StreamDescriptor().name("updated_stream")); - final CatalogDiff catalogDiff = new CatalogDiff().transforms(List.of(streamTransformAdd, streamTransformRemove, streamTransformUpdate)); - final List resultList = WebBackendConnectionsHandler.getStreamsToReset(catalogDiff); - assertTrue( - resultList.stream().anyMatch( - streamDescriptor -> "added_stream".equalsIgnoreCase(streamDescriptor.getName()))); - assertTrue( - resultList.stream().anyMatch( - streamDescriptor -> "removed_stream".equalsIgnoreCase(streamDescriptor.getName()))); - assertTrue( - resultList.stream().anyMatch( - streamDescriptor -> "updated_stream".equalsIgnoreCase(streamDescriptor.getName()))); - } - - @Test - void testGetSchemaChangeNoChange() { - final ConnectionRead connectionReadNotBreaking = new ConnectionRead().breakingChange(false); - - assertEquals(SchemaChange.NO_CHANGE, wbHandler.getSchemaChange(null, Optional.of(UUID.randomUUID()), Optional.of(new ActorCatalogFetchEvent()))); - assertEquals(SchemaChange.NO_CHANGE, - wbHandler.getSchemaChange(connectionReadNotBreaking, Optional.empty(), Optional.of(new ActorCatalogFetchEvent()))); - - final UUID catalogId = UUID.randomUUID(); - - assertEquals(SchemaChange.NO_CHANGE, wbHandler.getSchemaChange(connectionReadNotBreaking, Optional.of(catalogId), - Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(catalogId)))); - } - - @Test - void testGetSchemaChangeBreaking() { - final UUID sourceId = UUID.randomUUID(); - final ConnectionRead connectionReadWithSourceId = new ConnectionRead().sourceCatalogId(UUID.randomUUID()).sourceId(sourceId).breakingChange(true); - - assertEquals(SchemaChange.BREAKING, wbHandler.getSchemaChange(connectionReadWithSourceId, - Optional.of(UUID.randomUUID()), Optional.empty())); - } - - @Test - void testGetSchemaChangeNotBreaking() { - final UUID catalogId = UUID.randomUUID(); - final UUID differentCatalogId = UUID.randomUUID(); - final ConnectionRead connectionReadWithSourceId = - new ConnectionRead().breakingChange(false); - - assertEquals(SchemaChange.NON_BREAKING, wbHandler.getSchemaChange(connectionReadWithSourceId, - Optional.of(catalogId), Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(differentCatalogId)))); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendGeographiesHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendGeographiesHandlerTest.java deleted file mode 100644 index 2db572a7eabb..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendGeographiesHandlerTest.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import io.airbyte.api.model.generated.Geography; -import io.airbyte.api.model.generated.WebBackendGeographiesListResult; -import java.util.List; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class WebBackendGeographiesHandlerTest { - - private WebBackendGeographiesHandler webBackendGeographiesHandler; - - @BeforeEach - void setUp() { - webBackendGeographiesHandler = new WebBackendGeographiesHandler(); - } - - @Test - void testListGeographiesOSS() { - final WebBackendGeographiesListResult expected = new WebBackendGeographiesListResult().geographies( - List.of(Geography.AUTO)); - - final WebBackendGeographiesListResult actual = webBackendGeographiesHandler.listGeographiesOSS(); - - Assertions.assertEquals(expected, actual); - } - - @Test - void testListGeographiesCloud() { - final WebBackendGeographiesListResult expected = new WebBackendGeographiesListResult().geographies( - List.of(Geography.AUTO, Geography.US, Geography.EU)); - - final WebBackendGeographiesListResult actual = webBackendGeographiesHandler.listGeographiesCloud(); - - Assertions.assertEquals(expected, actual); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java deleted file mode 100644 index ab05041e4e74..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java +++ /dev/null @@ -1,590 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionReadList; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationReadList; -import io.airbyte.api.model.generated.SlugRequestBody; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.SourceReadList; -import io.airbyte.api.model.generated.WebhookConfigRead; -import io.airbyte.api.model.generated.WebhookConfigWrite; -import io.airbyte.api.model.generated.WorkspaceCreate; -import io.airbyte.api.model.generated.WorkspaceGiveFeedback; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.api.model.generated.WorkspaceRead; -import io.airbyte.api.model.generated.WorkspaceReadList; -import io.airbyte.api.model.generated.WorkspaceUpdate; -import io.airbyte.api.model.generated.WorkspaceUpdateName; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.converters.NotificationConverter; -import io.airbyte.config.Geography; -import io.airbyte.config.Notification; -import io.airbyte.config.Notification.NotificationType; -import io.airbyte.config.SlackNotificationConfiguration; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import java.util.function.Supplier; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; - -class WorkspacesHandlerTest { - - private static final String FAILURE_NOTIFICATION_WEBHOOK = "http://airbyte.notifications/failure"; - private static final String NEW_WORKSPACE = "new workspace"; - private static final String TEST_NAME = "test-name"; - - private static final String TEST_AUTH_TOKEN = "test-auth-token"; - private static final UUID WEBHOOK_CONFIG_ID = UUID.randomUUID(); - private static final JsonNode PERSISTED_WEBHOOK_CONFIGS = Jsons.deserialize( - String.format("{\"webhookConfigs\": [{\"id\": \"%s\", \"name\": \"%s\", \"authToken\": {\"_secret\": \"a-secret_v1\"}}]}", - WEBHOOK_CONFIG_ID, TEST_NAME)); - public static final String UPDATED = "updated"; - private ConfigRepository configRepository; - private SecretsRepositoryWriter secretsRepositoryWriter; - private ConnectionsHandler connectionsHandler; - private DestinationHandler destinationHandler; - private SourceHandler sourceHandler; - private Supplier uuidSupplier; - private StandardWorkspace workspace; - private WorkspacesHandler workspacesHandler; - - private static final String TEST_EMAIL = "test@airbyte.io"; - private static final String TEST_WORKSPACE_NAME = "test workspace"; - private static final String TEST_WORKSPACE_SLUG = "test-workspace"; - - private static final io.airbyte.api.model.generated.Geography GEOGRAPHY_AUTO = - io.airbyte.api.model.generated.Geography.AUTO; - private static final io.airbyte.api.model.generated.Geography GEOGRAPHY_US = - io.airbyte.api.model.generated.Geography.US; - private SecretPersistence secretPersistence; - - @SuppressWarnings("unchecked") - @BeforeEach - void setUp() { - configRepository = mock(ConfigRepository.class); - secretPersistence = mock(SecretPersistence.class); - secretsRepositoryWriter = new SecretsRepositoryWriter(configRepository, Optional.of(secretPersistence), Optional.empty()); - connectionsHandler = mock(ConnectionsHandler.class); - destinationHandler = mock(DestinationHandler.class); - sourceHandler = mock(SourceHandler.class); - uuidSupplier = mock(Supplier.class); - - workspace = generateWorkspace(); - workspacesHandler = new WorkspacesHandler(configRepository, secretsRepositoryWriter, connectionsHandler, - destinationHandler, sourceHandler, uuidSupplier); - } - - private StandardWorkspace generateWorkspace() { - return new StandardWorkspace() - .withWorkspaceId(UUID.randomUUID()) - .withCustomerId(UUID.randomUUID()) - .withEmail(TEST_EMAIL) - .withName(TEST_WORKSPACE_NAME) - .withSlug(TEST_WORKSPACE_SLUG) - .withInitialSetupComplete(false) - .withDisplaySetupWizard(true) - .withNews(false) - .withAnonymousDataCollection(false) - .withSecurityUpdates(false) - .withTombstone(false) - .withNotifications(List.of(generateNotification())) - .withDefaultGeography(Geography.AUTO); - } - - private Notification generateNotification() { - return new Notification() - .withNotificationType(NotificationType.SLACK) - .withSlackConfiguration(new SlackNotificationConfiguration() - .withWebhook(FAILURE_NOTIFICATION_WEBHOOK)); - } - - private io.airbyte.api.model.generated.Notification generateApiNotification() { - return new io.airbyte.api.model.generated.Notification() - .notificationType(io.airbyte.api.model.generated.NotificationType.SLACK) - .slackConfiguration(new io.airbyte.api.model.generated.SlackNotificationConfiguration() - .webhook(FAILURE_NOTIFICATION_WEBHOOK)); - } - - @Test - void testCreateWorkspace() throws JsonValidationException, IOException, ConfigNotFoundException { - workspace.withWebhookOperationConfigs(PERSISTED_WEBHOOK_CONFIGS); - when(configRepository.getStandardWorkspaceNoSecrets(any(), eq(false))).thenReturn(workspace); - - final UUID uuid = UUID.randomUUID(); - when(uuidSupplier.get()).thenReturn(uuid); - - configRepository.writeStandardWorkspaceNoSecrets(workspace); - - final WorkspaceCreate workspaceCreate = new WorkspaceCreate() - .name(NEW_WORKSPACE) - .email(TEST_EMAIL) - .news(false) - .anonymousDataCollection(false) - .securityUpdates(false) - .notifications(List.of(generateApiNotification())) - .defaultGeography(GEOGRAPHY_US) - .webhookConfigs(List.of(new WebhookConfigWrite().name(TEST_NAME).authToken(TEST_AUTH_TOKEN))); - - final WorkspaceRead actualRead = workspacesHandler.createWorkspace(workspaceCreate); - final WorkspaceRead expectedRead = new WorkspaceRead() - .workspaceId(uuid) - .customerId(uuid) - .email(TEST_EMAIL) - .name(NEW_WORKSPACE) - .slug("new-workspace") - .initialSetupComplete(false) - .displaySetupWizard(false) - .news(false) - .anonymousDataCollection(false) - .securityUpdates(false) - .notifications(List.of(generateApiNotification())) - .defaultGeography(GEOGRAPHY_US) - .webhookConfigs(List.of(new WebhookConfigRead().id(uuid).name(TEST_NAME))); - - assertEquals(expectedRead, actualRead); - } - - @Test - void testCreateWorkspaceDuplicateSlug() throws JsonValidationException, IOException, ConfigNotFoundException { - when(configRepository.getWorkspaceBySlugOptional(any(String.class), eq(true))) - .thenReturn(Optional.of(workspace)) - .thenReturn(Optional.of(workspace)) - .thenReturn(Optional.empty()); - when(configRepository.getStandardWorkspaceNoSecrets(any(), eq(false))).thenReturn(workspace); - - final UUID uuid = UUID.randomUUID(); - when(uuidSupplier.get()).thenReturn(uuid); - - configRepository.writeStandardWorkspaceNoSecrets(workspace); - - final WorkspaceCreate workspaceCreate = new WorkspaceCreate() - .name(workspace.getName()) - .email(TEST_EMAIL) - .news(false) - .anonymousDataCollection(false) - .securityUpdates(false) - .notifications(Collections.emptyList()); - - final WorkspaceRead actualRead = workspacesHandler.createWorkspace(workspaceCreate); - final WorkspaceRead expectedRead = new WorkspaceRead() - .workspaceId(uuid) - .customerId(uuid) - .email(TEST_EMAIL) - .name(workspace.getName()) - .slug(workspace.getSlug()) - .initialSetupComplete(false) - .displaySetupWizard(false) - .news(false) - .anonymousDataCollection(false) - .securityUpdates(false) - .notifications(Collections.emptyList()) - .defaultGeography(GEOGRAPHY_AUTO) - .webhookConfigs(Collections.emptyList()); - - assertTrue(actualRead.getSlug().startsWith(workspace.getSlug())); - assertNotEquals(workspace.getSlug(), actualRead.getSlug()); - assertEquals(Jsons.clone(expectedRead).slug(null), Jsons.clone(actualRead).slug(null)); - final ArgumentCaptor slugCaptor = ArgumentCaptor.forClass(String.class); - verify(configRepository, times(3)).getWorkspaceBySlugOptional(slugCaptor.capture(), eq(true)); - assertEquals(3, slugCaptor.getAllValues().size()); - assertEquals(workspace.getSlug(), slugCaptor.getAllValues().get(0)); - assertTrue(slugCaptor.getAllValues().get(1).startsWith(workspace.getSlug())); - assertTrue(slugCaptor.getAllValues().get(2).startsWith(workspace.getSlug())); - - } - - @Test - void testDeleteWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody().workspaceId(workspace.getWorkspaceId()); - - final ConnectionRead connection = new ConnectionRead(); - final DestinationRead destination = new DestinationRead(); - final SourceRead source = new SourceRead(); - - when(configRepository.getStandardWorkspaceNoSecrets(workspace.getWorkspaceId(), false)).thenReturn(workspace); - - when(configRepository.listStandardWorkspaces(false)).thenReturn(Collections.singletonList(workspace)); - - when(connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody)) - .thenReturn(new ConnectionReadList().connections(Collections.singletonList(connection))); - - when(destinationHandler.listDestinationsForWorkspace(workspaceIdRequestBody)) - .thenReturn(new DestinationReadList().destinations(Collections.singletonList(destination))); - - when(sourceHandler.listSourcesForWorkspace(workspaceIdRequestBody)) - .thenReturn(new SourceReadList().sources(Collections.singletonList(source))); - - workspacesHandler.deleteWorkspace(workspaceIdRequestBody); - - verify(connectionsHandler).deleteConnection(connection.getConnectionId()); - verify(destinationHandler).deleteDestination(destination); - verify(sourceHandler).deleteSource(source); - } - - @Test - void testListWorkspaces() throws JsonValidationException, IOException { - final StandardWorkspace workspace2 = generateWorkspace(); - - when(configRepository.listStandardWorkspaces(false)).thenReturn(Lists.newArrayList(workspace, workspace2)); - - final WorkspaceRead expectedWorkspaceRead1 = new WorkspaceRead() - .workspaceId(workspace.getWorkspaceId()) - .customerId(workspace.getCustomerId()) - .email(workspace.getEmail()) - .name(workspace.getName()) - .slug(workspace.getSlug()) - .initialSetupComplete(workspace.getInitialSetupComplete()) - .displaySetupWizard(workspace.getDisplaySetupWizard()) - .news(workspace.getNews()) - .anonymousDataCollection(workspace.getAnonymousDataCollection()) - .securityUpdates(workspace.getSecurityUpdates()) - .notifications(List.of(generateApiNotification())) - .defaultGeography(GEOGRAPHY_AUTO); - - final WorkspaceRead expectedWorkspaceRead2 = new WorkspaceRead() - .workspaceId(workspace2.getWorkspaceId()) - .customerId(workspace2.getCustomerId()) - .email(workspace2.getEmail()) - .name(workspace2.getName()) - .slug(workspace2.getSlug()) - .initialSetupComplete(workspace2.getInitialSetupComplete()) - .displaySetupWizard(workspace2.getDisplaySetupWizard()) - .news(workspace2.getNews()) - .anonymousDataCollection(workspace2.getAnonymousDataCollection()) - .securityUpdates(workspace2.getSecurityUpdates()) - .notifications(List.of(generateApiNotification())) - .defaultGeography(GEOGRAPHY_AUTO); - - final WorkspaceReadList actualWorkspaceReadList = workspacesHandler.listWorkspaces(); - - assertEquals(Lists.newArrayList(expectedWorkspaceRead1, expectedWorkspaceRead2), - actualWorkspaceReadList.getWorkspaces()); - } - - @Test - void testGetWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - workspace.withWebhookOperationConfigs(PERSISTED_WEBHOOK_CONFIGS); - when(configRepository.getStandardWorkspaceNoSecrets(workspace.getWorkspaceId(), false)).thenReturn(workspace); - - final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody().workspaceId(workspace.getWorkspaceId()); - - final WorkspaceRead workspaceRead = new WorkspaceRead() - .workspaceId(workspace.getWorkspaceId()) - .customerId(workspace.getCustomerId()) - .email(TEST_EMAIL) - .name(TEST_WORKSPACE_NAME) - .slug(TEST_WORKSPACE_SLUG) - .initialSetupComplete(false) - .displaySetupWizard(true) - .news(false) - .anonymousDataCollection(false) - .securityUpdates(false) - .notifications(List.of(generateApiNotification())) - .defaultGeography(GEOGRAPHY_AUTO) - .webhookConfigs(List.of(new WebhookConfigRead().id(WEBHOOK_CONFIG_ID).name(TEST_NAME))); - - assertEquals(workspaceRead, workspacesHandler.getWorkspace(workspaceIdRequestBody)); - } - - @Test - void testGetWorkspaceBySlug() throws JsonValidationException, ConfigNotFoundException, IOException { - when(configRepository.getWorkspaceBySlug("default", false)).thenReturn(workspace); - - final SlugRequestBody slugRequestBody = new SlugRequestBody().slug("default"); - final WorkspaceRead workspaceRead = new WorkspaceRead() - .workspaceId(workspace.getWorkspaceId()) - .customerId(workspace.getCustomerId()) - .email(TEST_EMAIL) - .name(workspace.getName()) - .slug(workspace.getSlug()) - .initialSetupComplete(workspace.getInitialSetupComplete()) - .displaySetupWizard(workspace.getDisplaySetupWizard()) - .news(workspace.getNews()) - .anonymousDataCollection(workspace.getAnonymousDataCollection()) - .securityUpdates(workspace.getSecurityUpdates()) - .notifications(NotificationConverter.toApiList(workspace.getNotifications())) - .defaultGeography(GEOGRAPHY_AUTO); - - assertEquals(workspaceRead, workspacesHandler.getWorkspaceBySlug(slugRequestBody)); - } - - @Test - void testGetWorkspaceByConnectionId() { - final UUID connectionId = UUID.randomUUID(); - when(configRepository.getStandardWorkspaceFromConnection(connectionId, false)).thenReturn(workspace); - final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(connectionId); - final WorkspaceRead workspaceRead = new WorkspaceRead() - .workspaceId(workspace.getWorkspaceId()) - .customerId(workspace.getCustomerId()) - .email(TEST_EMAIL) - .name(workspace.getName()) - .slug(workspace.getSlug()) - .initialSetupComplete(workspace.getInitialSetupComplete()) - .displaySetupWizard(workspace.getDisplaySetupWizard()) - .news(workspace.getNews()) - .anonymousDataCollection(workspace.getAnonymousDataCollection()) - .securityUpdates(workspace.getSecurityUpdates()) - .notifications(NotificationConverter.toApiList(workspace.getNotifications())) - .defaultGeography(GEOGRAPHY_AUTO); - - assertEquals(workspaceRead, workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody)); - } - - @Test - void testUpdateWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - final io.airbyte.api.model.generated.Notification apiNotification = generateApiNotification(); - apiNotification.getSlackConfiguration().webhook(UPDATED); - final WorkspaceUpdate workspaceUpdate = new WorkspaceUpdate() - .workspaceId(workspace.getWorkspaceId()) - .anonymousDataCollection(true) - .securityUpdates(false) - .news(false) - .initialSetupComplete(true) - .displaySetupWizard(false) - .notifications(List.of(apiNotification)) - .defaultGeography(GEOGRAPHY_US) - .webhookConfigs(List.of(new WebhookConfigWrite().name(TEST_NAME).authToken("test-auth-token"))); - - final Notification expectedNotification = generateNotification(); - expectedNotification.getSlackConfiguration().withWebhook(UPDATED); - final StandardWorkspace expectedWorkspace = new StandardWorkspace() - .withWorkspaceId(workspace.getWorkspaceId()) - .withCustomerId(workspace.getCustomerId()) - .withEmail(TEST_EMAIL) - .withName(TEST_WORKSPACE_NAME) - .withSlug(TEST_WORKSPACE_SLUG) - .withAnonymousDataCollection(true) - .withSecurityUpdates(false) - .withNews(false) - .withInitialSetupComplete(true) - .withDisplaySetupWizard(false) - .withTombstone(false) - .withNotifications(List.of(expectedNotification)) - .withDefaultGeography(Geography.US) - .withWebhookOperationConfigs(PERSISTED_WEBHOOK_CONFIGS); - - when(uuidSupplier.get()).thenReturn(WEBHOOK_CONFIG_ID); - - when(configRepository.getStandardWorkspaceNoSecrets(workspace.getWorkspaceId(), false)) - .thenReturn(workspace) - .thenReturn(expectedWorkspace); - - final WorkspaceRead actualWorkspaceRead = workspacesHandler.updateWorkspace(workspaceUpdate); - - final io.airbyte.api.model.generated.Notification expectedNotificationRead = generateApiNotification(); - expectedNotificationRead.getSlackConfiguration().webhook(UPDATED); - final WorkspaceRead expectedWorkspaceRead = new WorkspaceRead() - .workspaceId(workspace.getWorkspaceId()) - .customerId(workspace.getCustomerId()) - .email(TEST_EMAIL) - .name(TEST_WORKSPACE_NAME) - .slug(TEST_WORKSPACE_SLUG) - .initialSetupComplete(true) - .displaySetupWizard(false) - .news(false) - .anonymousDataCollection(true) - .securityUpdates(false) - .notifications(List.of(expectedNotificationRead)) - .defaultGeography(GEOGRAPHY_US) - .webhookConfigs(List.of(new WebhookConfigRead().name(TEST_NAME).id(WEBHOOK_CONFIG_ID))); - - verify(configRepository).writeStandardWorkspaceNoSecrets(expectedWorkspace); - - assertEquals(expectedWorkspaceRead, actualWorkspaceRead); - } - - @Test - void testUpdateWorkspaceWithoutWebhookConfigs() throws JsonValidationException, ConfigNotFoundException, IOException { - final io.airbyte.api.model.generated.Notification apiNotification = generateApiNotification(); - apiNotification.getSlackConfiguration().webhook(UPDATED); - final WorkspaceUpdate workspaceUpdate = new WorkspaceUpdate() - .workspaceId(workspace.getWorkspaceId()) - .anonymousDataCollection(false); - - final Notification expectedNotification = generateNotification(); - expectedNotification.getSlackConfiguration().withWebhook(UPDATED); - final StandardWorkspace expectedWorkspace = new StandardWorkspace() - .withWorkspaceId(workspace.getWorkspaceId()) - .withCustomerId(workspace.getCustomerId()) - .withEmail(TEST_EMAIL) - .withName(TEST_WORKSPACE_NAME) - .withSlug(TEST_WORKSPACE_SLUG) - .withAnonymousDataCollection(true) - .withSecurityUpdates(false) - .withNews(false) - .withInitialSetupComplete(true) - .withDisplaySetupWizard(false) - .withTombstone(false) - .withNotifications(List.of(expectedNotification)) - .withDefaultGeography(Geography.US) - .withWebhookOperationConfigs(PERSISTED_WEBHOOK_CONFIGS); - - when(uuidSupplier.get()).thenReturn(WEBHOOK_CONFIG_ID); - - when(configRepository.getStandardWorkspaceNoSecrets(workspace.getWorkspaceId(), false)) - .thenReturn(expectedWorkspace) - .thenReturn(expectedWorkspace.withAnonymousDataCollection(false)); - - workspacesHandler.updateWorkspace(workspaceUpdate); - - verify(configRepository).writeStandardWorkspaceNoSecrets(expectedWorkspace); - } - - @Test - @DisplayName("Updating workspace name should update name and slug") - void testUpdateWorkspaceNoNameUpdate() throws JsonValidationException, ConfigNotFoundException, IOException { - final WorkspaceUpdateName workspaceUpdate = new WorkspaceUpdateName() - .workspaceId(workspace.getWorkspaceId()) - .name("New Workspace Name"); - - final StandardWorkspace expectedWorkspace = new StandardWorkspace() - .withWorkspaceId(workspace.getWorkspaceId()) - .withCustomerId(workspace.getCustomerId()) - .withEmail(TEST_EMAIL) - .withName("New Workspace Name") - .withSlug("new-workspace-name") - .withAnonymousDataCollection(workspace.getAnonymousDataCollection()) - .withSecurityUpdates(workspace.getSecurityUpdates()) - .withNews(workspace.getNews()) - .withInitialSetupComplete(workspace.getInitialSetupComplete()) - .withDisplaySetupWizard(workspace.getDisplaySetupWizard()) - .withTombstone(false) - .withNotifications(workspace.getNotifications()) - .withDefaultGeography(Geography.AUTO); - - when(configRepository.getStandardWorkspaceNoSecrets(workspace.getWorkspaceId(), false)) - .thenReturn(workspace) - .thenReturn(expectedWorkspace); - - final WorkspaceRead actualWorkspaceRead = workspacesHandler.updateWorkspaceName(workspaceUpdate); - - final WorkspaceRead expectedWorkspaceRead = new WorkspaceRead() - .workspaceId(workspace.getWorkspaceId()) - .customerId(workspace.getCustomerId()) - .email(TEST_EMAIL) - .name("New Workspace Name") - .slug("new-workspace-name") - .initialSetupComplete(workspace.getInitialSetupComplete()) - .displaySetupWizard(workspace.getDisplaySetupWizard()) - .news(workspace.getNews()) - .anonymousDataCollection(workspace.getAnonymousDataCollection()) - .securityUpdates(workspace.getSecurityUpdates()) - .notifications(List.of(generateApiNotification())) - .defaultGeography(GEOGRAPHY_AUTO); - - verify(configRepository).writeStandardWorkspaceNoSecrets(expectedWorkspace); - - assertEquals(expectedWorkspaceRead, actualWorkspaceRead); - } - - @Test - @DisplayName("Partial patch update should preserve unchanged fields") - void testWorkspacePatchUpdate() throws JsonValidationException, ConfigNotFoundException, IOException { - final String EXPECTED_NEW_EMAIL = "expected-new-email@example.com"; - final WorkspaceUpdate workspaceUpdate = new WorkspaceUpdate() - .workspaceId(workspace.getWorkspaceId()) - .anonymousDataCollection(true) - .email(EXPECTED_NEW_EMAIL); - - final StandardWorkspace expectedWorkspace = Jsons.clone(workspace).withEmail(EXPECTED_NEW_EMAIL).withAnonymousDataCollection(true); - when(configRepository.getStandardWorkspaceNoSecrets(workspace.getWorkspaceId(), false)) - .thenReturn(workspace) - .thenReturn(expectedWorkspace); - // The same as the original workspace, with only the email and data collection flags changed. - final WorkspaceRead expectedWorkspaceRead = new WorkspaceRead() - .workspaceId(workspace.getWorkspaceId()) - .customerId(workspace.getCustomerId()) - .email(EXPECTED_NEW_EMAIL) - .name(workspace.getName()) - .slug(workspace.getSlug()) - .initialSetupComplete(workspace.getInitialSetupComplete()) - .displaySetupWizard(workspace.getDisplaySetupWizard()) - .news(workspace.getNews()) - .anonymousDataCollection(true) - .securityUpdates(workspace.getSecurityUpdates()) - .notifications(NotificationConverter.toApiList(workspace.getNotifications())) - .defaultGeography(GEOGRAPHY_AUTO); - - final WorkspaceRead actualWorkspaceRead = workspacesHandler.updateWorkspace(workspaceUpdate); - verify(configRepository).writeStandardWorkspaceNoSecrets(expectedWorkspace); - assertEquals(expectedWorkspaceRead, actualWorkspaceRead); - } - - @Test - void testSetFeedbackDone() throws JsonValidationException, ConfigNotFoundException, IOException { - final WorkspaceGiveFeedback workspaceGiveFeedback = new WorkspaceGiveFeedback() - .workspaceId(UUID.randomUUID()); - - workspacesHandler.setFeedbackDone(workspaceGiveFeedback); - - verify(configRepository).setFeedback(workspaceGiveFeedback.getWorkspaceId()); - } - - @Test - void testWorkspaceIsWrittenThroughSecretsWriter() throws JsonValidationException, IOException { - secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); - workspacesHandler = new WorkspacesHandler(configRepository, secretsRepositoryWriter, connectionsHandler, - destinationHandler, sourceHandler, uuidSupplier); - - final UUID uuid = UUID.randomUUID(); - when(uuidSupplier.get()).thenReturn(uuid); - - final WorkspaceCreate workspaceCreate = new WorkspaceCreate() - .name(NEW_WORKSPACE) - .email(TEST_EMAIL) - .news(false) - .anonymousDataCollection(false) - .securityUpdates(false) - .notifications(List.of(generateApiNotification())) - .defaultGeography(GEOGRAPHY_US); - - final WorkspaceRead actualRead = workspacesHandler.createWorkspace(workspaceCreate); - final WorkspaceRead expectedRead = new WorkspaceRead() - .workspaceId(uuid) - .customerId(uuid) - .email(TEST_EMAIL) - .name(NEW_WORKSPACE) - .slug("new-workspace") - .initialSetupComplete(false) - .displaySetupWizard(false) - .news(false) - .anonymousDataCollection(false) - .securityUpdates(false) - .notifications(List.of(generateApiNotification())) - .defaultGeography(GEOGRAPHY_US) - .webhookConfigs(Collections.emptyList()); - - assertEquals(expectedRead, actualRead); - verify(secretsRepositoryWriter, times(1)).writeWorkspace(any()); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helper/OAuthPathExtractorTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helper/OAuthPathExtractorTest.java deleted file mode 100644 index 4caeb44a65b7..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helper/OAuthPathExtractorTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers.helper; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.handlers.helpers.OAuthPathExtractor; -import java.util.List; -import java.util.Map; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.Test; - -class OAuthPathExtractorTest { - - @Test - void testExtract() { - final JsonNode input = Jsons.deserialize(""" - { - "type": "object", - "additionalProperties": false, - "properties": { - "tenant_id": { - "type": "string", - "path_in_connector_config": ["tenant_id"] - }, - "another_property": { - "type": "string", - "path_in_connector_config": ["another", "property"] - } - } - } - """); - - final Map> expected = Map.ofEntries( - Map.entry("tenant_id", List.of("tenant_id")), - Map.entry("another_property", List.of("another", "property"))); - - Assertions.assertThat(OAuthPathExtractor.extractOauthConfigurationPaths(input)) - .containsExactlyInAnyOrderEntriesOf(expected); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/ConnectionHelpers.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/ConnectionHelpers.java deleted file mode 100644 index b509e569daff..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/ConnectionHelpers.java +++ /dev/null @@ -1,401 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.helpers; - -import static io.airbyte.commons.server.handlers.helpers.CatalogConverter.toApi; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.api.model.generated.AirbyteCatalog; -import io.airbyte.api.model.generated.AirbyteStream; -import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration; -import io.airbyte.api.model.generated.AirbyteStreamConfiguration; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionSchedule; -import io.airbyte.api.model.generated.ConnectionSchedule.TimeUnitEnum; -import io.airbyte.api.model.generated.ConnectionScheduleData; -import io.airbyte.api.model.generated.ConnectionScheduleDataBasicSchedule; -import io.airbyte.api.model.generated.ConnectionScheduleType; -import io.airbyte.api.model.generated.ConnectionStatus; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationSnippetRead; -import io.airbyte.api.model.generated.Geography; -import io.airbyte.api.model.generated.JobStatus; -import io.airbyte.api.model.generated.ResourceRequirements; -import io.airbyte.api.model.generated.SchemaChange; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.SourceSnippetRead; -import io.airbyte.api.model.generated.SyncMode; -import io.airbyte.api.model.generated.WebBackendConnectionListItem; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.server.converters.ApiPojoConverters; -import io.airbyte.commons.text.Names; -import io.airbyte.config.BasicSchedule; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.Schedule; -import io.airbyte.config.Schedule.TimeUnit; -import io.airbyte.config.ScheduleData; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.Status; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.DestinationSyncMode; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.StreamDescriptor; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.UUID; - -public class ConnectionHelpers { - - private static final String STREAM_NAME_BASE = "users-data"; - private static final String STREAM_NAME = STREAM_NAME_BASE + "0"; - public static final String FIELD_NAME = "id"; - - public static final String SECOND_FIELD_NAME = "id2"; - private static final String BASIC_SCHEDULE_TIME_UNIT = "days"; - private static final long BASIC_SCHEDULE_UNITS = 1L; - private static final String BASIC_SCHEDULE_DATA_TIME_UNITS = "days"; - private static final long BASIC_SCHEDULE_DATA_UNITS = 1L; - private static final String ONE_HUNDRED_G = "100g"; - private static final String STANDARD_SYNC_NAME = "presto to hudi"; - private static final String STANDARD_SYNC_PREFIX = "presto_to_hudi"; - - public static final StreamDescriptor STREAM_DESCRIPTOR = new StreamDescriptor().withName(STREAM_NAME); - - // only intended for unit tests, so intentionally set very high to ensure they aren't being used - // elsewhere - public static final io.airbyte.config.ResourceRequirements TESTING_RESOURCE_REQUIREMENTS = new io.airbyte.config.ResourceRequirements() - .withCpuLimit(ONE_HUNDRED_G) - .withCpuRequest(ONE_HUNDRED_G) - .withMemoryLimit(ONE_HUNDRED_G) - .withMemoryRequest(ONE_HUNDRED_G); - - public static StandardSync generateSyncWithSourceId(final UUID sourceId) { - final UUID connectionId = UUID.randomUUID(); - - return new StandardSync() - .withConnectionId(connectionId) - .withName(STANDARD_SYNC_NAME) - .withNamespaceDefinition(NamespaceDefinitionType.SOURCE) - .withNamespaceFormat(null) - .withPrefix(STANDARD_SYNC_PREFIX) - .withStatus(StandardSync.Status.ACTIVE) - .withCatalog(generateBasicConfiguredAirbyteCatalog()) - .withSourceId(sourceId) - .withDestinationId(UUID.randomUUID()) - .withOperationIds(List.of(UUID.randomUUID())) - .withManual(false) - .withSchedule(generateBasicSchedule()) - .withResourceRequirements(TESTING_RESOURCE_REQUIREMENTS) - .withBreakingChange(false); - } - - public static StandardSync generateSyncWithDestinationId(final UUID destinationId) { - final UUID connectionId = UUID.randomUUID(); - - return new StandardSync() - .withConnectionId(connectionId) - .withName(STANDARD_SYNC_NAME) - .withNamespaceDefinition(NamespaceDefinitionType.SOURCE) - .withNamespaceFormat(null) - .withPrefix(STANDARD_SYNC_PREFIX) - .withStatus(StandardSync.Status.ACTIVE) - .withCatalog(generateBasicConfiguredAirbyteCatalog()) - .withSourceId(UUID.randomUUID()) - .withDestinationId(destinationId) - .withOperationIds(List.of(UUID.randomUUID())) - .withManual(true); - } - - public static StandardSync generateSyncWithSourceAndDestinationId(final UUID sourceId, - final UUID destinationId, - final boolean isBroken, - final Status status) { - final UUID connectionId = UUID.randomUUID(); - - return new StandardSync() - .withConnectionId(connectionId) - .withName(STANDARD_SYNC_NAME) - .withNamespaceDefinition(NamespaceDefinitionType.SOURCE) - .withNamespaceFormat(null) - .withPrefix(STANDARD_SYNC_PREFIX) - .withStatus(status) - .withCatalog(generateBasicConfiguredAirbyteCatalog()) - .withSourceCatalogId(UUID.randomUUID()) - .withSourceId(sourceId) - .withDestinationId(destinationId) - .withOperationIds(List.of(UUID.randomUUID())) - .withManual(true) - .withBreakingChange(isBroken); - } - - public static ConnectionSchedule generateBasicConnectionSchedule() { - return new ConnectionSchedule() - .timeUnit(ConnectionSchedule.TimeUnitEnum.fromValue(BASIC_SCHEDULE_TIME_UNIT)) - .units(BASIC_SCHEDULE_UNITS); - } - - public static Schedule generateBasicSchedule() { - return new Schedule() - .withTimeUnit(TimeUnit.fromValue(BASIC_SCHEDULE_TIME_UNIT)) - .withUnits(BASIC_SCHEDULE_UNITS); - } - - public static ConnectionScheduleData generateBasicConnectionScheduleData() { - return new ConnectionScheduleData().basicSchedule( - new ConnectionScheduleDataBasicSchedule().timeUnit(ConnectionScheduleDataBasicSchedule.TimeUnitEnum.DAYS).units(BASIC_SCHEDULE_UNITS)); - } - - public static ScheduleData generateBasicScheduleData() { - return new ScheduleData().withBasicSchedule(new BasicSchedule() - .withTimeUnit(BasicSchedule.TimeUnit.fromValue((BASIC_SCHEDULE_DATA_TIME_UNITS))) - .withUnits(BASIC_SCHEDULE_DATA_UNITS)); - } - - public static ConnectionRead generateExpectedConnectionRead(final UUID connectionId, - final UUID sourceId, - final UUID destinationId, - final List operationIds, - final UUID sourceCatalogId, - final Geography geography, - final boolean breaking) { - - return new ConnectionRead() - .connectionId(connectionId) - .sourceId(sourceId) - .destinationId(destinationId) - .operationIds(operationIds) - .name("presto to hudi") - .namespaceDefinition(io.airbyte.api.model.generated.NamespaceDefinitionType.SOURCE) - .namespaceFormat(null) - .prefix("presto_to_hudi") - .schedule(generateBasicConnectionSchedule()) - .scheduleType(ConnectionScheduleType.BASIC) - .scheduleData(generateBasicConnectionScheduleData()) - .syncCatalog(ConnectionHelpers.generateBasicApiCatalog()) - .resourceRequirements(new ResourceRequirements() - .cpuRequest(TESTING_RESOURCE_REQUIREMENTS.getCpuRequest()) - .cpuLimit(TESTING_RESOURCE_REQUIREMENTS.getCpuLimit()) - .memoryRequest(TESTING_RESOURCE_REQUIREMENTS.getMemoryRequest()) - .memoryLimit(TESTING_RESOURCE_REQUIREMENTS.getMemoryLimit())) - .sourceCatalogId(sourceCatalogId) - .geography(geography) - .breakingChange(breaking); - } - - public static ConnectionRead generateExpectedConnectionRead(final StandardSync standardSync) { - final ConnectionRead connectionRead = generateExpectedConnectionRead( - standardSync.getConnectionId(), - standardSync.getSourceId(), - standardSync.getDestinationId(), - standardSync.getOperationIds(), - standardSync.getSourceCatalogId(), - Enums.convertTo(standardSync.getGeography(), Geography.class), - standardSync.getBreakingChange()); - - if (standardSync.getSchedule() == null) { - connectionRead.schedule(null); - } else { - connectionRead.schedule(new ConnectionSchedule() - .timeUnit(TimeUnitEnum.fromValue(standardSync.getSchedule().getTimeUnit().value())) - .units(standardSync.getSchedule().getUnits())); - } - - if (standardSync.getStatus() == Status.INACTIVE) { - connectionRead.setStatus(ConnectionStatus.INACTIVE); - } else if (standardSync.getStatus() == Status.ACTIVE) { - connectionRead.setStatus(ConnectionStatus.ACTIVE); - } else if (standardSync.getStatus() == Status.DEPRECATED) { - connectionRead.setStatus(ConnectionStatus.DEPRECATED); - } - - return connectionRead; - } - - public static ConnectionRead connectionReadFromStandardSync(final StandardSync standardSync) { - final ConnectionRead connectionRead = new ConnectionRead(); - connectionRead - .connectionId(standardSync.getConnectionId()) - .sourceId(standardSync.getSourceId()) - .destinationId(standardSync.getDestinationId()) - .operationIds(standardSync.getOperationIds()) - .name(standardSync.getName()) - .namespaceFormat(standardSync.getNamespaceFormat()) - .prefix(standardSync.getPrefix()) - .sourceCatalogId(standardSync.getSourceCatalogId()) - .geography(ApiPojoConverters.toApiGeography(standardSync.getGeography())) - .breakingChange(standardSync.getBreakingChange()); - - if (standardSync.getNamespaceDefinition() != null) { - connectionRead - .namespaceDefinition(io.airbyte.api.model.generated.NamespaceDefinitionType.fromValue(standardSync.getNamespaceDefinition().value())); - } - if (standardSync.getStatus() != null) { - connectionRead.status(io.airbyte.api.model.generated.ConnectionStatus.fromValue(standardSync.getStatus().value())); - } - ApiPojoConverters.populateConnectionReadSchedule(standardSync, connectionRead); - - if (standardSync.getCatalog() != null) { - connectionRead.syncCatalog(toApi(standardSync.getCatalog(), standardSync.getFieldSelectionData())); - } - if (standardSync.getResourceRequirements() != null) { - connectionRead.resourceRequirements(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuLimit(standardSync.getResourceRequirements().getCpuLimit()) - .cpuRequest(standardSync.getResourceRequirements().getCpuRequest()) - .memoryLimit(standardSync.getResourceRequirements().getMemoryLimit()) - .memoryRequest(standardSync.getResourceRequirements().getMemoryRequest())); - } - return connectionRead; - } - - public static WebBackendConnectionListItem generateExpectedWebBackendConnectionListItem( - final StandardSync standardSync, - final SourceRead source, - final DestinationRead destination, - final boolean isSyncing, - final Long latestSyncJobCreatedAt, - final JobStatus latestSynJobStatus, - final SchemaChange schemaChange) { - - final WebBackendConnectionListItem connectionListItem = new WebBackendConnectionListItem() - .connectionId(standardSync.getConnectionId()) - .name(standardSync.getName()) - .source(new SourceSnippetRead() - .icon(source.getIcon()) - .name(source.getName()) - .sourceName(source.getSourceName()) - .sourceDefinitionId(source.getSourceDefinitionId()) - .sourceId(source.getSourceId())) - .destination(new DestinationSnippetRead() - .icon(destination.getIcon()) - .name(destination.getName()) - .destinationName(destination.getDestinationName()) - .destinationDefinitionId(destination.getDestinationDefinitionId()) - .destinationId(destination.getDestinationId())) - .status(ApiPojoConverters.toApiStatus(standardSync.getStatus())) - .isSyncing(isSyncing) - .latestSyncJobCreatedAt(latestSyncJobCreatedAt) - .latestSyncJobStatus(latestSynJobStatus) - .scheduleType(ApiPojoConverters.toApiConnectionScheduleType(standardSync)) - .scheduleData(ApiPojoConverters.toApiConnectionScheduleData(standardSync)) - .schemaChange(schemaChange); - - return connectionListItem; - } - - public static JsonNode generateBasicJsonSchema() { - return CatalogHelpers.fieldsToJsonSchema(Field.of(FIELD_NAME, JsonSchemaType.STRING)); - } - - public static JsonNode generateJsonSchemaWithTwoFields() { - return CatalogHelpers.fieldsToJsonSchema( - Field.of(FIELD_NAME, JsonSchemaType.STRING), - Field.of(SECOND_FIELD_NAME, JsonSchemaType.STRING)); - } - - public static ConfiguredAirbyteCatalog generateBasicConfiguredAirbyteCatalog() { - return new ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(generateBasicConfiguredStream(null))); - } - - public static ConfiguredAirbyteCatalog generateAirbyteCatalogWithTwoFields() { - return new ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(new ConfiguredAirbyteStream() - .withStream( - new io.airbyte.protocol.models.AirbyteStream() - .withName(STREAM_NAME) - .withJsonSchema(generateJsonSchemaWithTwoFields()) - .withDefaultCursorField(Lists.newArrayList(FIELD_NAME)) - .withSourceDefinedCursor(false) - .withSourceDefinedPrimaryKey(Collections.emptyList()) - .withSupportedSyncModes( - List.of(io.airbyte.protocol.models.SyncMode.FULL_REFRESH, io.airbyte.protocol.models.SyncMode.INCREMENTAL))))); - } - - public static ConfiguredAirbyteCatalog generateMultipleStreamsConfiguredAirbyteCatalog(final int streamsCount) { - final List configuredStreams = new ArrayList<>(); - for (int i = 0; i < streamsCount; i++) { - configuredStreams.add(generateBasicConfiguredStream(String.valueOf(i))); - } - return new ConfiguredAirbyteCatalog().withStreams(configuredStreams); - } - - public static ConfiguredAirbyteStream generateBasicConfiguredStream(final String nameSuffix) { - return new ConfiguredAirbyteStream() - .withStream(generateBasicAirbyteStream(nameSuffix)) - .withCursorField(Lists.newArrayList(FIELD_NAME)) - .withSyncMode(io.airbyte.protocol.models.SyncMode.INCREMENTAL) - .withDestinationSyncMode(DestinationSyncMode.APPEND); - } - - private static io.airbyte.protocol.models.AirbyteStream generateBasicAirbyteStream(final String nameSuffix) { - return CatalogHelpers.createAirbyteStream( - nameSuffix == null ? STREAM_NAME : STREAM_NAME_BASE + nameSuffix, Field.of(FIELD_NAME, JsonSchemaType.STRING)) - .withDefaultCursorField(Lists.newArrayList(FIELD_NAME)) - .withSourceDefinedCursor(false) - .withSupportedSyncModes(List.of(io.airbyte.protocol.models.SyncMode.FULL_REFRESH, io.airbyte.protocol.models.SyncMode.INCREMENTAL)); - } - - public static AirbyteCatalog generateBasicApiCatalog() { - return new AirbyteCatalog().streams(Lists.newArrayList(new AirbyteStreamAndConfiguration() - .stream(generateBasicApiStream(null)) - .config(generateBasicApiStreamConfig(null)))); - } - - /** - * Generates an API catalog that has two fields, both selected. - * - * @return AirbyteCatalog - */ - public static AirbyteCatalog generateApiCatalogWithTwoFields() { - return new AirbyteCatalog().streams(Lists.newArrayList(new AirbyteStreamAndConfiguration() - .stream(generateApiStreamWithTwoFields()) - .config(generateBasicApiStreamConfig(null)))); - } - - public static AirbyteCatalog generateMultipleStreamsApiCatalog(final int streamsCount) { - final List streamAndConfigurations = new ArrayList<>(); - for (int i = 0; i < streamsCount; i++) { - streamAndConfigurations.add(new AirbyteStreamAndConfiguration() - .stream(generateBasicApiStream(String.valueOf(i))) - .config(generateBasicApiStreamConfig(String.valueOf(i)))); - } - return new AirbyteCatalog().streams(streamAndConfigurations); - } - - private static AirbyteStreamConfiguration generateBasicApiStreamConfig(final String nameSuffix) { - return new AirbyteStreamConfiguration() - .syncMode(SyncMode.INCREMENTAL) - .cursorField(Lists.newArrayList(FIELD_NAME)) - .destinationSyncMode(io.airbyte.api.model.generated.DestinationSyncMode.APPEND) - .primaryKey(Collections.emptyList()) - .aliasName(Names.toAlphanumericAndUnderscore(nameSuffix == null ? STREAM_NAME : STREAM_NAME_BASE + nameSuffix)) - .selected(true) - .fieldSelectionEnabled(false); - } - - private static AirbyteStream generateBasicApiStream(final String nameSuffix) { - return new AirbyteStream() - .name(nameSuffix == null ? STREAM_NAME : STREAM_NAME_BASE + nameSuffix) - .jsonSchema(generateBasicJsonSchema()) - .defaultCursorField(Lists.newArrayList(FIELD_NAME)) - .sourceDefinedCursor(false) - .sourceDefinedPrimaryKey(Collections.emptyList()) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - } - - private static AirbyteStream generateApiStreamWithTwoFields() { - return new AirbyteStream() - .name(STREAM_NAME) - .jsonSchema(generateJsonSchemaWithTwoFields()) - .defaultCursorField(Lists.newArrayList(FIELD_NAME)) - .sourceDefinedCursor(false) - .sourceDefinedPrimaryKey(Collections.emptyList()) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/ConnectorSpecificationHelpers.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/ConnectorSpecificationHelpers.java deleted file mode 100644 index 2997705ae42b..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/ConnectorSpecificationHelpers.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.helpers; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.ConnectorSpecification; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; - -public class ConnectorSpecificationHelpers { - - public static ConnectorSpecification generateConnectorSpecification() throws IOException { - - final Path path = Paths.get(ConnectorSpecificationHelpers.class.getClassLoader().getResource("json/TestSpecification.json").getPath()); - - try { - return new ConnectorSpecification() - .withDocumentationUrl(new URI("https://airbyte.io")) - .withConnectionSpecification(Jsons.deserialize(Files.readString(path))) - .withSupportsDBT(false) - .withSupportsNormalization(false); - } catch (final URISyntaxException e) { - throw new RuntimeException(e); - } - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/DestinationDefinitionHelpers.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/DestinationDefinitionHelpers.java deleted file mode 100644 index eb23997843a4..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/DestinationDefinitionHelpers.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.helpers; - -import io.airbyte.config.StandardDestinationDefinition; -import java.util.UUID; - -public class DestinationDefinitionHelpers { - - public static StandardDestinationDefinition generateDestination() { - return new StandardDestinationDefinition() - .withDestinationDefinitionId(UUID.randomUUID()) - .withName("db2") - .withDockerRepository("thebestrepo") - .withDockerImageTag("thelatesttag") - .withDocumentationUrl("https://wikipedia.org"); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/DestinationHelpers.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/DestinationHelpers.java deleted file mode 100644 index 5a49af66e279..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/DestinationHelpers.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.helpers; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.handlers.DestinationDefinitionsHandler; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.StandardDestinationDefinition; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.UUID; - -public class DestinationHelpers { - - public static JsonNode getTestDestinationJson() throws IOException { - final Path path = - Paths.get(DestinationHelpers.class.getClassLoader().getResource("json/TestImplementation.json").getPath()); - - return Jsons.deserialize(Files.readString(path)); - } - - public static DestinationConnection generateDestination(final UUID destinationDefinitionId) throws IOException { - return generateDestination(destinationDefinitionId, "my default dest name", false); - } - - public static DestinationConnection generateDestination(final UUID destinationDefinitionId, final String name) throws IOException { - return generateDestination(destinationDefinitionId, name, false); - } - - public static DestinationConnection generateDestination(final UUID destinationDefinitionId, final boolean tombstone) throws IOException { - return generateDestination(destinationDefinitionId, "my default dest name", tombstone); - } - - public static DestinationConnection generateDestination(final UUID destinationDefinitionId, final String name, final boolean tombstone) - throws IOException { - final UUID workspaceId = UUID.randomUUID(); - final UUID destinationId = UUID.randomUUID(); - - final JsonNode implementationJson = getTestDestinationJson(); - - return new DestinationConnection() - .withName(name) - .withWorkspaceId(workspaceId) - .withDestinationDefinitionId(destinationDefinitionId) - .withDestinationId(destinationId) - .withConfiguration(implementationJson) - .withTombstone(tombstone); - } - - public static DestinationRead getDestinationRead(final DestinationConnection destination, - final StandardDestinationDefinition standardDestinationDefinition) { - return new DestinationRead() - .destinationDefinitionId(standardDestinationDefinition.getDestinationDefinitionId()) - .workspaceId(destination.getWorkspaceId()) - .destinationDefinitionId(destination.getDestinationDefinitionId()) - .destinationId(destination.getDestinationId()) - .connectionConfiguration(destination.getConfiguration()) - .name(destination.getName()) - .destinationName(standardDestinationDefinition.getName()) - .icon(DestinationDefinitionsHandler.loadIcon(standardDestinationDefinition.getIcon())); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/SourceDefinitionHelpers.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/SourceDefinitionHelpers.java deleted file mode 100644 index d76d0726e822..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/SourceDefinitionHelpers.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.helpers; - -import io.airbyte.config.StandardSourceDefinition; -import java.util.UUID; - -public class SourceDefinitionHelpers { - - public static StandardSourceDefinition generateSourceDefinition() { - return new StandardSourceDefinition() - .withSourceDefinitionId(UUID.randomUUID()) - .withName("marketo") - .withDockerRepository("thebestrepo") - .withDockerImageTag("thelatesttag") - .withDocumentationUrl("https://wikipedia.org"); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/SourceHelpers.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/SourceHelpers.java deleted file mode 100644 index 354e2634ea70..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/helpers/SourceHelpers.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.helpers; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.handlers.SourceDefinitionsHandler; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardSourceDefinition; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.UUID; - -public class SourceHelpers { - - public static SourceConnection generateSource(final UUID sourceDefinitionId) throws IOException { - return generateSource(sourceDefinitionId, "my default source name", false); - } - - public static SourceConnection generateSource(final UUID sourceDefinitionId, final String name) throws IOException { - return generateSource(sourceDefinitionId, name, false); - } - - public static SourceConnection generateSource(final UUID sourceDefinitionId, final boolean tombstone) throws IOException { - return generateSource(sourceDefinitionId, "my default source name", tombstone); - } - - public static SourceConnection generateSource(final UUID sourceDefinitionId, final String name, final boolean tombstone) throws IOException { - final UUID workspaceId = UUID.randomUUID(); - final UUID sourceId = UUID.randomUUID(); - - final JsonNode implementationJson = getTestImplementationJson(); - - return new SourceConnection() - .withName(name) - .withWorkspaceId(workspaceId) - .withSourceDefinitionId(sourceDefinitionId) - .withSourceId(sourceId) - .withConfiguration(implementationJson) - .withTombstone(tombstone); - } - - public static JsonNode getTestImplementationJson() throws IOException { - final Path path = Paths.get(SourceHelpers.class.getClassLoader().getResource("json/TestImplementation.json").getPath()); - return Jsons.deserialize(Files.readString(path)); - } - - public static SourceRead getSourceRead(final SourceConnection source, final StandardSourceDefinition standardSourceDefinition) { - - return new SourceRead() - .sourceDefinitionId(standardSourceDefinition.getSourceDefinitionId()) - .workspaceId(source.getWorkspaceId()) - .sourceDefinitionId(source.getSourceDefinitionId()) - .sourceId(source.getSourceId()) - .connectionConfiguration(source.getConfiguration()) - .name(source.getName()) - .sourceName(standardSourceDefinition.getName()) - .icon(SourceDefinitionsHandler.loadIcon(standardSourceDefinition.getIcon())); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/scheduler/DefaultSynchronousSchedulerClientTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/scheduler/DefaultSynchronousSchedulerClientTest.java deleted file mode 100644 index e25aa0dcfd36..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/scheduler/DefaultSynchronousSchedulerClientTest.java +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.scheduler; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.JobMetadata; -import io.airbyte.commons.temporal.TemporalClient; -import io.airbyte.commons.temporal.TemporalJobType; -import io.airbyte.commons.temporal.TemporalResponse; -import io.airbyte.commons.temporal.scheduling.RouterService; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorType; -import io.airbyte.config.ConnectorJobOutput; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.JobCheckConnectionConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.JobDiscoverCatalogConfig; -import io.airbyte.config.JobGetSpecConfig; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardCheckConnectionOutput; -import io.airbyte.persistence.job.errorreporter.ConnectorJobReportingContext; -import io.airbyte.persistence.job.errorreporter.JobErrorReporter; -import io.airbyte.persistence.job.factory.OAuthConfigSupplier; -import io.airbyte.persistence.job.tracker.JobTracker; -import io.airbyte.persistence.job.tracker.JobTracker.JobState; -import io.airbyte.protocol.models.ConnectorSpecification; -import java.io.IOException; -import java.nio.file.Path; -import java.util.UUID; -import java.util.function.Function; -import java.util.function.Supplier; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; - -// the goal here is to test the "execute" part of this class and all of the various exceptional -// cases. then separately test submission of each job type without having to re-test all of the -// execution exception cases again. -class DefaultSynchronousSchedulerClientTest { - - private static final Path LOG_PATH = Path.of("/tmp"); - private static final String DOCKER_IMAGE = "foo/bar"; - private static final String DOCKER_IMAGE_TAG = "baz/qux"; - private static final Version PROTOCOL_VERSION = new Version("0.2.3"); - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final UUID UUID1 = UUID.randomUUID(); - private static final UUID UUID2 = UUID.randomUUID(); - private static final String UNCHECKED = "unchecked"; - private static final String CHECK_TASK_QUEUE = "check"; - private static final String DISCOVER_TASK_QUEUE = "discover"; - private static final JsonNode CONFIGURATION = Jsons.jsonNode(ImmutableMap.builder() - .put("username", "airbyte") - .put("password", "abc") - .build()); - private static final SourceConnection SOURCE_CONNECTION = new SourceConnection() - .withSourceId(UUID1) - .withSourceDefinitionId(UUID2) - .withConfiguration(CONFIGURATION); - private static final DestinationConnection DESTINATION_CONNECTION = new DestinationConnection() - .withDestinationId(UUID1) - .withDestinationDefinitionId(UUID2) - .withConfiguration(CONFIGURATION); - private static final String SOURCE_DOCKER_IMAGE = "source-airbyte:1.2.3"; - - private TemporalClient temporalClient; - private JobTracker jobTracker; - private JobErrorReporter jobErrorReporter; - private OAuthConfigSupplier oAuthConfigSupplier; - - private RouterService routerService; - private DefaultSynchronousSchedulerClient schedulerClient; - - @BeforeEach - void setup() throws IOException { - temporalClient = mock(TemporalClient.class); - jobTracker = mock(JobTracker.class); - jobErrorReporter = mock(JobErrorReporter.class); - oAuthConfigSupplier = mock(OAuthConfigSupplier.class); - routerService = mock(RouterService.class); - schedulerClient = new DefaultSynchronousSchedulerClient(temporalClient, jobTracker, jobErrorReporter, oAuthConfigSupplier, routerService); - - when(oAuthConfigSupplier.injectSourceOAuthParameters(any(), any(), eq(CONFIGURATION))).thenReturn(CONFIGURATION); - when(oAuthConfigSupplier.injectDestinationOAuthParameters(any(), any(), eq(CONFIGURATION))).thenReturn(CONFIGURATION); - - when(routerService.getTaskQueueForWorkspace(any(), eq(TemporalJobType.CHECK_CONNECTION))).thenReturn(CHECK_TASK_QUEUE); - when(routerService.getTaskQueueForWorkspace(any(), eq(TemporalJobType.DISCOVER_SCHEMA))).thenReturn(DISCOVER_TASK_QUEUE); - } - - private static JobMetadata createMetadata(final boolean succeeded) { - return new JobMetadata( - succeeded, - LOG_PATH); - } - - @Nested - @DisplayName("Test execute method.") - class ExecuteSynchronousJob { - - @SuppressWarnings(UNCHECKED) - @Test - void testExecuteJobSuccess() { - final UUID sourceDefinitionId = UUID.randomUUID(); - final UUID discoveredCatalogId = UUID.randomUUID(); - final Supplier> function = mock(Supplier.class); - final Function mapperFunction = ConnectorJobOutput::getDiscoverCatalogId; - final ConnectorJobOutput jobOutput = new ConnectorJobOutput().withDiscoverCatalogId(discoveredCatalogId); - when(function.get()).thenReturn(new TemporalResponse<>(jobOutput, createMetadata(true))); - - final ConnectorJobReportingContext jobContext = new ConnectorJobReportingContext(UUID.randomUUID(), SOURCE_DOCKER_IMAGE); - final SynchronousResponse response = schedulerClient - .execute(ConfigType.DISCOVER_SCHEMA, jobContext, sourceDefinitionId, function, mapperFunction, WORKSPACE_ID); - - assertNotNull(response); - assertEquals(discoveredCatalogId, response.getOutput()); - assertEquals(ConfigType.DISCOVER_SCHEMA, response.getMetadata().getConfigType()); - assertTrue(response.getMetadata().getConfigId().isPresent()); - assertEquals(sourceDefinitionId, response.getMetadata().getConfigId().get()); - assertTrue(response.getMetadata().isSucceeded()); - assertEquals(LOG_PATH, response.getMetadata().getLogPath()); - - verify(jobTracker).trackDiscover(any(UUID.class), eq(sourceDefinitionId), eq(WORKSPACE_ID), eq(JobState.STARTED)); - verify(jobTracker).trackDiscover(any(UUID.class), eq(sourceDefinitionId), eq(WORKSPACE_ID), eq(JobState.SUCCEEDED)); - verifyNoInteractions(jobErrorReporter); - } - - @SuppressWarnings(UNCHECKED) - @Test - void testExecuteJobFailure() { - final UUID sourceDefinitionId = UUID.randomUUID(); - final Supplier> function = mock(Supplier.class); - final Function mapperFunction = ConnectorJobOutput::getDiscoverCatalogId; - when(function.get()).thenReturn(new TemporalResponse<>(null, createMetadata(false))); - - final ConnectorJobReportingContext jobContext = new ConnectorJobReportingContext(UUID.randomUUID(), SOURCE_DOCKER_IMAGE); - final SynchronousResponse response = schedulerClient - .execute(ConfigType.DISCOVER_SCHEMA, jobContext, sourceDefinitionId, function, mapperFunction, WORKSPACE_ID); - - assertNotNull(response); - assertNull(response.getOutput()); - assertEquals(ConfigType.DISCOVER_SCHEMA, response.getMetadata().getConfigType()); - assertTrue(response.getMetadata().getConfigId().isPresent()); - assertEquals(sourceDefinitionId, response.getMetadata().getConfigId().get()); - assertFalse(response.getMetadata().isSucceeded()); - assertEquals(LOG_PATH, response.getMetadata().getLogPath()); - - verify(jobTracker).trackDiscover(any(UUID.class), eq(sourceDefinitionId), eq(WORKSPACE_ID), eq(JobState.STARTED)); - verify(jobTracker).trackDiscover(any(UUID.class), eq(sourceDefinitionId), eq(WORKSPACE_ID), eq(JobState.FAILED)); - verifyNoInteractions(jobErrorReporter); - } - - @SuppressWarnings(UNCHECKED) - @Test - void testExecuteRuntimeException() { - final UUID sourceDefinitionId = UUID.randomUUID(); - final Supplier> function = mock(Supplier.class); - final Function mapperFunction = ConnectorJobOutput::getDiscoverCatalogId; - when(function.get()).thenThrow(new RuntimeException()); - - final ConnectorJobReportingContext jobContext = new ConnectorJobReportingContext(UUID.randomUUID(), SOURCE_DOCKER_IMAGE); - assertThrows( - RuntimeException.class, - () -> schedulerClient.execute(ConfigType.DISCOVER_SCHEMA, jobContext, sourceDefinitionId, function, - mapperFunction, WORKSPACE_ID)); - - verify(jobTracker).trackDiscover(any(UUID.class), eq(sourceDefinitionId), eq(WORKSPACE_ID), eq(JobState.STARTED)); - verify(jobTracker).trackDiscover(any(UUID.class), eq(sourceDefinitionId), eq(WORKSPACE_ID), eq(JobState.FAILED)); - verifyNoInteractions(jobErrorReporter); - } - - } - - @Nested - @DisplayName("Test job creation for each configuration type.") - class TestJobCreation { - - @Test - void testCreateSourceCheckConnectionJob() throws IOException { - final JobCheckConnectionConfig jobCheckConnectionConfig = new JobCheckConnectionConfig() - .withActorType(ActorType.SOURCE) - .withActorId(SOURCE_CONNECTION.getSourceId()) - .withConnectionConfiguration(SOURCE_CONNECTION.getConfiguration()) - .withDockerImage(DOCKER_IMAGE) - .withProtocolVersion(PROTOCOL_VERSION).withIsCustomConnector(false); - - final StandardCheckConnectionOutput mockOutput = mock(StandardCheckConnectionOutput.class); - final ConnectorJobOutput jobOutput = new ConnectorJobOutput().withCheckConnection(mockOutput); - when(temporalClient.submitCheckConnection(any(UUID.class), eq(0), eq(CHECK_TASK_QUEUE), eq(jobCheckConnectionConfig))) - .thenReturn(new TemporalResponse<>(jobOutput, createMetadata(true))); - final SynchronousResponse response = - schedulerClient.createSourceCheckConnectionJob(SOURCE_CONNECTION, DOCKER_IMAGE, PROTOCOL_VERSION, false); - assertEquals(mockOutput, response.getOutput()); - } - - @Test - void testCreateDestinationCheckConnectionJob() throws IOException { - final JobCheckConnectionConfig jobCheckConnectionConfig = new JobCheckConnectionConfig() - .withActorType(ActorType.DESTINATION) - .withActorId(DESTINATION_CONNECTION.getDestinationId()) - .withConnectionConfiguration(DESTINATION_CONNECTION.getConfiguration()) - .withDockerImage(DOCKER_IMAGE) - .withProtocolVersion(PROTOCOL_VERSION) - .withIsCustomConnector(false); - - final StandardCheckConnectionOutput mockOutput = mock(StandardCheckConnectionOutput.class); - final ConnectorJobOutput jobOutput = new ConnectorJobOutput().withCheckConnection(mockOutput); - when(temporalClient.submitCheckConnection(any(UUID.class), eq(0), eq(CHECK_TASK_QUEUE), eq(jobCheckConnectionConfig))) - .thenReturn(new TemporalResponse<>(jobOutput, createMetadata(true))); - final SynchronousResponse response = - schedulerClient.createDestinationCheckConnectionJob(DESTINATION_CONNECTION, DOCKER_IMAGE, PROTOCOL_VERSION, false); - assertEquals(mockOutput, response.getOutput()); - } - - @Test - void testCreateDiscoverSchemaJob() throws IOException { - final UUID expectedCatalogId = UUID.randomUUID(); - final ConnectorJobOutput jobOutput = new ConnectorJobOutput().withDiscoverCatalogId(expectedCatalogId); - when(temporalClient.submitDiscoverSchema(any(UUID.class), eq(0), eq(DISCOVER_TASK_QUEUE), any(JobDiscoverCatalogConfig.class))) - .thenReturn(new TemporalResponse<>(jobOutput, createMetadata(true))); - final SynchronousResponse response = - schedulerClient.createDiscoverSchemaJob(SOURCE_CONNECTION, DOCKER_IMAGE, DOCKER_IMAGE_TAG, PROTOCOL_VERSION, false); - assertEquals(expectedCatalogId, response.getOutput()); - } - - @Test - void testCreateGetSpecJob() throws IOException { - final JobGetSpecConfig jobSpecConfig = new JobGetSpecConfig().withDockerImage(DOCKER_IMAGE).withIsCustomConnector(false); - - final ConnectorSpecification mockOutput = mock(ConnectorSpecification.class); - final ConnectorJobOutput jobOutput = new ConnectorJobOutput().withSpec(mockOutput); - when(temporalClient.submitGetSpec(any(UUID.class), eq(0), eq(jobSpecConfig))) - .thenReturn(new TemporalResponse<>(jobOutput, createMetadata(true))); - final SynchronousResponse response = schedulerClient.createGetSpecJob(DOCKER_IMAGE, false); - assertEquals(mockOutput, response.getOutput()); - } - - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/services/AirbyteGithubStoreTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/services/AirbyteGithubStoreTest.java deleted file mode 100644 index 414fdabd3717..000000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/services/AirbyteGithubStoreTest.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.services; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import java.io.IOException; -import java.net.http.HttpTimeoutException; -import java.time.Duration; -import java.util.Collections; -import java.util.concurrent.TimeUnit; -import okhttp3.mockwebserver.MockResponse; -import okhttp3.mockwebserver.MockWebServer; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; - -public class AirbyteGithubStoreTest { - - private static final Duration TIMEOUT = Duration.ofSeconds(1); - private static final String CONTENT_TYPE = "Content-Type"; - private static final String PLAIN_TEXT = "text/plain; charset=utf-8"; - private static final String CACHE_CONTROL = "Cache-Control"; - private static final String NO_CACHE = "no-cache"; - - private MockWebServer webServer; - private AirbyteGithubStore githubStore; - - @BeforeEach - public void setUp() { - webServer = new MockWebServer(); - githubStore = AirbyteGithubStore.test(webServer.url("/").toString(), TIMEOUT); - } - - @Nested - @DisplayName("when the additional definitions file is unusable, badly formatted, or cannot be retrieved due to errors") - class FileUnusable { - - @Test - void testGetLatestSourcesWithNonJson() throws InterruptedException { - final var nonjsonBody = "irrelevant text"; - final var nonjsonResponse = new MockResponse().setResponseCode(200) - .addHeader(CONTENT_TYPE, PLAIN_TEXT) - .addHeader(CACHE_CONTROL, NO_CACHE) - .setBody(nonjsonBody); - webServer.enqueue(nonjsonResponse); - assertEquals(Collections.emptyList(), githubStore.getLatestSources()); - } - - @Test - void testGetLatestSourcesWithWrongSchemaJson() throws InterruptedException { - final var jsonBody = "{ json: 'validButWrongFormat' }"; - final var jsonResponse = new MockResponse().setResponseCode(200) - .addHeader(CONTENT_TYPE, "application/json; charset=utf-8") - .addHeader(CACHE_CONTROL, NO_CACHE) - .setBody(jsonBody); - webServer.enqueue(jsonResponse); - assertEquals(Collections.emptyList(), githubStore.getLatestSources()); - } - - @Test - void testGetLatestDestinationsWithNonJson() throws InterruptedException { - final var nonjsonBody = "irrelevant text"; - final var nonjsonResponse = new MockResponse().setResponseCode(200) - .addHeader(CONTENT_TYPE, PLAIN_TEXT) - .addHeader(CACHE_CONTROL, NO_CACHE) - .setBody(nonjsonBody); - webServer.enqueue(nonjsonResponse); - assertEquals(Collections.emptyList(), githubStore.getLatestDestinations()); - } - - @Test - void testGetLatestDestinationsWithWrongSchemaJson() throws InterruptedException { - final var jsonBody = "{ json: 'validButWrongFormat' }"; - final var jsonResponse = new MockResponse().setResponseCode(200) - .addHeader(CONTENT_TYPE, "application/json; charset=utf-8") - .addHeader(CACHE_CONTROL, NO_CACHE) - .setBody(jsonBody); - webServer.enqueue(jsonResponse); - assertEquals(Collections.emptyList(), githubStore.getLatestDestinations()); - } - - } - - @Nested - @DisplayName("when there is no internet") - class NoInternet { - - @Test - void testGetLatestDestinations() throws InterruptedException, IOException { - webServer.shutdown(); - assertEquals(Collections.emptyList(), githubStore.getLatestDestinations()); - } - - @Test - void testGetLatestSources() throws InterruptedException, IOException { - webServer.shutdown(); - assertEquals(Collections.emptyList(), githubStore.getLatestSources()); - } - - } - - @Nested - @DisplayName("when a bad file is specified") - class BadFile { - - @Test - void testGetLatestDestinations() throws InterruptedException { - final var timeoutResp = new MockResponse().setResponseCode(404); - webServer.enqueue(timeoutResp); - - assertEquals(Collections.emptyList(), githubStore.getLatestDestinations()); - } - - @Test - void testGetLatestSources() throws InterruptedException { - final var timeoutResp = new MockResponse().setResponseCode(404); - webServer.enqueue(timeoutResp); - - assertEquals(Collections.emptyList(), githubStore.getLatestSources()); - } - - } - - @Nested - @DisplayName("getFile") - class GetFile { - - @Test - void testReturn() throws IOException, InterruptedException { - final var goodBody = "great day!"; - final var goodResp = new MockResponse().setResponseCode(200) - .addHeader(CONTENT_TYPE, PLAIN_TEXT) - .addHeader(CACHE_CONTROL, NO_CACHE) - .setBody(goodBody); - webServer.enqueue(goodResp); - - final var fileStr = githubStore.getFile("test-file"); - assertEquals(goodBody, fileStr); - } - - @Test - void testHttpTimeout() { - final var timeoutResp = new MockResponse().setResponseCode(200) - .addHeader(CONTENT_TYPE, PLAIN_TEXT) - .addHeader(CACHE_CONTROL, NO_CACHE) - .setBody("") - .setHeadersDelay(TIMEOUT.toSeconds() * 2, TimeUnit.SECONDS) - .setBodyDelay(TIMEOUT.toSeconds() * 2, TimeUnit.SECONDS); - webServer.enqueue(timeoutResp); - - assertThrows(HttpTimeoutException.class, () -> githubStore.getFile("test-file")); - } - - } - -} diff --git a/airbyte-commons-server/src/test/resources/icons/test-destination.svg b/airbyte-commons-server/src/test/resources/icons/test-destination.svg deleted file mode 100644 index 29fb9f8e862e..000000000000 --- a/airbyte-commons-server/src/test/resources/icons/test-destination.svg +++ /dev/null @@ -1,3 +0,0 @@ - - destination - diff --git a/airbyte-commons-server/src/test/resources/icons/test-source.svg b/airbyte-commons-server/src/test/resources/icons/test-source.svg deleted file mode 100644 index 7d81478ebd00..000000000000 --- a/airbyte-commons-server/src/test/resources/icons/test-source.svg +++ /dev/null @@ -1,3 +0,0 @@ - - source - diff --git a/airbyte-commons-server/src/test/resources/json/TestImplementation.json b/airbyte-commons-server/src/test/resources/json/TestImplementation.json deleted file mode 100644 index 3c88772c28c4..000000000000 --- a/airbyte-commons-server/src/test/resources/json/TestImplementation.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "apiKey": "123-abc", - "hostname": "airbyte.io" -} diff --git a/airbyte-commons-server/src/test/resources/json/TestSpecification.json b/airbyte-commons-server/src/test/resources/json/TestSpecification.json deleted file mode 100644 index 399ab4e98420..000000000000 --- a/airbyte-commons-server/src/test/resources/json/TestSpecification.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "specification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://github.com/airbytehq/airbyte/blob/master/airbyte-server/src/test/resources/json/TestSpecification.json", - "title": "TestSpecification", - "description": "information output by the connection.", - "type": "object", - "required": ["apiKey", "hostname"], - "additionalProperties": false, - "properties": { - "apiKey": { - "type": "string" - }, - "hostname": { - "type": "string" - } - } - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/03a4c904-c91d-447f-ab59-27a43b52c2fd.gz b/airbyte-commons-server/src/test/resources/migration/03a4c904-c91d-447f-ab59-27a43b52c2fd.gz deleted file mode 100644 index 0e9f6b2c4069..000000000000 Binary files a/airbyte-commons-server/src/test/resources/migration/03a4c904-c91d-447f-ab59-27a43b52c2fd.gz and /dev/null differ diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/DESTINATION_CONNECTION/4e00862d-5484-4f50-9860-f3bbb4317397.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/DESTINATION_CONNECTION/4e00862d-5484-4f50-9860-f3bbb4317397.json deleted file mode 100644 index b88d0a44331d..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/DESTINATION_CONNECTION/4e00862d-5484-4f50-9860-f3bbb4317397.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "Postgres Docker", - "destinationDefinitionId": "25c5221d-dce2-4163-ade9-739ef790f503", - "workspaceId": "5ae6b09b-fdec-41af-aaf7-7d94cfc33ef6", - "destinationId": "4e00862d-5484-4f50-9860-f3bbb4317397", - "configuration": { - "basic_normalization": true, - "username": "postgres", - "password": "password", - "database": "postgres", - "schema": "public", - "port": 3000, - "host": "localhost" - }, - "tombstone": false -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/DESTINATION_CONNECTION/5434615d-a3b7-4351-bc6b-a9a695555a30.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/DESTINATION_CONNECTION/5434615d-a3b7-4351-bc6b-a9a695555a30.json deleted file mode 100644 index 1820bef139d3..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/DESTINATION_CONNECTION/5434615d-a3b7-4351-bc6b-a9a695555a30.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "name": "CSV", - "destinationDefinitionId": "8be1cf83-fde1-477f-a4ad-318d23c9f3c6", - "workspaceId": "5ae6b09b-fdec-41af-aaf7-7d94cfc33ef6", - "destinationId": "5434615d-a3b7-4351-bc6b-a9a695555a30", - "configuration": { "destination_path": "csv_data" }, - "tombstone": false -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/SOURCE_CONNECTION/28ffee2b-372a-4f72-9b95-8ed56a8b99c5.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/SOURCE_CONNECTION/28ffee2b-372a-4f72-9b95-8ed56a8b99c5.json deleted file mode 100644 index fc15d49eb16e..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/SOURCE_CONNECTION/28ffee2b-372a-4f72-9b95-8ed56a8b99c5.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "MySQL localhost", - "sourceDefinitionId": "435bb9a5-7887-4809-aa58-28c27df0d7ad", - "workspaceId": "5ae6b09b-fdec-41af-aaf7-7d94cfc33ef6", - "sourceId": "28ffee2b-372a-4f72-9b95-8ed56a8b99c5", - "configuration": { - "username": "root", - "password": "password", - "database": "localhost_test", - "port": 3306, - "host": "host.docker.internal" - }, - "tombstone": false -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/SOURCE_CONNECTION/e48cae1a-1f5c-42cc-9ec1-a44ff7fb4969.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/SOURCE_CONNECTION/e48cae1a-1f5c-42cc-9ec1-a44ff7fb4969.json deleted file mode 100644 index ee2dc444ef37..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/SOURCE_CONNECTION/e48cae1a-1f5c-42cc-9ec1-a44ff7fb4969.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "Using a source definition deleted", - "sourceDefinitionId": "4eb22946-2a79-4d20-a3e6-effd234613c3", - "workspaceId": "5ae6b09b-fdec-41af-aaf7-7d94cfc33ef6", - "sourceId": "e48cae1a-1f5c-42cc-9ec1-a44ff7fb4969", - "configuration": { - "username": "root", - "password": "password", - "database": "localhost_test", - "port": 3306, - "host": "host.docker.internal" - }, - "tombstone": false -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/22f6c74f-5699-40ff-833c-4a879ea40133.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/22f6c74f-5699-40ff-833c-4a879ea40133.json deleted file mode 100644 index 0f41fabe8f3a..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/22f6c74f-5699-40ff-833c-4a879ea40133.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "destinationDefinitionId": "22f6c74f-5699-40ff-833c-4a879ea40133", - "name": "BigQuery", - "dockerRepository": "airbyte/destination-bigquery", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/bigquery", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/bigquery", - "connectionSpecification": {}, - "supportsIncremental": true - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/25c5221d-dce2-4163-ade9-739ef790f503.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/25c5221d-dce2-4163-ade9-739ef790f503.json deleted file mode 100644 index 3f346d797154..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/25c5221d-dce2-4163-ade9-739ef790f503.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "destinationDefinitionId": "25c5221d-dce2-4163-ade9-739ef790f503", - "name": "Postgres", - "dockerRepository": "airbyte/destination-postgres", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/postgres", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/postgres", - "connectionSpecification": {}, - "supportsIncremental": true - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/424892c4-daac-4491-b35d-c6688ba547ba.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/424892c4-daac-4491-b35d-c6688ba547ba.json deleted file mode 100644 index f4b70b05aaa3..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/424892c4-daac-4491-b35d-c6688ba547ba.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "destinationDefinitionId": "424892c4-daac-4491-b35d-c6688ba547ba", - "name": "Snowflake", - "dockerRepository": "airbyte/destination-snowflake", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/snowflake", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/snowflake", - "connectionSpecification": {}, - "supportsIncremental": true - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/8be1cf83-fde1-477f-a4ad-318d23c9f3c6.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/8be1cf83-fde1-477f-a4ad-318d23c9f3c6.json deleted file mode 100644 index 9df1daa7600d..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/8be1cf83-fde1-477f-a4ad-318d23c9f3c6.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "destinationDefinitionId": "8be1cf83-fde1-477f-a4ad-318d23c9f3c6", - "name": "Local CSV", - "dockerRepository": "airbyte/destination-csv", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/local-csv", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/local-csv", - "connectionSpecification": {}, - "supportsIncremental": true - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/a625d593-bba5-4a1c-a53d-2d246268a816.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/a625d593-bba5-4a1c-a53d-2d246268a816.json deleted file mode 100644 index 10188ba37216..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/a625d593-bba5-4a1c-a53d-2d246268a816.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "destinationDefinitionId": "a625d593-bba5-4a1c-a53d-2d246268a816", - "name": "Local JSON", - "dockerRepository": "airbyte/destination-local-json", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/local-json", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/local-json", - "connectionSpecification": {}, - "supportsIncremental": true - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/af7c921e-5892-4ff2-b6c1-4a5ab258fb7e.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/af7c921e-5892-4ff2-b6c1-4a5ab258fb7e.json deleted file mode 100644 index db3cc13e318f..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/af7c921e-5892-4ff2-b6c1-4a5ab258fb7e.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "destinationDefinitionId": "af7c921e-5892-4ff2-b6c1-4a5ab258fb7e", - "name": "MeiliSearch", - "dockerRepository": "airbyte/destination-meilisearch", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/meilisearch", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/meilisearch", - "connectionSpecification": {}, - "supportsIncremental": true - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/f7a7d195-377f-cf5b-70a5-be6b819019dc.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/f7a7d195-377f-cf5b-70a5-be6b819019dc.json deleted file mode 100644 index 32304def896d..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_DESTINATION_DEFINITION/f7a7d195-377f-cf5b-70a5-be6b819019dc.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "destinationDefinitionId": "f7a7d195-377f-cf5b-70a5-be6b819019dc", - "name": "Redshift", - "dockerRepository": "airbyte/destination-redshift", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/redshift", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/redshift", - "connectionSpecification": {}, - "supportsIncremental": true - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/00405b19-9768-4e0c-b1ae-9fc2ee2b2a8c.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/00405b19-9768-4e0c-b1ae-9fc2ee2b2a8c.json deleted file mode 100644 index 44982b32ccab..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/00405b19-9768-4e0c-b1ae-9fc2ee2b2a8c.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "00405b19-9768-4e0c-b1ae-9fc2ee2b2a8c", - "name": "Looker", - "dockerRepository": "airbyte/source-looker", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-looker", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/looker", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/2470e835-feaf-4db6-96f3-70fd645acc77.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/2470e835-feaf-4db6-96f3-70fd645acc77.json deleted file mode 100644 index 4d8144375228..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/2470e835-feaf-4db6-96f3-70fd645acc77.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "2470e835-feaf-4db6-96f3-70fd645acc77", - "name": "Salesforce", - "dockerRepository": "airbyte/source-salesforce-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-salesforce-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/salesforce", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/2af123bf-0aaf-4e0d-9784-cb497f23741a.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/2af123bf-0aaf-4e0d-9784-cb497f23741a.json deleted file mode 100644 index 1b2292279099..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/2af123bf-0aaf-4e0d-9784-cb497f23741a.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "2af123bf-0aaf-4e0d-9784-cb497f23741a", - "name": "Appstore", - "dockerRepository": "airbyte/source-appstore-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-appstore-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/appstore", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/396e4ca3-8a97-4b85-aa4e-c9d8c2d5f992.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/396e4ca3-8a97-4b85-aa4e-c9d8c2d5f992.json deleted file mode 100644 index 48b6d163439e..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/396e4ca3-8a97-4b85-aa4e-c9d8c2d5f992.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "396e4ca3-8a97-4b85-aa4e-c9d8c2d5f992", - "name": "Braintree", - "dockerRepository": "airbyte/source-braintree-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-braintree-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/braintree", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/41375467-61ae-4204-8e38-e2b8b7365f23.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/41375467-61ae-4204-8e38-e2b8b7365f23.json deleted file mode 100644 index b706fc7d9f1a..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/41375467-61ae-4204-8e38-e2b8b7365f23.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "41375467-61ae-4204-8e38-e2b8b7365f23", - "name": "Slack", - "dockerRepository": "airbyte/source-slack-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/repository/docker/airbyte/source-slack-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/slack", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/435bb9a5-7887-4809-aa58-28c27df0d7ad.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/435bb9a5-7887-4809-aa58-28c27df0d7ad.json deleted file mode 100644 index 1b9e159ba87c..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/435bb9a5-7887-4809-aa58-28c27df0d7ad.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "435bb9a5-7887-4809-aa58-28c27df0d7ad", - "name": "MySQL", - "dockerRepository": "airbyte/source-mysql", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://docs.airbyte.io/integrations/sources/mysql", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/mysql", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/445831eb-78db-4b1f-8f1f-0d96ad8739e2.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/445831eb-78db-4b1f-8f1f-0d96ad8739e2.json deleted file mode 100644 index a73646d3aa07..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/445831eb-78db-4b1f-8f1f-0d96ad8739e2.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "445831eb-78db-4b1f-8f1f-0d96ad8739e2", - "name": "Drift", - "dockerRepository": "airbyte/source-drift", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-drift", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/drift", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/4eb22946-2a79-4d20-a3e6-effd234613c3.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/4eb22946-2a79-4d20-a3e6-effd234613c3.json deleted file mode 100644 index eb7306ee608f..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/4eb22946-2a79-4d20-a3e6-effd234613c3.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "4eb22946-2a79-4d20-a3e6-effd234613c3", - "name": "Old connector still being used", - "dockerRepository": "airbyte/source-mysql", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://docs.airbyte.io/integrations/sources/mysql", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/mysql", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/57eb1576-8f52-463d-beb6-2e107cdf571d.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/57eb1576-8f52-463d-beb6-2e107cdf571d.json deleted file mode 100644 index e681e0eaac14..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/57eb1576-8f52-463d-beb6-2e107cdf571d.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "57eb1576-8f52-463d-beb6-2e107cdf571d", - "name": "Hubspot", - "dockerRepository": "airbyte/source-hubspot-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://https://docs.airbyte.io/integrations/sources/hubspot", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/hubspot", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/59f1e50a-331f-4f09-b3e8-2e8d4d355f44.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/59f1e50a-331f-4f09-b3e8-2e8d4d355f44.json deleted file mode 100644 index fca454d4c181..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/59f1e50a-331f-4f09-b3e8-2e8d4d355f44.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "59f1e50a-331f-4f09-b3e8-2e8d4d355f44", - "name": "Greenhouse", - "dockerRepository": "airbyte/source-greenhouse", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://https://docs.airbyte.io/integrations/sources/greenhouse", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/greenhouse", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/68e63de2-bb83-4c7e-93fa-a8a9051e3993.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/68e63de2-bb83-4c7e-93fa-a8a9051e3993.json deleted file mode 100644 index 93812f6b85b2..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/68e63de2-bb83-4c7e-93fa-a8a9051e3993.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "68e63de2-bb83-4c7e-93fa-a8a9051e3993", - "name": "Jira", - "dockerRepository": "airbyte/source-jira", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-jira", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/jira", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/71607ba1-c0ac-4799-8049-7f4b90dd50f7.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/71607ba1-c0ac-4799-8049-7f4b90dd50f7.json deleted file mode 100644 index 11ce44b6c97e..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/71607ba1-c0ac-4799-8049-7f4b90dd50f7.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "71607ba1-c0ac-4799-8049-7f4b90dd50f7", - "name": "Google Sheets", - "dockerRepository": "airbyte/source-google-sheets", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/repository/docker/airbyte/source-google-sheets", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/google-sheets", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/778daa7c-feaf-4db6-96f3-70fd645acc77.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/778daa7c-feaf-4db6-96f3-70fd645acc77.json deleted file mode 100644 index d98d85610c7e..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/778daa7c-feaf-4db6-96f3-70fd645acc77.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "778daa7c-feaf-4db6-96f3-70fd645acc77", - "name": "File", - "dockerRepository": "airbyte/source-file", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-file", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/file", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/859e501d-2b67-471f-91bb-1c801414d28f.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/859e501d-2b67-471f-91bb-1c801414d28f.json deleted file mode 100644 index 80888b0d66a3..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/859e501d-2b67-471f-91bb-1c801414d28f.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "859e501d-2b67-471f-91bb-1c801414d28f", - "name": "Mixpanel", - "dockerRepository": "airbyte/source-mixpanel-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-mixpanel-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/mixpanel", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/9e0556f4-69df-4522-a3fb-03264d36b348.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/9e0556f4-69df-4522-a3fb-03264d36b348.json deleted file mode 100644 index 75dec47c07dc..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/9e0556f4-69df-4522-a3fb-03264d36b348.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "9e0556f4-69df-4522-a3fb-03264d36b348", - "name": "Marketo", - "dockerRepository": "airbyte/source-marketo-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-marketo-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/marketo", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/9fed261d-d107-47fd-8c8b-323023db6e20.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/9fed261d-d107-47fd-8c8b-323023db6e20.json deleted file mode 100644 index 5f76fd492695..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/9fed261d-d107-47fd-8c8b-323023db6e20.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "9fed261d-d107-47fd-8c8b-323023db6e20", - "name": "Exchange Rates Api", - "dockerRepository": "airbyte/source-exchangeratesapi-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/integration-singer-exchangeratesapi_io-source", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/exchangeratesapi", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/aea2fd0d-377d-465e-86c0-4fdc4f688e51.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/aea2fd0d-377d-465e-86c0-4fdc4f688e51.json deleted file mode 100644 index ab9153772842..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/aea2fd0d-377d-465e-86c0-4fdc4f688e51.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "aea2fd0d-377d-465e-86c0-4fdc4f688e51", - "name": "Zoom", - "dockerRepository": "airbyte/source-zoom-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-zoom-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/zoom", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/b03a9f3e-22a5-11eb-adc1-0242ac120002.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/b03a9f3e-22a5-11eb-adc1-0242ac120002.json deleted file mode 100644 index b08cfcc58259..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/b03a9f3e-22a5-11eb-adc1-0242ac120002.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "b03a9f3e-22a5-11eb-adc1-0242ac120002", - "name": "Mailchimp", - "dockerRepository": "airbyte/source-mailchimp", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-mailchimp", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/mailchimp", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/b1892b11-788d-44bd-b9ec-3a436f7b54ce.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/b1892b11-788d-44bd-b9ec-3a436f7b54ce.json deleted file mode 100644 index 82140f27bdef..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/b1892b11-788d-44bd-b9ec-3a436f7b54ce.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "b1892b11-788d-44bd-b9ec-3a436f7b54ce", - "name": "Shopify", - "dockerRepository": "airbyte/source-shopify-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-shopify-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/shopify", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/b5ea17b1-f170-46dc-bc31-cc744ca984c1.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/b5ea17b1-f170-46dc-bc31-cc744ca984c1.json deleted file mode 100644 index 91c409ba1ed9..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/b5ea17b1-f170-46dc-bc31-cc744ca984c1.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "b5ea17b1-f170-46dc-bc31-cc744ca984c1", - "name": "Microsoft SQL Server (MSSQL)", - "dockerRepository": "airbyte/source-mssql", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-mssql", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/mssql", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/cd42861b-01fc-4658-a8ab-5d11d0510f01.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/cd42861b-01fc-4658-a8ab-5d11d0510f01.json deleted file mode 100644 index 4b6ac4817bf1..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/cd42861b-01fc-4658-a8ab-5d11d0510f01.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "cd42861b-01fc-4658-a8ab-5d11d0510f01", - "name": "Recurly", - "dockerRepository": "airbyte/source-recurly", - "dockerImageTag": "0.3.1", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-recurly", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/recurly", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/d2147be5-fa36-4936-977e-f031affa5895.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/d2147be5-fa36-4936-977e-f031affa5895.json deleted file mode 100644 index cce976b11c97..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/d2147be5-fa36-4936-977e-f031affa5895.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "d2147be5-fa36-4936-977e-f031affa5895", - "name": "Old Connector", - "dockerRepository": "airbyte/source-appstore-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-appstore-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/appstore", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/d29764f8-80d7-4dd7-acbe-1a42005ee5aa.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/d29764f8-80d7-4dd7-acbe-1a42005ee5aa.json deleted file mode 100644 index 426c111fece4..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/d29764f8-80d7-4dd7-acbe-1a42005ee5aa.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "d29764f8-80d7-4dd7-acbe-1a42005ee5aa", - "name": "Zendesk Support", - "dockerRepository": "airbyte/source-zendesk-support-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-zendesk-support-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/zendesk-support", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/d8313939-3782-41b0-be29-b3ca20d8dd3a.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/d8313939-3782-41b0-be29-b3ca20d8dd3a.json deleted file mode 100644 index 5121a300a770..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/d8313939-3782-41b0-be29-b3ca20d8dd3a.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "d8313939-3782-41b0-be29-b3ca20d8dd3a", - "name": "Intercom", - "dockerRepository": "airbyte/source-intercom-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-intercom-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/intercom", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/decd338e-5647-4c0b-adf4-da0e75f5a750.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/decd338e-5647-4c0b-adf4-da0e75f5a750.json deleted file mode 100644 index 79bf62ad40a6..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/decd338e-5647-4c0b-adf4-da0e75f5a750.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "decd338e-5647-4c0b-adf4-da0e75f5a750", - "name": "Postgres", - "dockerRepository": "airbyte/source-postgres", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-postgres", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/postgres", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/e094cb9a-26de-4645-8761-65c0c425d1de.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/e094cb9a-26de-4645-8761-65c0c425d1de.json deleted file mode 100644 index 4e59550a3577..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/e094cb9a-26de-4645-8761-65c0c425d1de.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "e094cb9a-26de-4645-8761-65c0c425d1de", - "name": "Stripe", - "dockerRepository": "airbyte/source-stripe-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/integration-singer-stripe-source", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/stripe", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json deleted file mode 100644 index f265921c0876..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "e7778cfc-e97c-4458-9ecb-b4f2bba8946c", - "name": "Facebook Marketing", - "dockerRepository": "airbyte/source-facebook-marketing", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-facebook-marketing", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/facebook", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/e87ffa8e-a3b5-f69c-9076-6011339de1f6.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/e87ffa8e-a3b5-f69c-9076-6011339de1f6.json deleted file mode 100644 index ad2e7c653474..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/e87ffa8e-a3b5-f69c-9076-6011339de1f6.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "e87ffa8e-a3b5-f69c-9076-6011339de1f6", - "name": "Redshift", - "dockerRepository": "airbyte/source-redshift", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/repository/docker/airbyte/source-redshift", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/redshift", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/eaf50f04-21dd-4620-913b-2a83f5635227.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/eaf50f04-21dd-4620-913b-2a83f5635227.json deleted file mode 100644 index 9df0eb9bd85c..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/eaf50f04-21dd-4620-913b-2a83f5635227.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "eaf50f04-21dd-4620-913b-2a83f5635227", - "name": "Microsoft teams", - "dockerRepository": "airbyte/source-microsoft-teams", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-microsoft-teams", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/microsoft-teams", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/ec4b9503-13cb-48ab-a4ab-6ade4be46567.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/ec4b9503-13cb-48ab-a4ab-6ade4be46567.json deleted file mode 100644 index 5beabc83f6a6..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/ec4b9503-13cb-48ab-a4ab-6ade4be46567.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "ec4b9503-13cb-48ab-a4ab-6ade4be46567", - "name": "Freshdesk", - "dockerRepository": "airbyte/source-freshdesk", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-freshdesk", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/freshdesk", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/ed799e2b-2158-4c66-8da4-b40fe63bc72a.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/ed799e2b-2158-4c66-8da4-b40fe63bc72a.json deleted file mode 100644 index ebe806009ad6..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/ed799e2b-2158-4c66-8da4-b40fe63bc72a.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "ed799e2b-2158-4c66-8da4-b40fe63bc72a", - "name": "Plaid", - "dockerRepository": "airbyte/source-plaid", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-plaid", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/plaid", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/ef69ef6e-aa7f-4af1-a01d-ef775033524e.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/ef69ef6e-aa7f-4af1-a01d-ef775033524e.json deleted file mode 100644 index 2bf8badeebe2..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/ef69ef6e-aa7f-4af1-a01d-ef775033524e.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "ef69ef6e-aa7f-4af1-a01d-ef775033524e", - "name": "GitHub", - "dockerRepository": "airbyte/source-github-singer", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-github-singer", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/github", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/fbb5fbe2-16ad-4cf4-af7d-ff9d9c316c87.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/fbb5fbe2-16ad-4cf4-af7d-ff9d9c316c87.json deleted file mode 100644 index 84bf72d3da15..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SOURCE_DEFINITION/fbb5fbe2-16ad-4cf4-af7d-ff9d9c316c87.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "sourceDefinitionId": "fbb5fbe2-16ad-4cf4-af7d-ff9d9c316c87", - "name": "Sendgrid", - "dockerRepository": "airbyte/source-sendgrid", - "dockerImageTag": "0.2.0", - "documentationUrl": "https://hub.docker.com/r/airbyte/source-sendgrid", - "spec": { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/sendgrid", - "connectionSpecification": {} - } -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC/49dae3f0-158b-4737-b6e4-0eed77d4b74e.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC/49dae3f0-158b-4737-b6e4-0eed77d4b74e.json deleted file mode 100644 index 281271e9b940..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC/49dae3f0-158b-4737-b6e4-0eed77d4b74e.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "prefix": "", - "sourceId": "28ffee2b-372a-4f72-9b95-8ed56a8b99c5", - "destinationId": "5434615d-a3b7-4351-bc6b-a9a695555a30", - "connectionId": "49dae3f0-158b-4737-b6e4-0eed77d4b74e", - "name": "default", - "catalog": { - "streams": [ - { - "stream": { - "name": "localhost_test.new_table", - "json_schema": { - "type": "object", - "properties": { - "id": { "type": "number" }, - "val": { "type": "string" } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "default_cursor_field": [] - }, - "sync_mode": "full_refresh", - "cursor_field": [] - }, - { - "stream": { - "name": "localhost_test.test_table", - "json_schema": { - "type": "object", - "properties": { - "id": { "type": "number" }, - "val": { "type": "string" }, - "updated_at": { "type": "string" } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "default_cursor_field": [] - }, - "sync_mode": "incremental", - "cursor_field": ["updated_at"] - } - ] - }, - "status": "active" -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC/a294256f-1abe-4837-925f-91602c7207b4.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC/a294256f-1abe-4837-925f-91602c7207b4.json deleted file mode 100644 index 9a8b5318a58f..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC/a294256f-1abe-4837-925f-91602c7207b4.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "prefix": "", - "sourceId": "28ffee2b-372a-4f72-9b95-8ed56a8b99c5", - "destinationId": "4e00862d-5484-4f50-9860-f3bbb4317397", - "connectionId": "a294256f-1abe-4837-925f-91602c7207b4", - "name": "default", - "catalog": { - "streams": [ - { - "stream": { - "name": "localhost_test.new_table", - "json_schema": { - "type": "object", - "properties": { - "id": { "type": "number" }, - "val": { "type": "string" } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "default_cursor_field": [] - }, - "sync_mode": "full_refresh", - "cursor_field": [] - }, - { - "stream": { - "name": "localhost_test.test_table", - "json_schema": { - "type": "object", - "properties": { - "id": { "type": "number" }, - "val": { "type": "string" }, - "updated_at": { "type": "string" } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "default_cursor_field": [] - }, - "sync_mode": "incremental", - "cursor_field": ["updated_at"] - } - ] - }, - "status": "active" -} diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC_SCHEDULE/49dae3f0-158b-4737-b6e4-0eed77d4b74e.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC_SCHEDULE/49dae3f0-158b-4737-b6e4-0eed77d4b74e.json deleted file mode 100644 index 3c9c3fb1825b..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC_SCHEDULE/49dae3f0-158b-4737-b6e4-0eed77d4b74e.json +++ /dev/null @@ -1 +0,0 @@ -{ "connectionId": "49dae3f0-158b-4737-b6e4-0eed77d4b74e", "manual": true } diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC_SCHEDULE/a294256f-1abe-4837-925f-91602c7207b4.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC_SCHEDULE/a294256f-1abe-4837-925f-91602c7207b4.json deleted file mode 100644 index 0ed8884c4750..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_SYNC_SCHEDULE/a294256f-1abe-4837-925f-91602c7207b4.json +++ /dev/null @@ -1 +0,0 @@ -{ "connectionId": "a294256f-1abe-4837-925f-91602c7207b4", "manual": true } diff --git a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_WORKSPACE/5ae6b09b-fdec-41af-aaf7-7d94cfc33ef6.json b/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_WORKSPACE/5ae6b09b-fdec-41af-aaf7-7d94cfc33ef6.json deleted file mode 100644 index 864257ed8966..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/dummy_data/config/STANDARD_WORKSPACE/5ae6b09b-fdec-41af-aaf7-7d94cfc33ef6.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "workspaceId": "5ae6b09b-fdec-41af-aaf7-7d94cfc33ef6", - "customerId": "17f90b72-5ae4-40b7-bc49-d6c2943aea57", - "name": "default", - "slug": "default", - "initialSetupComplete": true, - "anonymousDataCollection": false, - "news": false, - "securityUpdates": false, - "displaySetupWizard": false -} diff --git a/airbyte-commons-server/src/test/resources/migration/schema.sql b/airbyte-commons-server/src/test/resources/migration/schema.sql deleted file mode 100644 index dd2b7c507300..000000000000 --- a/airbyte-commons-server/src/test/resources/migration/schema.sql +++ /dev/null @@ -1,82 +0,0 @@ --- extensions -CREATE - EXTENSION IF NOT EXISTS "uuid-ossp"; - --- types -CREATE - TYPE JOB_STATUS AS ENUM( - 'pending', - 'running', - 'incomplete', - 'failed', - 'succeeded', - 'cancelled' - ); - -CREATE - TYPE ATTEMPT_STATUS AS ENUM( - 'running', - 'failed', - 'succeeded' - ); - -CREATE - TYPE JOB_CONFIG_TYPE AS ENUM( - 'check_connection_source', - 'check_connection_destination', - 'discover_schema', - 'get_spec', - 'sync', - 'reset_connection' - ); - --- tables -CREATE - TABLE - AIRBYTE_METADATA( - KEY VARCHAR(255) PRIMARY KEY, - value VARCHAR(255) - ); - -CREATE - TABLE - JOBS( - id BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - config_type JOB_CONFIG_TYPE, - SCOPE VARCHAR(255), - config JSONB, - status JOB_STATUS, - started_at TIMESTAMPTZ, - created_at TIMESTAMPTZ, - updated_at TIMESTAMPTZ - ); - -CREATE - TABLE - ATTEMPTS( - id BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - job_id BIGINT, - attempt_number INTEGER, - log_path VARCHAR(255), - OUTPUT JSONB, - status ATTEMPT_STATUS, - created_at TIMESTAMPTZ, - updated_at TIMESTAMPTZ, - ended_at TIMESTAMPTZ - ); - -CREATE - UNIQUE INDEX job_attempt_idx ON - ATTEMPTS( - job_id, - attempt_number - ); - --- entries -INSERT - INTO - AIRBYTE_METADATA - VALUES( - 'server_uuid', - uuid_generate_v4() - ); diff --git a/airbyte-commons-temporal/build.gradle b/airbyte-commons-temporal/build.gradle deleted file mode 100644 index 2ec6401db3fd..000000000000 --- a/airbyte-commons-temporal/build.gradle +++ /dev/null @@ -1,30 +0,0 @@ -plugins { - id "java-library" -} - -dependencies { - annotationProcessor platform(libs.micronaut.bom) - annotationProcessor libs.bundles.micronaut.annotation.processor - - implementation platform(libs.micronaut.bom) - implementation libs.bundles.micronaut - implementation libs.bundles.temporal - - testAnnotationProcessor platform(libs.micronaut.bom) - testAnnotationProcessor libs.bundles.micronaut.test.annotation.processor - - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-config:config-persistence') - implementation project(':airbyte-metrics:metrics-lib') - implementation project(':airbyte-persistence:job-persistence') - implementation libs.airbyte.protocol - implementation project(':airbyte-worker-models') - implementation project(':airbyte-api') - implementation project(':airbyte-json-validation') - - testImplementation libs.temporal.testing - // Needed to be able to mock final class - testImplementation 'org.mockito:mockito-inline:4.7.0' -} - -Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/CancellationHandler.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/CancellationHandler.java deleted file mode 100644 index f96cb85346e8..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/CancellationHandler.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import io.temporal.activity.ActivityExecutionContext; -import io.temporal.client.ActivityCompletionException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public interface CancellationHandler { - - void checkAndHandleCancellation(Runnable onCancellationCallback); - - class TemporalCancellationHandler implements CancellationHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(TemporalCancellationHandler.class); - - private final ActivityExecutionContext activityContext; - - public TemporalCancellationHandler(final ActivityExecutionContext activityContext) { - this.activityContext = activityContext; - } - - /** - * Check for a cancellation/timeout status and run any callbacks necessary to shut down underlying - * processes. This method should generally be run frequently within an activity so a change in - * cancellation status is respected. This will only be effective if the cancellation type for the - * workflow is set to - * {@link io.temporal.activity.ActivityCancellationType#WAIT_CANCELLATION_COMPLETED}; otherwise, the - * activity will be killed automatically as part of cleanup without removing underlying processes. - * - * @param onCancellationCallback a runnable that will only run when Temporal indicates the activity - * should be killed (cancellation or timeout). - */ - @Override - public void checkAndHandleCancellation(final Runnable onCancellationCallback) { - try { - /** - * Heartbeat is somewhat misleading here. What it does is check the current Temporal activity's - * context and throw an exception if the sync has been cancelled or timed out. The input to this - * heartbeat function is available as a field in thrown ActivityCompletionExceptions, which we - * aren't using for now. - * - * We should use this only as a check for the ActivityCompletionException. See - * {@link TemporalUtils#withBackgroundHeartbeat} for where we actually send heartbeats to ensure - * that we don't time out the activity. - */ - activityContext.heartbeat(null); - } catch (final ActivityCompletionException e) { - onCancellationCallback.run(); - LOGGER.warn("Job either timed out or was cancelled."); - } - } - - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java deleted file mode 100644 index 40a085ee9208..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java +++ /dev/null @@ -1,257 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import io.airbyte.commons.temporal.exception.DeletedWorkflowException; -import io.airbyte.commons.temporal.exception.UnreachableWorkflowException; -import io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow; -import io.airbyte.commons.temporal.scheduling.ConnectionUpdaterInput; -import io.airbyte.commons.temporal.scheduling.state.WorkflowState; -import io.temporal.api.common.v1.WorkflowExecution; -import io.temporal.api.enums.v1.WorkflowExecutionStatus; -import io.temporal.api.workflowservice.v1.DescribeWorkflowExecutionRequest; -import io.temporal.api.workflowservice.v1.DescribeWorkflowExecutionResponse; -import io.temporal.client.BatchRequest; -import io.temporal.client.WorkflowClient; -import io.temporal.workflow.Functions.Proc; -import io.temporal.workflow.Functions.Proc1; -import io.temporal.workflow.Functions.TemporalFunctionalInterfaceMarker; -import jakarta.inject.Singleton; -import java.util.Optional; -import java.util.UUID; -import java.util.function.Function; -import lombok.NoArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -@NoArgsConstructor -@Singleton -@Slf4j -public class ConnectionManagerUtils { - - /** - * Send a cancellation to the workflow. It will swallow any exception and won't check if the - * workflow is already deleted when being cancel. - */ - public void deleteWorkflowIfItExist(final WorkflowClient client, - final UUID connectionId) { - try { - final ConnectionManagerWorkflow connectionManagerWorkflow = - client.newWorkflowStub(ConnectionManagerWorkflow.class, getConnectionManagerName(connectionId)); - connectionManagerWorkflow.deleteConnection(); - } catch (final Exception e) { - log.warn("The workflow is not reachable when trying to cancel it", e); - } - - } - - /** - * Attempts to send a signal to the existing ConnectionManagerWorkflow for the provided connection. - * - * If the workflow is unreachable, this will restart the workflow and send the signal in a single - * batched request. Batching is used to avoid race conditions between starting the workflow and - * executing the signal. - * - * @param client the WorkflowClient for interacting with temporal - * @param connectionId the connection ID to execute this operation for - * @param signalMethod a function that takes in a connection manager workflow and executes a signal - * method on it, with no arguments - * @return the healthy connection manager workflow that was signaled - * @throws DeletedWorkflowException if the connection manager workflow was deleted - */ - public ConnectionManagerWorkflow signalWorkflowAndRepairIfNecessary(final WorkflowClient client, - final UUID connectionId, - final Function signalMethod) - throws DeletedWorkflowException { - return signalWorkflowAndRepairIfNecessary(client, connectionId, signalMethod, Optional.empty()); - } - - /** - * Attempts to send a signal to the existing ConnectionManagerWorkflow for the provided connection. - * - * If the workflow is unreachable, this will restart the workflow and send the signal in a single - * batched request. Batching is used to avoid race conditions between starting the workflow and - * executing the signal. - * - * @param client the WorkflowClient for interacting with temporal - * @param connectionId the connection ID to execute this operation for - * @param signalMethod a function that takes in a connection manager workflow and executes a signal - * method on it, with 1 argument - * @param signalArgument the single argument to be input to the signal - * @return the healthy connection manager workflow that was signaled - * @throws DeletedWorkflowException if the connection manager workflow was deleted - */ - public ConnectionManagerWorkflow signalWorkflowAndRepairIfNecessary(final WorkflowClient client, - final UUID connectionId, - final Function> signalMethod, - final T signalArgument) - throws DeletedWorkflowException { - return signalWorkflowAndRepairIfNecessary(client, connectionId, signalMethod, Optional.of(signalArgument)); - } - - // This method unifies the logic of the above two, by using the optional signalArgument parameter to - // indicate if an argument is being provided to the signal or not. - // Keeping this private and only exposing the above methods outside this class provides a strict - // type enforcement for external calls, and means this method can assume consistent type - // implementations for both cases. - private ConnectionManagerWorkflow signalWorkflowAndRepairIfNecessary(final WorkflowClient client, - final UUID connectionId, - final Function signalMethod, - final Optional signalArgument) - throws DeletedWorkflowException { - try { - final ConnectionManagerWorkflow connectionManagerWorkflow = getConnectionManagerWorkflow(client, connectionId); - log.info("Retrieved existing connection manager workflow for connection {}. Executing signal.", connectionId); - // retrieve the signal from the lambda - final TemporalFunctionalInterfaceMarker signal = signalMethod.apply(connectionManagerWorkflow); - // execute the signal - if (signalArgument.isPresent()) { - ((Proc1) signal).apply(signalArgument.get()); - } else { - ((Proc) signal).apply(); - } - return connectionManagerWorkflow; - } catch (final UnreachableWorkflowException e) { - log.error( - String.format( - "Failed to retrieve ConnectionManagerWorkflow for connection %s. Repairing state by creating new workflow and starting with the signal.", - connectionId), - e); - - // in case there is an existing workflow in a bad state, attempt to terminate it first before - // starting a new workflow - safeTerminateWorkflow(client, connectionId, "Terminating workflow in unreachable state before starting a new workflow for this connection"); - - final ConnectionManagerWorkflow connectionManagerWorkflow = newConnectionManagerWorkflowStub(client, connectionId); - final ConnectionUpdaterInput startWorkflowInput = TemporalWorkflowUtils.buildStartWorkflowInput(connectionId); - - final BatchRequest batchRequest = client.newSignalWithStartRequest(); - batchRequest.add(connectionManagerWorkflow::run, startWorkflowInput); - - // retrieve the signal from the lambda - final TemporalFunctionalInterfaceMarker signal = signalMethod.apply(connectionManagerWorkflow); - // add signal to batch request - if (signalArgument.isPresent()) { - batchRequest.add((Proc1) signal, signalArgument.get()); - } else { - batchRequest.add((Proc) signal); - } - - client.signalWithStart(batchRequest); - log.info("Connection manager workflow for connection {} has been started and signaled.", connectionId); - - return connectionManagerWorkflow; - } - } - - void safeTerminateWorkflow(final WorkflowClient client, final String workflowId, final String reason) { - log.info("Attempting to terminate existing workflow for workflowId {}.", workflowId); - try { - client.newUntypedWorkflowStub(workflowId).terminate(reason); - } catch (final Exception e) { - log.warn( - "Could not terminate temporal workflow due to the following error; " - + "this may be because there is currently no running workflow for this connection.", - e); - } - } - - public void safeTerminateWorkflow(final WorkflowClient client, final UUID connectionId, final String reason) { - safeTerminateWorkflow(client, getConnectionManagerName(connectionId), reason); - } - - public ConnectionManagerWorkflow startConnectionManagerNoSignal(final WorkflowClient client, final UUID connectionId) { - final ConnectionManagerWorkflow connectionManagerWorkflow = newConnectionManagerWorkflowStub(client, connectionId); - final ConnectionUpdaterInput input = TemporalWorkflowUtils.buildStartWorkflowInput(connectionId); - WorkflowClient.start(connectionManagerWorkflow::run, input); - - return connectionManagerWorkflow; - } - - /** - * Attempts to retrieve the connection manager workflow for the provided connection. - * - * @param connectionId the ID of the connection whose workflow should be retrieved - * @return the healthy ConnectionManagerWorkflow - * @throws DeletedWorkflowException if the workflow was deleted, according to the workflow state - * @throws UnreachableWorkflowException if the workflow is in an unreachable state - */ - public ConnectionManagerWorkflow getConnectionManagerWorkflow(final WorkflowClient client, final UUID connectionId) - throws DeletedWorkflowException, UnreachableWorkflowException { - - final ConnectionManagerWorkflow connectionManagerWorkflow; - final WorkflowState workflowState; - final WorkflowExecutionStatus workflowExecutionStatus; - try { - connectionManagerWorkflow = client.newWorkflowStub(ConnectionManagerWorkflow.class, getConnectionManagerName(connectionId)); - workflowState = connectionManagerWorkflow.getState(); - workflowExecutionStatus = getConnectionManagerWorkflowStatus(client, connectionId); - } catch (final Exception e) { - throw new UnreachableWorkflowException( - String.format("Failed to retrieve ConnectionManagerWorkflow for connection %s due to the following error:", connectionId), - e); - } - - if (WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED.equals(workflowExecutionStatus)) { - if (workflowState.isDeleted()) { - throw new DeletedWorkflowException(String.format( - "The connection manager workflow for connection %s is deleted, so no further operations cannot be performed on it.", - connectionId)); - } - - // A non-deleted workflow being in a COMPLETED state is unexpected, and should be corrected - throw new UnreachableWorkflowException( - String.format("ConnectionManagerWorkflow for connection %s is unreachable due to having COMPLETED status.", connectionId)); - } - - return connectionManagerWorkflow; - } - - Optional getWorkflowState(final WorkflowClient client, final UUID connectionId) { - try { - final ConnectionManagerWorkflow connectionManagerWorkflow = client.newWorkflowStub(ConnectionManagerWorkflow.class, - getConnectionManagerName(connectionId)); - return Optional.of(connectionManagerWorkflow.getState()); - } catch (final Exception e) { - log.error("Exception thrown while checking workflow state for connection id {}", connectionId, e); - return Optional.empty(); - } - } - - boolean isWorkflowStateRunning(final WorkflowClient client, final UUID connectionId) { - return getWorkflowState(client, connectionId).map(WorkflowState::isRunning).orElse(false); - } - - public WorkflowExecutionStatus getConnectionManagerWorkflowStatus(final WorkflowClient workflowClient, final UUID connectionId) { - final DescribeWorkflowExecutionRequest describeWorkflowExecutionRequest = DescribeWorkflowExecutionRequest.newBuilder() - .setExecution(WorkflowExecution.newBuilder() - .setWorkflowId(getConnectionManagerName(connectionId)) - .build()) - .setNamespace(workflowClient.getOptions().getNamespace()).build(); - - final DescribeWorkflowExecutionResponse describeWorkflowExecutionResponse = workflowClient.getWorkflowServiceStubs().blockingStub() - .describeWorkflowExecution(describeWorkflowExecutionRequest); - - return describeWorkflowExecutionResponse.getWorkflowExecutionInfo().getStatus(); - } - - public long getCurrentJobId(final WorkflowClient client, final UUID connectionId) { - try { - final ConnectionManagerWorkflow connectionManagerWorkflow = getConnectionManagerWorkflow(client, connectionId); - return connectionManagerWorkflow.getJobInformation().getJobId(); - } catch (final Exception e) { - return ConnectionManagerWorkflow.NON_RUNNING_JOB_ID; - } - } - - public ConnectionManagerWorkflow newConnectionManagerWorkflowStub(final WorkflowClient client, final UUID connectionId) { - return client.newWorkflowStub(ConnectionManagerWorkflow.class, - TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.CONNECTION_UPDATER, getConnectionManagerName(connectionId))); - } - - public String getConnectionManagerName(final UUID connectionId) { - return "connection_manager_" + connectionId; - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ErrorCode.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ErrorCode.java deleted file mode 100644 index 3baf5cfa5213..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ErrorCode.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -public enum ErrorCode { - UNKNOWN, - WORKFLOW_DELETED, - WORKFLOW_RUNNING -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/JobMetadata.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/JobMetadata.java deleted file mode 100644 index ca73a5daaaba..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/JobMetadata.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import java.nio.file.Path; -import java.util.Objects; - -public class JobMetadata { - - private final boolean succeeded; - private final Path logPath; - - public JobMetadata(final boolean succeeded, final Path logPath) { - this.succeeded = succeeded; - this.logPath = logPath; - } - - public boolean isSucceeded() { - return succeeded; - } - - public Path getLogPath() { - return logPath; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final JobMetadata that = (JobMetadata) o; - return succeeded == that.succeeded && Objects.equals(logPath, that.logPath); - } - - @Override - public int hashCode() { - return Objects.hash(succeeded, logPath); - } - - @Override - public String toString() { - return "JobMetadata{" + - "succeeded=" + succeeded + - ", logPath=" + logPath + - '}'; - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/NotificationUtils.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/NotificationUtils.java deleted file mode 100644 index 3af59a0214a4..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/NotificationUtils.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.commons.temporal.scheduling.ConnectionNotificationWorkflow; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.temporal.client.WorkflowClient; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.UUID; -import lombok.extern.slf4j.Slf4j; - -@Singleton -@Slf4j -public class NotificationUtils { - - public NotificationUtils() {} - - public void sendSchemaChangeNotification(final WorkflowClient client, final UUID connectionId, final String url) { - final ConnectionNotificationWorkflow notificationWorkflow = - client.newWorkflowStub(ConnectionNotificationWorkflow.class, TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.NOTIFY)); - try { - notificationWorkflow.sendSchemaChangeNotification(connectionId, url); - } catch (IOException | RuntimeException | InterruptedException | ApiException | ConfigNotFoundException | JsonValidationException e) { - log.error("There was an error while sending a Schema Change Notification", e); - } - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/StreamResetRecordsHelper.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/StreamResetRecordsHelper.java deleted file mode 100644 index 2b2f2cc7b1d0..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/StreamResetRecordsHelper.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import io.airbyte.commons.temporal.exception.RetryableException; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.persistence.StreamResetPersistence; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.protocol.models.StreamDescriptor; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.List; -import java.util.UUID; -import lombok.extern.slf4j.Slf4j; - -/** - * Helper class that provides methods for dealing with stream reset records. - */ -@Singleton -@Slf4j -public class StreamResetRecordsHelper { - - private final JobPersistence jobPersistence; - private final StreamResetPersistence streamResetPersistence; - - public StreamResetRecordsHelper(final JobPersistence jobPersistence, final StreamResetPersistence streamResetPersistence) { - this.jobPersistence = jobPersistence; - this.streamResetPersistence = streamResetPersistence; - } - - /** - * Deletes all stream reset records related to the provided job and connection. - * - * @param jobId The job ID. - * @param connectionId the connection ID. - */ - public void deleteStreamResetRecordsForJob(final Long jobId, final UUID connectionId) { - if (jobId == null) { - log.info("deleteStreamResetRecordsForJob was called with a null job id; returning."); - return; - } - - try { - final Job job = jobPersistence.getJob(jobId); - final ConfigType configType = job.getConfig().getConfigType(); - if (!ConfigType.RESET_CONNECTION.equals(configType)) { - log.info("deleteStreamResetRecordsForJob was called for job {} with config type {}. Returning, as config type is not {}.", - jobId, - configType, - ConfigType.RESET_CONNECTION); - return; - } - - final List resetStreams = job.getConfig().getResetConnection().getResetSourceConfiguration().getStreamsToReset(); - log.info("Deleting the following streams for reset job {} from the stream_reset table: {}", jobId, resetStreams); - streamResetPersistence.deleteStreamResets(connectionId, resetStreams); - } catch (final IOException e) { - throw new RetryableException(e); - } - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalClient.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalClient.java deleted file mode 100644 index 18409f12dd6d..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalClient.java +++ /dev/null @@ -1,560 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import static io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow.NON_RUNNING_JOB_ID; - -import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; -import io.airbyte.commons.temporal.exception.DeletedWorkflowException; -import io.airbyte.commons.temporal.exception.UnreachableWorkflowException; -import io.airbyte.commons.temporal.scheduling.CheckConnectionWorkflow; -import io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow; -import io.airbyte.commons.temporal.scheduling.DiscoverCatalogWorkflow; -import io.airbyte.commons.temporal.scheduling.SpecWorkflow; -import io.airbyte.commons.temporal.scheduling.SyncWorkflow; -import io.airbyte.commons.temporal.scheduling.state.WorkflowState; -import io.airbyte.config.AttemptSyncConfig; -import io.airbyte.config.ConnectorJobOutput; -import io.airbyte.config.JobCheckConnectionConfig; -import io.airbyte.config.JobDiscoverCatalogConfig; -import io.airbyte.config.JobGetSpecConfig; -import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.StandardCheckConnectionInput; -import io.airbyte.config.StandardDiscoverCatalogInput; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.config.StandardSyncOutput; -import io.airbyte.config.persistence.StreamResetPersistence; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.protocol.models.StreamDescriptor; -import io.temporal.api.common.v1.WorkflowType; -import io.temporal.api.enums.v1.WorkflowExecutionStatus; -import io.temporal.api.workflowservice.v1.ListClosedWorkflowExecutionsRequest; -import io.temporal.api.workflowservice.v1.ListClosedWorkflowExecutionsResponse; -import io.temporal.api.workflowservice.v1.ListOpenWorkflowExecutionsRequest; -import io.temporal.api.workflowservice.v1.ListOpenWorkflowExecutionsResponse; -import io.temporal.client.WorkflowClient; -import io.temporal.serviceclient.WorkflowServiceStubs; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.nio.file.Path; -import java.util.HashSet; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import lombok.Builder; -import lombok.Value; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.time.StopWatch; - -@Slf4j -@Singleton -public class TemporalClient { - - /** - * This is used to sleep between 2 temporal queries. The query is needed to ensure that the cancel - * and start manual sync methods wait before returning. Since temporal signals are async, we need to - * use the queries to make sure that we are in a state in which we want to continue with. - */ - private static final int DELAY_BETWEEN_QUERY_MS = 10; - - private final Path workspaceRoot; - private final WorkflowClient client; - private final WorkflowServiceStubs service; - private final StreamResetPersistence streamResetPersistence; - private final ConnectionManagerUtils connectionManagerUtils; - private final NotificationUtils notificationUtils; - private final StreamResetRecordsHelper streamResetRecordsHelper; - - public TemporalClient(@Named("workspaceRootTemporal") final Path workspaceRoot, - final WorkflowClient client, - final WorkflowServiceStubs service, - final StreamResetPersistence streamResetPersistence, - final ConnectionManagerUtils connectionManagerUtils, - final NotificationUtils notificationUtils, - final StreamResetRecordsHelper streamResetRecordsHelper) { - this.workspaceRoot = workspaceRoot; - this.client = client; - this.service = service; - this.streamResetPersistence = streamResetPersistence; - this.connectionManagerUtils = connectionManagerUtils; - this.notificationUtils = notificationUtils; - this.streamResetRecordsHelper = streamResetRecordsHelper; - } - - private final Set workflowNames = new HashSet<>(); - - public void restartClosedWorkflowByStatus(final WorkflowExecutionStatus executionStatus) { - final Set workflowExecutionInfos = fetchClosedWorkflowsByStatus(executionStatus); - - final Set nonRunningWorkflow = filterOutRunningWorkspaceId(workflowExecutionInfos); - - nonRunningWorkflow.forEach(connectionId -> { - connectionManagerUtils.safeTerminateWorkflow(client, connectionId, "Terminating workflow in " - + "unreachable state before starting a new workflow for this connection"); - connectionManagerUtils.startConnectionManagerNoSignal(client, connectionId); - }); - } - - Set fetchClosedWorkflowsByStatus(final WorkflowExecutionStatus executionStatus) { - ByteString token; - ListClosedWorkflowExecutionsRequest workflowExecutionsRequest = - ListClosedWorkflowExecutionsRequest.newBuilder() - .setNamespace(client.getOptions().getNamespace()) - .build(); - - final Set workflowExecutionInfos = new HashSet<>(); - do { - final ListClosedWorkflowExecutionsResponse listOpenWorkflowExecutionsRequest = - service.blockingStub().listClosedWorkflowExecutions(workflowExecutionsRequest); - final WorkflowType connectionManagerWorkflowType = WorkflowType.newBuilder().setName(ConnectionManagerWorkflow.class.getSimpleName()).build(); - workflowExecutionInfos.addAll(listOpenWorkflowExecutionsRequest.getExecutionsList().stream() - .filter(workflowExecutionInfo -> workflowExecutionInfo.getType() == connectionManagerWorkflowType || - workflowExecutionInfo.getStatus() == executionStatus) - .flatMap((workflowExecutionInfo -> extractConnectionIdFromWorkflowId(workflowExecutionInfo.getExecution().getWorkflowId()).stream())) - .collect(Collectors.toSet())); - token = listOpenWorkflowExecutionsRequest.getNextPageToken(); - - workflowExecutionsRequest = - ListClosedWorkflowExecutionsRequest.newBuilder() - .setNamespace(client.getOptions().getNamespace()) - .setNextPageToken(token) - .build(); - - } while (token != null && token.size() > 0); - - return workflowExecutionInfos; - } - - @VisibleForTesting - Set filterOutRunningWorkspaceId(final Set workflowIds) { - refreshRunningWorkflow(); - - final Set runningWorkflowByUUID = - workflowNames.stream().flatMap(name -> extractConnectionIdFromWorkflowId(name).stream()).collect(Collectors.toSet()); - - return workflowIds.stream().filter(workflowId -> !runningWorkflowByUUID.contains(workflowId)).collect(Collectors.toSet()); - } - - @VisibleForTesting - void refreshRunningWorkflow() { - workflowNames.clear(); - ByteString token; - ListOpenWorkflowExecutionsRequest openWorkflowExecutionsRequest = - ListOpenWorkflowExecutionsRequest.newBuilder() - .setNamespace(client.getOptions().getNamespace()) - .build(); - do { - final ListOpenWorkflowExecutionsResponse listOpenWorkflowExecutionsRequest = - service.blockingStub().listOpenWorkflowExecutions(openWorkflowExecutionsRequest); - final Set workflowExecutionInfos = listOpenWorkflowExecutionsRequest.getExecutionsList().stream() - .map((workflowExecutionInfo -> workflowExecutionInfo.getExecution().getWorkflowId())) - .collect(Collectors.toSet()); - workflowNames.addAll(workflowExecutionInfos); - token = listOpenWorkflowExecutionsRequest.getNextPageToken(); - - openWorkflowExecutionsRequest = - ListOpenWorkflowExecutionsRequest.newBuilder() - .setNamespace(client.getOptions().getNamespace()) - .setNextPageToken(token) - .build(); - - } while (token != null && token.size() > 0); - } - - Optional extractConnectionIdFromWorkflowId(final String workflowId) { - if (!workflowId.startsWith("connection_manager_")) { - return Optional.empty(); - } - return Optional.ofNullable(StringUtils.removeStart(workflowId, "connection_manager_")) - .map( - stringUUID -> UUID.fromString(stringUUID)); - } - - @Value - @Builder - public static class ManualOperationResult { - - final Optional failingReason; - final Optional jobId; - final Optional errorCode; - - } - - public Optional getWorkflowState(final UUID connectionId) { - return connectionManagerUtils.getWorkflowState(client, connectionId); - } - - public ManualOperationResult startNewManualSync(final UUID connectionId) { - log.info("Manual sync request"); - - if (connectionManagerUtils.isWorkflowStateRunning(client, connectionId)) { - // TODO Bmoric: Error is running - return new ManualOperationResult( - Optional.of("A sync is already running for: " + connectionId), - Optional.empty(), Optional.of(ErrorCode.WORKFLOW_RUNNING)); - } - - try { - connectionManagerUtils.signalWorkflowAndRepairIfNecessary(client, connectionId, workflow -> workflow::submitManualSync); - } catch (final DeletedWorkflowException e) { - log.error("Can't sync a deleted connection.", e); - return new ManualOperationResult( - Optional.of(e.getMessage()), - Optional.empty(), Optional.of(ErrorCode.WORKFLOW_DELETED)); - } - - do { - try { - Thread.sleep(DELAY_BETWEEN_QUERY_MS); - } catch (final InterruptedException e) { - return new ManualOperationResult( - Optional.of("Didn't managed to start a sync for: " + connectionId), - Optional.empty(), Optional.of(ErrorCode.UNKNOWN)); - } - } while (!connectionManagerUtils.isWorkflowStateRunning(client, connectionId)); - - log.info("end of manual schedule"); - - final long jobId = connectionManagerUtils.getCurrentJobId(client, connectionId); - - return new ManualOperationResult( - Optional.empty(), - Optional.of(jobId), Optional.empty()); - } - - public ManualOperationResult startNewCancellation(final UUID connectionId) { - log.info("Manual cancellation request"); - - final long jobId = connectionManagerUtils.getCurrentJobId(client, connectionId); - - try { - connectionManagerUtils.signalWorkflowAndRepairIfNecessary(client, connectionId, workflow -> workflow::cancelJob); - } catch (final DeletedWorkflowException e) { - log.error("Can't cancel a deleted workflow", e); - return new ManualOperationResult( - Optional.of(e.getMessage()), - Optional.empty(), Optional.of(ErrorCode.WORKFLOW_DELETED)); - } - - do { - try { - Thread.sleep(DELAY_BETWEEN_QUERY_MS); - } catch (final InterruptedException e) { - return new ManualOperationResult( - Optional.of("Didn't manage to cancel a sync for: " + connectionId), - Optional.empty(), Optional.of(ErrorCode.UNKNOWN)); - } - } while (connectionManagerUtils.isWorkflowStateRunning(client, connectionId)); - - streamResetRecordsHelper.deleteStreamResetRecordsForJob(jobId, connectionId); - - log.info("end of manual cancellation"); - - return new ManualOperationResult( - Optional.empty(), - Optional.of(jobId), Optional.empty()); - } - - public ManualOperationResult resetConnection(final UUID connectionId, - final List streamsToReset, - final boolean syncImmediatelyAfter) { - log.info("reset sync request"); - - try { - streamResetPersistence.createStreamResets(connectionId, streamsToReset); - } catch (final IOException e) { - log.error("Could not persist streams to reset.", e); - return new ManualOperationResult( - Optional.of(e.getMessage()), - Optional.empty(), Optional.of(ErrorCode.UNKNOWN)); - } - - // get the job ID before the reset, defaulting to NON_RUNNING_JOB_ID if workflow is unreachable - final long oldJobId = connectionManagerUtils.getCurrentJobId(client, connectionId); - - try { - if (syncImmediatelyAfter) { - connectionManagerUtils.signalWorkflowAndRepairIfNecessary(client, connectionId, workflow -> workflow::resetConnectionAndSkipNextScheduling); - } else { - connectionManagerUtils.signalWorkflowAndRepairIfNecessary(client, connectionId, workflow -> workflow::resetConnection); - } - } catch (final DeletedWorkflowException e) { - log.error("Can't reset a deleted workflow", e); - return new ManualOperationResult( - Optional.of(e.getMessage()), - Optional.empty(), Optional.of(ErrorCode.UNKNOWN)); - } - - Optional newJobId; - - do { - try { - Thread.sleep(DELAY_BETWEEN_QUERY_MS); - } catch (final InterruptedException e) { - return new ManualOperationResult( - Optional.of("Didn't manage to reset a sync for: " + connectionId), - Optional.empty(), Optional.of(ErrorCode.UNKNOWN)); - } - newJobId = getNewJobId(connectionId, oldJobId); - } while (newJobId.isEmpty()); - - log.info("end of reset submission"); - - return new ManualOperationResult( - Optional.empty(), - newJobId, Optional.empty()); - } - - private Optional getNewJobId(final UUID connectionId, final long oldJobId) { - final long currentJobId = connectionManagerUtils.getCurrentJobId(client, connectionId); - if (currentJobId == NON_RUNNING_JOB_ID || currentJobId == oldJobId) { - return Optional.empty(); - } else { - return Optional.of(currentJobId); - } - } - - public TemporalResponse submitGetSpec(final UUID jobId, final int attempt, final JobGetSpecConfig config) { - final JobRunConfig jobRunConfig = TemporalWorkflowUtils.createJobRunConfig(jobId, attempt); - - final IntegrationLauncherConfig launcherConfig = new IntegrationLauncherConfig() - .withJobId(jobId.toString()) - .withAttemptId((long) attempt) - .withDockerImage(config.getDockerImage()) - .withIsCustomConnector(config.getIsCustomConnector()); - return execute(jobRunConfig, - () -> getWorkflowStub(SpecWorkflow.class, TemporalJobType.GET_SPEC).run(jobRunConfig, launcherConfig)); - - } - - public TemporalResponse submitCheckConnection(final UUID jobId, - final int attempt, - final String taskQueue, - final JobCheckConnectionConfig config) { - final JobRunConfig jobRunConfig = TemporalWorkflowUtils.createJobRunConfig(jobId, attempt); - final IntegrationLauncherConfig launcherConfig = new IntegrationLauncherConfig() - .withJobId(jobId.toString()) - .withAttemptId((long) attempt) - .withDockerImage(config.getDockerImage()) - .withProtocolVersion(config.getProtocolVersion()) - .withIsCustomConnector(config.getIsCustomConnector()); - final StandardCheckConnectionInput input = new StandardCheckConnectionInput() - .withActorType(config.getActorType()) - .withActorId(config.getActorId()) - .withConnectionConfiguration(config.getConnectionConfiguration()); - - return execute(jobRunConfig, - () -> getWorkflowStubWithTaskQueue(CheckConnectionWorkflow.class, taskQueue).run(jobRunConfig, launcherConfig, input)); - } - - public TemporalResponse submitDiscoverSchema(final UUID jobId, - final int attempt, - final String taskQueue, - final JobDiscoverCatalogConfig config) { - final JobRunConfig jobRunConfig = TemporalWorkflowUtils.createJobRunConfig(jobId, attempt); - final IntegrationLauncherConfig launcherConfig = new IntegrationLauncherConfig() - .withJobId(jobId.toString()) - .withAttemptId((long) attempt) - .withDockerImage(config.getDockerImage()) - .withProtocolVersion(config.getProtocolVersion()) - .withIsCustomConnector(config.getIsCustomConnector()); - final StandardDiscoverCatalogInput input = new StandardDiscoverCatalogInput().withConnectionConfiguration(config.getConnectionConfiguration()) - .withSourceId(config.getSourceId()).withConnectorVersion(config.getConnectorVersion()).withConfigHash(config.getConfigHash()); - - return execute(jobRunConfig, - () -> getWorkflowStubWithTaskQueue(DiscoverCatalogWorkflow.class, taskQueue).run(jobRunConfig, launcherConfig, input)); - } - - public TemporalResponse submitSync(final long jobId, - final int attempt, - final JobSyncConfig config, - final AttemptSyncConfig attemptConfig, - final UUID connectionId) { - final JobRunConfig jobRunConfig = TemporalWorkflowUtils.createJobRunConfig(jobId, attempt); - - final IntegrationLauncherConfig sourceLauncherConfig = new IntegrationLauncherConfig() - .withJobId(String.valueOf(jobId)) - .withAttemptId((long) attempt) - .withDockerImage(config.getSourceDockerImage()) - .withProtocolVersion(config.getSourceProtocolVersion()) - .withIsCustomConnector(config.getIsSourceCustomConnector()); - - final IntegrationLauncherConfig destinationLauncherConfig = new IntegrationLauncherConfig() - .withJobId(String.valueOf(jobId)) - .withAttemptId((long) attempt) - .withDockerImage(config.getDestinationDockerImage()) - .withProtocolVersion(config.getDestinationProtocolVersion()) - .withIsCustomConnector(config.getIsDestinationCustomConnector()); - - final StandardSyncInput input = new StandardSyncInput() - .withNamespaceDefinition(config.getNamespaceDefinition()) - .withNamespaceFormat(config.getNamespaceFormat()) - .withPrefix(config.getPrefix()) - .withSourceConfiguration(attemptConfig.getSourceConfiguration()) - .withDestinationConfiguration(attemptConfig.getDestinationConfiguration()) - .withOperationSequence(config.getOperationSequence()) - .withCatalog(config.getConfiguredAirbyteCatalog()) - .withState(attemptConfig.getState()) - .withResourceRequirements(config.getResourceRequirements()) - .withSourceResourceRequirements(config.getSourceResourceRequirements()) - .withDestinationResourceRequirements(config.getDestinationResourceRequirements()) - .withConnectionId(connectionId) - .withWorkspaceId(config.getWorkspaceId()); - - return execute(jobRunConfig, - () -> getWorkflowStub(SyncWorkflow.class, TemporalJobType.SYNC).run( - jobRunConfig, - sourceLauncherConfig, - destinationLauncherConfig, - input, - connectionId)); - } - - public void migrateSyncIfNeeded(final Set connectionIds) { - final StopWatch globalMigrationWatch = new StopWatch(); - globalMigrationWatch.start(); - refreshRunningWorkflow(); - - connectionIds.forEach((connectionId) -> { - final StopWatch singleSyncMigrationWatch = new StopWatch(); - singleSyncMigrationWatch.start(); - if (!isInRunningWorkflowCache(connectionManagerUtils.getConnectionManagerName(connectionId))) { - log.info("Migrating: " + connectionId); - try { - submitConnectionUpdaterAsync(connectionId); - } catch (final Exception e) { - log.error("New workflow submission failed, retrying", e); - refreshRunningWorkflow(); - submitConnectionUpdaterAsync(connectionId); - } - } - singleSyncMigrationWatch.stop(); - log.info("Sync migration took: " + singleSyncMigrationWatch.formatTime()); - }); - globalMigrationWatch.stop(); - - log.info("The migration to the new scheduler took: " + globalMigrationWatch.formatTime()); - } - - @VisibleForTesting - TemporalResponse execute(final JobRunConfig jobRunConfig, final Supplier executor) { - final Path jobRoot = TemporalUtils.getJobRoot(workspaceRoot, jobRunConfig); - final Path logPath = TemporalUtils.getLogPath(jobRoot); - - T operationOutput = null; - RuntimeException exception = null; - - try { - operationOutput = executor.get(); - } catch (final RuntimeException e) { - exception = e; - } - - boolean succeeded = exception == null; - if (succeeded && operationOutput instanceof ConnectorJobOutput) { - succeeded = getConnectorJobSucceeded((ConnectorJobOutput) operationOutput); - } - - final JobMetadata metadata = new JobMetadata(succeeded, logPath); - return new TemporalResponse<>(operationOutput, metadata); - } - - private T getWorkflowStub(final Class workflowClass, final TemporalJobType jobType) { - return client.newWorkflowStub(workflowClass, TemporalWorkflowUtils.buildWorkflowOptions(jobType)); - } - - private T getWorkflowStubWithTaskQueue(final Class workflowClass, final String taskQueue) { - return client.newWorkflowStub(workflowClass, TemporalWorkflowUtils.buildWorkflowOptionsWithTaskQueue(taskQueue)); - } - - public ConnectionManagerWorkflow submitConnectionUpdaterAsync(final UUID connectionId) { - log.info("Starting the scheduler temporal wf"); - final ConnectionManagerWorkflow connectionManagerWorkflow = - connectionManagerUtils.startConnectionManagerNoSignal(client, connectionId); - try { - CompletableFuture.supplyAsync(() -> { - try { - do { - Thread.sleep(DELAY_BETWEEN_QUERY_MS); - } while (!isWorkflowReachable(connectionId)); - } catch (final InterruptedException e) {} - return null; - }).get(60, TimeUnit.SECONDS); - } catch (final InterruptedException | ExecutionException e) { - log.error("Failed to create a new connection manager workflow", e); - } catch (final TimeoutException e) { - log.error("Can't create a new connection manager workflow due to timeout", e); - } - - return connectionManagerWorkflow; - } - - /** - * This will cancel a workflow even if the connection is deleted already - * - * @param connectionId - connectionId to cancel - */ - public void forceDeleteWorkflow(final UUID connectionId) { - connectionManagerUtils.deleteWorkflowIfItExist(client, connectionId); - } - - public void sendSchemaChangeNotification(final UUID connectionId, final String url) { - notificationUtils.sendSchemaChangeNotification(client, connectionId, url); - } - - public void update(final UUID connectionId) { - final ConnectionManagerWorkflow connectionManagerWorkflow; - try { - connectionManagerWorkflow = connectionManagerUtils.getConnectionManagerWorkflow(client, connectionId); - } catch (final DeletedWorkflowException e) { - log.info("Connection {} is deleted, and therefore cannot be updated.", connectionId); - return; - } catch (final UnreachableWorkflowException e) { - log.error( - String.format("Failed to retrieve ConnectionManagerWorkflow for connection %s. Repairing state by creating new workflow.", connectionId), - e); - connectionManagerUtils.safeTerminateWorkflow(client, connectionId, - "Terminating workflow in unreachable state before starting a new workflow for this connection"); - submitConnectionUpdaterAsync(connectionId); - return; - } - - connectionManagerWorkflow.connectionUpdated(); - } - - private boolean getConnectorJobSucceeded(final ConnectorJobOutput output) { - return output.getFailureReason() == null; - } - - /** - * Check if a workflow is reachable for signal calls by attempting to query for current state. If - * the query succeeds, and the workflow is not marked as deleted, the workflow is reachable. - */ - @VisibleForTesting - boolean isWorkflowReachable(final UUID connectionId) { - try { - connectionManagerUtils.getConnectionManagerWorkflow(client, connectionId); - return true; - } catch (final Exception e) { - return false; - } - } - - boolean isInRunningWorkflowCache(final String workflowName) { - return workflowNames.contains(workflowName); - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalInitializationUtils.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalInitializationUtils.java deleted file mode 100644 index 6234e7fa2c4e..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalInitializationUtils.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import io.grpc.StatusRuntimeException; -import io.micronaut.context.annotation.Value; -import io.micronaut.core.util.StringUtils; -import io.temporal.api.workflowservice.v1.DescribeNamespaceRequest; -import io.temporal.serviceclient.WorkflowServiceStubs; -import jakarta.inject.Inject; -import jakarta.inject.Singleton; -import java.util.concurrent.TimeUnit; -import lombok.extern.slf4j.Slf4j; - -@Singleton -@Slf4j -public class TemporalInitializationUtils { - - @Inject - private WorkflowServiceStubs temporalService; - @Value("${temporal.cloud.namespace}") - private String temporalCloudNamespace; - - /** - * Blocks until the Temporal {@link TemporalUtils#DEFAULT_NAMESPACE} has been created. This is - * necessary to avoid issues related to - * https://community.temporal.io/t/running-into-an-issue-when-creating-namespace-programmatically/2783/8. - */ - public void waitForTemporalNamespace() { - boolean namespaceExists = false; - final String temporalNamespace = getTemporalNamespace(); - while (!namespaceExists) { - try { - // This is to allow the configured namespace to be available in the Temporal - // cache before continuing on with any additional configuration/bean creation. - temporalService.blockingStub().describeNamespace(DescribeNamespaceRequest.newBuilder().setNamespace(temporalNamespace).build()); - namespaceExists = true; - // This is to allow the configured namespace to be available in the Temporal - // cache before continuing on with any additional configuration/bean creation. - Thread.sleep(TimeUnit.SECONDS.toMillis(5)); - } catch (final InterruptedException | StatusRuntimeException e) { - log.debug("Namespace '{}' does not exist yet. Re-checking...", temporalNamespace); - try { - Thread.sleep(TimeUnit.SECONDS.toMillis(5)); - } catch (final InterruptedException ie) { - log.debug("Sleep interrupted. Exiting loop..."); - } - } - } - } - - /** - * Retrieve the Temporal namespace based on the configuration. - * - * @return The Temporal namespace. - */ - private String getTemporalNamespace() { - return StringUtils.isNotEmpty(temporalCloudNamespace) ? temporalCloudNamespace : TemporalUtils.DEFAULT_NAMESPACE; - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalJobType.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalJobType.java deleted file mode 100644 index 2470e669fea0..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalJobType.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -public enum TemporalJobType { - GET_SPEC, - CHECK_CONNECTION, - DISCOVER_SCHEMA, - SYNC, - RESET_CONNECTION, - CONNECTION_UPDATER, - REPLICATE, - NOTIFY -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalResponse.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalResponse.java deleted file mode 100644 index c2788a8c0ea8..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import java.util.Objects; -import java.util.Optional; - -public class TemporalResponse { - - private final T output; - private final JobMetadata metadata; - - public static TemporalResponse error(final JobMetadata metadata) { - return new TemporalResponse<>(null, metadata); - } - - public static TemporalResponse success(final T output, final JobMetadata metadata) { - return new TemporalResponse<>(output, metadata); - } - - public TemporalResponse(final T output, final JobMetadata metadata) { - this.output = output; - this.metadata = metadata; - } - - public boolean isSuccess() { - return metadata.isSucceeded(); - } - - /** - * Returns the output of the Temporal job. - * - * @return The output of the Temporal job. Empty if no output or if the job failed. - */ - public Optional getOutput() { - return Optional.ofNullable(output); - } - - public JobMetadata getMetadata() { - return metadata; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final TemporalResponse that = (TemporalResponse) o; - return Objects.equals(output, that.output) && Objects.equals(metadata, that.metadata); - } - - @Override - public int hashCode() { - return Objects.hash(output, metadata); - } - - @Override - public String toString() { - return "TemporalResponse{" + - "output=" + output + - ", metadata=" + metadata + - '}'; - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalUtils.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalUtils.java deleted file mode 100644 index 8d6b37aeaa08..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalUtils.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import com.uber.m3.tally.RootScopeBuilder; -import com.uber.m3.tally.Scope; -import com.uber.m3.tally.StatsReporter; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.metrics.lib.MetricClientFactory; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.micrometer.core.instrument.MeterRegistry; -import io.micronaut.context.annotation.Property; -import io.micronaut.context.annotation.Value; -import io.temporal.activity.ActivityExecutionContext; -import io.temporal.api.common.v1.WorkflowExecution; -import io.temporal.api.namespace.v1.NamespaceConfig; -import io.temporal.api.namespace.v1.NamespaceInfo; -import io.temporal.api.workflowservice.v1.DescribeNamespaceRequest; -import io.temporal.api.workflowservice.v1.UpdateNamespaceRequest; -import io.temporal.client.ActivityCompletionException; -import io.temporal.client.WorkflowClient; -import io.temporal.client.WorkflowStub; -import io.temporal.common.RetryOptions; -import io.temporal.common.reporter.MicrometerClientStatsReporter; -import io.temporal.serviceclient.SimpleSslContextBuilder; -import io.temporal.serviceclient.WorkflowServiceStubs; -import io.temporal.serviceclient.WorkflowServiceStubsOptions; -import io.temporal.workflow.Functions; -import jakarta.inject.Singleton; -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.io.Serializable; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.time.Duration; -import java.util.UUID; -import java.util.concurrent.Callable; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Supplier; -import javax.net.ssl.SSLException; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.time.DurationFormatUtils; -import org.apache.commons.lang3.tuple.ImmutablePair; - -@Slf4j -@Singleton -public class TemporalUtils { - - private static final Duration WAIT_INTERVAL = Duration.ofSeconds(2); - private static final Duration MAX_TIME_TO_CONNECT = Duration.ofMinutes(2); - private static final Duration WAIT_TIME_AFTER_CONNECT = Duration.ofSeconds(5); - public static final String DEFAULT_NAMESPACE = "default"; - public static final Duration SEND_HEARTBEAT_INTERVAL = Duration.ofSeconds(10); - public static final Duration HEARTBEAT_TIMEOUT = Duration.ofSeconds(30); - public static final RetryOptions NO_RETRY = RetryOptions.newBuilder().setMaximumAttempts(1).build(); - private static final double REPORT_INTERVAL_SECONDS = 120.0; - - private final String temporalCloudClientCert; - private final String temporalCloudClientKey; - private final Boolean temporalCloudEnabled; - private final String temporalCloudHost; - private final String temporalCloudNamespace; - private final String temporalHost; - private final Integer temporalRetentionInDays; - - public TemporalUtils(@Property(name = "temporal.cloud.client.cert") final String temporalCloudClientCert, - @Property(name = "temporal.cloud.client.key") final String temporalCloudClientKey, - @Property(name = "temporal.cloud.enabled", - defaultValue = "false") final Boolean temporalCloudEnabled, - @Value("${temporal.cloud.host}") final String temporalCloudHost, - @Value("${temporal.cloud.namespace}") final String temporalCloudNamespace, - @Value("${temporal.host}") final String temporalHost, - @Property(name = "${temporal.retention}", - defaultValue = "30") final Integer temporalRetentionInDays) { - this.temporalCloudClientCert = temporalCloudClientCert; - this.temporalCloudClientKey = temporalCloudClientKey; - this.temporalCloudEnabled = temporalCloudEnabled; - this.temporalCloudHost = temporalCloudHost; - this.temporalCloudNamespace = temporalCloudNamespace; - this.temporalHost = temporalHost; - this.temporalRetentionInDays = temporalRetentionInDays; - } - - public WorkflowServiceStubs createTemporalService(final WorkflowServiceStubsOptions options, final String namespace) { - return getTemporalClientWhenConnected( - WAIT_INTERVAL, - MAX_TIME_TO_CONNECT, - WAIT_TIME_AFTER_CONNECT, - () -> WorkflowServiceStubs.newInstance(options), - namespace); - } - - // TODO consider consolidating this method's logic into createTemporalService() after the Temporal - // Cloud migration is complete. - // The Temporal Migration migrator is the only reason this public method exists. - public WorkflowServiceStubs createTemporalService(final boolean isCloud) { - final WorkflowServiceStubsOptions options = isCloud ? getCloudTemporalOptions() : TemporalWorkflowUtils.getAirbyteTemporalOptions(temporalHost); - final String namespace = isCloud ? temporalCloudNamespace : DEFAULT_NAMESPACE; - - return createTemporalService(options, namespace); - } - - public WorkflowServiceStubs createTemporalService() { - return createTemporalService(temporalCloudEnabled); - } - - private WorkflowServiceStubsOptions getCloudTemporalOptions() { - final InputStream clientCert = new ByteArrayInputStream(temporalCloudClientCert.getBytes(StandardCharsets.UTF_8)); - final InputStream clientKey = new ByteArrayInputStream(temporalCloudClientKey.getBytes(StandardCharsets.UTF_8)); - final WorkflowServiceStubsOptions.Builder optionBuilder; - try { - optionBuilder = WorkflowServiceStubsOptions.newBuilder() - .setSslContext(SimpleSslContextBuilder.forPKCS8(clientCert, clientKey).build()) - .setTarget(temporalCloudHost); - } catch (final SSLException e) { - log.error("SSL Exception occurred attempting to establish Temporal Cloud options."); - throw new RuntimeException(e); - } - - configureTemporalMeterRegistry(optionBuilder); - return optionBuilder.build(); - } - - private void configureTemporalMeterRegistry(final WorkflowServiceStubsOptions.Builder optionalBuilder) { - final MeterRegistry registry = MetricClientFactory.getMeterRegistry(); - if (registry != null) { - final StatsReporter reporter = new MicrometerClientStatsReporter(registry); - final Scope scope = new RootScopeBuilder() - .reporter(reporter) - .reportEvery(com.uber.m3.util.Duration.ofSeconds(REPORT_INTERVAL_SECONDS)); - optionalBuilder.setMetricsScope(scope); - } - } - - public String getNamespace() { - return temporalCloudEnabled ? temporalCloudNamespace : DEFAULT_NAMESPACE; - } - - /** - * Modifies the retention period for on-premise deployment of Temporal at the default namespace. - * This should not be called when using Temporal Cloud, because Temporal Cloud does not allow - * programmatic modification of workflow execution retention TTL. - */ - public void configureTemporalNamespace(final WorkflowServiceStubs temporalService) { - if (temporalCloudEnabled) { - log.info("Skipping Temporal Namespace configuration because Temporal Cloud is in use."); - return; - } - - final var client = temporalService.blockingStub(); - final var describeNamespaceRequest = DescribeNamespaceRequest.newBuilder().setNamespace(DEFAULT_NAMESPACE).build(); - final var currentRetentionGrpcDuration = client.describeNamespace(describeNamespaceRequest).getConfig().getWorkflowExecutionRetentionTtl(); - final var currentRetention = Duration.ofSeconds(currentRetentionGrpcDuration.getSeconds()); - final var workflowExecutionTtl = Duration.ofDays(temporalRetentionInDays); - final var humanReadableWorkflowExecutionTtl = DurationFormatUtils.formatDurationWords(workflowExecutionTtl.toMillis(), true, true); - - if (currentRetention.equals(workflowExecutionTtl)) { - log.info("Workflow execution TTL already set for namespace " + DEFAULT_NAMESPACE + ". Remains unchanged as: " - + humanReadableWorkflowExecutionTtl); - } else { - final var newGrpcDuration = com.google.protobuf.Duration.newBuilder().setSeconds(workflowExecutionTtl.getSeconds()).build(); - final var humanReadableCurrentRetention = DurationFormatUtils.formatDurationWords(currentRetention.toMillis(), true, true); - final var namespaceConfig = NamespaceConfig.newBuilder().setWorkflowExecutionRetentionTtl(newGrpcDuration).build(); - final var updateNamespaceRequest = UpdateNamespaceRequest.newBuilder().setNamespace(DEFAULT_NAMESPACE).setConfig(namespaceConfig).build(); - log.info("Workflow execution TTL differs for namespace " + DEFAULT_NAMESPACE + ". Changing from (" + humanReadableCurrentRetention + ") to (" - + humanReadableWorkflowExecutionTtl + "). "); - client.updateNamespace(updateNamespaceRequest); - } - } - - @FunctionalInterface - public interface TemporalJobCreator { - - UUID create(WorkflowClient workflowClient, long jobId, int attempt, T config); - - } - - /** - * Allows running a given temporal workflow stub asynchronously. This method only works for - * workflows that take one argument. Because of the iface that Temporal supplies, in order to handle - * other method signatures, if we need to support them, we will need to add another helper with that - * number of args. For a reference on how Temporal recommends to do this see their docs: - * https://docs.temporal.io/docs/java/workflows#asynchronous-start - * - * @param workflowStub - workflow stub to be executed - * @param function - function on the workflow stub to be executed - * @param arg1 - argument to be supplied to the workflow function - * @param outputType - class of the output type of the workflow function - * @param - type of the workflow stub - * @param - type of the argument of the workflow stub - * @param - type of the return of the workflow stub - * @return pair of the workflow execution (contains metadata on the asynchronously running job) and - * future that can be used to await the result of the workflow stub's function - */ - public ImmutablePair> asyncExecute(final STUB workflowStub, - final Functions.Func1 function, - final A1 arg1, - final Class outputType) { - final WorkflowStub untyped = WorkflowStub.fromTyped(workflowStub); - final WorkflowExecution workflowExecution = WorkflowClient.start(function, arg1); - final CompletableFuture resultAsync = untyped.getResultAsync(outputType); - return ImmutablePair.of(workflowExecution, resultAsync); - } - - /** - * Loops and waits for the Temporal service to become available and returns a client. - *

- * This function uses a supplier as input since the creation of a WorkflowServiceStubs can result in - * connection exceptions as well. - */ - public WorkflowServiceStubs getTemporalClientWhenConnected( - final Duration waitInterval, - final Duration maxTimeToConnect, - final Duration waitAfterConnection, - final Supplier temporalServiceSupplier, - final String namespace) { - log.info("Waiting for temporal server..."); - - boolean temporalNamespaceInitialized = false; - WorkflowServiceStubs temporalService = null; - long millisWaited = 0; - - while (!temporalNamespaceInitialized) { - if (millisWaited >= maxTimeToConnect.toMillis()) { - throw new RuntimeException("Could not create Temporal client within max timeout!"); - } - - log.warn("Waiting for namespace {} to be initialized in temporal...", namespace); - Exceptions.toRuntime(() -> Thread.sleep(waitInterval.toMillis())); - millisWaited = millisWaited + waitInterval.toMillis(); - - try { - temporalService = temporalServiceSupplier.get(); - final var namespaceInfo = getNamespaceInfo(temporalService, namespace); - temporalNamespaceInitialized = namespaceInfo.isInitialized(); - } catch (final Exception e) { - // Ignore the exception because this likely means that the Temporal service is still initializing. - log.warn("Ignoring exception while trying to request Temporal namespace:", e); - } - } - - // sometimes it takes a few additional seconds for workflow queue listening to be available - Exceptions.toRuntime(() -> Thread.sleep(waitAfterConnection.toMillis())); - - log.info("Temporal namespace {} initialized!", namespace); - - return temporalService; - } - - protected NamespaceInfo getNamespaceInfo(final WorkflowServiceStubs temporalService, final String namespace) { - return temporalService.blockingStub() - .describeNamespace(DescribeNamespaceRequest.newBuilder().setNamespace(namespace).build()) - .getNamespaceInfo(); - } - - /** - * Runs the code within the supplier while heartbeating in the backgroud. Also makes sure to shut - * down the heartbeat server after the fact. - */ - public T withBackgroundHeartbeat(final Callable callable, - final Supplier activityContext) { - final ScheduledExecutorService scheduledExecutor = Executors.newSingleThreadScheduledExecutor(); - - try { - scheduledExecutor.scheduleAtFixedRate( - () -> new CancellationHandler.TemporalCancellationHandler(activityContext.get()).checkAndHandleCancellation(() -> {}), - 0, SEND_HEARTBEAT_INTERVAL.toSeconds(), TimeUnit.SECONDS); - - return callable.call(); - } catch (final ActivityCompletionException e) { - log.warn("Job either timed out or was cancelled."); - throw new RuntimeException(e); - } catch (final Exception e) { - throw new RuntimeException(e); - } finally { - log.info("Stopping temporal heartbeating..."); - scheduledExecutor.shutdown(); - } - } - - public T withBackgroundHeartbeat(final AtomicReference afterCancellationCallbackRef, - final Callable callable, - final Supplier activityContext) { - final ScheduledExecutorService scheduledExecutor = Executors.newSingleThreadScheduledExecutor(); - - try { - // Schedule the cancellation handler. - scheduledExecutor.scheduleAtFixedRate(() -> { - final CancellationHandler cancellationHandler = new CancellationHandler.TemporalCancellationHandler(activityContext.get()); - - cancellationHandler.checkAndHandleCancellation(() -> { - // After cancellation cleanup. - if (afterCancellationCallbackRef != null) { - final Runnable cancellationCallback = afterCancellationCallbackRef.get(); - if (cancellationCallback != null) { - cancellationCallback.run(); - } - } - }); - }, 0, SEND_HEARTBEAT_INTERVAL.toSeconds(), TimeUnit.SECONDS); - - return callable.call(); - } catch (final ActivityCompletionException e) { - log.warn("Job either timed out or was cancelled."); - throw new RuntimeException(e); - } catch (final Exception e) { - throw new RuntimeException(e); - } finally { - log.info("Stopping temporal heartbeating..."); - scheduledExecutor.shutdown(); - } - } - - // todo (cgardens) - there are 2 sources of truth for job path. we need to reduce this down to one, - // once we are fully on temporal. - public static Path getJobRoot(final Path workspaceRoot, final JobRunConfig jobRunConfig) { - return getJobRoot(workspaceRoot, jobRunConfig.getJobId(), jobRunConfig.getAttemptId()); - } - - public static Path getLogPath(final Path jobRoot) { - return jobRoot.resolve(LogClientSingleton.LOG_FILENAME); - } - - public static Path getJobRoot(final Path workspaceRoot, final String jobId, final long attemptId) { - return getJobRoot(workspaceRoot, jobId, Math.toIntExact(attemptId)); - } - - public static Path getJobRoot(final Path workspaceRoot, final String jobId, final int attemptId) { - return workspaceRoot - .resolve(String.valueOf(jobId)) - .resolve(String.valueOf(attemptId)); - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalWorkflowUtils.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalWorkflowUtils.java deleted file mode 100644 index d61a6944b0f1..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalWorkflowUtils.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.temporal.scheduling.ConnectionUpdaterInput; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.temporal.client.WorkflowClient; -import io.temporal.client.WorkflowClientOptions; -import io.temporal.client.WorkflowOptions; -import io.temporal.common.RetryOptions; -import io.temporal.serviceclient.WorkflowServiceStubs; -import io.temporal.serviceclient.WorkflowServiceStubsOptions; -import java.time.Duration; -import java.util.UUID; - -/** - * Collection of Temporal workflow related utility methods. - * - * N.B: These methods should not store any state or depend on any other objects/singletons - * managed by the application framework. - */ -public class TemporalWorkflowUtils { - - public static final RetryOptions NO_RETRY = RetryOptions.newBuilder().setMaximumAttempts(1).build(); - - private TemporalWorkflowUtils() {} - - public static ConnectionUpdaterInput buildStartWorkflowInput(final UUID connectionId) { - return ConnectionUpdaterInput.builder() - .connectionId(connectionId) - .jobId(null) - .attemptId(null) - .fromFailure(false) - .attemptNumber(1) - .workflowState(null) - .resetConnection(false) - .fromJobResetFailure(false) - .build(); - } - - public static WorkflowOptions buildWorkflowOptions(final TemporalJobType jobType, final String workflowId) { - return WorkflowOptions.newBuilder() - .setWorkflowId(workflowId) - .setRetryOptions(NO_RETRY) - .setTaskQueue(jobType.name()) - .build(); - } - - public static WorkflowOptions buildWorkflowOptions(final TemporalJobType jobType) { - return buildWorkflowOptionsWithTaskQueue(jobType.name()); - } - - public static WorkflowOptions buildWorkflowOptionsWithTaskQueue(final String taskQueue) { - return WorkflowOptions.newBuilder() - .setTaskQueue(taskQueue) - .setWorkflowTaskTimeout(Duration.ofSeconds(27)) // TODO parker - temporarily increasing this to a recognizable number to see if it changes - // error I'm seeing - // todo (cgardens) we do not leverage Temporal retries. - .setRetryOptions(RetryOptions.newBuilder().setMaximumAttempts(1).build()) - .build(); - } - - public static JobRunConfig createJobRunConfig(final UUID jobId, final int attemptId) { - return createJobRunConfig(String.valueOf(jobId), attemptId); - } - - public static JobRunConfig createJobRunConfig(final long jobId, final int attemptId) { - return createJobRunConfig(String.valueOf(jobId), attemptId); - } - - public static JobRunConfig createJobRunConfig(final String jobId, final int attemptId) { - return new JobRunConfig() - .withJobId(jobId) - .withAttemptId((long) attemptId); - } - - @VisibleForTesting - public static WorkflowServiceStubsOptions getAirbyteTemporalOptions(final String temporalHost) { - return WorkflowServiceStubsOptions.newBuilder() - .setTarget(temporalHost) - .build(); - } - - public static WorkflowClient createWorkflowClient(final WorkflowServiceStubs workflowServiceStubs, final String namespace) { - return WorkflowClient.newInstance( - workflowServiceStubs, - WorkflowClientOptions.newBuilder() - .setNamespace(namespace) - .build()); - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/config/CommonFactory.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/config/CommonFactory.java deleted file mode 100644 index 007d11c66aa7..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/config/CommonFactory.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.config; - -import io.airbyte.config.Configs.WorkerEnvironment; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.env.Environment; -import jakarta.inject.Singleton; - -@Factory -public class CommonFactory { - - @Singleton - public WorkerEnvironment workerEnvironment(final Environment environment) { - return environment.getActiveNames().contains(Environment.KUBERNETES) ? WorkerEnvironment.KUBERNETES : WorkerEnvironment.DOCKER; - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/config/TemporalBeanFactory.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/config/TemporalBeanFactory.java deleted file mode 100644 index aa2d4abfff4f..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/config/TemporalBeanFactory.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.config; - -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.commons.temporal.TemporalWorkflowUtils; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Value; -import io.temporal.client.WorkflowClient; -import io.temporal.serviceclient.WorkflowServiceStubs; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.nio.file.Path; - -/** - * Micronaut bean factory for Temporal-related singletons. - */ -@Factory -public class TemporalBeanFactory { - - @Singleton - public WorkflowServiceStubs temporalService(final TemporalUtils temporalUtils) { - return temporalUtils.createTemporalService(); - } - - @Singleton - public WorkflowClient workflowClient( - final TemporalUtils temporalUtils, - final WorkflowServiceStubs temporalService) { - return TemporalWorkflowUtils.createWorkflowClient(temporalService, temporalUtils.getNamespace()); - } - - @Singleton - @Named("workspaceRootTemporal") - public Path workspaceRoot(@Value("${airbyte.workspace.root}") final String workspaceRoot) { - return Path.of(workspaceRoot); - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/config/WorkerMode.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/config/WorkerMode.java deleted file mode 100644 index e87d573bfbad..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/config/WorkerMode.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.config; - -/** - * Defines the different execution modes for the workers application. - */ -public final class WorkerMode { - - private WorkerMode() {} - - /** - * Control plane environment/mode. - */ - public static final String CONTROL_PLANE = "control-plane"; - - /** - * Data plane environment/mode. - */ - public static final String DATA_PLANE = "data-plane"; - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/exception/DeletedWorkflowException.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/exception/DeletedWorkflowException.java deleted file mode 100644 index 9ce16bc81162..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/exception/DeletedWorkflowException.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.exception; - -public class DeletedWorkflowException extends Exception { - - public DeletedWorkflowException(final String message) { - super(message); - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/exception/RetryableException.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/exception/RetryableException.java deleted file mode 100644 index 824f29540931..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/exception/RetryableException.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.exception; - -public class RetryableException extends RuntimeException { - - public RetryableException(final Exception e) { - super(e); - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/exception/UnreachableWorkflowException.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/exception/UnreachableWorkflowException.java deleted file mode 100644 index ec435642a2a8..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/exception/UnreachableWorkflowException.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.exception; - -public class UnreachableWorkflowException extends Exception { - - public UnreachableWorkflowException(final String message) { - super(message); - } - - public UnreachableWorkflowException(final String message, final Throwable t) { - super(message, t); - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/CheckConnectionWorkflow.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/CheckConnectionWorkflow.java deleted file mode 100644 index 73d7fe37e351..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/CheckConnectionWorkflow.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling; - -import io.airbyte.config.ConnectorJobOutput; -import io.airbyte.config.StandardCheckConnectionInput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.temporal.workflow.WorkflowInterface; -import io.temporal.workflow.WorkflowMethod; - -@WorkflowInterface -public interface CheckConnectionWorkflow { - - @WorkflowMethod - ConnectorJobOutput run(JobRunConfig jobRunConfig, - IntegrationLauncherConfig launcherConfig, - StandardCheckConnectionInput connectionConfiguration); - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionManagerWorkflow.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionManagerWorkflow.java deleted file mode 100644 index f38a23a4af91..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionManagerWorkflow.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling; - -import io.airbyte.commons.temporal.scheduling.state.WorkflowState; -import io.temporal.workflow.QueryMethod; -import io.temporal.workflow.SignalMethod; -import io.temporal.workflow.WorkflowInterface; -import io.temporal.workflow.WorkflowMethod; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -@WorkflowInterface -public interface ConnectionManagerWorkflow { - - long NON_RUNNING_JOB_ID = -1; - int NON_RUNNING_ATTEMPT_ID = -1; - - /** - * Workflow method to launch a {@link ConnectionManagerWorkflow}. Launches a workflow responsible - * for scheduling syncs. This workflow will run and then continue running until deleted. - */ - @WorkflowMethod - void run(ConnectionUpdaterInput connectionUpdaterInput); - - /** - * Send a signal that will bypass the waiting time and run a sync. Nothing will happen if a sync is - * already running. - */ - @SignalMethod - void submitManualSync(); - - /** - * Cancel all the current executions of a sync and mark the set the status of the job as canceled. - * Nothing will happen if a sync is not running. - */ - @SignalMethod - void cancelJob(); - - /** - * Cancel a running workflow and then delete the connection and finally make the workflow to stop - * instead of continuing as new. - */ - @SignalMethod - void deleteConnection(); - - /** - * Signal that the connection config has been updated. If nothing was currently running, it will - * continue the workflow as new, which will reload the config. Nothing will happend if a sync is - * running. - */ - @SignalMethod - void connectionUpdated(); - - @SignalMethod - void resetConnection(); - - @SignalMethod - void resetConnectionAndSkipNextScheduling(); - - /** - * Return the current state of the workflow. - */ - @QueryMethod - WorkflowState getState(); - - @Data - @NoArgsConstructor - @AllArgsConstructor - class JobInformation { - - private long jobId; - private int attemptId; - - } - - /** - * Return which job and attempt is currently run by the workflow. - */ - @QueryMethod - JobInformation getJobInformation(); - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionNotificationWorkflow.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionNotificationWorkflow.java deleted file mode 100644 index f9035ee86d85..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionNotificationWorkflow.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling; - -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.temporal.workflow.WorkflowInterface; -import io.temporal.workflow.WorkflowMethod; -import java.io.IOException; -import java.util.UUID; - -@WorkflowInterface -public interface ConnectionNotificationWorkflow { - - @WorkflowMethod - boolean sendSchemaChangeNotification(UUID connectionId, String url) - throws IOException, InterruptedException, ApiException, ConfigNotFoundException, JsonValidationException; - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionUpdaterInput.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionUpdaterInput.java deleted file mode 100644 index 01c498ca1725..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionUpdaterInput.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling; - -import io.airbyte.commons.temporal.scheduling.state.WorkflowState; -import java.util.UUID; -import javax.annotation.Nullable; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; -import lombok.NoArgsConstructor; -import lombok.NonNull; - -@Data -@AllArgsConstructor -@NoArgsConstructor -@Builder -public class ConnectionUpdaterInput { - - @NonNull - private UUID connectionId; - @Nullable - private Long jobId; - /** - * This field is unused, it is kept for compatibility reasons. - */ - @Nullable - private Integer attemptId; - private boolean fromFailure; - private int attemptNumber; - /** - * The state is needed because it has an event listener in it. The event listener only listen to - * state updates which explains why it is a member of the {@link WorkflowState} class. The event - * listener is currently (02/18/22) use for testing only. - */ - @Nullable - private WorkflowState workflowState; - private boolean resetConnection; - @Builder.Default - private final boolean fromJobResetFailure = false; - - @Builder.Default - private boolean skipScheduling = false; - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/DefaultTaskQueueMapper.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/DefaultTaskQueueMapper.java deleted file mode 100644 index 0241957e0d17..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/DefaultTaskQueueMapper.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.temporal.TemporalJobType; -import io.airbyte.config.Geography; -import jakarta.inject.Singleton; - -@Singleton -public class DefaultTaskQueueMapper implements TaskQueueMapper { - - @VisibleForTesting - static final String DEFAULT_SYNC_TASK_QUEUE = TemporalJobType.SYNC.name(); - @VisibleForTesting - static final String DEFAULT_CHECK_TASK_QUEUE = TemporalJobType.CHECK_CONNECTION.name(); - @VisibleForTesting - static final String DEFAULT_DISCOVER_TASK_QUEUE = TemporalJobType.DISCOVER_SCHEMA.name(); - - // By default, map every Geography value to the default task queue. - // To override this behavior, define a new TaskQueueMapper bean with the @Primary annotation. - @Override - public String getTaskQueue(final Geography geography, final TemporalJobType jobType) { - switch (jobType) { - case CHECK_CONNECTION: - return DEFAULT_CHECK_TASK_QUEUE; - case DISCOVER_SCHEMA: - return DEFAULT_DISCOVER_TASK_QUEUE; - case SYNC: - return DEFAULT_SYNC_TASK_QUEUE; - default: - throw new IllegalArgumentException(String.format("Unexpected jobType %s", jobType)); - } - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/DiscoverCatalogWorkflow.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/DiscoverCatalogWorkflow.java deleted file mode 100644 index 7c2954af2068..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/DiscoverCatalogWorkflow.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling; - -import io.airbyte.config.ConnectorJobOutput; -import io.airbyte.config.StandardDiscoverCatalogInput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.temporal.workflow.WorkflowInterface; -import io.temporal.workflow.WorkflowMethod; - -@WorkflowInterface -public interface DiscoverCatalogWorkflow { - - @WorkflowMethod - ConnectorJobOutput run(JobRunConfig jobRunConfig, - IntegrationLauncherConfig launcherConfig, - StandardDiscoverCatalogInput config); - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/RouterService.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/RouterService.java deleted file mode 100644 index 54edd956c9bb..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/RouterService.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling; - -import io.airbyte.commons.temporal.TemporalJobType; -import io.airbyte.config.Geography; -import io.airbyte.config.persistence.ConfigRepository; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.UUID; -import lombok.extern.slf4j.Slf4j; - -/** - * Decides which Task Queue should be used for a given connection's sync operations, based on the - * configured {@link Geography} - */ -@Singleton -@Slf4j -public class RouterService { - - private final ConfigRepository configRepository; - private final TaskQueueMapper taskQueueMapper; - - public RouterService(final ConfigRepository configRepository, final TaskQueueMapper taskQueueMapper) { - this.configRepository = configRepository; - this.taskQueueMapper = taskQueueMapper; - } - - /** - * Given a connectionId, look up the connection's configured {@link Geography} in the config DB and - * use it to determine which Task Queue should be used for this connection's sync. - */ - public String getTaskQueue(final UUID connectionId, final TemporalJobType jobType) throws IOException { - final Geography geography = configRepository.getGeographyForConnection(connectionId); - return taskQueueMapper.getTaskQueue(geography, jobType); - } - - public String getTaskQueueForWorkspace(final UUID workspaceId, final TemporalJobType jobType) throws IOException { - final Geography geography = configRepository.getGeographyForWorkspace(workspaceId); - return taskQueueMapper.getTaskQueue(geography, jobType); - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/SpecWorkflow.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/SpecWorkflow.java deleted file mode 100644 index 3069fab6dfa4..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/SpecWorkflow.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling; - -import io.airbyte.config.ConnectorJobOutput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.temporal.workflow.WorkflowInterface; -import io.temporal.workflow.WorkflowMethod; - -@WorkflowInterface -public interface SpecWorkflow { - - @WorkflowMethod - ConnectorJobOutput run(JobRunConfig jobRunConfig, IntegrationLauncherConfig launcherConfig); - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/SyncWorkflow.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/SyncWorkflow.java deleted file mode 100644 index 16d7f76c0206..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/SyncWorkflow.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling; - -import io.airbyte.config.StandardSyncInput; -import io.airbyte.config.StandardSyncOutput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.temporal.workflow.WorkflowInterface; -import io.temporal.workflow.WorkflowMethod; -import java.util.UUID; - -@WorkflowInterface -public interface SyncWorkflow { - - @WorkflowMethod - StandardSyncOutput run(JobRunConfig jobRunConfig, - IntegrationLauncherConfig sourceLauncherConfig, - IntegrationLauncherConfig destinationLauncherConfig, - StandardSyncInput syncInput, - UUID connectionId); - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/TaskQueueMapper.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/TaskQueueMapper.java deleted file mode 100644 index a03601649643..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/TaskQueueMapper.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling; - -import io.airbyte.commons.temporal.TemporalJobType; -import io.airbyte.config.Geography; - -/** - * Maps a {@link Geography} to a Temporal Task Queue that should be used to run syncs for the given - * Geography. - */ -public interface TaskQueueMapper { - - String getTaskQueue(Geography geography, TemporalJobType jobType); - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/WorkflowInternalState.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/WorkflowInternalState.java deleted file mode 100644 index 3f52343f10c6..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/WorkflowInternalState.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling.state; - -import io.airbyte.config.FailureReason; -import java.util.HashSet; -import java.util.Set; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; - -@Getter -@Setter -@NoArgsConstructor -public class WorkflowInternalState { - - private Long jobId = null; - private Integer attemptNumber = null; - - // StandardSyncOutput standardSyncOutput = null; - private Set failures = new HashSet<>(); - private Boolean partialSuccess = null; - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/WorkflowState.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/WorkflowState.java deleted file mode 100644 index 16da029df10a..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/WorkflowState.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling.state; - -import io.airbyte.commons.temporal.scheduling.state.listener.WorkflowStateChangedListener; -import io.airbyte.commons.temporal.scheduling.state.listener.WorkflowStateChangedListener.ChangedStateEvent; -import io.airbyte.commons.temporal.scheduling.state.listener.WorkflowStateChangedListener.StateField; -import java.util.UUID; -import lombok.AccessLevel; -import lombok.Getter; -import lombok.NoArgsConstructor; - -@Getter -@NoArgsConstructor -public class WorkflowState { - - public WorkflowState(final UUID id, final WorkflowStateChangedListener stateChangedListener) { - this.id = id; - this.stateChangedListener = stateChangedListener; - } - - private UUID id; - private WorkflowStateChangedListener stateChangedListener; - private boolean running = false; - private boolean deleted = false; - private boolean skipScheduling = false; - private boolean updated = false; - private boolean cancelled = false; - private boolean failed = false; - @Deprecated - @Getter(AccessLevel.NONE) - private final boolean resetConnection = false; - @Deprecated - @Getter(AccessLevel.NONE) - private final boolean continueAsReset = false; - @Deprecated - @Getter(AccessLevel.NONE) - private boolean quarantined = false; - private boolean success = true; - private boolean cancelledForReset = false; - @Deprecated - @Getter(AccessLevel.NONE) - private final boolean resetWithScheduling = false; - private boolean doneWaiting = false; - private boolean skipSchedulingNextWorkflow = false; - - public void setRunning(final boolean running) { - final ChangedStateEvent event = new ChangedStateEvent( - StateField.RUNNING, - running); - stateChangedListener.addEvent(id, event); - this.running = running; - } - - public void setDeleted(final boolean deleted) { - final ChangedStateEvent event = new ChangedStateEvent( - StateField.DELETED, - deleted); - stateChangedListener.addEvent(id, event); - this.deleted = deleted; - } - - public void setSkipScheduling(final boolean skipScheduling) { - final ChangedStateEvent event = new ChangedStateEvent( - StateField.SKIPPED_SCHEDULING, - skipScheduling); - stateChangedListener.addEvent(id, event); - this.skipScheduling = skipScheduling; - } - - public void setUpdated(final boolean updated) { - final ChangedStateEvent event = new ChangedStateEvent( - StateField.UPDATED, - updated); - stateChangedListener.addEvent(id, event); - this.updated = updated; - } - - public void setCancelled(final boolean cancelled) { - final ChangedStateEvent event = new ChangedStateEvent( - StateField.CANCELLED, - cancelled); - stateChangedListener.addEvent(id, event); - this.cancelled = cancelled; - } - - public void setFailed(final boolean failed) { - final ChangedStateEvent event = new ChangedStateEvent( - StateField.FAILED, - failed); - stateChangedListener.addEvent(id, event); - this.failed = failed; - } - - public void setSuccess(final boolean success) { - final ChangedStateEvent event = new ChangedStateEvent( - StateField.SUCCESS, - success); - stateChangedListener.addEvent(id, event); - this.success = success; - } - - public void setCancelledForReset(final boolean cancelledForReset) { - final ChangedStateEvent event = new ChangedStateEvent( - StateField.CANCELLED_FOR_RESET, - cancelledForReset); - stateChangedListener.addEvent(id, event); - this.cancelledForReset = cancelledForReset; - } - - public void setDoneWaiting(final boolean doneWaiting) { - final ChangedStateEvent event = new ChangedStateEvent( - StateField.DONE_WAITING, - doneWaiting); - stateChangedListener.addEvent(id, event); - this.doneWaiting = doneWaiting; - } - - public void setSkipSchedulingNextWorkflow(final boolean skipSchedulingNextWorkflow) { - final ChangedStateEvent event = new ChangedStateEvent( - StateField.SKIP_SCHEDULING_NEXT_WORKFLOW, - skipSchedulingNextWorkflow); - stateChangedListener.addEvent(id, event); - this.skipSchedulingNextWorkflow = skipSchedulingNextWorkflow; - } - - // TODO: bmoric -> This is noisy when inpecting the list of event, it should be just a single reset - // event. - public void reset() { - this.setRunning(false); - this.setDeleted(false); - this.setSkipScheduling(false); - this.setUpdated(false); - this.setCancelled(false); - this.setFailed(false); - this.setSuccess(false); - this.setDoneWaiting(false); - this.setSkipSchedulingNextWorkflow(false); - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/NoopStateListener.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/NoopStateListener.java deleted file mode 100644 index 1ce9642ebe50..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/NoopStateListener.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling.state.listener; - -import java.util.LinkedList; -import java.util.Queue; -import java.util.UUID; - -public class NoopStateListener implements WorkflowStateChangedListener { - - @Override - public Queue events(final UUID id) { - return new LinkedList<>(); - } - - @Override - public void addEvent(final UUID id, final ChangedStateEvent event) {} - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/TestStateListener.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/TestStateListener.java deleted file mode 100644 index e738ffb51de8..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/TestStateListener.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling.state.listener; - -import java.util.LinkedList; -import java.util.Optional; -import java.util.Queue; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; - -public class TestStateListener implements WorkflowStateChangedListener { - - private static final ConcurrentHashMap> events = new ConcurrentHashMap<>(); - - public static void reset() { - events.clear(); - } - - @Override - public Queue events(final UUID testId) { - if (!events.containsKey(testId)) { - return new LinkedList<>(); - } - - return events.get(testId); - } - - @Override - public void addEvent(final UUID testId, final ChangedStateEvent event) { - Optional.ofNullable(events.get(testId)) - .or(() -> Optional.of(new LinkedList<>())) - .stream() - .forEach((eventQueue) -> { - eventQueue.add(event); - events.put(testId, eventQueue); - }); - } - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/WorkflowStateChangedListener.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/WorkflowStateChangedListener.java deleted file mode 100644 index d6e371f3fc4b..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/WorkflowStateChangedListener.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.scheduling.state.listener; - -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonSubTypes.Type; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import java.util.Queue; -import java.util.UUID; -import lombok.Value; - -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type") -@JsonSubTypes({ - @Type(value = TestStateListener.class, - name = "test"), - @Type(value = NoopStateListener.class, - name = "noop") -}) -public interface WorkflowStateChangedListener { - - enum StateField { - CANCELLED, - DELETED, - RUNNING, - SKIPPED_SCHEDULING, - UPDATED, - FAILED, - RESET, - CONTINUE_AS_RESET, - SUCCESS, - CANCELLED_FOR_RESET, - RESET_WITH_SCHEDULING, - DONE_WAITING, - SKIP_SCHEDULING_NEXT_WORKFLOW, - } - - @Value - class ChangedStateEvent { - - private final StateField field; - private final boolean value; - - } - - Queue events(UUID testId); - - void addEvent(UUID testId, ChangedStateEvent event); - -} diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/sync/OrchestratorConstants.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/sync/OrchestratorConstants.java deleted file mode 100644 index e35e933acb91..000000000000 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/sync/OrchestratorConstants.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.sync; - -import com.uber.m3.util.ImmutableSet; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.helpers.LogClientSingleton; -import java.util.Set; - -public class OrchestratorConstants { - - // we want to propagate log level, even if it isn't consumed by EnvConfigs - private static final String LOG_LEVEL = "LOG_LEVEL"; - - // necessary for s3/minio logging. used in the log4j2 configuration. - private static final String S3_PATH_STYLE_ACCESS = "S3_PATH_STYLE_ACCESS"; - private static final String FEATURE_FLAG_CLIENT = "FEATURE_FLAG_CLIENT"; - private static final String FEATURE_FLAG_PATH = "FEATURE_FLAG_PATH"; - - // set of env vars necessary for the container orchestrator app to run - public static final Set ENV_VARS_TO_TRANSFER = new ImmutableSet.Builder() - .addAll(EnvConfigs.JOB_SHARED_ENVS.keySet()) - .addAll(Set.of( - EnvConfigs.WORKER_ENVIRONMENT, - EnvConfigs.JOB_KUBE_TOLERATIONS, - EnvConfigs.JOB_KUBE_CURL_IMAGE, - EnvConfigs.JOB_KUBE_BUSYBOX_IMAGE, - EnvConfigs.JOB_KUBE_SOCAT_IMAGE, - EnvConfigs.JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_POLICY, - EnvConfigs.JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET, - EnvConfigs.JOB_KUBE_SIDECAR_CONTAINER_IMAGE_PULL_POLICY, - EnvConfigs.JOB_KUBE_NODE_SELECTORS, - EnvConfigs.JOB_ISOLATED_KUBE_NODE_SELECTORS, - EnvConfigs.USE_CUSTOM_NODE_SELECTOR, - EnvConfigs.DOCKER_NETWORK, - EnvConfigs.LOCAL_DOCKER_MOUNT, - EnvConfigs.WORKSPACE_DOCKER_MOUNT, - EnvConfigs.WORKSPACE_ROOT, - EnvConfigs.JOB_KUBE_NAMESPACE, - EnvConfigs.JOB_MAIN_CONTAINER_CPU_REQUEST, - EnvConfigs.JOB_MAIN_CONTAINER_CPU_LIMIT, - EnvConfigs.JOB_MAIN_CONTAINER_MEMORY_REQUEST, - EnvConfigs.JOB_MAIN_CONTAINER_MEMORY_LIMIT, - EnvConfigs.JOB_DEFAULT_ENV_MAP, - EnvConfigs.LOCAL_ROOT, - EnvConfigs.PUBLISH_METRICS, - EnvConfigs.DD_AGENT_HOST, - EnvConfigs.DD_DOGSTATSD_PORT, - EnvConfigs.METRIC_CLIENT, - LOG_LEVEL, - LogClientSingleton.GCS_LOG_BUCKET, - LogClientSingleton.GOOGLE_APPLICATION_CREDENTIALS, - LogClientSingleton.S3_MINIO_ENDPOINT, - S3_PATH_STYLE_ACCESS, - LogClientSingleton.S3_LOG_BUCKET, - LogClientSingleton.AWS_ACCESS_KEY_ID, - LogClientSingleton.AWS_SECRET_ACCESS_KEY, - LogClientSingleton.S3_LOG_BUCKET_REGION, - EnvConfigs.STATE_STORAGE_GCS_BUCKET_NAME, - EnvConfigs.STATE_STORAGE_GCS_APPLICATION_CREDENTIALS, - EnvConfigs.STATE_STORAGE_MINIO_ENDPOINT, - EnvConfigs.STATE_STORAGE_MINIO_BUCKET_NAME, - EnvConfigs.STATE_STORAGE_MINIO_ACCESS_KEY, - EnvConfigs.STATE_STORAGE_MINIO_SECRET_ACCESS_KEY, - EnvConfigs.STATE_STORAGE_S3_BUCKET_NAME, - EnvConfigs.STATE_STORAGE_S3_ACCESS_KEY, - EnvConfigs.STATE_STORAGE_S3_SECRET_ACCESS_KEY, - EnvConfigs.STATE_STORAGE_S3_REGION, - EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, - EnvVariableFeatureFlags.AUTO_DETECT_SCHEMA, - EnvVariableFeatureFlags.APPLY_FIELD_SELECTION, - EnvVariableFeatureFlags.FIELD_SELECTION_WORKSPACES, - EnvVariableFeatureFlags.STRICT_COMPARISON_NORMALIZATION_WORKSPACES, - EnvVariableFeatureFlags.STRICT_COMPARISON_NORMALIZATION_TAG, - FEATURE_FLAG_CLIENT, - FEATURE_FLAG_PATH, - EnvConfigs.LAUNCHDARKLY_KEY, - EnvConfigs.SOCAT_KUBE_CPU_LIMIT, - EnvConfigs.SOCAT_KUBE_CPU_REQUEST)) - .build(); - - public static final String INIT_FILE_ENV_MAP = "envMap.json"; - public static final String INIT_FILE_INPUT = "input.json"; - public static final String INIT_FILE_JOB_RUN_CONFIG = "jobRunConfig.json"; - public static final String INIT_FILE_APPLICATION = "application.txt"; - - // define two ports for stdout/stderr usage on the container orchestrator pod - public static final int PORT1 = 9877; - public static final int PORT2 = 9878; - public static final int PORT3 = 9879; - public static final int PORT4 = 9880; - public static final Set PORTS = Set.of(PORT1, PORT2, PORT3, PORT4); - -} diff --git a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/CancellationHandlerTest.java b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/CancellationHandlerTest.java deleted file mode 100644 index 270829408828..000000000000 --- a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/CancellationHandlerTest.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import io.airbyte.commons.temporal.stubs.HeartbeatWorkflow; -import io.temporal.activity.Activity; -import io.temporal.activity.ActivityExecutionContext; -import io.temporal.client.WorkflowClient; -import io.temporal.client.WorkflowOptions; -import io.temporal.testing.TestWorkflowEnvironment; -import io.temporal.worker.Worker; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class CancellationHandlerTest { - - @Test - void testCancellationHandler() { - - final TestWorkflowEnvironment testEnv = TestWorkflowEnvironment.newInstance(); - - final Worker worker = testEnv.newWorker("task-queue"); - - worker.registerWorkflowImplementationTypes(HeartbeatWorkflow.HeartbeatWorkflowImpl.class); - final WorkflowClient client = testEnv.getWorkflowClient(); - - worker.registerActivitiesImplementations(new HeartbeatWorkflow.HeartbeatActivityImpl(() -> { - final ActivityExecutionContext context = Activity.getExecutionContext(); - new CancellationHandler.TemporalCancellationHandler(context).checkAndHandleCancellation(() -> {}); - })); - - testEnv.start(); - - final HeartbeatWorkflow heartbeatWorkflow = client.newWorkflowStub( - HeartbeatWorkflow.class, - WorkflowOptions.newBuilder() - .setTaskQueue("task-queue") - .build()); - - Assertions.assertDoesNotThrow(heartbeatWorkflow::execute); - - } - -} diff --git a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java deleted file mode 100644 index f1401b79bf41..000000000000 --- a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java +++ /dev/null @@ -1,800 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import static io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow.NON_RUNNING_JOB_ID; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import com.google.common.collect.Sets; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.TemporalClient.ManualOperationResult; -import io.airbyte.commons.temporal.scheduling.CheckConnectionWorkflow; -import io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow; -import io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow.JobInformation; -import io.airbyte.commons.temporal.scheduling.DiscoverCatalogWorkflow; -import io.airbyte.commons.temporal.scheduling.SpecWorkflow; -import io.airbyte.commons.temporal.scheduling.SyncWorkflow; -import io.airbyte.commons.temporal.scheduling.state.WorkflowState; -import io.airbyte.config.AttemptSyncConfig; -import io.airbyte.config.ConnectorJobOutput; -import io.airbyte.config.FailureReason; -import io.airbyte.config.JobCheckConnectionConfig; -import io.airbyte.config.JobDiscoverCatalogConfig; -import io.airbyte.config.JobGetSpecConfig; -import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.StandardCheckConnectionInput; -import io.airbyte.config.StandardDiscoverCatalogInput; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.persistence.StreamResetPersistence; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.StreamDescriptor; -import io.temporal.api.enums.v1.WorkflowExecutionStatus; -import io.temporal.api.workflow.v1.WorkflowExecutionInfo; -import io.temporal.api.workflowservice.v1.DescribeWorkflowExecutionResponse; -import io.temporal.api.workflowservice.v1.WorkflowServiceGrpc.WorkflowServiceBlockingStub; -import io.temporal.client.BatchRequest; -import io.temporal.client.WorkflowClient; -import io.temporal.client.WorkflowClientOptions; -import io.temporal.client.WorkflowOptions; -import io.temporal.client.WorkflowStub; -import io.temporal.serviceclient.WorkflowServiceStubs; -import io.temporal.workflow.Functions.Proc; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Set; -import java.util.UUID; -import java.util.function.Supplier; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; - -@SuppressWarnings("PMD.JUnit5TestShouldBePackagePrivate") -public class TemporalClientTest { - - private static final UUID CONNECTION_ID = UUID.randomUUID(); - private static final UUID JOB_UUID = UUID.randomUUID(); - private static final long JOB_ID = 11L; - private static final int ATTEMPT_ID = 21; - - private static final String CHECK_TASK_QUEUE = "CHECK_CONNECTION"; - private static final String DISCOVER_TASK_QUEUE = "DISCOVER_SCHEMA"; - private static final JobRunConfig JOB_RUN_CONFIG = new JobRunConfig() - .withJobId(String.valueOf(JOB_ID)) - .withAttemptId((long) ATTEMPT_ID); - private static final String IMAGE_NAME1 = "hms invincible"; - private static final String IMAGE_NAME2 = "hms defiant"; - private static final IntegrationLauncherConfig UUID_LAUNCHER_CONFIG = new IntegrationLauncherConfig() - .withJobId(String.valueOf(JOB_UUID)) - .withAttemptId((long) ATTEMPT_ID) - .withDockerImage(IMAGE_NAME1); - private static final IntegrationLauncherConfig LAUNCHER_CONFIG = new IntegrationLauncherConfig() - .withJobId(String.valueOf(JOB_ID)) - .withAttemptId((long) ATTEMPT_ID) - .withDockerImage(IMAGE_NAME1); - private static final String NAMESPACE = "namespace"; - private static final StreamDescriptor STREAM_DESCRIPTOR = new StreamDescriptor().withName("name"); - private static final String UNCHECKED = "unchecked"; - private static final String EXCEPTION_MESSAGE = "Force state exception to simulate workflow not running"; - - private WorkflowClient workflowClient; - private TemporalClient temporalClient; - private Path logPath; - private WorkflowServiceStubs workflowServiceStubs; - private WorkflowServiceBlockingStub workflowServiceBlockingStub; - private StreamResetPersistence streamResetPersistence; - private ConnectionManagerUtils connectionManagerUtils; - private NotificationUtils notificationUtils; - private StreamResetRecordsHelper streamResetRecordsHelper; - private Path workspaceRoot; - - @BeforeEach - void setup() throws IOException { - workspaceRoot = Files.createTempDirectory(Path.of("/tmp"), "temporal_client_test"); - logPath = workspaceRoot.resolve(String.valueOf(JOB_ID)).resolve(String.valueOf(ATTEMPT_ID)).resolve(LogClientSingleton.LOG_FILENAME); - workflowClient = mock(WorkflowClient.class); - when(workflowClient.getOptions()).thenReturn(WorkflowClientOptions.newBuilder().setNamespace(NAMESPACE).build()); - workflowServiceStubs = mock(WorkflowServiceStubs.class); - when(workflowClient.getWorkflowServiceStubs()).thenReturn(workflowServiceStubs); - workflowServiceBlockingStub = mock(WorkflowServiceBlockingStub.class); - when(workflowServiceStubs.blockingStub()).thenReturn(workflowServiceBlockingStub); - streamResetPersistence = mock(StreamResetPersistence.class); - mockWorkflowStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_RUNNING); - connectionManagerUtils = spy(new ConnectionManagerUtils()); - notificationUtils = spy(new NotificationUtils()); - streamResetRecordsHelper = mock(StreamResetRecordsHelper.class); - temporalClient = - spy(new TemporalClient(workspaceRoot, workflowClient, workflowServiceStubs, streamResetPersistence, connectionManagerUtils, notificationUtils, - streamResetRecordsHelper)); - } - - @Nested - class RestartPerStatus { - - private ConnectionManagerUtils mConnectionManagerUtils; - private NotificationUtils mNotificationUtils; - - @BeforeEach - void init() { - mConnectionManagerUtils = mock(ConnectionManagerUtils.class); - mNotificationUtils = mock(NotificationUtils.class); - - temporalClient = spy( - new TemporalClient(workspaceRoot, workflowClient, workflowServiceStubs, streamResetPersistence, mConnectionManagerUtils, mNotificationUtils, - streamResetRecordsHelper)); - } - - @Test - void testRestartFailed() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); - final UUID connectionId = UUID.fromString("ebbfdc4c-295b-48a0-844f-88551dfad3db"); - final Set workflowIds = Set.of(connectionId); - - doReturn(workflowIds) - .when(temporalClient).fetchClosedWorkflowsByStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_FAILED); - doReturn(workflowIds) - .when(temporalClient).filterOutRunningWorkspaceId(workflowIds); - mockWorkflowStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_FAILED); - temporalClient.restartClosedWorkflowByStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_FAILED); - verify(mConnectionManagerUtils).safeTerminateWorkflow(eq(workflowClient), eq(connectionId), - anyString()); - verify(mConnectionManagerUtils).startConnectionManagerNoSignal(eq(workflowClient), eq(connectionId)); - } - - } - - @Nested - @DisplayName("Test execute method.") - class ExecuteJob { - - @SuppressWarnings(UNCHECKED) - @Test - void testExecute() { - final Supplier supplier = mock(Supplier.class); - when(supplier.get()).thenReturn("hello"); - - final TemporalResponse response = temporalClient.execute(JOB_RUN_CONFIG, supplier); - - assertNotNull(response); - assertTrue(response.getOutput().isPresent()); - assertEquals("hello", response.getOutput().get()); - assertTrue(response.getMetadata().isSucceeded()); - assertEquals(logPath, response.getMetadata().getLogPath()); - } - - @SuppressWarnings(UNCHECKED) - @Test - void testExecuteWithException() { - final Supplier supplier = mock(Supplier.class); - when(supplier.get()).thenThrow(IllegalStateException.class); - - final TemporalResponse response = temporalClient.execute(JOB_RUN_CONFIG, supplier); - - assertNotNull(response); - assertFalse(response.getOutput().isPresent()); - assertFalse(response.getMetadata().isSucceeded()); - assertEquals(logPath, response.getMetadata().getLogPath()); - } - - @Test - void testExecuteWithConnectorJobFailure() { - final Supplier supplier = mock(Supplier.class); - final FailureReason mockFailureReason = mock(FailureReason.class); - final ConnectorJobOutput connectorJobOutput = new ConnectorJobOutput() - .withFailureReason(mockFailureReason); - when(supplier.get()).thenReturn(connectorJobOutput); - - final TemporalResponse response = temporalClient.execute(JOB_RUN_CONFIG, supplier); - - assertNotNull(response); - assertTrue(response.getOutput().isPresent()); - assertEquals(connectorJobOutput, response.getOutput().get()); - assertFalse(response.getMetadata().isSucceeded()); - assertEquals(logPath, response.getMetadata().getLogPath()); - } - - } - - @Nested - @DisplayName("Test job creation for each configuration type.") - class TestJobSubmission { - - @Test - void testSubmitGetSpec() { - final SpecWorkflow specWorkflow = mock(SpecWorkflow.class); - when(workflowClient.newWorkflowStub(SpecWorkflow.class, TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.GET_SPEC))) - .thenReturn(specWorkflow); - final JobGetSpecConfig getSpecConfig = new JobGetSpecConfig().withDockerImage(IMAGE_NAME1); - - temporalClient.submitGetSpec(JOB_UUID, ATTEMPT_ID, getSpecConfig); - specWorkflow.run(JOB_RUN_CONFIG, UUID_LAUNCHER_CONFIG); - verify(workflowClient).newWorkflowStub(SpecWorkflow.class, TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.GET_SPEC)); - } - - @Test - void testSubmitCheckConnection() { - final CheckConnectionWorkflow checkConnectionWorkflow = mock(CheckConnectionWorkflow.class); - when( - workflowClient.newWorkflowStub(CheckConnectionWorkflow.class, TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.CHECK_CONNECTION))) - .thenReturn(checkConnectionWorkflow); - final JobCheckConnectionConfig checkConnectionConfig = new JobCheckConnectionConfig() - .withDockerImage(IMAGE_NAME1) - .withConnectionConfiguration(Jsons.emptyObject()); - final StandardCheckConnectionInput input = new StandardCheckConnectionInput() - .withConnectionConfiguration(checkConnectionConfig.getConnectionConfiguration()); - - temporalClient.submitCheckConnection(JOB_UUID, ATTEMPT_ID, CHECK_TASK_QUEUE, checkConnectionConfig); - checkConnectionWorkflow.run(JOB_RUN_CONFIG, UUID_LAUNCHER_CONFIG, input); - verify(workflowClient).newWorkflowStub(CheckConnectionWorkflow.class, - TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.CHECK_CONNECTION)); - } - - @Test - void testSubmitDiscoverSchema() { - final DiscoverCatalogWorkflow discoverCatalogWorkflow = mock(DiscoverCatalogWorkflow.class); - when(workflowClient.newWorkflowStub(DiscoverCatalogWorkflow.class, TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.DISCOVER_SCHEMA))) - .thenReturn(discoverCatalogWorkflow); - final JobDiscoverCatalogConfig checkConnectionConfig = new JobDiscoverCatalogConfig() - .withDockerImage(IMAGE_NAME1) - .withConnectionConfiguration(Jsons.emptyObject()); - final StandardDiscoverCatalogInput input = new StandardDiscoverCatalogInput() - .withConnectionConfiguration(checkConnectionConfig.getConnectionConfiguration()); - - temporalClient.submitDiscoverSchema(JOB_UUID, ATTEMPT_ID, DISCOVER_TASK_QUEUE, checkConnectionConfig); - discoverCatalogWorkflow.run(JOB_RUN_CONFIG, UUID_LAUNCHER_CONFIG, input); - verify(workflowClient).newWorkflowStub(DiscoverCatalogWorkflow.class, - TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.DISCOVER_SCHEMA)); - } - - @Test - void testSubmitSync() { - final SyncWorkflow discoverCatalogWorkflow = mock(SyncWorkflow.class); - when(workflowClient.newWorkflowStub(SyncWorkflow.class, TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.SYNC))) - .thenReturn(discoverCatalogWorkflow); - final JobSyncConfig syncConfig = new JobSyncConfig() - .withSourceDockerImage(IMAGE_NAME1) - .withDestinationDockerImage(IMAGE_NAME2) - .withOperationSequence(List.of()) - .withConfiguredAirbyteCatalog(new ConfiguredAirbyteCatalog()); - final AttemptSyncConfig attemptSyncConfig = new AttemptSyncConfig() - .withSourceConfiguration(Jsons.emptyObject()) - .withDestinationConfiguration(Jsons.emptyObject()); - final StandardSyncInput input = new StandardSyncInput() - .withNamespaceDefinition(syncConfig.getNamespaceDefinition()) - .withNamespaceFormat(syncConfig.getNamespaceFormat()) - .withPrefix(syncConfig.getPrefix()) - .withSourceConfiguration(attemptSyncConfig.getSourceConfiguration()) - .withDestinationConfiguration(attemptSyncConfig.getDestinationConfiguration()) - .withOperationSequence(syncConfig.getOperationSequence()) - .withCatalog(syncConfig.getConfiguredAirbyteCatalog()) - .withState(attemptSyncConfig.getState()); - - final IntegrationLauncherConfig destinationLauncherConfig = new IntegrationLauncherConfig() - .withJobId(String.valueOf(JOB_ID)) - .withAttemptId((long) ATTEMPT_ID) - .withDockerImage(IMAGE_NAME2); - - temporalClient.submitSync(JOB_ID, ATTEMPT_ID, syncConfig, attemptSyncConfig, CONNECTION_ID); - discoverCatalogWorkflow.run(JOB_RUN_CONFIG, LAUNCHER_CONFIG, destinationLauncherConfig, input, CONNECTION_ID); - verify(workflowClient).newWorkflowStub(SyncWorkflow.class, TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.SYNC)); - } - - } - - @Nested - @DisplayName("Test related to the migration to the new scheduler") - class TestMigration { - - @DisplayName("Test that the migration is properly done if needed") - @Test - void migrateCalled() { - final UUID nonMigratedId = UUID.randomUUID(); - final UUID migratedId = UUID.randomUUID(); - - when(temporalClient.isInRunningWorkflowCache(connectionManagerUtils.getConnectionManagerName(nonMigratedId))).thenReturn(false); - when(temporalClient.isInRunningWorkflowCache(connectionManagerUtils.getConnectionManagerName(migratedId))).thenReturn(true); - - doNothing() - .when(temporalClient).refreshRunningWorkflow(); - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - doReturn(mConnectionManagerWorkflow) - .when(temporalClient).submitConnectionUpdaterAsync(nonMigratedId); - - temporalClient.migrateSyncIfNeeded(Sets.newHashSet(nonMigratedId, migratedId)); - - verify(temporalClient, times(1)).submitConnectionUpdaterAsync(nonMigratedId); - verify(temporalClient, times(0)).submitConnectionUpdaterAsync(migratedId); - } - - } - - @Nested - @DisplayName("Test delete connection method.") - class ForceCancelConnection { - - @Test - @SuppressWarnings(UNCHECKED) - @DisplayName("Test delete connection method when workflow is in a running state.") - void testforceCancelConnection() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(false); - - doReturn(true).when(temporalClient).isWorkflowReachable(any(UUID.class)); - when(workflowClient.newWorkflowStub(any(Class.class), anyString())).thenReturn(mConnectionManagerWorkflow); - - final AttemptSyncConfig attemptSyncConfig = new AttemptSyncConfig() - .withSourceConfiguration(Jsons.emptyObject()) - .withDestinationConfiguration(Jsons.emptyObject()); - - final JobSyncConfig syncConfig = new JobSyncConfig() - .withSourceDockerImage(IMAGE_NAME1) - .withDestinationDockerImage(IMAGE_NAME2) - .withOperationSequence(List.of()) - .withConfiguredAirbyteCatalog(new ConfiguredAirbyteCatalog()); - - temporalClient.submitSync(JOB_ID, ATTEMPT_ID, syncConfig, attemptSyncConfig, CONNECTION_ID); - temporalClient.forceDeleteWorkflow(CONNECTION_ID); - - verify(connectionManagerUtils).deleteWorkflowIfItExist(workflowClient, CONNECTION_ID); - } - - } - - @Nested - @DisplayName("Test update connection behavior") - class UpdateConnection { - - @Test - @SuppressWarnings(UNCHECKED) - @DisplayName("Test update connection when workflow is running") - void testUpdateConnection() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - - when(mWorkflowState.isRunning()).thenReturn(true); - when(mWorkflowState.isDeleted()).thenReturn(false); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(workflowClient.newWorkflowStub(any(Class.class), any(String.class))).thenReturn(mConnectionManagerWorkflow); - - temporalClient.update(CONNECTION_ID); - - verify(mConnectionManagerWorkflow, Mockito.times(1)).connectionUpdated(); - } - - @Test - @SuppressWarnings(UNCHECKED) - @DisplayName("Test update connection method starts a new workflow when workflow is in an unexpected state") - void testUpdateConnectionInUnexpectedState() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - - when(mConnectionManagerWorkflow.getState()).thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); - when(workflowClient.newWorkflowStub(any(Class.class), any(String.class))).thenReturn(mConnectionManagerWorkflow); - doReturn(mConnectionManagerWorkflow).when(temporalClient).submitConnectionUpdaterAsync(CONNECTION_ID); - - final WorkflowStub untypedWorkflowStub = mock(WorkflowStub.class); - when(workflowClient.newUntypedWorkflowStub(anyString())).thenReturn(untypedWorkflowStub); - - temporalClient.update(CONNECTION_ID); - - // this is only called when updating an existing workflow - verify(mConnectionManagerWorkflow, Mockito.never()).connectionUpdated(); - - verify(untypedWorkflowStub, Mockito.times(1)).terminate(anyString()); - verify(temporalClient, Mockito.times(1)).submitConnectionUpdaterAsync(CONNECTION_ID); - } - - @Test - @SuppressWarnings(UNCHECKED) - @DisplayName("Test update connection method does nothing when connection is deleted") - void testUpdateConnectionDeletedWorkflow() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(true); - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); - mockWorkflowStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED); - - temporalClient.update(CONNECTION_ID); - - // this is only called when updating an existing workflow - verify(mConnectionManagerWorkflow, Mockito.never()).connectionUpdated(); - verify(temporalClient).update(CONNECTION_ID); - verifyNoMoreInteractions(temporalClient); - } - - } - - @Nested - @DisplayName("Test manual sync behavior") - class ManualSync { - - @Test - @DisplayName("Test startNewManualSync successful") - void testStartNewManualSyncSuccess() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(false); - when(mWorkflowState.isRunning()).thenReturn(false).thenReturn(true); - when(mConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); - - final ManualOperationResult result = temporalClient.startNewManualSync(CONNECTION_ID); - - assertTrue(result.getJobId().isPresent()); - assertEquals(JOB_ID, result.getJobId().get()); - assertFalse(result.getFailingReason().isPresent()); - verify(mConnectionManagerWorkflow).submitManualSync(); - } - - @Test - @DisplayName("Test startNewManualSync fails if job is already running") - void testStartNewManualSyncAlreadyRunning() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(false); - when(mWorkflowState.isRunning()).thenReturn(true); - when(mConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); - - final ManualOperationResult result = temporalClient.startNewManualSync(CONNECTION_ID); - - assertFalse(result.getJobId().isPresent()); - assertTrue(result.getFailingReason().isPresent()); - verify(mConnectionManagerWorkflow, times(0)).submitManualSync(); - } - - @Test - @DisplayName("Test startNewManualSync repairs the workflow if it is in a bad state") - void testStartNewManualSyncRepairsBadWorkflowState() { - final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - when(mTerminatedConnectionManagerWorkflow.getState()) - .thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); - when(mTerminatedConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); - - final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mNewConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(false); - when(mWorkflowState.isRunning()).thenReturn(false).thenReturn(true); - when(mNewConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); - when(workflowClient.newWorkflowStub(any(Class.class), any(WorkflowOptions.class))).thenReturn(mNewConnectionManagerWorkflow); - final BatchRequest mBatchRequest = mock(BatchRequest.class); - when(workflowClient.newSignalWithStartRequest()).thenReturn(mBatchRequest); - - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mTerminatedConnectionManagerWorkflow, mTerminatedConnectionManagerWorkflow, - mNewConnectionManagerWorkflow); - - final ManualOperationResult result = temporalClient.startNewManualSync(CONNECTION_ID); - - assertTrue(result.getJobId().isPresent()); - assertEquals(JOB_ID, result.getJobId().get()); - assertFalse(result.getFailingReason().isPresent()); - verify(workflowClient).signalWithStart(mBatchRequest); - - // Verify that the submitManualSync signal was passed to the batch request by capturing the - // argument, - // executing the signal, and verifying that the desired signal was executed - final ArgumentCaptor batchRequestAddArgCaptor = ArgumentCaptor.forClass(Proc.class); - verify(mBatchRequest).add(batchRequestAddArgCaptor.capture()); - final Proc signal = batchRequestAddArgCaptor.getValue(); - signal.apply(); - verify(mNewConnectionManagerWorkflow).submitManualSync(); - } - - @Test - @SuppressWarnings(UNCHECKED) - @DisplayName("Test startNewManualSync returns a failure reason when connection is deleted") - void testStartNewManualSyncDeletedWorkflow() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(true); - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); - mockWorkflowStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED); - - final ManualOperationResult result = temporalClient.startNewManualSync(CONNECTION_ID); - - // this is only called when updating an existing workflow - assertFalse(result.getJobId().isPresent()); - assertTrue(result.getFailingReason().isPresent()); - verify(mConnectionManagerWorkflow, times(0)).submitManualSync(); - } - - } - - @Nested - @DisplayName("Test cancellation behavior") - class Cancellation { - - @Test - @DisplayName("Test startNewCancellation successful") - void testStartNewCancellationSuccess() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(false); - when(mWorkflowState.isRunning()).thenReturn(true).thenReturn(false); - when(mConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); - - final ManualOperationResult result = temporalClient.startNewCancellation(CONNECTION_ID); - - assertTrue(result.getJobId().isPresent()); - assertEquals(JOB_ID, result.getJobId().get()); - assertFalse(result.getFailingReason().isPresent()); - verify(mConnectionManagerWorkflow).cancelJob(); - verify(streamResetRecordsHelper).deleteStreamResetRecordsForJob(JOB_ID, CONNECTION_ID); - } - - @Test - @DisplayName("Test startNewCancellation repairs the workflow if it is in a bad state") - void testStartNewCancellationRepairsBadWorkflowState() { - final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - when(mTerminatedConnectionManagerWorkflow.getState()) - .thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); - when(mTerminatedConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); - - final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mNewConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(false); - when(mWorkflowState.isRunning()).thenReturn(true).thenReturn(false); - when(mNewConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); - when(workflowClient.newWorkflowStub(any(Class.class), any(WorkflowOptions.class))).thenReturn(mNewConnectionManagerWorkflow); - final BatchRequest mBatchRequest = mock(BatchRequest.class); - when(workflowClient.newSignalWithStartRequest()).thenReturn(mBatchRequest); - - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mTerminatedConnectionManagerWorkflow, mTerminatedConnectionManagerWorkflow, - mNewConnectionManagerWorkflow); - - final ManualOperationResult result = temporalClient.startNewCancellation(CONNECTION_ID); - - assertTrue(result.getJobId().isPresent()); - assertEquals(NON_RUNNING_JOB_ID, result.getJobId().get()); - assertFalse(result.getFailingReason().isPresent()); - verify(workflowClient).signalWithStart(mBatchRequest); - - // Verify that the cancelJob signal was passed to the batch request by capturing the argument, - // executing the signal, and verifying that the desired signal was executed - final ArgumentCaptor batchRequestAddArgCaptor = ArgumentCaptor.forClass(Proc.class); - verify(mBatchRequest).add(batchRequestAddArgCaptor.capture()); - final Proc signal = batchRequestAddArgCaptor.getValue(); - signal.apply(); - verify(mNewConnectionManagerWorkflow).cancelJob(); - } - - @Test - @SuppressWarnings(UNCHECKED) - @DisplayName("Test startNewCancellation returns a failure reason when connection is deleted") - void testStartNewCancellationDeletedWorkflow() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(true); - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); - mockWorkflowStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED); - - final ManualOperationResult result = temporalClient.startNewCancellation(CONNECTION_ID); - - // this is only called when updating an existing workflow - assertFalse(result.getJobId().isPresent()); - assertTrue(result.getFailingReason().isPresent()); - verify(mConnectionManagerWorkflow, times(0)).cancelJob(); - } - - } - - @Nested - @DisplayName("Test reset connection behavior") - class ResetConnection { - - @Test - @DisplayName("Test resetConnection successful") - void testResetConnectionSuccess() throws IOException { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(false); - when(mWorkflowState.isRunning()).thenReturn(false); - final long jobId1 = 1; - final long jobId2 = 2; - when(mConnectionManagerWorkflow.getJobInformation()).thenReturn( - new JobInformation(jobId1, 0), - new JobInformation(jobId1, 0), - new JobInformation(NON_RUNNING_JOB_ID, 0), - new JobInformation(NON_RUNNING_JOB_ID, 0), - new JobInformation(jobId2, 0), - new JobInformation(jobId2, 0)); - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); - - final List streamsToReset = List.of(STREAM_DESCRIPTOR); - final ManualOperationResult result = temporalClient.resetConnection(CONNECTION_ID, streamsToReset, false); - - verify(streamResetPersistence).createStreamResets(CONNECTION_ID, streamsToReset); - - assertTrue(result.getJobId().isPresent()); - assertEquals(jobId2, result.getJobId().get()); - assertFalse(result.getFailingReason().isPresent()); - verify(mConnectionManagerWorkflow).resetConnection(); - } - - @Test - @DisplayName("Test resetConnection successful") - void testResetConnectionSuccessAndContinue() throws IOException { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(false); - when(mWorkflowState.isRunning()).thenReturn(false); - final long jobId1 = 1; - final long jobId2 = 2; - when(mConnectionManagerWorkflow.getJobInformation()).thenReturn( - new JobInformation(jobId1, 0), - new JobInformation(jobId1, 0), - new JobInformation(NON_RUNNING_JOB_ID, 0), - new JobInformation(NON_RUNNING_JOB_ID, 0), - new JobInformation(jobId2, 0), - new JobInformation(jobId2, 0)); - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); - - final List streamsToReset = List.of(STREAM_DESCRIPTOR); - final ManualOperationResult result = temporalClient.resetConnection(CONNECTION_ID, streamsToReset, true); - - verify(streamResetPersistence).createStreamResets(CONNECTION_ID, streamsToReset); - - assertTrue(result.getJobId().isPresent()); - assertEquals(jobId2, result.getJobId().get()); - assertFalse(result.getFailingReason().isPresent()); - verify(mConnectionManagerWorkflow).resetConnectionAndSkipNextScheduling(); - } - - @Test - @DisplayName("Test resetConnection repairs the workflow if it is in a bad state") - void testResetConnectionRepairsBadWorkflowState() throws IOException { - final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - when(mTerminatedConnectionManagerWorkflow.getState()) - .thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); - when(mTerminatedConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); - - final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mNewConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(false); - when(mWorkflowState.isRunning()).thenReturn(false); - when(mNewConnectionManagerWorkflow.getJobInformation()).thenReturn( - new JobInformation(NON_RUNNING_JOB_ID, 0), - new JobInformation(NON_RUNNING_JOB_ID, 0), - new JobInformation(JOB_ID, 0), - new JobInformation(JOB_ID, 0)); - when(workflowClient.newWorkflowStub(any(Class.class), any(WorkflowOptions.class))).thenReturn(mNewConnectionManagerWorkflow); - final BatchRequest mBatchRequest = mock(BatchRequest.class); - when(workflowClient.newSignalWithStartRequest()).thenReturn(mBatchRequest); - - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mTerminatedConnectionManagerWorkflow, mTerminatedConnectionManagerWorkflow, - mNewConnectionManagerWorkflow); - - final List streamsToReset = List.of(STREAM_DESCRIPTOR); - final ManualOperationResult result = temporalClient.resetConnection(CONNECTION_ID, streamsToReset, false); - - verify(streamResetPersistence).createStreamResets(CONNECTION_ID, streamsToReset); - - assertTrue(result.getJobId().isPresent()); - assertEquals(JOB_ID, result.getJobId().get()); - assertFalse(result.getFailingReason().isPresent()); - verify(workflowClient).signalWithStart(mBatchRequest); - - // Verify that the resetConnection signal was passed to the batch request by capturing the argument, - // executing the signal, and verifying that the desired signal was executed - final ArgumentCaptor batchRequestAddArgCaptor = ArgumentCaptor.forClass(Proc.class); - verify(mBatchRequest).add(batchRequestAddArgCaptor.capture()); - final Proc signal = batchRequestAddArgCaptor.getValue(); - signal.apply(); - verify(mNewConnectionManagerWorkflow).resetConnection(); - } - - @Test - @SuppressWarnings(UNCHECKED) - @DisplayName("Test resetConnection returns a failure reason when connection is deleted") - void testResetConnectionDeletedWorkflow() throws IOException { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(true); - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); - mockWorkflowStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED); - - final List streamsToReset = List.of(STREAM_DESCRIPTOR); - final ManualOperationResult result = temporalClient.resetConnection(CONNECTION_ID, streamsToReset, false); - - verify(streamResetPersistence).createStreamResets(CONNECTION_ID, streamsToReset); - - // this is only called when updating an existing workflow - assertFalse(result.getJobId().isPresent()); - assertTrue(result.getFailingReason().isPresent()); - verify(mConnectionManagerWorkflow, times(0)).resetConnection(); - } - - } - - @Test - @DisplayName("Test manual operation on completed workflow causes a restart") - void testManualOperationOnCompletedWorkflow() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(false); - when(workflowServiceBlockingStub.describeWorkflowExecution(any())) - .thenReturn(DescribeWorkflowExecutionResponse.newBuilder().setWorkflowExecutionInfo( - WorkflowExecutionInfo.newBuilder().setStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED).buildPartial()).build()) - .thenReturn(DescribeWorkflowExecutionResponse.newBuilder().setWorkflowExecutionInfo( - WorkflowExecutionInfo.newBuilder().setStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_RUNNING).buildPartial()).build()); - - final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mNewWorkflowState = mock(WorkflowState.class); - when(mNewConnectionManagerWorkflow.getState()).thenReturn(mNewWorkflowState); - when(mNewWorkflowState.isRunning()).thenReturn(false).thenReturn(true); - when(mNewConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); - when(workflowClient.newWorkflowStub(any(Class.class), any(WorkflowOptions.class))).thenReturn(mNewConnectionManagerWorkflow); - final BatchRequest mBatchRequest = mock(BatchRequest.class); - when(workflowClient.newSignalWithStartRequest()).thenReturn(mBatchRequest); - - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow, mConnectionManagerWorkflow, - mNewConnectionManagerWorkflow); - - final WorkflowStub mWorkflowStub = mock(WorkflowStub.class); - when(workflowClient.newUntypedWorkflowStub(anyString())).thenReturn(mWorkflowStub); - - final ManualOperationResult result = temporalClient.startNewManualSync(CONNECTION_ID); - - assertTrue(result.getJobId().isPresent()); - assertEquals(JOB_ID, result.getJobId().get()); - assertFalse(result.getFailingReason().isPresent()); - verify(workflowClient).signalWithStart(mBatchRequest); - verify(mWorkflowStub).terminate(anyString()); - - // Verify that the submitManualSync signal was passed to the batch request by capturing the - // argument, - // executing the signal, and verifying that the desired signal was executed - final ArgumentCaptor batchRequestAddArgCaptor = ArgumentCaptor.forClass(Proc.class); - verify(mBatchRequest).add(batchRequestAddArgCaptor.capture()); - final Proc signal = batchRequestAddArgCaptor.getValue(); - signal.apply(); - verify(mNewConnectionManagerWorkflow).submitManualSync(); - } - - private void mockWorkflowStatus(final WorkflowExecutionStatus status) { - when(workflowServiceBlockingStub.describeWorkflowExecution(any())).thenReturn( - DescribeWorkflowExecutionResponse.newBuilder().setWorkflowExecutionInfo( - WorkflowExecutionInfo.newBuilder().setStatus(status).buildPartial()).build()); - } - -} diff --git a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalUtilsTest.java b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalUtilsTest.java deleted file mode 100644 index 17da1e94b9f6..000000000000 --- a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalUtilsTest.java +++ /dev/null @@ -1,416 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.concurrency.VoidCallable; -import io.airbyte.commons.temporal.stubs.HeartbeatWorkflow; -import io.temporal.activity.Activity; -import io.temporal.activity.ActivityCancellationType; -import io.temporal.activity.ActivityExecutionContext; -import io.temporal.activity.ActivityInterface; -import io.temporal.activity.ActivityMethod; -import io.temporal.activity.ActivityOptions; -import io.temporal.api.common.v1.WorkflowExecution; -import io.temporal.api.namespace.v1.NamespaceInfo; -import io.temporal.api.workflow.v1.WorkflowExecutionInfo; -import io.temporal.api.workflowservice.v1.DescribeNamespaceResponse; -import io.temporal.client.WorkflowClient; -import io.temporal.client.WorkflowFailedException; -import io.temporal.client.WorkflowOptions; -import io.temporal.serviceclient.WorkflowServiceStubs; -import io.temporal.testing.TestWorkflowEnvironment; -import io.temporal.worker.Worker; -import io.temporal.workflow.Workflow; -import io.temporal.workflow.WorkflowInterface; -import io.temporal.workflow.WorkflowMethod; -import java.time.Duration; -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Supplier; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class TemporalUtilsTest { - - private static final String TASK_QUEUE = "default"; - private static final String BEFORE = "before: {}"; - - @Test - void testAsyncExecute() throws Exception { - final TemporalUtils temporalUtils = new TemporalUtils(null, null, null, null, null, null, null); - final CountDownLatch countDownLatch = new CountDownLatch(1); - - final VoidCallable callable = mock(VoidCallable.class); - - // force it to wait until we can verify that it is running. - doAnswer((a) -> { - countDownLatch.await(1, TimeUnit.MINUTES); - return null; - }).when(callable).call(); - - final TestWorkflowEnvironment testEnv = TestWorkflowEnvironment.newInstance(); - final WorkflowServiceStubs temporalService = testEnv.getWorkflowService(); - final Worker worker = testEnv.newWorker(TASK_QUEUE); - worker.registerWorkflowImplementationTypes(TestWorkflow.WorkflowImpl.class); - final WorkflowClient client = testEnv.getWorkflowClient(); - worker.registerActivitiesImplementations(new TestWorkflow.Activity1Impl(callable)); - testEnv.start(); - - final TestWorkflow workflowStub = client.newWorkflowStub(TestWorkflow.class, WorkflowOptions.newBuilder().setTaskQueue(TASK_QUEUE).build()); - final ImmutablePair> pair = temporalUtils.asyncExecute( - workflowStub, - workflowStub::run, - "whatever", - String.class); - - final WorkflowExecution workflowExecution = pair.getLeft(); - final String workflowId = workflowExecution.getWorkflowId(); - final String runId = workflowExecution.getRunId(); - - final WorkflowExecutionInfo workflowExecutionInfo = temporalService.blockingStub().listOpenWorkflowExecutions(null).getExecutionsList().get(0); - assertEquals(workflowId, workflowExecutionInfo.getExecution().getWorkflowId()); - assertEquals(runId, workflowExecutionInfo.getExecution().getRunId()); - - // allow the workflow to complete. - countDownLatch.countDown(); - - final String result = pair.getRight().get(1, TimeUnit.MINUTES); - assertEquals("completed", result); - } - - @Test - void testWaitForTemporalServerAndLogThrowsException() { - final TemporalUtils temporalUtils = new TemporalUtils(null, null, null, null, null, null, null); - final WorkflowServiceStubs workflowServiceStubs = mock(WorkflowServiceStubs.class, Mockito.RETURNS_DEEP_STUBS); - final DescribeNamespaceResponse describeNamespaceResponse = mock(DescribeNamespaceResponse.class); - final NamespaceInfo namespaceInfo = mock(NamespaceInfo.class); - final Supplier serviceSupplier = mock(Supplier.class); - final String namespace = "default"; - - when(namespaceInfo.isInitialized()).thenReturn(true); - when(namespaceInfo.getName()).thenReturn(namespace); - when(describeNamespaceResponse.getNamespaceInfo()).thenReturn(namespaceInfo); - when(serviceSupplier.get()) - .thenThrow(RuntimeException.class) - .thenReturn(workflowServiceStubs); - when(workflowServiceStubs.blockingStub().describeNamespace(any())) - .thenThrow(RuntimeException.class) - .thenReturn(describeNamespaceResponse); - temporalUtils.getTemporalClientWhenConnected(Duration.ofMillis(10), Duration.ofSeconds(1), Duration.ofSeconds(0), serviceSupplier, namespace); - } - - @Test - void testWaitThatTimesOut() { - final TemporalUtils temporalUtils = new TemporalUtils(null, null, null, null, null, null, null); - final WorkflowServiceStubs workflowServiceStubs = mock(WorkflowServiceStubs.class, Mockito.RETURNS_DEEP_STUBS); - final DescribeNamespaceResponse describeNamespaceResponse = mock(DescribeNamespaceResponse.class); - final NamespaceInfo namespaceInfo = mock(NamespaceInfo.class); - final Supplier serviceSupplier = mock(Supplier.class); - final String namespace = "default"; - - when(namespaceInfo.getName()).thenReturn(namespace); - when(describeNamespaceResponse.getNamespaceInfo()).thenReturn(namespaceInfo); - when(serviceSupplier.get()) - .thenThrow(RuntimeException.class) - .thenReturn(workflowServiceStubs); - when(workflowServiceStubs.blockingStub().listNamespaces(any()).getNamespacesList()) - .thenThrow(RuntimeException.class) - .thenReturn(List.of(describeNamespaceResponse)); - assertThrows(RuntimeException.class, () -> { - temporalUtils.getTemporalClientWhenConnected(Duration.ofMillis(100), Duration.ofMillis(10), Duration.ofSeconds(0), serviceSupplier, namespace); - }); - } - - @Test - void testRuntimeExceptionOnHeartbeatWrapper() { - final TestWorkflowEnvironment testEnv = TestWorkflowEnvironment.newInstance(); - final Worker worker = testEnv.newWorker(TASK_QUEUE); - worker.registerWorkflowImplementationTypes(TestFailingWorkflow.WorkflowImpl.class); - final WorkflowClient client = testEnv.getWorkflowClient(); - final AtomicInteger timesReachedEnd = new AtomicInteger(0); - worker.registerActivitiesImplementations(new TestFailingWorkflow.Activity1Impl(timesReachedEnd)); - testEnv.start(); - - final TestFailingWorkflow workflowStub = - client.newWorkflowStub(TestFailingWorkflow.class, WorkflowOptions.newBuilder().setTaskQueue(TASK_QUEUE).build()); - - // test runtime first - assertThrows(RuntimeException.class, () -> { - workflowStub.run("runtime"); - }); - - // we should never retry enough to reach the end - assertEquals(0, timesReachedEnd.get()); - } - - @Test - void testWorkerExceptionOnHeartbeatWrapper() { - final TestWorkflowEnvironment testEnv = TestWorkflowEnvironment.newInstance(); - final Worker worker = testEnv.newWorker(TASK_QUEUE); - worker.registerWorkflowImplementationTypes(TestFailingWorkflow.WorkflowImpl.class); - final WorkflowClient client = testEnv.getWorkflowClient(); - final AtomicInteger timesReachedEnd = new AtomicInteger(0); - worker.registerActivitiesImplementations(new TestFailingWorkflow.Activity1Impl(timesReachedEnd)); - testEnv.start(); - - final TestFailingWorkflow workflowStub = - client.newWorkflowStub(TestFailingWorkflow.class, WorkflowOptions.newBuilder().setTaskQueue(TASK_QUEUE).build()); - - // throws workerexception wrapped in a WorkflowFailedException - assertThrows(WorkflowFailedException.class, () -> workflowStub.run("worker")); - - // we should never retry enough to reach the end - assertEquals(0, timesReachedEnd.get()); - } - - @Test - void testHeartbeatWithContext() throws InterruptedException { - final TemporalUtils temporalUtils = new TemporalUtils(null, null, null, null, null, null, null); - final TestWorkflowEnvironment testEnv = TestWorkflowEnvironment.newInstance(); - - final Worker worker = testEnv.newWorker(TASK_QUEUE); - - worker.registerWorkflowImplementationTypes(HeartbeatWorkflow.HeartbeatWorkflowImpl.class); - final WorkflowClient client = testEnv.getWorkflowClient(); - - final CountDownLatch latch = new CountDownLatch(2); - - worker.registerActivitiesImplementations(new HeartbeatWorkflow.HeartbeatActivityImpl(() -> { - final ActivityExecutionContext context = Activity.getExecutionContext(); - temporalUtils.withBackgroundHeartbeat( - // TODO (itaseski) figure out how to decrease heartbeat intervals using reflection - () -> { - latch.await(); - return new Object(); - }, - () -> { - latch.countDown(); - return context; - }); - })); - - testEnv.start(); - - final HeartbeatWorkflow heartbeatWorkflow = client.newWorkflowStub( - HeartbeatWorkflow.class, - WorkflowOptions.newBuilder() - .setTaskQueue(TASK_QUEUE) - .build()); - - // use async execution to avoid blocking the test thread - WorkflowClient.start(heartbeatWorkflow::execute); - - assertTrue(latch.await(15, TimeUnit.SECONDS)); - - } - - @Test - void testHeartbeatWithContextAndCallbackRef() throws InterruptedException { - final TemporalUtils temporalUtils = new TemporalUtils(null, null, null, null, null, null, null); - final TestWorkflowEnvironment testEnv = TestWorkflowEnvironment.newInstance(); - - final Worker worker = testEnv.newWorker(TASK_QUEUE); - - worker.registerWorkflowImplementationTypes(HeartbeatWorkflow.HeartbeatWorkflowImpl.class); - final WorkflowClient client = testEnv.getWorkflowClient(); - - final CountDownLatch latch = new CountDownLatch(2); - - worker.registerActivitiesImplementations(new HeartbeatWorkflow.HeartbeatActivityImpl(() -> { - final ActivityExecutionContext context = Activity.getExecutionContext(); - temporalUtils.withBackgroundHeartbeat( - // TODO (itaseski) figure out how to decrease heartbeat intervals using reflection - new AtomicReference<>(() -> {}), - () -> { - latch.await(); - return new Object(); - }, - () -> { - latch.countDown(); - return context; - }); - })); - - testEnv.start(); - - final HeartbeatWorkflow heartbeatWorkflow = client.newWorkflowStub( - HeartbeatWorkflow.class, - WorkflowOptions.newBuilder() - .setTaskQueue(TASK_QUEUE) - .build()); - - // use async execution to avoid blocking the test thread - WorkflowClient.start(heartbeatWorkflow::execute); - - assertTrue(latch.await(15, TimeUnit.SECONDS)); - - } - - @WorkflowInterface - public interface TestWorkflow { - - @WorkflowMethod - String run(String arg); - - class WorkflowImpl implements TestWorkflow { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowImpl.class); - - private final ActivityOptions options = ActivityOptions.newBuilder() - .setScheduleToCloseTimeout(Duration.ofDays(3)) - .setCancellationType(ActivityCancellationType.WAIT_CANCELLATION_COMPLETED) - .setRetryOptions(TemporalUtils.NO_RETRY) - .build(); - - private final Activity1 activity1 = Workflow.newActivityStub(Activity1.class, options); - private final Activity1 activity2 = Workflow.newActivityStub(Activity1.class, options); - - @Override - public String run(final String arg) { - LOGGER.info("workflow before activity 1"); - activity1.activity(); - LOGGER.info("workflow before activity 2"); - activity2.activity(); - LOGGER.info("workflow after all activities"); - - return "completed"; - } - - } - - @ActivityInterface - interface Activity1 { - - @ActivityMethod - void activity(); - - } - - class Activity1Impl implements Activity1 { - - private static final Logger LOGGER = LoggerFactory.getLogger(Activity1Impl.class); - private static final String ACTIVITY1 = "activity1"; - - private final VoidCallable callable; - - public Activity1Impl(final VoidCallable callable) { - this.callable = callable; - } - - @Override - public void activity() { - LOGGER.info(BEFORE, ACTIVITY1); - try { - callable.call(); - } catch (final Exception e) { - throw new RuntimeException(e); - } - LOGGER.info(BEFORE, ACTIVITY1); - } - - } - - } - - @WorkflowInterface - public interface TestFailingWorkflow { - - @WorkflowMethod - String run(String arg); - - class WorkflowImpl implements TestFailingWorkflow { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowImpl.class); - - final ActivityOptions options = ActivityOptions.newBuilder() - .setScheduleToCloseTimeout(Duration.ofMinutes(30)) - .setStartToCloseTimeout(Duration.ofMinutes(30)) - .setScheduleToStartTimeout(Duration.ofMinutes(30)) - .setCancellationType(ActivityCancellationType.WAIT_CANCELLATION_COMPLETED) - .setRetryOptions(TemporalUtils.NO_RETRY) - .setHeartbeatTimeout(Duration.ofSeconds(1)) - .build(); - - private final Activity1 activity1 = Workflow.newActivityStub(Activity1.class, options); - - @Override - public String run(final String arg) { - - LOGGER.info("workflow before activity 1"); - activity1.activity(arg); - LOGGER.info("workflow after all activities"); - - return "completed"; - } - - } - - @ActivityInterface - interface Activity1 { - - @ActivityMethod - void activity(String arg); - - } - - class Activity1Impl implements Activity1 { - - private static final Logger LOGGER = LoggerFactory.getLogger(TestWorkflow.Activity1Impl.class); - private static final String ACTIVITY1 = "activity1"; - - private final AtomicInteger timesReachedEnd; - - private final TemporalUtils temporalUtils = new TemporalUtils(null, null, null, null, null, null, null); - - public Activity1Impl(final AtomicInteger timesReachedEnd) { - this.timesReachedEnd = timesReachedEnd; - } - - @Override - public void activity(final String arg) { - LOGGER.info(BEFORE, ACTIVITY1); - final ActivityExecutionContext context = Activity.getExecutionContext(); - temporalUtils.withBackgroundHeartbeat( - new AtomicReference<>(null), - () -> { - if (timesReachedEnd.get() == 0) { - if ("runtime".equals(arg)) { - throw new RuntimeException("failed"); - } else if ("timeout".equals(arg)) { - Thread.sleep(10000); - return null; - } else { - throw new Exception("failed"); - } - } else { - return null; - } - }, - () -> context); - timesReachedEnd.incrementAndGet(); - LOGGER.info(BEFORE, ACTIVITY1); - } - - } - - } - -} diff --git a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/stubs/HeartbeatWorkflow.java b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/stubs/HeartbeatWorkflow.java deleted file mode 100644 index 0790a00c623b..000000000000 --- a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/stubs/HeartbeatWorkflow.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.temporal.stubs; - -import io.airbyte.commons.temporal.TemporalUtils; -import io.temporal.activity.ActivityCancellationType; -import io.temporal.activity.ActivityInterface; -import io.temporal.activity.ActivityMethod; -import io.temporal.activity.ActivityOptions; -import io.temporal.workflow.Workflow; -import io.temporal.workflow.WorkflowInterface; -import io.temporal.workflow.WorkflowMethod; -import java.time.Duration; - -@WorkflowInterface -public interface HeartbeatWorkflow { - - @WorkflowMethod - void execute(); - - class HeartbeatWorkflowImpl implements HeartbeatWorkflow { - - private final ActivityOptions options = ActivityOptions.newBuilder() - .setScheduleToCloseTimeout(Duration.ofDays(1)) - .setCancellationType(ActivityCancellationType.WAIT_CANCELLATION_COMPLETED) - .setRetryOptions(TemporalUtils.NO_RETRY) - .build(); - - private final HeartbeatActivity heartbeatActivity = Workflow.newActivityStub(HeartbeatActivity.class, options); - - @Override - public void execute() { - heartbeatActivity.heartbeat(); - } - - } - - @ActivityInterface - interface HeartbeatActivity { - - @ActivityMethod - void heartbeat(); - - } - - class HeartbeatActivityImpl implements HeartbeatActivity { - - private final Runnable runnable; - - public HeartbeatActivityImpl(final Runnable runnable) { - this.runnable = runnable; - } - - @Override - public void heartbeat() { - runnable.run(); - } - - } - -} diff --git a/airbyte-commons-worker/build.gradle b/airbyte-commons-worker/build.gradle index 8b46f830a30d..24ea89e5c754 100644 --- a/airbyte-commons-worker/build.gradle +++ b/airbyte-commons-worker/build.gradle @@ -21,15 +21,11 @@ dependencies { implementation project(':airbyte-api') implementation project(':airbyte-commons-protocol') - implementation project(':airbyte-commons-temporal') implementation project(':airbyte-config:config-models') implementation project(':airbyte-config:config-persistence') implementation project(':airbyte-featureflag') implementation project(':airbyte-json-validation') - implementation project(':airbyte-metrics:metrics-lib') - implementation project(':airbyte-persistence:job-persistence') implementation libs.airbyte.protocol - implementation project(':airbyte-worker-models') testAnnotationProcessor platform(libs.micronaut.bom) testAnnotationProcessor libs.bundles.micronaut.test.annotation.processor diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/ContainerOrchestratorConfig.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/ContainerOrchestratorConfig.java deleted file mode 100644 index 429b94740d30..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/ContainerOrchestratorConfig.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers; - -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.workers.storage.DocumentStoreClient; -import io.fabric8.kubernetes.client.KubernetesClient; -import java.util.Map; - -public record ContainerOrchestratorConfig( - String namespace, - DocumentStoreClient documentStoreClient, - Map environmentVariables, - KubernetesClient kubernetesClient, - String secretName, - String secretMountPath, - String dataPlaneCredsSecretName, - String dataPlaneCredsSecretMountPath, - String containerOrchestratorImage, - String containerOrchestratorImagePullPolicy, - String googleApplicationCredentials, - WorkerEnvironment workerEnvironment) {} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/RecordSchemaValidator.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/RecordSchemaValidator.java deleted file mode 100644 index 124c98a9d95e..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/RecordSchemaValidator.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.PerfBackgroundJsonValidation; -import io.airbyte.featureflag.Workspace; -import io.airbyte.protocol.models.AirbyteRecordMessage; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import io.airbyte.workers.exception.RecordSchemaValidationException; -import java.lang.invoke.MethodHandles; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Validates that AirbyteRecordMessage data conforms to the JSON schema defined by the source's - * configured catalog - */ -public class RecordSchemaValidator { - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private final FeatureFlagClient featureFlagClient; - private final UUID workspaceId; - private static final JsonSchemaValidator validator = new JsonSchemaValidator(); - private final Map streams; - - public RecordSchemaValidator(final FeatureFlagClient featureFlagClient, - final UUID workspaceId, - final Map streamNamesToSchemas) { - this.featureFlagClient = featureFlagClient; - this.workspaceId = workspaceId; - // streams is Map of a stream source namespace + name mapped to the stream schema - // for easy access when we check each record's schema - this.streams = streamNamesToSchemas; - // initialize schema validator to avoid creating validators each time. - for (final AirbyteStreamNameNamespacePair stream : streamNamesToSchemas.keySet()) { - // We must choose a JSON validator version for validating the schema - // Rather than allowing connectors to use any version, we enforce validation using V7 - final var schema = streams.get(stream); - ((ObjectNode) schema).put("$schema", "http://json-schema.org/draft-07/schema#"); - validator.initializeSchemaValidator(stream.toString(), schema); - } - - } - - /** - * Takes an AirbyteRecordMessage and uses the JsonSchemaValidator to validate that its data conforms - * to the stream's schema If it does not, this method throws a RecordSchemaValidationException - * - * @param message - * @throws RecordSchemaValidationException - */ - public void validateSchema(final AirbyteRecordMessage message, final AirbyteStreamNameNamespacePair messageStream) - throws RecordSchemaValidationException { - - final JsonNode messageData = message.getData(); - final JsonNode matchingSchema = streams.get(messageStream); - - if (workspaceId != null) { - if (featureFlagClient.enabled(PerfBackgroundJsonValidation.INSTANCE, new Workspace(workspaceId))) { - log.debug("feature flag enabled for workspace {}", workspaceId); - } else { - log.debug("feature flag disabled for workspace {}", workspaceId); - } - } else { - log.debug("workspace id is null"); - } - - try { - validator.ensureInitializedSchema(messageStream.toString(), messageData); - } catch (final JsonValidationException e) { - final List invalidRecordDataAndType = validator.getValidationMessageArgs(matchingSchema, messageData); - final List invalidFields = validator.getValidationMessagePaths(matchingSchema, messageData); - - final Set validationMessagesToDisplay = new HashSet<>(); - for (int i = 0; i < invalidFields.size(); i++) { - final StringBuilder expectedType = new StringBuilder(); - if (invalidRecordDataAndType.size() > i && invalidRecordDataAndType.get(i).length > 1) { - expectedType.append(invalidRecordDataAndType.get(i)[1]); - } - final StringBuilder newMessage = new StringBuilder(); - newMessage.append(invalidFields.get(i)); - newMessage.append(" is of an incorrect type."); - if (expectedType.length() > 0) { - newMessage.append(" Expected it to be " + expectedType); - } - validationMessagesToDisplay.add(newMessage.toString()); - } - - throw new RecordSchemaValidationException(validationMessagesToDisplay, - String.format("Record schema validation failed for %s", messageStream), e); - } - - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerMetricReporter.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerMetricReporter.java deleted file mode 100644 index 53386a794b7e..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerMetricReporter.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers; - -import io.airbyte.metrics.lib.MetricAttribute; -import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.metrics.lib.OssMetricsRegistry; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; - -public class WorkerMetricReporter { - - private final String dockerRepo; - private final String dockerVersion; - private final MetricClient metricClient; - - public WorkerMetricReporter(final MetricClient metricClient, final String dockerImage) { - final String[] dockerImageInfo = dockerImage.split(":"); - this.dockerRepo = dockerImageInfo[0]; - this.dockerVersion = dockerImageInfo.length > 1 ? dockerImageInfo[1] : ""; - this.metricClient = metricClient; - } - - public void trackSchemaValidationError(final AirbyteStreamNameNamespacePair stream) { - metricClient.count(OssMetricsRegistry.NUM_SOURCE_STREAMS_WITH_RECORD_SCHEMA_VALIDATION_ERRORS, 1, new MetricAttribute("docker_repo", dockerRepo), - new MetricAttribute("docker_version", dockerVersion), new MetricAttribute("stream", stream.toString())); - } - - public void trackStateMetricTrackerError() { - metricClient.count(OssMetricsRegistry.STATE_METRIC_TRACKER_ERROR, 1, new MetricAttribute("docker_repo", dockerRepo), - new MetricAttribute("docker_version", dockerVersion)); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/config/ApiClientBeanFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/config/ApiClientBeanFactory.java deleted file mode 100644 index f31a66f8a044..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/config/ApiClientBeanFactory.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.config; - -import com.auth0.jwt.JWT; -import com.auth0.jwt.JWTCreator; -import com.auth0.jwt.algorithms.Algorithm; -import com.google.auth.oauth2.ServiceAccountCredentials; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.generated.AttemptApi; -import io.airbyte.api.client.generated.ConnectionApi; -import io.airbyte.api.client.generated.DestinationApi; -import io.airbyte.api.client.generated.JobsApi; -import io.airbyte.api.client.generated.SourceApi; -import io.airbyte.api.client.generated.StateApi; -import io.airbyte.api.client.generated.WorkspaceApi; -import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.commons.temporal.config.WorkerMode; -import io.micronaut.context.BeanProvider; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Prototype; -import io.micronaut.context.annotation.Value; -import io.micronaut.context.env.Environment; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.FileInputStream; -import java.net.http.HttpClient; -import java.net.http.HttpClient.Version; -import java.security.interfaces.RSAPrivateKey; -import java.time.Duration; -import java.util.Date; -import java.util.concurrent.TimeUnit; -import lombok.extern.slf4j.Slf4j; - -/** - * Micronaut bean factory for API client singletons. - */ -@Factory -@Slf4j -public class ApiClientBeanFactory { - - private static final int JWT_TTL_MINUTES = 5; - - @Singleton - @Named("apiClient") - public ApiClient apiClient( - @Value("${airbyte.internal.api.auth-header.name}") final String airbyteApiAuthHeaderName, - @Value("${airbyte.internal.api.host}") final String airbyteApiHost, - @Named("internalApiAuthToken") final BeanProvider internalApiAuthToken, - @Named("internalApiScheme") final String internalApiScheme) { - return new ApiClient() - .setScheme(internalApiScheme) - .setHost(parseHostName(airbyteApiHost)) - .setPort(parsePort(airbyteApiHost)) - .setBasePath("/api") - .setHttpClientBuilder(HttpClient.newBuilder().version(Version.HTTP_1_1)) - .setConnectTimeout(Duration.ofSeconds(30)) - .setReadTimeout(Duration.ofSeconds(30)) - .setRequestInterceptor(builder -> { - builder.setHeader("User-Agent", "WorkerApp"); - // internalApiAuthToken is in BeanProvider because we want to create a new token each - // time we send a request. - if (!airbyteApiAuthHeaderName.isBlank()) { - builder.setHeader(airbyteApiAuthHeaderName, internalApiAuthToken.get()); - } - }); - } - - @Singleton - public AirbyteApiClient airbyteApiClient(final ApiClient apiClient) { - return new AirbyteApiClient(apiClient); - } - - @Singleton - public SourceApi sourceApi(@Named("apiClient") final ApiClient apiClient) { - return new SourceApi(apiClient); - } - - @Singleton - public JobsApi jobsApi(@Named("apiClient") final ApiClient apiClient) { - return new JobsApi(apiClient); - } - - @Singleton - public DestinationApi destinationApi(final ApiClient apiClient) { - return new DestinationApi(apiClient); - } - - @Singleton - public ConnectionApi connectionApi(final ApiClient apiClient) { - return new ConnectionApi(apiClient); - } - - @Singleton - public WorkspaceApi workspaceApi(final ApiClient apiClient) { - return new WorkspaceApi(apiClient); - } - - @Singleton - public AttemptApi attemptApi(final ApiClient apiClient) { - return new AttemptApi(apiClient); - } - - @Singleton - public StateApi stateApi(final ApiClient apiClient) { - return new StateApi(apiClient); - } - - @Singleton - public HttpClient httpClient() { - return HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); - } - - @Singleton - @Named("internalApiScheme") - public String internalApiScheme(@Value("${airbyte.acceptance.test.enabled}") final boolean isInTestMode, final Environment environment) { - // control plane workers communicate with the Airbyte API within their internal network, so https - // isn't needed - if (isInTestMode) { - return "http"; - } - return environment.getActiveNames().contains(WorkerMode.CONTROL_PLANE) ? "http" : "https"; - } - - /** - * Generate an auth token based on configs. This is called by the Api Client's requestInterceptor - * for each request. Using Prototype annotation here to make sure each time it's used it will - * generate a new JWT Signature if it's on data plane. - *

- * For Data Plane workers, generate a signed JWT as described here: - * https://cloud.google.com/endpoints/docs/openapi/service-account-authentication - *

- * Otherwise, use the AIRBYTE_API_AUTH_HEADER_VALUE from EnvConfigs. - */ - @Prototype - @Named("internalApiAuthToken") - public String internalApiAuthToken( - @Value("${airbyte.internal.api.auth-header.value}") final String airbyteApiAuthHeaderValue, - @Value("${airbyte.control.plane.auth-endpoint}") final String controlPlaneAuthEndpoint, - @Value("${airbyte.data.plane.service-account.email}") final String dataPlaneServiceAccountEmail, - @Value("${airbyte.data.plane.service-account.credentials-path}") final String dataPlaneServiceAccountCredentialsPath, - @Value("${airbyte.acceptance.test.enabled}") final boolean isInTestMode, - final Environment environment) { - if (isInTestMode || environment.getActiveNames().contains(WorkerMode.CONTROL_PLANE)) { - // control plane workers communicate with the Airbyte API within their internal network, so a signed - // JWT isn't needed - return airbyteApiAuthHeaderValue; - } else if (environment.getActiveNames().contains(WorkerMode.DATA_PLANE)) { - try { - final Date now = new Date(); - final Date expTime = new Date(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(JWT_TTL_MINUTES)); - // Build the JWT payload - final JWTCreator.Builder token = JWT.create() - .withIssuedAt(now) - .withExpiresAt(expTime) - .withIssuer(dataPlaneServiceAccountEmail) - .withAudience(controlPlaneAuthEndpoint) - .withSubject(dataPlaneServiceAccountEmail) - .withClaim("email", dataPlaneServiceAccountEmail); - - // TODO multi-cloud phase 2: check performance of on-demand token generation in load testing. might - // need to pull some of this outside of this method which is called for every API request - final FileInputStream stream = new FileInputStream(dataPlaneServiceAccountCredentialsPath); - final ServiceAccountCredentials cred = ServiceAccountCredentials.fromStream(stream); - final RSAPrivateKey key = (RSAPrivateKey) cred.getPrivateKey(); - final Algorithm algorithm = Algorithm.RSA256(null, key); - return "Bearer " + token.sign(algorithm); - } catch (final Exception e) { - log.warn( - "An issue occurred while generating a data plane auth token. Defaulting to empty string. Error Message: {}", - e.getMessage()); - return ""; - } - } else { - log.warn("Worker somehow wasn't a control plane or a data plane worker!"); - return ""; - } - } - - private String parseHostName(final String airbyteInternalApiHost) { - return airbyteInternalApiHost.split(":")[0]; - } - - private int parsePort(final String airbyteInternalApiHost) { - return Integer.parseInt(airbyteInternalApiHost.split(":")[1]); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/exception/RecordSchemaValidationException.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/exception/RecordSchemaValidationException.java deleted file mode 100644 index d6b51fb5fe03..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/exception/RecordSchemaValidationException.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.exception; - -import java.util.Set; - -/** - * Exception thrown by the RecordSchemaValidator during a sync when AirbyteRecordMessage data does - * not conform to its stream's defined JSON schema - */ - -public class RecordSchemaValidationException extends Exception { - - public final Set errorMessages; - - public RecordSchemaValidationException(final Set errorMessages, final String message) { - super(message); - this.errorMessages = errorMessages; - } - - public RecordSchemaValidationException(final Set errorMessages, final String message, final Throwable cause) { - super(message, cause); - this.errorMessages = errorMessages; - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationWorker.java deleted file mode 100644 index 8e7a66c946c9..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationWorker.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import datadog.trace.api.Trace; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.config.OperatorDbtInput; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.workers.Worker; -import io.airbyte.workers.exception.WorkerException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings("PMD.AvoidPrintStackTrace") -public class DbtTransformationWorker implements Worker { - - private static final Logger LOGGER = LoggerFactory.getLogger(DbtTransformationWorker.class); - - private final String jobId; - private final int attempt; - private final DbtTransformationRunner dbtTransformationRunner; - private final ResourceRequirements resourceRequirements; - - private final AtomicBoolean cancelled; - - public DbtTransformationWorker(final String jobId, - final int attempt, - final ResourceRequirements resourceRequirements, - final DbtTransformationRunner dbtTransformationRunner) { - this.jobId = jobId; - this.attempt = attempt; - this.dbtTransformationRunner = dbtTransformationRunner; - this.resourceRequirements = resourceRequirements; - - this.cancelled = new AtomicBoolean(false); - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public Void run(final OperatorDbtInput operatorDbtInput, final Path jobRoot) throws WorkerException { - final long startTime = System.currentTimeMillis(); - LineGobbler.startSection("DBT TRANSFORMATION"); - ApmTraceUtils.addTagsToTrace(Map.of(JOB_ID_KEY, jobId, JOB_ROOT_KEY, jobRoot)); - - try (dbtTransformationRunner) { - LOGGER.info("Running dbt transformation."); - dbtTransformationRunner.start(); - final Path transformRoot = Files.createDirectories(jobRoot.resolve("transform")); - if (!dbtTransformationRunner.run( - jobId, - attempt, - transformRoot, - operatorDbtInput.getDestinationConfiguration(), - resourceRequirements, - operatorDbtInput.getOperatorDbt())) { - throw new WorkerException("DBT Transformation Failed."); - } - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - throw new WorkerException("Dbt Transformation Failed.", e); - } - if (cancelled.get()) { - LOGGER.info("Dbt Transformation was cancelled."); - } - - final Duration duration = Duration.ofMillis(System.currentTimeMillis() - startTime); - LOGGER.info("Dbt Transformation executed in {}.", duration.toMinutesPart()); - LineGobbler.endSection("DBT TRANSFORMATION"); - - return null; - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void cancel() { - LOGGER.info("Cancelling Dbt Transformation runner..."); - try { - cancelled.set(true); - dbtTransformationRunner.close(); - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.error("Unable to cancel Dbt Transformation runner.", e); - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultCheckConnectionWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultCheckConnectionWorker.java index 02788ec88344..3e785f916fc9 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultCheckConnectionWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultCheckConnectionWorker.java @@ -4,11 +4,7 @@ package io.airbyte.workers.general; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - import com.fasterxml.jackson.databind.JsonNode; -import datadog.trace.api.Trace; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.io.LineGobbler; import io.airbyte.commons.json.Jsons; @@ -18,7 +14,6 @@ import io.airbyte.config.StandardCheckConnectionInput; import io.airbyte.config.StandardCheckConnectionOutput; import io.airbyte.config.StandardCheckConnectionOutput.Status; -import io.airbyte.metrics.lib.ApmTraceUtils; import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.AirbyteControlConnectorConfigMessage; import io.airbyte.protocol.models.AirbyteMessage; @@ -60,11 +55,9 @@ public DefaultCheckConnectionWorker(final IntegrationLauncher integrationLaunche this(integrationLauncher, connectorConfigUpdater, new DefaultAirbyteStreamFactory()); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public ConnectorJobOutput run(final StandardCheckConnectionInput input, final Path jobRoot) throws WorkerException { LineGobbler.startSection("CHECK"); - ApmTraceUtils.addTagsToTrace(Map.of(JOB_ROOT_KEY, jobRoot)); try { final JsonNode inputConfig = input.getConnectionConfiguration(); @@ -120,14 +113,12 @@ public ConnectorJobOutput run(final StandardCheckConnectionInput input, final Pa return jobOutput; } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); LOGGER.error("Unexpected error while checking connection: ", e); LineGobbler.endSection("CHECK"); throw new WorkerException("Unexpected error while getting checking connection.", e); } } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void cancel() { WorkerUtils.cancelProcess(process); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogWorker.java index b0a5168637ec..8e87369e9b33 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogWorker.java @@ -4,13 +4,7 @@ package io.airbyte.workers.general; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.CONNECTOR_VERSION_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.SOURCE_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - import com.fasterxml.jackson.databind.JsonNode; -import datadog.trace.api.Trace; import io.airbyte.api.client.AirbyteApiClient; import io.airbyte.api.client.model.generated.DiscoverCatalogResult; import io.airbyte.api.client.model.generated.SourceDiscoverSchemaWriteRequestBody; @@ -20,7 +14,6 @@ import io.airbyte.config.ConnectorJobOutput.OutputType; import io.airbyte.config.FailureReason; import io.airbyte.config.StandardDiscoverCatalogInput; -import io.airbyte.metrics.lib.ApmTraceUtils; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AirbyteControlConnectorConfigMessage; import io.airbyte.protocol.models.AirbyteMessage; @@ -35,7 +28,6 @@ import io.airbyte.workers.process.IntegrationLauncher; import java.nio.file.Path; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -70,10 +62,8 @@ public DefaultDiscoverCatalogWorker(final AirbyteApiClient airbyteApiClient, this(airbyteApiClient, integrationLauncher, connectorConfigUpdater, new DefaultAirbyteStreamFactory()); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public ConnectorJobOutput run(final StandardDiscoverCatalogInput discoverSchemaInput, final Path jobRoot) throws WorkerException { - ApmTraceUtils.addTagsToTrace(generateTraceTags(discoverSchemaInput, jobRoot)); try { final JsonNode inputConfig = discoverSchemaInput.getConnectionConfiguration(); process = integrationLauncher.discover( @@ -120,10 +110,8 @@ public ConnectorJobOutput run(final StandardDiscoverCatalogInput discoverSchemaI } return jobOutput; } catch (final WorkerException e) { - ApmTraceUtils.addExceptionToTrace(e); throw e; } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); throw new WorkerException("Error while discovering schema", e); } } @@ -141,24 +129,6 @@ private SourceDiscoverSchemaWriteRequestBody buildSourceDiscoverSchemaWriteReque discoverSchemaInput.getConfigHash()); } - private Map generateTraceTags(final StandardDiscoverCatalogInput discoverSchemaInput, final Path jobRoot) { - final Map tags = new HashMap<>(); - - tags.put(JOB_ROOT_KEY, jobRoot); - - if (discoverSchemaInput != null) { - if (discoverSchemaInput.getSourceId() != null) { - tags.put(SOURCE_ID_KEY, discoverSchemaInput.getSourceId()); - } - if (discoverSchemaInput.getConnectorVersion() != null) { - tags.put(CONNECTOR_VERSION_KEY, discoverSchemaInput.getConnectorVersion()); - } - } - - return tags; - } - - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void cancel() { WorkerUtils.cancelProcess(process); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultGetSpecWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultGetSpecWorker.java index dd7711e0acda..cc19bdfc04fa 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultGetSpecWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultGetSpecWorker.java @@ -4,17 +4,11 @@ package io.airbyte.workers.general; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.DOCKER_IMAGE_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import datadog.trace.api.Trace; import io.airbyte.commons.io.LineGobbler; import io.airbyte.config.ConnectorJobOutput; import io.airbyte.config.ConnectorJobOutput.OutputType; import io.airbyte.config.FailureReason; import io.airbyte.config.JobGetSpecConfig; -import io.airbyte.metrics.lib.ApmTraceUtils; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.ConnectorSpecification; @@ -49,10 +43,8 @@ public DefaultGetSpecWorker(final IntegrationLauncher integrationLauncher) { this(integrationLauncher, new DefaultAirbyteStreamFactory()); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public ConnectorJobOutput run(final JobGetSpecConfig config, final Path jobRoot) throws WorkerException { - ApmTraceUtils.addTagsToTrace(Map.of(JOB_ROOT_KEY, jobRoot, DOCKER_IMAGE_KEY, config.getDockerImage())); try { process = integrationLauncher.spec(jobRoot); @@ -87,7 +79,6 @@ public ConnectorJobOutput run(final JobGetSpecConfig config, final Path jobRoot) } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void cancel() { WorkerUtils.cancelProcess(process); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultNormalizationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultNormalizationWorker.java deleted file mode 100644 index 751d13658270..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultNormalizationWorker.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import datadog.trace.api.Trace; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.FailureReason; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.helper.FailureHelper; -import io.airbyte.workers.normalization.NormalizationRunner; -import io.airbyte.workers.normalization.NormalizationWorker; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.lang3.time.DurationFormatUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings("PMD.AvoidPrintStackTrace") -public class DefaultNormalizationWorker implements NormalizationWorker { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultNormalizationWorker.class); - - private final String jobId; - private final int attempt; - private final NormalizationRunner normalizationRunner; - private final WorkerEnvironment workerEnvironment; - private final List traceFailureReasons = new ArrayList<>(); - private boolean failed = false; - - private final AtomicBoolean cancelled; - - public DefaultNormalizationWorker(final String jobId, - final int attempt, - final NormalizationRunner normalizationRunner, - final WorkerEnvironment workerEnvironment) { - this.jobId = jobId; - this.attempt = attempt; - this.normalizationRunner = normalizationRunner; - this.workerEnvironment = workerEnvironment; - - this.cancelled = new AtomicBoolean(false); - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public NormalizationSummary run(final NormalizationInput input, final Path jobRoot) throws WorkerException { - final long startTime = System.currentTimeMillis(); - - ApmTraceUtils.addTagsToTrace(Map.of(JOB_ID_KEY, jobId, JOB_ROOT_KEY, jobRoot)); - - try (normalizationRunner) { - LineGobbler.startSection("DEFAULT NORMALIZATION"); - normalizationRunner.start(); - - Path normalizationRoot = null; - // There are no shared volumes on Kube; only create this for Docker. - if (workerEnvironment.equals(WorkerEnvironment.DOCKER)) { - normalizationRoot = Files.createDirectories(jobRoot.resolve("normalize")); - } - - if (!normalizationRunner.normalize(jobId, attempt, normalizationRoot, input.getDestinationConfiguration(), input.getCatalog(), - input.getResourceRequirements())) { - buildFailureReasonsAndSetFailure(); - } - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.error("Normalization failed for job {}.", jobId, e); - buildFailureReasonsAndSetFailure(); - } - - if (cancelled.get()) { - LOGGER.info("Normalization was cancelled for job {}.", jobId); - } - - final long endTime = System.currentTimeMillis(); - final Duration duration = Duration.ofMillis(endTime - startTime); - final String durationDescription = DurationFormatUtils.formatDurationWords(duration.toMillis(), true, true); - LOGGER.info("Normalization executed in {} for job {}.", durationDescription, jobId); - - final NormalizationSummary summary = new NormalizationSummary() - .withStartTime(startTime) - .withEndTime(endTime); - - if (!traceFailureReasons.isEmpty()) { - summary.setFailures(traceFailureReasons); - } else if (failed) { - throw new WorkerException("Normalization Failed."); - } - - LOGGER.info("Normalization summary: {}", summary); - LineGobbler.endSection("DEFAULT NORMALIZATION"); - - return summary; - } - - private void buildFailureReasonsAndSetFailure() { - normalizationRunner.getTraceMessages() - .filter(traceMessage -> traceMessage.getType() == AirbyteTraceMessage.Type.ERROR) - .forEach(traceMessage -> traceFailureReasons.add(FailureHelper.normalizationFailure(traceMessage, Long.valueOf(jobId), attempt))); - failed = true; - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void cancel() { - LOGGER.info("Cancelling normalization runner..."); - try { - cancelled.set(true); - normalizationRunner.close(); - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.error("Unable to cancel normalization runner.", e); - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java deleted file mode 100644 index bae558f504a8..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java +++ /dev/null @@ -1,765 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.CONNECTION_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import datadog.trace.api.Trace; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.config.FailureReason; -import io.airbyte.config.ReplicationAttemptSummary; -import io.airbyte.config.ReplicationOutput; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.config.StandardSyncSummary.ReplicationStatus; -import io.airbyte.config.State; -import io.airbyte.config.StreamSyncStats; -import io.airbyte.config.SyncStats; -import io.airbyte.config.WorkerDestinationConfig; -import io.airbyte.config.WorkerSourceConfig; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.protocol.models.AirbyteControlMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteRecordMessage; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.workers.RecordSchemaValidator; -import io.airbyte.workers.WorkerMetricReporter; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.RecordSchemaValidationException; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.helper.ConnectorConfigUpdater; -import io.airbyte.workers.helper.FailureHelper; -import io.airbyte.workers.helper.ThreadedTimeTracker; -import io.airbyte.workers.internal.AirbyteDestination; -import io.airbyte.workers.internal.AirbyteMapper; -import io.airbyte.workers.internal.AirbyteSource; -import io.airbyte.workers.internal.book_keeping.MessageTracker; -import io.airbyte.workers.internal.exception.DestinationException; -import io.airbyte.workers.internal.exception.SourceException; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - -/** - * This worker is the "data shovel" of ETL. It is responsible for moving data from the Source - * container to the Destination container. It manages the full lifecycle of this process. This - * includes: - *

    - *
  • Starting the Source and Destination containers
  • - *
  • Passing data from Source to Destination
  • - *
  • Executing any configured map-only operations (Mappers) in between the Source and - * Destination
  • - *
  • Collecting metadata about the data that is passing from Source to Destination
  • - *
  • Listening for state messages emitted from the Destination to keep track of what data has been - * replicated.
  • - *
  • Handling shutdown of the Source and Destination
  • - *
  • Handling failure cases and returning state for partially completed replications (so that the - * next replication can pick up where it left off instead of starting from the beginning)
  • - *
- */ -@SuppressWarnings("PMD.AvoidPrintStackTrace") -public class DefaultReplicationWorker implements ReplicationWorker { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultReplicationWorker.class); - - private final String jobId; - private final int attempt; - private final AirbyteSource source; - private final AirbyteMapper mapper; - private final AirbyteDestination destination; - private final MessageTracker messageTracker; - - private final ExecutorService executors; - private final AtomicBoolean cancelled; - private final AtomicBoolean hasFailed; - private final RecordSchemaValidator recordSchemaValidator; - private final WorkerMetricReporter metricReporter; - private final ConnectorConfigUpdater connectorConfigUpdater; - private final boolean fieldSelectionEnabled; - - public DefaultReplicationWorker(final String jobId, - final int attempt, - final AirbyteSource source, - final AirbyteMapper mapper, - final AirbyteDestination destination, - final MessageTracker messageTracker, - final RecordSchemaValidator recordSchemaValidator, - final WorkerMetricReporter metricReporter, - final ConnectorConfigUpdater connectorConfigUpdater, - final boolean fieldSelectionEnabled) { - this.jobId = jobId; - this.attempt = attempt; - this.source = source; - this.mapper = mapper; - this.destination = destination; - this.messageTracker = messageTracker; - this.executors = Executors.newFixedThreadPool(2); - this.recordSchemaValidator = recordSchemaValidator; - this.metricReporter = metricReporter; - this.connectorConfigUpdater = connectorConfigUpdater; - this.fieldSelectionEnabled = fieldSelectionEnabled; - - this.cancelled = new AtomicBoolean(false); - this.hasFailed = new AtomicBoolean(false); - } - - /** - * Run executes two threads. The first pipes data from STDOUT of the source to STDIN of the - * destination. The second listen on STDOUT of the destination. The goal of this second thread is to - * detect when the destination emits state messages. Only state messages emitted by the destination - * should be treated as state that is safe to return from run. In the case when the destination - * emits no state, we fall back on whatever state is pass in as an argument to this method. - * - * @param syncInput all configuration for running replication - * @param jobRoot file root that worker is allowed to use - * @return output of the replication attempt (including state) - * @throws WorkerException - */ - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public final ReplicationOutput run(final StandardSyncInput syncInput, final Path jobRoot) throws WorkerException { - LOGGER.info("start sync worker. job id: {} attempt id: {}", jobId, attempt); - LineGobbler.startSection("REPLICATION"); - - // todo (cgardens) - this should not be happening in the worker. this is configuration information - // that is independent of workflow executions. - final WorkerDestinationConfig destinationConfig = WorkerUtils.syncToWorkerDestinationConfig(syncInput); - destinationConfig.setCatalog(mapper.mapCatalog(destinationConfig.getCatalog())); - - final ThreadedTimeTracker timeTracker = new ThreadedTimeTracker(); - timeTracker.trackReplicationStartTime(); - - final AtomicReference replicationRunnableFailureRef = new AtomicReference<>(); - final AtomicReference destinationRunnableFailureRef = new AtomicReference<>(); - - try { - LOGGER.info("configured sync modes: {}", syncInput.getCatalog().getStreams() - .stream() - .collect(Collectors.toMap(s -> s.getStream().getNamespace() + "." + s.getStream().getName(), - s -> String.format("%s - %s", s.getSyncMode(), s.getDestinationSyncMode())))); - LOGGER.debug("field selection enabled: {}", fieldSelectionEnabled); - final WorkerSourceConfig sourceConfig = WorkerUtils.syncToWorkerSourceConfig(syncInput); - - ApmTraceUtils.addTagsToTrace(generateTraceTags(destinationConfig, jobRoot)); - replicate(jobRoot, destinationConfig, timeTracker, replicationRunnableFailureRef, destinationRunnableFailureRef, sourceConfig); - timeTracker.trackReplicationEndTime(); - - return getReplicationOutput(syncInput, destinationConfig, replicationRunnableFailureRef, destinationRunnableFailureRef, timeTracker); - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - throw new WorkerException("Sync failed", e); - } - - } - - private void replicate(final Path jobRoot, - final WorkerDestinationConfig destinationConfig, - final ThreadedTimeTracker timeTracker, - final AtomicReference replicationRunnableFailureRef, - final AtomicReference destinationRunnableFailureRef, - final WorkerSourceConfig sourceConfig) { - final Map mdc = MDC.getCopyOfContextMap(); - - // note: resources are closed in the opposite order in which they are declared. thus source will be - // closed first (which is what we want). - try (destination; source) { - destination.start(destinationConfig, jobRoot); - timeTracker.trackSourceReadStartTime(); - source.start(sourceConfig, jobRoot); - timeTracker.trackDestinationWriteStartTime(); - - // note: `whenComplete` is used instead of `exceptionally` so that the original exception is still - // thrown - final CompletableFuture readFromDstThread = CompletableFuture.runAsync( - readFromDstRunnable(destination, cancelled, messageTracker, connectorConfigUpdater, mdc, timeTracker, destinationConfig.getDestinationId()), - executors) - .whenComplete((msg, ex) -> { - if (ex != null) { - ApmTraceUtils.addExceptionToTrace(ex); - if (ex.getCause() instanceof DestinationException) { - destinationRunnableFailureRef.set(FailureHelper.destinationFailure(ex, Long.valueOf(jobId), attempt)); - } else { - destinationRunnableFailureRef.set(FailureHelper.replicationFailure(ex, Long.valueOf(jobId), attempt)); - } - } - }); - - final CompletableFuture readSrcAndWriteDstThread = CompletableFuture.runAsync( - readFromSrcAndWriteToDstRunnable( - source, - destination, - sourceConfig.getCatalog(), - cancelled, - mapper, - messageTracker, - connectorConfigUpdater, - mdc, - recordSchemaValidator, - metricReporter, - timeTracker, - sourceConfig.getSourceId(), - fieldSelectionEnabled), - executors) - .whenComplete((msg, ex) -> { - if (ex != null) { - ApmTraceUtils.addExceptionToTrace(ex); - if (ex.getCause() instanceof SourceException) { - replicationRunnableFailureRef.set(FailureHelper.sourceFailure(ex, Long.valueOf(jobId), attempt)); - } else if (ex.getCause() instanceof DestinationException) { - replicationRunnableFailureRef.set(FailureHelper.destinationFailure(ex, Long.valueOf(jobId), attempt)); - } else { - replicationRunnableFailureRef.set(FailureHelper.replicationFailure(ex, Long.valueOf(jobId), attempt)); - } - } - }); - - LOGGER.info("Waiting for source and destination threads to complete."); - // CompletableFuture#allOf waits until all futures finish before returning, even if one throws an - // exception. So in order to handle exceptions from a future immediately without needing to wait for - // the other future to finish, we first call CompletableFuture#anyOf. - CompletableFuture.anyOf(readSrcAndWriteDstThread, readFromDstThread).get(); - LOGGER.info("One of source or destination thread complete. Waiting on the other."); - CompletableFuture.allOf(readSrcAndWriteDstThread, readFromDstThread).get(); - LOGGER.info("Source and destination threads complete."); - - } catch (final Exception e) { - hasFailed.set(true); - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.error("Sync worker failed.", e); - } finally { - executors.shutdownNow(); - } - } - - @SuppressWarnings("PMD.AvoidInstanceofChecksInCatchClause") - private static Runnable readFromDstRunnable(final AirbyteDestination destination, - final AtomicBoolean cancelled, - final MessageTracker messageTracker, - final ConnectorConfigUpdater connectorConfigUpdater, - final Map mdc, - final ThreadedTimeTracker timeHolder, - final UUID destinationId) { - return () -> { - MDC.setContextMap(mdc); - LOGGER.info("Destination output thread started."); - try { - while (!cancelled.get() && !destination.isFinished()) { - final Optional messageOptional; - try { - messageOptional = destination.attemptRead(); - } catch (final Exception e) { - throw new DestinationException("Destination process read attempt failed", e); - } - if (messageOptional.isPresent()) { - final AirbyteMessage message = messageOptional.get(); - LOGGER.info("State in DefaultReplicationWorker from destination: {}", message); - - messageTracker.acceptFromDestination(message); - - try { - if (message.getType() == Type.CONTROL) { - acceptDstControlMessage(destinationId, message.getControl(), connectorConfigUpdater); - } - } catch (final Exception e) { - LOGGER.error("Error updating destination configuration", e); - } - } - } - timeHolder.trackDestinationWriteEndTime(); - if (!cancelled.get() && destination.getExitValue() != 0) { - throw new DestinationException("Destination process exited with non-zero exit code " + destination.getExitValue()); - } - } catch (final Exception e) { - if (!cancelled.get()) { - // Although this thread is closed first, it races with the destination's closure and can attempt one - // final read after the destination is closed before it's terminated. - // This read will fail and throw an exception. Because of this, throw exceptions only if the worker - // was not cancelled. - - if (e instanceof DestinationException) { - // Surface Destination exceptions directly so that they can be classified properly by the worker - throw e; - } else { - throw new RuntimeException(e); - } - } - } - }; - } - - @SuppressWarnings("PMD.AvoidInstanceofChecksInCatchClause") - private static Runnable readFromSrcAndWriteToDstRunnable(final AirbyteSource source, - final AirbyteDestination destination, - final ConfiguredAirbyteCatalog catalog, - final AtomicBoolean cancelled, - final AirbyteMapper mapper, - final MessageTracker messageTracker, - final ConnectorConfigUpdater connectorConfigUpdater, - final Map mdc, - final RecordSchemaValidator recordSchemaValidator, - final WorkerMetricReporter metricReporter, - final ThreadedTimeTracker timeHolder, - final UUID sourceId, - final boolean fieldSelectionEnabled) { - return () -> { - MDC.setContextMap(mdc); - LOGGER.info("Replication thread started."); - long recordsRead = 0L; - final Map, Integer>> validationErrors = new HashMap<>(); - final Map> streamToSelectedFields = new HashMap<>(); - final Map> streamToAllFields = new HashMap<>(); - final Map> unexpectedFields = new HashMap<>(); - if (fieldSelectionEnabled) { - populatedStreamToSelectedFields(catalog, streamToSelectedFields); - } - populateStreamToAllFields(catalog, streamToAllFields); - try { - while (!cancelled.get() && !source.isFinished()) { - final Optional messageOptional; - try { - messageOptional = source.attemptRead(); - } catch (final Exception e) { - throw new SourceException("Source process read attempt failed", e); - } - - if (messageOptional.isPresent()) { - final AirbyteMessage airbyteMessage = messageOptional.get(); - if (fieldSelectionEnabled) { - filterSelectedFields(streamToSelectedFields, airbyteMessage); - } - validateSchema(recordSchemaValidator, streamToAllFields, unexpectedFields, validationErrors, airbyteMessage); - final AirbyteMessage message = mapper.mapMessage(airbyteMessage); - - messageTracker.acceptFromSource(message); - - try { - if (message.getType() == Type.CONTROL) { - acceptSrcControlMessage(sourceId, message.getControl(), connectorConfigUpdater); - } - } catch (final Exception e) { - LOGGER.error("Error updating source configuration", e); - } - - try { - if (message.getType() == Type.RECORD || message.getType() == Type.STATE) { - destination.accept(message); - } - } catch (final Exception e) { - throw new DestinationException("Destination process message delivery failed", e); - } - - recordsRead += 1; - - if (recordsRead % 1000 == 0) { - LOGGER.info("Records read: {} ({})", recordsRead, FileUtils.byteCountToDisplaySize(messageTracker.getTotalBytesEmitted())); - } - } else { - LOGGER.info("Source has no more messages, closing connection."); - try { - source.close(); - } catch (final Exception e) { - throw new SourceException("Source cannot be stopped!", e); - } - } - } - timeHolder.trackSourceReadEndTime(); - LOGGER.info("Total records read: {} ({})", recordsRead, FileUtils.byteCountToDisplaySize(messageTracker.getTotalBytesEmitted())); - if (!validationErrors.isEmpty()) { - validationErrors.forEach((stream, errorPair) -> { - LOGGER.warn("Schema validation errors found for stream {}. Error messages: {}", stream, errorPair.getLeft()); - metricReporter.trackSchemaValidationError(stream); - }); - } - unexpectedFields.forEach((stream, unexpectedFieldNames) -> { - if (!unexpectedFieldNames.isEmpty()) { - LOGGER.warn("Source {} has unexpected fields [{}] in stream {}", sourceId, String.join(", ", unexpectedFieldNames), stream); - // TODO(mfsiega-airbyte): publish this as a metric. - } - }); - - try { - destination.notifyEndOfInput(); - } catch (final Exception e) { - throw new DestinationException("Destination process end of stream notification failed", e); - } - if (!cancelled.get() && source.getExitValue() != 0) { - throw new SourceException("Source process exited with non-zero exit code " + source.getExitValue()); - } - } catch (final Exception e) { - if (!cancelled.get()) { - // Although this thread is closed first, it races with the source's closure and can attempt one - // final read after the source is closed before it's terminated. - // This read will fail and throw an exception. Because of this, throw exceptions only if the worker - // was not cancelled. - - if (e instanceof SourceException || e instanceof DestinationException) { - // Surface Source and Destination exceptions directly so that they can be classified properly by the - // worker - throw e; - } else { - throw new RuntimeException(e); - } - } - } - }; - } - - private static void acceptSrcControlMessage(final UUID sourceId, - final AirbyteControlMessage controlMessage, - final ConnectorConfigUpdater connectorConfigUpdater) { - if (controlMessage.getType() == AirbyteControlMessage.Type.CONNECTOR_CONFIG) { - connectorConfigUpdater.updateSource(sourceId, controlMessage.getConnectorConfig().getConfig()); - } - } - - private static void acceptDstControlMessage(final UUID destinationId, - final AirbyteControlMessage controlMessage, - final ConnectorConfigUpdater connectorConfigUpdater) { - if (controlMessage.getType() == AirbyteControlMessage.Type.CONNECTOR_CONFIG) { - connectorConfigUpdater.updateDestination(destinationId, controlMessage.getConnectorConfig().getConfig()); - } - } - - private ReplicationOutput getReplicationOutput(final StandardSyncInput syncInput, - final WorkerDestinationConfig destinationConfig, - final AtomicReference replicationRunnableFailureRef, - final AtomicReference destinationRunnableFailureRef, - final ThreadedTimeTracker timeTracker) - throws JsonProcessingException { - final ReplicationStatus outputStatus; - // First check if the process was cancelled. Cancellation takes precedence over failures. - if (cancelled.get()) { - outputStatus = ReplicationStatus.CANCELLED; - } - // if the process was not cancelled but still failed, then it's an actual failure - else if (hasFailed.get()) { - outputStatus = ReplicationStatus.FAILED; - } else { - outputStatus = ReplicationStatus.COMPLETED; - } - - final SyncStats totalSyncStats = getTotalStats(timeTracker, outputStatus); - final List streamSyncStats = getPerStreamStats(outputStatus); - - final ReplicationAttemptSummary summary = new ReplicationAttemptSummary() - .withStatus(outputStatus) - .withRecordsSynced(messageTracker.getTotalRecordsEmitted()) // TODO (parker) remove in favor of totalRecordsEmitted - .withBytesSynced(messageTracker.getTotalBytesEmitted()) // TODO (parker) remove in favor of totalBytesEmitted - .withTotalStats(totalSyncStats) - .withStreamStats(streamSyncStats) - .withStartTime(timeTracker.getReplicationStartTime()) - .withEndTime(System.currentTimeMillis()); - - final ReplicationOutput output = new ReplicationOutput() - .withReplicationAttemptSummary(summary) - .withOutputCatalog(destinationConfig.getCatalog()); - - final List failures = getFailureReasons(replicationRunnableFailureRef, destinationRunnableFailureRef, - output); - - prepStateForLaterSaving(syncInput, output); - - final ObjectMapper mapper = new ObjectMapper(); - LOGGER.info("sync summary: {}", mapper.writerWithDefaultPrettyPrinter().writeValueAsString(summary)); - LOGGER.info("failures: {}", mapper.writerWithDefaultPrettyPrinter().writeValueAsString(failures)); - LineGobbler.endSection("REPLICATION"); - - return output; - } - - private SyncStats getTotalStats(final ThreadedTimeTracker timeTracker, final ReplicationStatus outputStatus) { - final SyncStats totalSyncStats = new SyncStats() - .withRecordsEmitted(messageTracker.getTotalRecordsEmitted()) - .withBytesEmitted(messageTracker.getTotalBytesEmitted()) - .withSourceStateMessagesEmitted(messageTracker.getTotalSourceStateMessagesEmitted()) - .withDestinationStateMessagesEmitted(messageTracker.getTotalDestinationStateMessagesEmitted()) - .withMaxSecondsBeforeSourceStateMessageEmitted(messageTracker.getMaxSecondsToReceiveSourceStateMessage()) - .withMeanSecondsBeforeSourceStateMessageEmitted(messageTracker.getMeanSecondsToReceiveSourceStateMessage()) - .withMaxSecondsBetweenStateMessageEmittedandCommitted(messageTracker.getMaxSecondsBetweenStateMessageEmittedAndCommitted().orElse(null)) - .withMeanSecondsBetweenStateMessageEmittedandCommitted(messageTracker.getMeanSecondsBetweenStateMessageEmittedAndCommitted().orElse(null)) - .withReplicationStartTime(timeTracker.getReplicationStartTime()) - .withReplicationEndTime(timeTracker.getReplicationEndTime()) - .withSourceReadStartTime(timeTracker.getSourceReadStartTime()) - .withSourceReadEndTime(timeTracker.getSourceReadEndTime()) - .withDestinationWriteStartTime(timeTracker.getDestinationWriteStartTime()) - .withDestinationWriteEndTime(timeTracker.getDestinationWriteEndTime()); - - if (outputStatus == ReplicationStatus.COMPLETED) { - totalSyncStats.setRecordsCommitted(totalSyncStats.getRecordsEmitted()); - } else if (messageTracker.getTotalRecordsCommitted().isPresent()) { - totalSyncStats.setRecordsCommitted(messageTracker.getTotalRecordsCommitted().get()); - } else { - LOGGER.warn("Could not reliably determine committed record counts, committed record stats will be set to null"); - totalSyncStats.setRecordsCommitted(null); - } - return totalSyncStats; - } - - private List getPerStreamStats(final ReplicationStatus outputStatus) { - // assume every stream with stats is in streamToEmittedRecords map - return messageTracker.getStreamToEmittedRecords().keySet().stream().map(stream -> { - final SyncStats syncStats = new SyncStats() - .withRecordsEmitted(messageTracker.getStreamToEmittedRecords().get(stream)) - .withBytesEmitted(messageTracker.getStreamToEmittedBytes().get(stream)) - .withSourceStateMessagesEmitted(null) - .withDestinationStateMessagesEmitted(null); - - if (outputStatus == ReplicationStatus.COMPLETED) { - syncStats.setRecordsCommitted(messageTracker.getStreamToEmittedRecords().get(stream)); - } else if (messageTracker.getStreamToCommittedRecords().isPresent()) { - syncStats.setRecordsCommitted(messageTracker.getStreamToCommittedRecords().get().get(stream)); - } else { - syncStats.setRecordsCommitted(null); - } - return new StreamSyncStats() - .withStreamName(stream.getName()) - .withStreamNamespace(stream.getNamespace()) - .withStats(syncStats); - }).collect(Collectors.toList()); - } - - /** - * Extracts state out to the {@link ReplicationOutput} so it can be later saved in the - * PersistStateActivity - State is NOT SAVED here. - * - * @param syncInput - * @param output - */ - private void prepStateForLaterSaving(final StandardSyncInput syncInput, final ReplicationOutput output) { - if (messageTracker.getSourceOutputState().isPresent()) { - LOGGER.info("Source output at least one state message"); - } else { - LOGGER.info("Source did not output any state messages"); - } - - if (messageTracker.getDestinationOutputState().isPresent()) { - LOGGER.info("State capture: Updated state to: {}", messageTracker.getDestinationOutputState()); - final State state = messageTracker.getDestinationOutputState().get(); - output.withState(state); - } else if (syncInput.getState() != null) { - LOGGER.warn("State capture: No new state, falling back on input state: {}", syncInput.getState()); - output.withState(syncInput.getState()); - } else { - LOGGER.warn("State capture: No state retained."); - } - - if (messageTracker.getUnreliableStateTimingMetrics()) { - metricReporter.trackStateMetricTrackerError(); - } - } - - private List getFailureReasons(final AtomicReference replicationRunnableFailureRef, - final AtomicReference destinationRunnableFailureRef, - final ReplicationOutput output) { - // only .setFailures() if a failure occurred or if there is an AirbyteErrorTraceMessage - final FailureReason sourceFailure = replicationRunnableFailureRef.get(); - final FailureReason destinationFailure = destinationRunnableFailureRef.get(); - final FailureReason traceMessageFailure = messageTracker.errorTraceMessageFailure(Long.valueOf(jobId), attempt); - - final List failures = new ArrayList<>(); - - if (traceMessageFailure != null) { - failures.add(traceMessageFailure); - } - - if (sourceFailure != null) { - failures.add(sourceFailure); - } - if (destinationFailure != null) { - failures.add(destinationFailure); - } - if (!failures.isEmpty()) { - output.setFailures(failures); - } - return failures; - } - - private static void validateSchema(final RecordSchemaValidator recordSchemaValidator, - Map> streamToAllFields, - Map> unexpectedFields, - final Map, Integer>> validationErrors, - final AirbyteMessage message) { - if (message.getRecord() == null) { - return; - } - - final AirbyteRecordMessage record = message.getRecord(); - final AirbyteStreamNameNamespacePair messageStream = AirbyteStreamNameNamespacePair.fromRecordMessage(record); - // avoid noise by validating only if the stream has less than 10 records with validation errors - final boolean streamHasLessThenTenErrs = - validationErrors.get(messageStream) == null || validationErrors.get(messageStream).getRight() < 10; - if (streamHasLessThenTenErrs) { - try { - recordSchemaValidator.validateSchema(record, messageStream); - final Set unexpectedFieldNames = unexpectedFields.getOrDefault(messageStream, new HashSet<>()); - populateUnexpectedFieldNames(record, streamToAllFields.get(messageStream), unexpectedFieldNames); - unexpectedFields.put(messageStream, unexpectedFieldNames); - } catch (final RecordSchemaValidationException e) { - final ImmutablePair, Integer> exceptionWithCount = validationErrors.get(messageStream); - if (exceptionWithCount == null) { - validationErrors.put(messageStream, new ImmutablePair<>(e.errorMessages, 1)); - } else { - final Integer currentCount = exceptionWithCount.getRight(); - final Set currentErrorMessages = exceptionWithCount.getLeft(); - final Set updatedErrorMessages = Stream.concat(currentErrorMessages.stream(), e.errorMessages.stream()).collect(Collectors.toSet()); - validationErrors.put(messageStream, new ImmutablePair<>(updatedErrorMessages, currentCount + 1)); - } - } - } - } - - private static void populateUnexpectedFieldNames(AirbyteRecordMessage record, Set fieldsInCatalog, Set unexpectedFieldNames) { - final JsonNode data = record.getData(); - if (data.isObject()) { - Iterator fieldNamesInRecord = data.fieldNames(); - while (fieldNamesInRecord.hasNext()) { - final String fieldName = fieldNamesInRecord.next(); - if (!fieldsInCatalog.contains(fieldName)) { - unexpectedFieldNames.add(fieldName); - } - } - } - // If it's not an object it's malformed, but we tolerate it here - it will be logged as an error by - // the validation. - } - - /** - * Generates a map from stream -> the explicit list of fields included for that stream, according to - * the configured catalog. Since the configured catalog only includes the selected fields, this lets - * us filter records to only the fields explicitly requested. - * - * @param catalog - * @param streamToSelectedFields - */ - private static void populatedStreamToSelectedFields(final ConfiguredAirbyteCatalog catalog, - final Map> streamToSelectedFields) { - for (final var s : catalog.getStreams()) { - final List selectedFields = new ArrayList<>(); - final JsonNode propertiesNode = s.getStream().getJsonSchema().findPath("properties"); - if (propertiesNode.isObject()) { - propertiesNode.fieldNames().forEachRemaining((fieldName) -> selectedFields.add(fieldName)); - } else { - throw new RuntimeException("No properties node in stream schema"); - } - streamToSelectedFields.put(AirbyteStreamNameNamespacePair.fromConfiguredAirbyteSteam(s), selectedFields); - } - } - - /** - * Populates a map for stream -> all the top-level fields in the catalog. Used to identify any - * unexpected top-level fields in the records. - * - * @param catalog - * @param streamToAllFields - */ - private static void populateStreamToAllFields(final ConfiguredAirbyteCatalog catalog, - final Map> streamToAllFields) { - for (final var s : catalog.getStreams()) { - final Set fields = new HashSet<>(); - final JsonNode propertiesNode = s.getStream().getJsonSchema().findPath("properties"); - if (propertiesNode.isObject()) { - propertiesNode.fieldNames().forEachRemaining((fieldName) -> fields.add(fieldName)); - } else { - throw new RuntimeException("No properties node in stream schema"); - } - streamToAllFields.put(AirbyteStreamNameNamespacePair.fromConfiguredAirbyteSteam(s), fields); - } - } - - private static void filterSelectedFields(final Map> streamToSelectedFields, - final AirbyteMessage airbyteMessage) { - final AirbyteRecordMessage record = airbyteMessage.getRecord(); - - if (record == null) { - // This isn't a record message, so we don't need to do any filtering. - return; - } - - final AirbyteStreamNameNamespacePair messageStream = AirbyteStreamNameNamespacePair.fromRecordMessage(record); - final List selectedFields = streamToSelectedFields.getOrDefault(messageStream, Collections.emptyList()); - final JsonNode data = record.getData(); - if (data.isObject()) { - ((ObjectNode) data).retain(selectedFields); - } else { - throw new RuntimeException(String.format("Unexpected data in record: %s", data.toString())); - } - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void cancel() { - // Resources are closed in the opposite order they are declared. - LOGGER.info("Cancelling replication worker..."); - try { - executors.awaitTermination(10, TimeUnit.SECONDS); - } catch (final InterruptedException e) { - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.error("Unable to cancel due to interruption.", e); - } - cancelled.set(true); - - LOGGER.info("Cancelling destination..."); - try { - destination.cancel(); - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.info("Error cancelling destination: ", e); - } - - LOGGER.info("Cancelling source..."); - try { - source.cancel(); - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.info("Error cancelling source: ", e); - } - - } - - private Map generateTraceTags(final WorkerDestinationConfig destinationConfig, final Path jobRoot) { - final Map tags = new HashMap<>(); - - tags.put(JOB_ID_KEY, jobId); - tags.put(JOB_ROOT_KEY, jobRoot); - - if (destinationConfig != null) { - if (destinationConfig.getConnectionId() != null) { - tags.put(CONNECTION_ID_KEY, destinationConfig.getConnectionId()); - } - } - - return tags; - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/EchoWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/EchoWorker.java deleted file mode 100644 index fadec03c6aa4..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/EchoWorker.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import io.airbyte.workers.Worker; -import java.nio.file.Path; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class EchoWorker implements Worker { - - private static final Logger LOGGER = LoggerFactory.getLogger(EchoWorker.class); - - public EchoWorker() {} - - @Override - public String run(final String string, final Path jobRoot) { - LOGGER.info("Hello World. input: {}, workspace root: {}", string, jobRoot); - return "echoed"; - } - - @Override - public void cancel() { - // no-op - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorker.java deleted file mode 100644 index 78062a93eae0..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorker.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import io.airbyte.config.ReplicationOutput; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.workers.Worker; - -public interface ReplicationWorker extends Worker {} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/ConnectionHelper.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/ConnectionHelper.java deleted file mode 100644 index 2017f2564240..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/ConnectionHelper.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.helper; - -import com.google.common.base.Preconditions; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.BasicSchedule; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.Schedule; -import io.airbyte.config.ScheduleData; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.ScheduleType; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.WorkspaceHelper; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.List; -import java.util.UUID; -import javax.annotation.Nullable; - -// todo (cgardens) - we are not getting any value out of instantiating this class. we should just -// use it as statics. not doing it now, because already in the middle of another refactor. -@Singleton -public class ConnectionHelper { - - private final ConfigRepository configRepository; - private final WorkspaceHelper workspaceHelper; - - public ConnectionHelper(final ConfigRepository configRepository, final WorkspaceHelper workspaceHelper) { - this.configRepository = configRepository; - this.workspaceHelper = workspaceHelper; - } - - public void deleteConnection(final UUID connectionId) throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSync update = Jsons.clone(configRepository.getStandardSync(connectionId).withStatus(StandardSync.Status.DEPRECATED)); - updateConnection(update); - } - - /** - * Given a connection update, fetches an existing connection, applies the update, and then persists - * the update. - * - * @param update - updated sync info to be merged with original sync. - * @return new sync object - * @throws JsonValidationException - if provided object is invalid - * @throws ConfigNotFoundException - if there is no sync already persisted - * @throws IOException - you never know when you io - */ - public StandardSync updateConnection(final StandardSync update) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSync original = configRepository.getStandardSync(update.getConnectionId()); - final StandardSync newConnection = updateConnectionObject(workspaceHelper, original, update); - configRepository.writeStandardSync(newConnection); - return newConnection; - } - - /** - * Core logic for merging an existing connection configuration with an update. - * - * @param workspaceHelper - helper class - * @param original - already persisted sync - * @param update - updated sync info to be merged with original sync. - * @return new sync object - */ - public static StandardSync updateConnectionObject(final WorkspaceHelper workspaceHelper, final StandardSync original, final StandardSync update) { - validateWorkspace(workspaceHelper, original.getSourceId(), original.getDestinationId(), update.getOperationIds()); - - final StandardSync newConnection = Jsons.clone(original) - .withNamespaceDefinition(Enums.convertTo(update.getNamespaceDefinition(), NamespaceDefinitionType.class)) - .withNamespaceFormat(update.getNamespaceFormat()) - .withPrefix(update.getPrefix()) - .withOperationIds(update.getOperationIds()) - .withCatalog(update.getCatalog()) - .withStatus(update.getStatus()) - .withSourceCatalogId(update.getSourceCatalogId()); - - // update name - if (update.getName() != null) { - newConnection.withName(update.getName()); - } - - // update Resource Requirements - if (update.getResourceRequirements() != null) { - newConnection.withResourceRequirements(Jsons.clone(update.getResourceRequirements())); - } else { - newConnection.withResourceRequirements(original.getResourceRequirements()); - } - - if (update.getScheduleType() != null) { - newConnection.withScheduleType(update.getScheduleType()); - newConnection.withManual(update.getManual()); - if (update.getScheduleData() != null) { - newConnection.withScheduleData(Jsons.clone(update.getScheduleData())); - } - } else if (update.getSchedule() != null) { - final Schedule newSchedule = new Schedule() - .withTimeUnit(update.getSchedule().getTimeUnit()) - .withUnits(update.getSchedule().getUnits()); - newConnection.withManual(false).withSchedule(newSchedule); - // Also write into the new field. This one will be consumed if populated. - newConnection - .withScheduleType(ScheduleType.BASIC_SCHEDULE); - newConnection.withScheduleData(new ScheduleData().withBasicSchedule( - new BasicSchedule().withTimeUnit(convertTimeUnitSchema(update.getSchedule().getTimeUnit())) - .withUnits(update.getSchedule().getUnits()))); - } else { - newConnection.withManual(true).withSchedule(null); - newConnection.withScheduleType(ScheduleType.MANUAL).withScheduleData(null); - } - - return newConnection; - } - - public static void validateWorkspace(final WorkspaceHelper workspaceHelper, - final UUID sourceId, - final UUID destinationId, - final @Nullable List operationIds) { - final UUID sourceWorkspace = workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(sourceId); - final UUID destinationWorkspace = workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(destinationId); - - Preconditions.checkArgument( - sourceWorkspace.equals(destinationWorkspace), - String.format( - "Source and destination do not belong to the same workspace. Source id: %s, Source workspace id: %s, Destination id: %s, Destination workspace id: %s", - sourceId, - sourceWorkspace, - destinationId, - destinationWorkspace)); - - if (operationIds != null) { - for (final UUID operationId : operationIds) { - final UUID operationWorkspace = workspaceHelper.getWorkspaceForOperationIdIgnoreExceptions(operationId); - Preconditions.checkArgument( - sourceWorkspace.equals(operationWorkspace), - String.format( - "Operation and connection do not belong to the same workspace. Workspace id: %s, Operation id: %s, Operation workspace id: %s", - sourceWorkspace, - operationId, - operationWorkspace)); - } - } - } - - // Helper method to convert between TimeUnit enums for old and new schedule schemas. - private static BasicSchedule.TimeUnit convertTimeUnitSchema(final Schedule.TimeUnit timeUnit) { - switch (timeUnit) { - case MINUTES: - return BasicSchedule.TimeUnit.MINUTES; - case HOURS: - return BasicSchedule.TimeUnit.HOURS; - case DAYS: - return BasicSchedule.TimeUnit.DAYS; - case WEEKS: - return BasicSchedule.TimeUnit.WEEKS; - case MONTHS: - return BasicSchedule.TimeUnit.MONTHS; - default: - throw new RuntimeException("Unhandled TimeUnitEnum: " + timeUnit); - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/ProtocolConverters.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/ProtocolConverters.java deleted file mode 100644 index edccab0fecea..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/ProtocolConverters.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.helper; - -import io.airbyte.api.model.generated.StreamDescriptor; - -/** - * Utilities that convert protocol types into API or client representations of the protocol type. - */ -public class ProtocolConverters { - - public static StreamDescriptor streamDescriptorToApi(final io.airbyte.protocol.models.StreamDescriptor protocolStreamDescriptor) { - return new StreamDescriptor().name(protocolStreamDescriptor.getName()).namespace(protocolStreamDescriptor.getNamespace()); - } - - public static io.airbyte.api.client.model.generated.StreamDescriptor streamDescriptorToClient(final io.airbyte.protocol.models.StreamDescriptor protocolStreamDescriptor) { - return new io.airbyte.api.client.model.generated.StreamDescriptor() - .name(protocolStreamDescriptor.getName()) - .namespace(protocolStreamDescriptor.getNamespace()); - } - - public static io.airbyte.protocol.models.StreamDescriptor streamDescriptorToProtocol(final StreamDescriptor apiStreamDescriptor) { - return new io.airbyte.protocol.models.StreamDescriptor().withName(apiStreamDescriptor.getName()) - .withNamespace(apiStreamDescriptor.getNamespace()); - } - - public static io.airbyte.protocol.models.StreamDescriptor clientStreamDescriptorToProtocol(final io.airbyte.api.client.model.generated.StreamDescriptor clientStreamDescriptor) { - return new io.airbyte.protocol.models.StreamDescriptor().withName(clientStreamDescriptor.getName()) - .withNamespace(clientStreamDescriptor.getNamespace()); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/StateConverter.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/StateConverter.java deleted file mode 100644 index 5be2bf1f6a45..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/StateConverter.java +++ /dev/null @@ -1,318 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.helper; - -import io.airbyte.api.model.generated.ConnectionState; -import io.airbyte.api.model.generated.ConnectionStateType; -import io.airbyte.api.model.generated.GlobalState; -import io.airbyte.api.model.generated.StreamState; -import io.airbyte.commons.enums.Enums; -import io.airbyte.config.StateType; -import io.airbyte.config.StateWrapper; -import io.airbyte.protocol.models.AirbyteGlobalState; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.AirbyteStreamState; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import javax.annotation.Nullable; - -public class StateConverter { - - /** - * Converts internal representation of state to API representation - * - * @param connectionId connection associated with the state - * @param stateWrapper internal state representation to convert - * @return api representation of state - */ - public static ConnectionState toApi(final UUID connectionId, final @Nullable StateWrapper stateWrapper) { - return new ConnectionState() - .connectionId(connectionId) - .stateType(convertStateTypeToApi(stateWrapper)) - .state(stateWrapper != null ? stateWrapper.getLegacyState() : null) - .globalState(globalStateToApi(stateWrapper).orElse(null)) - .streamState(streamStateToApi(stateWrapper).orElse(null)); - } - - /** - * Converts internal representation of state to client representation - * - * @param connectionId connection associated with the state - * @param stateWrapper internal state representation to convert - * @return client representation of state - */ - public static io.airbyte.api.client.model.generated.ConnectionState toClient(final UUID connectionId, final @Nullable StateWrapper stateWrapper) { - return new io.airbyte.api.client.model.generated.ConnectionState() - .connectionId(connectionId) - .stateType(convertStateTypeToClient(stateWrapper)) - .state(stateWrapper != null ? stateWrapper.getLegacyState() : null) - .globalState(globalStateToClient(stateWrapper).orElse(null)) - .streamState(streamStateToClient(stateWrapper).orElse(null)); - } - - /** - * Converts API representation of state to internal representation - * - * @param apiConnectionState api representation of state - * @return internal representation of state - */ - public static StateWrapper toInternal(final @Nullable ConnectionState apiConnectionState) { - return new StateWrapper() - .withStateType(convertStateTypeToInternal(apiConnectionState).orElse(null)) - .withGlobal(globalStateToInternal(apiConnectionState).orElse(null)) - .withLegacyState(apiConnectionState != null ? apiConnectionState.getState() : null) - .withStateMessages(streamStateToInternal(apiConnectionState).orElse(null)); - - } - - public static StateWrapper clientToInternal(final @Nullable io.airbyte.api.client.model.generated.ConnectionState clientConnectionState) { - return new StateWrapper() - .withStateType(clientConnectionState != null ? convertClientStateTypeToInternal(clientConnectionState.getStateType()) : null) - .withGlobal(clientGlobalStateToInternal(clientConnectionState).orElse(null)) - .withLegacyState(clientConnectionState != null ? clientConnectionState.getState() : null) - .withStateMessages(clientStreamStateToInternal(clientConnectionState).orElse(null)); - - } - - public static StateType convertClientStateTypeToInternal(final @Nullable io.airbyte.api.client.model.generated.ConnectionStateType connectionStateType) { - if (connectionStateType == null || connectionStateType.equals(io.airbyte.api.client.model.generated.ConnectionStateType.NOT_SET)) { - return null; - } else { - return Enums.convertTo(connectionStateType, StateType.class); - } - } - - /** - * Convert to API representation of state type. API has an additional type (NOT_SET). This - * represents the case where no state is saved so we do not know the state type. - * - * @param stateWrapper state to convert - * @return api representation of state type - */ - private static ConnectionStateType convertStateTypeToApi(final @Nullable StateWrapper stateWrapper) { - if (stateWrapper == null || stateWrapper.getStateType() == null) { - return ConnectionStateType.NOT_SET; - } else { - return Enums.convertTo(stateWrapper.getStateType(), ConnectionStateType.class); - } - } - - /** - * Convert to client representation of state type. The client model has an additional type - * (NOT_SET). This represents the case where no state is saved so we do not know the state type. - * - * @param stateWrapper state to convert - * @return client representation of state type - */ - private static io.airbyte.api.client.model.generated.ConnectionStateType convertStateTypeToClient(final @Nullable StateWrapper stateWrapper) { - if (stateWrapper == null || stateWrapper.getStateType() == null) { - return io.airbyte.api.client.model.generated.ConnectionStateType.NOT_SET; - } else { - return Enums.convertTo(stateWrapper.getStateType(), io.airbyte.api.client.model.generated.ConnectionStateType.class); - } - } - - /** - * Convert to internal representation of state type, if set. Otherise, empty optional - * - * @param connectionState API state to convert. - * @return internal state type, if set. Otherwise, empty optional. - */ - private static Optional convertStateTypeToInternal(final @Nullable ConnectionState connectionState) { - if (connectionState == null || connectionState.getStateType().equals(ConnectionStateType.NOT_SET)) { - return Optional.empty(); - } else { - return Optional.of(Enums.convertTo(connectionState.getStateType(), StateType.class)); - } - } - - /** - * If wrapper is of type global state, returns API representation of global state. Otherwise, empty - * optional. - * - * @param stateWrapper state wrapper to extract from - * @return api representation of global state if state wrapper is type global. Otherwise, empty - * optional. - */ - private static Optional globalStateToApi(final @Nullable StateWrapper stateWrapper) { - if (stateWrapper != null - && stateWrapper.getStateType() == StateType.GLOBAL - && stateWrapper.getGlobal() != null - && stateWrapper.getGlobal().getGlobal() != null) { - return Optional.of(new GlobalState() - .sharedState(stateWrapper.getGlobal().getGlobal().getSharedState()) - .streamStates(stateWrapper.getGlobal().getGlobal().getStreamStates() - .stream() - .map(StateConverter::streamStateStructToApi) - .toList())); - } else { - return Optional.empty(); - } - } - - /** - * If wrapper is of type global state, returns client representation of global state. Otherwise, - * empty optional. - * - * @param stateWrapper state wrapper to extract from - * @return client representation of global state if state wrapper is type global. Otherwise, empty - * optional. - */ - private static Optional globalStateToClient(final @Nullable StateWrapper stateWrapper) { - if (stateWrapper != null - && stateWrapper.getStateType() == StateType.GLOBAL - && stateWrapper.getGlobal() != null - && stateWrapper.getGlobal().getGlobal() != null) { - return Optional.of(new io.airbyte.api.client.model.generated.GlobalState() - .sharedState(stateWrapper.getGlobal().getGlobal().getSharedState()) - .streamStates(stateWrapper.getGlobal().getGlobal().getStreamStates() - .stream() - .map(StateConverter::streamStateStructToClient) - .toList())); - } else { - return Optional.empty(); - } - } - - /** - * If API state is of type global, returns internal representation of global state. Otherwise, empty - * optional. - * - * @param connectionState API state representation to extract from - * @return global state message if API state is of type global. Otherwise, empty optional. - */ - private static Optional globalStateToInternal(final @Nullable ConnectionState connectionState) { - if (connectionState != null - && connectionState.getStateType() == ConnectionStateType.GLOBAL - && connectionState.getGlobalState() != null) { - return Optional.of(new AirbyteStateMessage() - .withGlobal(new AirbyteGlobalState() - .withSharedState(connectionState.getGlobalState().getSharedState()) - .withStreamStates(connectionState.getGlobalState().getStreamStates() - .stream() - .map(StateConverter::streamStateStructToInternal) - .toList()))); - } else { - return Optional.empty(); - } - } - - private static Optional clientGlobalStateToInternal(final @Nullable io.airbyte.api.client.model.generated.ConnectionState connectionState) { - if (connectionState != null - && connectionState.getStateType() == io.airbyte.api.client.model.generated.ConnectionStateType.GLOBAL - && connectionState.getGlobalState() != null) { - return Optional.of(new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(connectionState.getGlobalState().getSharedState()) - .withStreamStates(connectionState.getGlobalState().getStreamStates() - .stream() - .map(StateConverter::clientStreamStateStructToInternal) - .toList()))); - } else { - return Optional.empty(); - } - } - - /** - * If wrapper is of type stream state, returns API representation of stream state. Otherwise, empty - * optional. - * - * @param stateWrapper state wrapper to extract from - * @return api representation of stream state if state wrapper is type stream. Otherwise, empty - * optional. - */ - private static Optional> streamStateToApi(final @Nullable StateWrapper stateWrapper) { - if (stateWrapper != null && stateWrapper.getStateType() == StateType.STREAM && stateWrapper.getStateMessages() != null) { - return Optional.ofNullable(stateWrapper.getStateMessages() - .stream() - .map(AirbyteStateMessage::getStream) - .map(StateConverter::streamStateStructToApi) - .toList()); - } else { - return Optional.empty(); - } - } - - /** - * If wrapper is of type stream state, returns client representation of stream state. Otherwise, - * empty optional. - * - * @param stateWrapper state wrapper to extract from - * @return client representation of stream state if state wrapper is type stream. Otherwise, empty - * optional. - */ - private static Optional> streamStateToClient(final @Nullable StateWrapper stateWrapper) { - if (stateWrapper != null && stateWrapper.getStateType() == StateType.STREAM && stateWrapper.getStateMessages() != null) { - return Optional.ofNullable(stateWrapper.getStateMessages() - .stream() - .map(AirbyteStateMessage::getStream) - .map(StateConverter::streamStateStructToClient) - .toList()); - } else { - return Optional.empty(); - } - } - - /** - * If API state is of type stream, returns internal representation of stream state. Otherwise, empty - * optional. - * - * @param connectionState API representation of state to extract from - * @return internal representation of stream state if API state representation is of type stream. - * Otherwise, empty optional. - */ - private static Optional> streamStateToInternal(final @Nullable ConnectionState connectionState) { - if (connectionState != null && connectionState.getStateType() == ConnectionStateType.STREAM && connectionState.getStreamState() != null) { - return Optional.ofNullable(connectionState.getStreamState() - .stream() - .map(StateConverter::streamStateStructToInternal) - .map(s -> new AirbyteStateMessage().withStream(s)) - .toList()); - } else { - return Optional.empty(); - } - } - - private static Optional> clientStreamStateToInternal(final @Nullable io.airbyte.api.client.model.generated.ConnectionState connectionState) { - if (connectionState != null && connectionState.getStateType() == io.airbyte.api.client.model.generated.ConnectionStateType.STREAM - && connectionState.getStreamState() != null) { - return Optional.ofNullable(connectionState.getStreamState() - .stream() - .map(StateConverter::clientStreamStateStructToInternal) - .map(s -> new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream(s)) - .toList()); - } else { - return Optional.empty(); - } - } - - private static StreamState streamStateStructToApi(final AirbyteStreamState streamState) { - return new StreamState() - .streamDescriptor(ProtocolConverters.streamDescriptorToApi(streamState.getStreamDescriptor())) - .streamState(streamState.getStreamState()); - } - - private static io.airbyte.api.client.model.generated.StreamState streamStateStructToClient(final AirbyteStreamState streamState) { - return new io.airbyte.api.client.model.generated.StreamState() - .streamDescriptor(ProtocolConverters.streamDescriptorToClient(streamState.getStreamDescriptor())) - .streamState(streamState.getStreamState()); - } - - private static AirbyteStreamState streamStateStructToInternal(final StreamState streamState) { - return new AirbyteStreamState() - .withStreamDescriptor(ProtocolConverters.streamDescriptorToProtocol(streamState.getStreamDescriptor())) - .withStreamState(streamState.getStreamState()); - } - - private static AirbyteStreamState clientStreamStateStructToInternal(final io.airbyte.api.client.model.generated.StreamState streamState) { - return new AirbyteStreamState() - .withStreamDescriptor(ProtocolConverters.clientStreamDescriptorToProtocol(streamState.getStreamDescriptor())) - .withStreamState(streamState.getStreamState()); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/ThreadedTimeTracker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/ThreadedTimeTracker.java deleted file mode 100644 index 258db80bbc62..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/ThreadedTimeTracker.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.helper; - -/** - * This class exists to track timing information for the sync. It needs to be thread-safe as - * multiple threads (source, destination, and worker) will be accessing it. - */ -public class ThreadedTimeTracker { - - private long replicationStartTime; - private long replicationEndTime; - private long sourceReadStartTime; - private long sourceReadEndTime; - private long destinationWriteStartTime; - private long destinationWriteEndTime; - - public synchronized void trackReplicationStartTime() { - this.replicationStartTime = System.currentTimeMillis(); - } - - public synchronized void trackReplicationEndTime() { - this.replicationEndTime = System.currentTimeMillis(); - } - - public synchronized void trackSourceReadStartTime() { - this.sourceReadStartTime = System.currentTimeMillis(); - } - - public synchronized void trackSourceReadEndTime() { - this.sourceReadEndTime = System.currentTimeMillis(); - } - - public synchronized void trackDestinationWriteStartTime() { - this.destinationWriteStartTime = System.currentTimeMillis(); - } - - public synchronized void trackDestinationWriteEndTime() { - this.destinationWriteEndTime = System.currentTimeMillis(); - } - - public synchronized long getReplicationStartTime() { - return this.replicationStartTime; - } - - public synchronized long getReplicationEndTime() { - return this.replicationEndTime; - } - - public synchronized long getSourceReadStartTime() { - return this.sourceReadStartTime; - } - - public synchronized long getSourceReadEndTime() { - return this.sourceReadEndTime; - } - - public synchronized long getDestinationWriteStartTime() { - return this.destinationWriteStartTime; - } - - public synchronized long getDestinationWriteEndTime() { - return this.destinationWriteEndTime; - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteMapper.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteMapper.java deleted file mode 100644 index ca26d711c8e4..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteMapper.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; - -/** - * Interface to allow map operations on data as they pass from Source to Destination. This interface - * will be updated in Protocol V2. - */ -public interface AirbyteMapper { - - ConfiguredAirbyteCatalog mapCatalog(ConfiguredAirbyteCatalog catalog); - - AirbyteMessage mapMessage(AirbyteMessage message); - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteProtocolPredicate.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteProtocolPredicate.java index 8ad226f5c776..1db8c67c8530 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteProtocolPredicate.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteProtocolPredicate.java @@ -4,10 +4,7 @@ package io.airbyte.workers.internal; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - import com.fasterxml.jackson.databind.JsonNode; -import datadog.trace.api.Trace; import io.airbyte.protocol.models.AirbyteProtocolSchema; import io.airbyte.validation.json.JsonSchemaValidator; import java.util.function.Predicate; @@ -27,7 +24,6 @@ public AirbyteProtocolPredicate() { jsonSchemaValidator.initializeSchemaValidator(PROTOCOL_SCHEMA_NAME, schema); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public boolean test(final JsonNode s) { return jsonSchemaValidator.testInitializedSchema(PROTOCOL_SCHEMA_NAME, s); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java index 76d09bc150fd..4f1ee56bbd7c 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java @@ -4,11 +4,8 @@ package io.airbyte.workers.internal; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - import com.google.common.base.Charsets; import com.google.common.base.Preconditions; -import datadog.trace.api.Trace; import io.airbyte.commons.io.IOs; import io.airbyte.commons.io.LineGobbler; import io.airbyte.commons.json.Jsons; @@ -76,7 +73,6 @@ public DefaultAirbyteDestination(final IntegrationLauncher integrationLauncher, this.protocolSerializer = protocolSerializer; } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void start(final WorkerDestinationConfig destinationConfig, final Path jobRoot) throws IOException, WorkerException { Preconditions.checkState(destinationProcess == null); @@ -99,7 +95,6 @@ public void start(final WorkerDestinationConfig destinationConfig, final Path jo .iterator(); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void accept(final AirbyteMessage message) throws IOException { Preconditions.checkState(destinationProcess != null && !inputHasEnded.get()); @@ -107,7 +102,6 @@ public void accept(final AirbyteMessage message) throws IOException { writer.write(message); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void notifyEndOfInput() throws IOException { Preconditions.checkState(destinationProcess != null && !inputHasEnded.get()); @@ -117,7 +111,6 @@ public void notifyEndOfInput() throws IOException { inputHasEnded.set(true); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void close() throws Exception { if (destinationProcess == null) { @@ -138,7 +131,6 @@ public void close() throws Exception { } } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void cancel() throws Exception { LOGGER.info("Attempting to cancel destination process..."); @@ -152,7 +144,6 @@ public void cancel() throws Exception { } } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public boolean isFinished() { Preconditions.checkState(destinationProcess != null); @@ -163,7 +154,6 @@ public boolean isFinished() { return !messageIterator.hasNext() && !destinationProcess.isAlive(); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public int getExitValue() { Preconditions.checkState(destinationProcess != null, "Destination process is null, cannot retrieve exit value."); @@ -176,7 +166,6 @@ public int getExitValue() { return exitValue; } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public Optional attemptRead() { Preconditions.checkState(destinationProcess != null); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java index 63e46461681c..bf04a2229b83 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java @@ -4,11 +4,8 @@ package io.airbyte.workers.internal; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import datadog.trace.api.Trace; import io.airbyte.commons.features.FeatureFlags; import io.airbyte.commons.io.IOs; import io.airbyte.commons.io.LineGobbler; @@ -85,7 +82,6 @@ public DefaultAirbyteSource(final IntegrationLauncher integrationLauncher, this.featureFlagLogConnectorMsgs = featureFlags.logConnectorMessages(); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void start(final WorkerSourceConfig sourceConfig, final Path jobRoot) throws Exception { Preconditions.checkState(sourceProcess == null); @@ -110,7 +106,6 @@ public void start(final WorkerSourceConfig sourceConfig, final Path jobRoot) thr .iterator(); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public boolean isFinished() { Preconditions.checkState(sourceProcess != null); @@ -122,7 +117,6 @@ public boolean isFinished() { return !messageIterator.hasNext() && !sourceProcess.isAlive(); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public int getExitValue() throws IllegalStateException { Preconditions.checkState(sourceProcess != null, "Source process is null, cannot retrieve exit value."); @@ -135,7 +129,6 @@ public int getExitValue() throws IllegalStateException { return exitValue; } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public Optional attemptRead() { Preconditions.checkState(sourceProcess != null); @@ -143,7 +136,6 @@ public Optional attemptRead() { return Optional.ofNullable(messageIterator.hasNext() ? messageIterator.next() : null); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void close() throws Exception { if (sourceProcess == null) { @@ -163,7 +155,6 @@ public void close() throws Exception { } } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void cancel() throws Exception { LOGGER.info("Attempting to cancel source process..."); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.java index 9fc14c63ec7a..e0b16aaf6aa5 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.java @@ -4,15 +4,10 @@ package io.airbyte.workers.internal; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; -import datadog.trace.api.Trace; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.logging.MdcScope; -import io.airbyte.metrics.lib.MetricClientFactory; -import io.airbyte.metrics.lib.OssMetricsRegistry; import io.airbyte.protocol.models.AirbyteLogMessage; import io.airbyte.protocol.models.AirbyteMessage; import java.io.BufferedReader; @@ -85,13 +80,10 @@ public DefaultAirbyteStreamFactory(final MdcScope.Builder containerLogMdcBuilder this.maxMemory = maxMemory; } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public Stream create(final BufferedReader bufferedReader) { - final var metricClient = MetricClientFactory.getMetricClient(); return bufferedReader .lines() - .peek(str -> metricClient.distribution(OssMetricsRegistry.JSON_STRING_LENGTH, str.getBytes(StandardCharsets.UTF_8).length)) .peek(str -> { if (exceptionClass.isPresent()) { final long messageSize = str.getBytes(StandardCharsets.UTF_8).length; diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/EmptyAirbyteSource.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/EmptyAirbyteSource.java deleted file mode 100644 index 344c4b1712c0..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/EmptyAirbyteSource.java +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import datadog.trace.api.Trace; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ResetSourceConfiguration; -import io.airbyte.config.StateType; -import io.airbyte.config.StateWrapper; -import io.airbyte.config.WorkerSourceConfig; -import io.airbyte.config.helpers.StateMessageHelper; -import io.airbyte.protocol.models.AirbyteGlobalState; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.AirbyteStreamState; -import io.airbyte.protocol.models.StreamDescriptor; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.Optional; -import java.util.Queue; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - -/** - * This source will never emit any messages. It can be used in cases where that is helpful (hint: - * reset connection jobs). - */ -@Slf4j -public class EmptyAirbyteSource implements AirbyteSource { - - private final AtomicBoolean hasEmittedState; - private final Queue streamsToReset = new LinkedList<>(); - private final boolean useStreamCapableState; - // TODO: Once we are sure that the legacy way of transmitting the state is not use anymore, we need - // to remove this variable and the associated - // checks - private boolean isResetBasedForConfig; - private boolean isStarted = false; - private Optional stateWrapper; - - public EmptyAirbyteSource(final boolean useStreamCapableState) { - hasEmittedState = new AtomicBoolean(); - this.useStreamCapableState = useStreamCapableState; - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void start(final WorkerSourceConfig workerSourceConfig, final Path jobRoot) throws Exception { - - if (workerSourceConfig == null || workerSourceConfig.getSourceConnectionConfiguration() == null) { - // TODO: When the jobConfig is fully updated and tested, we can remove this extra check that makes - // us compatible with running a reset with - // a null config - /* - * This is a protection against reverting a commit that set the resetSourceConfiguration, it makes - * that there is not side effect of such a revert. The legacy behavior is to have the config as an - * empty jsonObject, this is an extra protection if the workerConfiguration is null. In the previous - * implementation it was unused so passing it as null should not result in a NPE or a parsing - * failure. - */ - isResetBasedForConfig = false; - } else { - final ResetSourceConfiguration resetSourceConfiguration; - resetSourceConfiguration = parseResetSourceConfigurationAndLogError(workerSourceConfig); - streamsToReset.addAll(resetSourceConfiguration.getStreamsToReset()); - - if (streamsToReset.isEmpty()) { - // TODO: This is done to be able to handle the transition period where we can have no stream being - // pass to the configuration because the - // logic of populating this list is not implemented - /* - * This is a protection against reverting a commit that set the resetSourceConfiguration, it makes - * that there is not side effect of such a revert. The legacy behavior is to have the config as an - * empty object, it has been changed here: - * https://github.com/airbytehq/airbyte/pull/13696/files#diff- - * f51ff997b60a346c704608bb1cd7d22457eda2559b42987d5fa1281d568fc222L40 - */ - isResetBasedForConfig = false; - } else { - if (workerSourceConfig.getState() != null) { - stateWrapper = StateMessageHelper.getTypedState(workerSourceConfig.getState().getState(), useStreamCapableState); - - if (stateWrapper.isPresent() && - stateWrapper.get().getStateType() == StateType.LEGACY && - !resettingAllCatalogStreams(workerSourceConfig)) { - log.error("The state a legacy one but we are trying to do a partial update, this is not supported."); - throw new IllegalStateException("Try to perform a partial reset on a legacy state"); - } - - isResetBasedForConfig = true; - } else { - /// No state - isResetBasedForConfig = false; - } - - } - } - isStarted = true; - } - - // always finished. it has no data to send. - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public boolean isFinished() { - return hasEmittedState.get(); - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public int getExitValue() { - return 0; - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public Optional attemptRead() { - if (!isStarted) { - throw new IllegalStateException("The empty source has not been started."); - } - - if (isResetBasedForConfig) { - if (stateWrapper.get().getStateType() == StateType.STREAM) { - return emitStreamState(); - } else if (stateWrapper.get().getStateType() == StateType.GLOBAL) { - return emitGlobalState(); - } else { - return emitLegacyState(); - } - } else { - return emitLegacyState(); - } - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void close() throws Exception { - // no op. - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void cancel() throws Exception { - // no op. - } - - private Optional emitStreamState() { - // Per stream, it will emit one message per stream being reset - if (!streamsToReset.isEmpty()) { - final StreamDescriptor streamDescriptor = streamsToReset.poll(); - return Optional.of(getNullStreamStateMessage(streamDescriptor)); - } else { - hasEmittedState.compareAndSet(false, true); - return Optional.empty(); - } - } - - private Optional emitGlobalState() { - if (hasEmittedState.get()) { - return Optional.empty(); - } else { - hasEmittedState.compareAndSet(false, true); - return Optional.of(getNullGlobalMessage(streamsToReset, stateWrapper.get().getGlobal())); - } - } - - private Optional emitLegacyState() { - if (hasEmittedState.get()) { - return Optional.empty(); - } else { - hasEmittedState.compareAndSet(false, true); - return Optional.of(new AirbyteMessage().withType(Type.STATE) - .withState(new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(Jsons.emptyObject()))); - } - } - - private boolean resettingAllCatalogStreams(final WorkerSourceConfig sourceConfig) { - final Set catalogStreamDescriptors = sourceConfig.getCatalog().getStreams().stream().map( - configuredAirbyteStream -> new StreamDescriptor() - .withName(configuredAirbyteStream.getStream().getName()) - .withNamespace(configuredAirbyteStream.getStream().getNamespace())) - .collect(Collectors.toSet()); - final Set streamsToResetDescriptors = new HashSet<>(streamsToReset); - return streamsToResetDescriptors.containsAll(catalogStreamDescriptors); - } - - private AirbyteMessage getNullStreamStateMessage(final StreamDescriptor streamsToReset) { - return new AirbyteMessage() - .withType(Type.STATE) - .withState( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream( - new AirbyteStreamState() - .withStreamDescriptor(new io.airbyte.protocol.models.StreamDescriptor() - .withName(streamsToReset.getName()) - .withNamespace(streamsToReset.getNamespace())) - .withStreamState(null))); - } - - private AirbyteMessage getNullGlobalMessage(final Queue streamsToReset, final AirbyteStateMessage currentState) { - final AirbyteGlobalState globalState = new AirbyteGlobalState(); - globalState.setStreamStates(new ArrayList<>()); - - currentState.getGlobal().getStreamStates().forEach(existingState -> globalState.getStreamStates() - .add( - new AirbyteStreamState() - .withStreamDescriptor(existingState.getStreamDescriptor()) - .withStreamState( - streamsToReset.contains(new StreamDescriptor() - .withName(existingState.getStreamDescriptor().getName()) - .withNamespace(existingState.getStreamDescriptor().getNamespace())) ? null : existingState.getStreamState()))); - - // If all the streams in the current state have been reset, we consider this to be a full reset, so - // reset the shared state as well - if (currentState.getGlobal().getStreamStates().size() == globalState.getStreamStates().stream() - .filter(streamState -> streamState.getStreamState() == null).count()) { - log.info("All the streams of a global state have been reset, the shared state will be erased as well"); - globalState.setSharedState(null); - } else { - log.info("This is a partial reset, the shared state will be preserved"); - globalState.setSharedState(currentState.getGlobal().getSharedState()); - } - - // Add state being reset that are not in the current state. This is made to follow the contract of - // the global state always containing the entire - // state - streamsToReset.forEach(configStreamDescriptor -> { - final io.airbyte.protocol.models.StreamDescriptor streamDescriptor = new io.airbyte.protocol.models.StreamDescriptor() - .withName(configStreamDescriptor.getName()) - .withNamespace(configStreamDescriptor.getNamespace()); - if (!currentState.getGlobal().getStreamStates().stream().map(streamState -> streamState.getStreamDescriptor()).toList() - .contains(streamDescriptor)) { - globalState.getStreamStates().add(new AirbyteStreamState() - .withStreamDescriptor(streamDescriptor) - .withStreamState(null)); - } - }); - - return new AirbyteMessage() - .withType(Type.STATE) - .withState( - new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(globalState)); - } - - private ResetSourceConfiguration parseResetSourceConfigurationAndLogError(final WorkerSourceConfig workerSourceConfig) { - try { - return Jsons.object(workerSourceConfig.getSourceConnectionConfiguration(), ResetSourceConfiguration.class); - } catch (final IllegalArgumentException e) { - log.error("The configuration provided to the reset has an invalid format"); - throw e; - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/NamespacingMapper.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/NamespacingMapper.java deleted file mode 100644 index 49518880e92c..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/NamespacingMapper.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteStream; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import java.util.regex.Pattern; -import org.apache.logging.log4j.util.Strings; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * We apply some transformations on the fly on the catalog (same should be done on records too) from - * the source before it reaches the destination. One of the transformation is to define the - * destination namespace where data will be stored and how to mirror (or not) the namespace used in - * the source (if any). This is configured in the UI through the syncInput. - */ -public class NamespacingMapper implements AirbyteMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(NamespacingMapper.class); - - private final NamespaceDefinitionType namespaceDefinition; - private final String namespaceFormat; - private final String streamPrefix; - - public NamespacingMapper(final NamespaceDefinitionType namespaceDefinition, final String namespaceFormat, final String streamPrefix) { - this.namespaceDefinition = namespaceDefinition; - this.namespaceFormat = namespaceFormat; - this.streamPrefix = streamPrefix; - } - - @Override - public ConfiguredAirbyteCatalog mapCatalog(final ConfiguredAirbyteCatalog inputCatalog) { - final ConfiguredAirbyteCatalog catalog = Jsons.clone(inputCatalog); - catalog.getStreams().forEach(s -> { - final AirbyteStream stream = s.getStream(); - // Default behavior if namespaceDefinition is not set is to follow SOURCE - if (namespaceDefinition != null) { - if (namespaceDefinition.equals(NamespaceDefinitionType.DESTINATION)) { - stream.withNamespace(null); - } else if (namespaceDefinition.equals(NamespaceDefinitionType.CUSTOMFORMAT)) { - final String namespace = formatNamespace(stream.getNamespace(), namespaceFormat); - if (namespace == null) { - LOGGER.error("Namespace Format cannot be blank for Stream {}. Falling back to default namespace from destination settings", - stream.getName()); - } - stream.withNamespace(namespace); - } - } - stream.withName(transformStreamName(stream.getName(), streamPrefix)); - }); - return catalog; - } - - @Override - public AirbyteMessage mapMessage(final AirbyteMessage message) { - if (message.getType() == Type.RECORD) { - // Default behavior if namespaceDefinition is not set is to follow SOURCE - if (namespaceDefinition != null) { - if (namespaceDefinition.equals(NamespaceDefinitionType.DESTINATION)) { - message.getRecord().withNamespace(null); - } else if (namespaceDefinition.equals(NamespaceDefinitionType.CUSTOMFORMAT)) { - message.getRecord().withNamespace(formatNamespace(message.getRecord().getNamespace(), namespaceFormat)); - } - } - message.getRecord().setStream(transformStreamName(message.getRecord().getStream(), streamPrefix)); - return message; - } - return message; - } - - private static String formatNamespace(final String sourceNamespace, final String namespaceFormat) { - String result = ""; - if (Strings.isNotBlank(namespaceFormat)) { - final String regex = Pattern.quote("${SOURCE_NAMESPACE}"); - result = namespaceFormat.replaceAll(regex, Strings.isNotBlank(sourceNamespace) ? sourceNamespace : ""); - } - if (Strings.isBlank(result)) { - result = null; - } - return result; - } - - private static String transformStreamName(final String streamName, final String prefix) { - if (Strings.isNotBlank(prefix)) { - return prefix + streamName; - } else { - return streamName; - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactory.java deleted file mode 100644 index da97c9683286..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactory.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Preconditions; -import datadog.trace.api.Trace; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.MdcScope; -import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider; -import io.airbyte.commons.protocol.AirbyteMessageVersionedMigrator; -import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory; -import io.airbyte.commons.protocol.serde.AirbyteMessageDeserializer; -import io.airbyte.commons.version.Version; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import java.io.BufferedReader; -import java.io.IOException; -import java.util.Optional; -import java.util.function.Predicate; -import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Extends DefaultAirbyteStreamFactory to handle version specific conversions. - * - * A VersionedAirbyteStreamFactory handles parsing and validation from a specific version of the - * Airbyte Protocol as well as upgrading messages to the current version. - */ -public class VersionedAirbyteStreamFactory extends DefaultAirbyteStreamFactory { - - private static final Logger LOGGER = LoggerFactory.getLogger(VersionedAirbyteStreamFactory.class); - private static final Version fallbackVersion = new Version("0.2.0"); - - // Buffer size to use when detecting the protocol version. - // Given that BufferedReader::reset fails if we try to reset if we go past its buffer size, this - // buffer has to be big enough to contain our longest spec and whatever messages get emitted before - // the SPEC. - private static final int BUFFER_READ_AHEAD_LIMIT = 32000; - private static final int MESSAGES_LOOK_AHEAD_FOR_DETECTION = 10; - private static final String TYPE_FIELD_NAME = "type"; - - private final AirbyteMessageSerDeProvider serDeProvider; - private final AirbyteProtocolVersionedMigratorFactory migratorFactory; - private final Optional configuredAirbyteCatalog; - private AirbyteMessageDeserializer deserializer; - private AirbyteMessageVersionedMigrator migrator; - private Version protocolVersion; - - private boolean shouldDetectVersion = false; - - public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProvider, - final AirbyteProtocolVersionedMigratorFactory migratorFactory, - final Version protocolVersion, - final Optional configuredAirbyteCatalog, - final Optional> exceptionClass) { - this(serDeProvider, migratorFactory, protocolVersion, configuredAirbyteCatalog, MdcScope.DEFAULT_BUILDER, exceptionClass); - } - - public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProvider, - final AirbyteProtocolVersionedMigratorFactory migratorFactory, - final Version protocolVersion, - final Optional configuredAirbyteCatalog, - final MdcScope.Builder containerLogMdcBuilder, - final Optional> exceptionClass) { - // TODO AirbyteProtocolPredicate needs to be updated to be protocol version aware - super(new AirbyteProtocolPredicate(), LOGGER, containerLogMdcBuilder, exceptionClass); - Preconditions.checkNotNull(protocolVersion); - this.serDeProvider = serDeProvider; - this.migratorFactory = migratorFactory; - this.configuredAirbyteCatalog = configuredAirbyteCatalog; - this.initializeForProtocolVersion(protocolVersion); - } - - /** - * Create the AirbyteMessage stream. - * - * If detectVersion is set to true, it will decide which protocol version to use from the content of - * the stream rather than the one passed from the constructor. - */ - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public Stream create(final BufferedReader bufferedReader) { - if (shouldDetectVersion) { - final Optional versionMaybe; - try { - versionMaybe = detectVersion(bufferedReader); - } catch (final IOException e) { - throw new RuntimeException(e); - } - if (versionMaybe.isPresent()) { - logger.info("Detected Protocol Version {}", versionMaybe.get().serialize()); - initializeForProtocolVersion(versionMaybe.get()); - } else { - // No version found, use the default as a fallback - logger.info("Unable to detect Protocol Version, assuming protocol version {}", fallbackVersion.serialize()); - initializeForProtocolVersion(fallbackVersion); - } - } - - final boolean needMigration = !protocolVersion.getMajorVersion().equals(migratorFactory.getMostRecentVersion().getMajorVersion()); - logger.info( - "Reading messages from protocol version {}{}", - protocolVersion.serialize(), - needMigration ? ", messages will be upgraded to protocol version " + migratorFactory.getMostRecentVersion().serialize() : ""); - return super.create(bufferedReader); - } - - /** - * Attempt to detect the version by scanning the stream - * - * Using the BufferedReader reset/mark feature to get a look-ahead. We will attempt to find the - * first SPEC message and decide on a protocol version from this message. - * - * @param bufferedReader the stream to read - * @return The Version if found - * @throws IOException - */ - private Optional detectVersion(final BufferedReader bufferedReader) throws IOException { - // Buffersize needs to be big enough to containing everything we need for the detection. Otherwise, - // the reset will fail. - bufferedReader.mark(BUFFER_READ_AHEAD_LIMIT); - try { - // Cap detection to the first 10 messages. When doing the protocol detection, we expect the SPEC - // message to show up early in the stream. Ideally it should be first message however we do not - // enforce this constraint currently so connectors may send LOG messages before. - for (int i = 0; i < MESSAGES_LOOK_AHEAD_FOR_DETECTION; ++i) { - final String line = bufferedReader.readLine(); - final Optional jsonOpt = Jsons.tryDeserialize(line); - if (jsonOpt.isPresent()) { - final JsonNode json = jsonOpt.get(); - if (isSpecMessage(json)) { - final JsonNode protocolVersionNode = json.at("/spec/protocol_version"); - bufferedReader.reset(); - return Optional.ofNullable(protocolVersionNode).filter(Predicate.not(JsonNode::isMissingNode)).map(node -> new Version(node.asText())); - } - } - } - bufferedReader.reset(); - return Optional.empty(); - } catch (final IOException e) { - logger.warn( - "Protocol version detection failed, it is likely than the connector sent more than {}B without an complete SPEC message." + - " A SPEC message that is too long could be the root cause here.", - BUFFER_READ_AHEAD_LIMIT); - throw e; - } - } - - private boolean isSpecMessage(final JsonNode json) { - return json.has(TYPE_FIELD_NAME) && "spec".equalsIgnoreCase(json.get(TYPE_FIELD_NAME).asText()); - } - - public boolean setDetectVersion(final boolean detectVersion) { - return this.shouldDetectVersion = detectVersion; - } - - public VersionedAirbyteStreamFactory withDetectVersion(final boolean detectVersion) { - setDetectVersion(detectVersion); - return this; - } - - final protected void initializeForProtocolVersion(final Version protocolVersion) { - this.deserializer = (AirbyteMessageDeserializer) serDeProvider.getDeserializer(protocolVersion).orElseThrow(); - this.migrator = migratorFactory.getAirbyteMessageMigrator(protocolVersion); - this.protocolVersion = protocolVersion; - } - - @Override - protected Stream toAirbyteMessage(final JsonNode json) { - try { - final AirbyteMessage message = migrator.upgrade(deserializer.deserialize(json), configuredAirbyteCatalog); - return Stream.of(message); - } catch (final RuntimeException e) { - logger.warn("Failed to upgrade a message from version {}: {}", protocolVersion, Jsons.serialize(json), e); - return Stream.empty(); - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/AirbyteMessageTracker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/AirbyteMessageTracker.java deleted file mode 100644 index 95e557cf537d..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/AirbyteMessageTracker.java +++ /dev/null @@ -1,557 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.book_keeping; - -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Charsets; -import com.google.common.base.Preconditions; -import com.google.common.collect.BiMap; -import com.google.common.collect.HashBiMap; -import com.google.common.hash.HashFunction; -import com.google.common.hash.Hashing; -import datadog.trace.api.Trace; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.FailureReason; -import io.airbyte.config.State; -import io.airbyte.protocol.models.AirbyteControlConnectorConfigMessage; -import io.airbyte.protocol.models.AirbyteControlMessage; -import io.airbyte.protocol.models.AirbyteEstimateTraceMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteRecordMessage; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.workers.helper.FailureHelper; -import io.airbyte.workers.internal.book_keeping.StateMetricsTracker.StateMetricsTrackerNoStateMatchException; -import io.airbyte.workers.internal.state_aggregator.DefaultStateAggregator; -import io.airbyte.workers.internal.state_aggregator.StateAggregator; -import java.time.LocalDateTime; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.concurrent.atomic.AtomicReference; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - -/** - * This class is responsible for stats and metadata tracking surrounding - * {@link AirbyteRecordMessage}. - *

- * It is not intended to perform meaningful operations - transforming, mutating, triggering - * downstream actions etc. - on specific messages. - */ -@Slf4j -public class AirbyteMessageTracker implements MessageTracker { - - private static final long STATE_DELTA_TRACKER_MEMORY_LIMIT_BYTES = 10L * 1024L * 1024L; // 10 MiB, ~5% of default cloud worker memory - private static final long STATE_METRICS_TRACKER_MESSAGE_LIMIT = 873813L; // 12 bytes per message tracked, maximum of 10MiB of memory - - private final AtomicReference sourceOutputState; - private final AtomicReference destinationOutputState; - private final Map streamToRunningCount; - private final HashFunction hashFunction; - private final BiMap nameNamespacePairToIndex; - private final Map nameNamespacePairToStreamStats; - private final StateDeltaTracker stateDeltaTracker; - private final StateMetricsTracker stateMetricsTracker; - private final List destinationErrorTraceMessages; - private final List sourceErrorTraceMessages; - private final StateAggregator stateAggregator; - private final FeatureFlags featureFlags; - private final boolean featureFlagLogConnectorMsgs; - - // These variables support SYNC level estimates and are meant for sources where stream level - // estimates are not possible e.g. CDC sources. - private Long totalRecordsEstimatedSync; - private Long totalBytesEstimatedSync; - - private short nextStreamIndex; - - /** - * If the StateDeltaTracker throws an exception, this flag is set to true and committed counts are - * not returned. - */ - private boolean unreliableCommittedCounts; - /** - * If the StateMetricsTracker throws an exception, this flag is set to true and the metrics around - * max and mean time between state message emitted and committed are unreliable - */ - private boolean unreliableStateTimingMetrics; - - private enum ConnectorType { - SOURCE, - DESTINATION - } - - public AirbyteMessageTracker(final FeatureFlags featureFlags) { - this(new StateDeltaTracker(STATE_DELTA_TRACKER_MEMORY_LIMIT_BYTES), - new DefaultStateAggregator(featureFlags.useStreamCapableState()), - new StateMetricsTracker(STATE_METRICS_TRACKER_MESSAGE_LIMIT), - featureFlags); - } - - @VisibleForTesting - protected AirbyteMessageTracker(final StateDeltaTracker stateDeltaTracker, - final StateAggregator stateAggregator, - final StateMetricsTracker stateMetricsTracker, - final FeatureFlags featureFlags) { - this.sourceOutputState = new AtomicReference<>(); - this.destinationOutputState = new AtomicReference<>(); - this.streamToRunningCount = new HashMap<>(); - this.nameNamespacePairToIndex = HashBiMap.create(); - this.hashFunction = Hashing.murmur3_32_fixed(); - this.nameNamespacePairToStreamStats = new HashMap<>(); - this.stateDeltaTracker = stateDeltaTracker; - this.stateMetricsTracker = stateMetricsTracker; - this.nextStreamIndex = 0; - this.unreliableCommittedCounts = false; - this.unreliableStateTimingMetrics = false; - this.destinationErrorTraceMessages = new ArrayList<>(); - this.sourceErrorTraceMessages = new ArrayList<>(); - this.stateAggregator = stateAggregator; - this.featureFlags = featureFlags; - this.featureFlagLogConnectorMsgs = featureFlags.logConnectorMessages(); - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void acceptFromSource(final AirbyteMessage message) { - logMessageAsJSON("source", message); - - switch (message.getType()) { - case TRACE -> handleEmittedTrace(message.getTrace(), ConnectorType.SOURCE); - case RECORD -> handleSourceEmittedRecord(message.getRecord()); - case STATE -> handleSourceEmittedState(message.getState()); - case CONTROL -> handleEmittedOrchestratorMessage(message.getControl(), ConnectorType.SOURCE); - default -> log.warn("Invalid message type for message: {}", message); - } - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void acceptFromDestination(final AirbyteMessage message) { - logMessageAsJSON("destination", message); - - switch (message.getType()) { - case TRACE -> handleEmittedTrace(message.getTrace(), ConnectorType.DESTINATION); - case STATE -> handleDestinationEmittedState(message.getState()); - case CONTROL -> handleEmittedOrchestratorMessage(message.getControl(), ConnectorType.DESTINATION); - default -> log.warn("Invalid message type for message: {}", message); - } - } - - /** - * When a source emits a record, increment the running record count, the total record count, and the - * total byte count for the record's stream. - */ - private void handleSourceEmittedRecord(final AirbyteRecordMessage recordMessage) { - if (stateMetricsTracker.getFirstRecordReceivedAt() == null) { - stateMetricsTracker.setFirstRecordReceivedAt(LocalDateTime.now()); - } - - final var nameNamespace = AirbyteStreamNameNamespacePair.fromRecordMessage(recordMessage); - final short streamIndex = getStreamIndex(nameNamespace); - - final long currentRunningCount = streamToRunningCount.getOrDefault(streamIndex, 0L); - streamToRunningCount.put(streamIndex, currentRunningCount + 1); - - final var currStats = nameNamespacePairToStreamStats.getOrDefault(nameNamespace, new StreamStats()); - currStats.emittedRecords++; - - final int estimatedNumBytes = Jsons.getEstimatedByteSize(recordMessage.getData()); - currStats.emittedBytes += estimatedNumBytes; - - nameNamespacePairToStreamStats.put(nameNamespace, currStats); - } - - /** - * When a source emits a state, persist the current running count per stream to the - * {@link StateDeltaTracker}. Then, reset the running count per stream so that new counts can start - * recording for the next state. Also add the state to list so that state order is tracked - * correctly. - */ - private void handleSourceEmittedState(final AirbyteStateMessage stateMessage) { - final LocalDateTime timeEmittedStateMessage = LocalDateTime.now(); - stateMetricsTracker.incrementTotalSourceEmittedStateMessages(); - stateMetricsTracker.updateMaxAndMeanSecondsToReceiveStateMessage(timeEmittedStateMessage); - stateMetricsTracker.setLastStateMessageReceivedAt(timeEmittedStateMessage); - sourceOutputState.set(new State().withState(stateMessage.getData())); - final int stateHash = getStateHashCode(stateMessage); - - try { - if (!unreliableCommittedCounts) { - stateDeltaTracker.addState(stateHash, streamToRunningCount); - } - if (!unreliableStateTimingMetrics) { - stateMetricsTracker.addState(stateMessage, stateHash, timeEmittedStateMessage); - } - } catch (final StateDeltaTracker.StateDeltaTrackerException e) { - log.warn("The message tracker encountered an issue that prevents committed record counts from being reliably computed."); - log.warn("This only impacts metadata and does not indicate a problem with actual sync data."); - log.warn(e.getMessage(), e); - unreliableCommittedCounts = true; - } catch (final StateMetricsTracker.StateMetricsTrackerOomException e) { - log.warn("The StateMetricsTracker encountered an out of memory error that prevents new state metrics from being recorded"); - log.warn("This only affects metrics and does not indicate a problem with actual sync data."); - unreliableStateTimingMetrics = true; - } - streamToRunningCount.clear(); - } - - /** - * When a destination emits a state, mark all uncommitted states up to and including this state as - * committed in the {@link StateDeltaTracker}. Also record this state as the last committed state. - */ - private void handleDestinationEmittedState(final AirbyteStateMessage stateMessage) { - final LocalDateTime timeCommitted = LocalDateTime.now(); - stateMetricsTracker.incrementTotalDestinationEmittedStateMessages(); - stateAggregator.ingest(stateMessage); - destinationOutputState.set(stateAggregator.getAggregated()); - final int stateHash = getStateHashCode(stateMessage); - - try { - if (!unreliableCommittedCounts) { - stateDeltaTracker.commitStateHash(stateHash); - } - } catch (final StateDeltaTracker.StateDeltaTrackerException e) { - log.warn("The message tracker encountered an issue that prevents committed record counts from being reliably computed."); - log.warn("This only impacts metadata and does not indicate a problem with actual sync data."); - log.warn(e.getMessage(), e); - unreliableCommittedCounts = true; - } - - try { - if (!unreliableStateTimingMetrics) { - stateMetricsTracker.updateStates(stateMessage, stateHash, timeCommitted); - } - } catch (final StateMetricsTrackerNoStateMatchException e) { - log.warn("The state message tracker was unable to match the destination state message to a corresponding source state message."); - log.warn("This only impacts metrics and does not indicate a problem with actual sync data."); - log.warn(e.getMessage(), e); - unreliableStateTimingMetrics = true; - } - } - - /** - * When a connector signals that the platform should update persist an update - */ - private void handleEmittedOrchestratorMessage(final AirbyteControlMessage controlMessage, final ConnectorType connectorType) { - switch (controlMessage.getType()) { - case CONNECTOR_CONFIG -> handleEmittedOrchestratorConnectorConfig(controlMessage.getConnectorConfig(), connectorType); - default -> log.warn("Invalid orchestrator message type for message: {}", controlMessage); - } - } - - /** - * When a connector needs to update its configuration - */ - @SuppressWarnings("PMD") // until method is implemented - private void handleEmittedOrchestratorConnectorConfig(final AirbyteControlConnectorConfigMessage configMessage, - final ConnectorType connectorType) { - // Config updates are being persisted as part of the DefaultReplicationWorker. - // In the future, we could add tracking of these kinds of messages here. Nothing to do for now. - } - - /** - * When a connector emits a trace message, check the type and call the correct function. If it is an - * error trace message, add it to the list of errorTraceMessages for the connector type - */ - private void handleEmittedTrace(final AirbyteTraceMessage traceMessage, final ConnectorType connectorType) { - switch (traceMessage.getType()) { - case ESTIMATE -> handleEmittedEstimateTrace(traceMessage.getEstimate()); - case ERROR -> handleEmittedErrorTrace(traceMessage, connectorType); - default -> log.warn("Invalid message type for trace message: {}", traceMessage); - } - } - - private void handleEmittedErrorTrace(final AirbyteTraceMessage errorTraceMessage, final ConnectorType connectorType) { - if (connectorType.equals(ConnectorType.DESTINATION)) { - destinationErrorTraceMessages.add(errorTraceMessage); - } else if (connectorType.equals(ConnectorType.SOURCE)) { - sourceErrorTraceMessages.add(errorTraceMessage); - } - } - - /** - * There are several assumptions here: - *

- * - Assume the estimate is a whole number and not a sum i.e. each estimate replaces the previous - * estimate. - *

- * - Sources cannot emit both STREAM and SYNC estimates in a same sync. Error out if this happens. - */ - @SuppressWarnings("PMD.AvoidDuplicateLiterals") - private void handleEmittedEstimateTrace(final AirbyteEstimateTraceMessage estimate) { - switch (estimate.getType()) { - case STREAM -> { - Preconditions.checkArgument(totalBytesEstimatedSync == null, "STREAM and SYNC estimates should not be emitted in the same sync."); - Preconditions.checkArgument(totalRecordsEstimatedSync == null, "STREAM and SYNC estimates should not be emitted in the same sync."); - - log.debug("Saving stream estimates for namespace: {}, stream: {}", estimate.getNamespace(), estimate.getName()); - final var nameNamespace = new AirbyteStreamNameNamespacePair(estimate.getName(), estimate.getNamespace()); - final var currStats = nameNamespacePairToStreamStats.getOrDefault(nameNamespace, new StreamStats()); - currStats.estimatedRecords = estimate.getRowEstimate(); - currStats.estimatedBytes = estimate.getByteEstimate(); - nameNamespacePairToStreamStats.put(nameNamespace, currStats); - } - case SYNC -> { - Preconditions.checkArgument(nameNamespacePairToStreamStats.isEmpty(), "STREAM and SYNC estimates should not be emitted in the same sync."); - - log.debug("Saving sync estimates"); - totalBytesEstimatedSync = estimate.getByteEstimate(); - totalRecordsEstimatedSync = estimate.getRowEstimate(); - } - } - - } - - private short getStreamIndex(final AirbyteStreamNameNamespacePair pair) { - if (!nameNamespacePairToIndex.containsKey(pair)) { - nameNamespacePairToIndex.put(pair, nextStreamIndex); - nextStreamIndex++; - } - return nameNamespacePairToIndex.get(pair); - } - - private int getStateHashCode(final AirbyteStateMessage stateMessage) { - if (AirbyteStateType.GLOBAL == stateMessage.getType()) { - return hashFunction.hashBytes(Jsons.serialize(stateMessage.getGlobal()).getBytes(Charsets.UTF_8)).hashCode(); - } else if (AirbyteStateType.STREAM == stateMessage.getType()) { - return hashFunction.hashBytes(Jsons.serialize(stateMessage.getStream().getStreamState()).getBytes(Charsets.UTF_8)).hashCode(); - } else { - // state type is LEGACY - return hashFunction.hashBytes(Jsons.serialize(stateMessage.getData()).getBytes(Charsets.UTF_8)).hashCode(); - } - } - - @Override - public AirbyteTraceMessage getFirstSourceErrorTraceMessage() { - if (!sourceErrorTraceMessages.isEmpty()) { - return sourceErrorTraceMessages.get(0); - } else { - return null; - } - } - - @Override - public AirbyteTraceMessage getFirstDestinationErrorTraceMessage() { - if (!destinationErrorTraceMessages.isEmpty()) { - return destinationErrorTraceMessages.get(0); - } else { - return null; - } - } - - @Override - public FailureReason errorTraceMessageFailure(final Long jobId, final Integer attempt) { - final AirbyteTraceMessage sourceMessage = getFirstSourceErrorTraceMessage(); - final AirbyteTraceMessage destinationMessage = getFirstDestinationErrorTraceMessage(); - - if (sourceMessage == null && destinationMessage == null) { - return null; - } - - if (destinationMessage == null) { - return FailureHelper.sourceFailure(sourceMessage, jobId, attempt); - } - - if (sourceMessage == null) { - return FailureHelper.destinationFailure(destinationMessage, jobId, attempt); - } - - if (sourceMessage.getEmittedAt() <= destinationMessage.getEmittedAt()) { - return FailureHelper.sourceFailure(sourceMessage, jobId, attempt); - } else { - return FailureHelper.destinationFailure(destinationMessage, jobId, attempt); - } - - } - - @Override - public Optional getSourceOutputState() { - return Optional.ofNullable(sourceOutputState.get()); - } - - @Override - public Optional getDestinationOutputState() { - return Optional.ofNullable(destinationOutputState.get()); - } - - /** - * Fetch committed stream index to record count from the {@link StateDeltaTracker}. Then, swap out - * stream indices for stream names. If the delta tracker has exceeded its capacity, return empty - * because committed record counts cannot be reliably computed. - */ - @Override - public Optional> getStreamToCommittedRecords() { - if (unreliableCommittedCounts) { - return Optional.empty(); - } - final Map streamIndexToCommittedRecordCount = stateDeltaTracker.getStreamToCommittedRecords(); - return Optional.of( - streamIndexToCommittedRecordCount.entrySet().stream().collect( - Collectors.toMap(entry -> nameNamespacePairToIndex.inverse().get(entry.getKey()), Entry::getValue))); - } - - /** - * Swap out stream indices for stream names and return total records emitted by stream. - */ - @Override - public Map getStreamToEmittedRecords() { - return nameNamespacePairToStreamStats.entrySet().stream().collect(Collectors.toMap( - Entry::getKey, entry -> entry.getValue().emittedRecords)); - } - - /** - * Swap out stream indices for stream names and return total records estimated by stream. - */ - @Override - public Map getStreamToEstimatedRecords() { - return nameNamespacePairToStreamStats.entrySet().stream().collect( - Collectors.toMap( - Entry::getKey, - entry -> entry.getValue().estimatedRecords)); - } - - /** - * Swap out stream indices for stream names and return total bytes emitted by stream. - */ - @Override - public Map getStreamToEmittedBytes() { - return nameNamespacePairToStreamStats.entrySet().stream().collect(Collectors.toMap( - Entry::getKey, - entry -> entry.getValue().emittedBytes)); - } - - /** - * Swap out stream indices for stream names and return total bytes estimated by stream. - */ - @Override - public Map getStreamToEstimatedBytes() { - return nameNamespacePairToStreamStats.entrySet().stream().collect( - Collectors.toMap( - Entry::getKey, - entry -> entry.getValue().estimatedBytes)); - } - - /** - * Compute sum of emitted record counts across all streams. - */ - @Override - public long getTotalRecordsEmitted() { - return nameNamespacePairToStreamStats.values().stream() - .map(stats -> stats.emittedRecords) - .reduce(0L, Long::sum); - } - - /** - * Compute sum of estimated record counts across all streams. - */ - @Override - public long getTotalRecordsEstimated() { - if (!nameNamespacePairToStreamStats.isEmpty()) { - return nameNamespacePairToStreamStats.values().stream() - .map(e -> e.estimatedRecords) - .reduce(0L, Long::sum); - } - - return totalRecordsEstimatedSync; - } - - /** - * Compute sum of emitted bytes across all streams. - */ - @Override - public long getTotalBytesEmitted() { - return nameNamespacePairToStreamStats.values().stream() - .map(e -> e.emittedBytes) - .reduce(0L, Long::sum); - } - - /** - * Compute sum of estimated bytes across all streams. - */ - @Override - public long getTotalBytesEstimated() { - if (!nameNamespacePairToStreamStats.isEmpty()) { - return nameNamespacePairToStreamStats.values().stream() - .map(e -> e.estimatedBytes) - .reduce(0L, Long::sum); - } - - return totalBytesEstimatedSync; - } - - /** - * Compute sum of committed record counts across all streams. If the delta tracker has exceeded its - * capacity, return empty because committed record counts cannot be reliably computed. - */ - @Override - public Optional getTotalRecordsCommitted() { - if (unreliableCommittedCounts) { - return Optional.empty(); - } - return Optional.of(stateDeltaTracker.getStreamToCommittedRecords().values().stream().reduce(0L, Long::sum)); - } - - @Override - public Long getTotalSourceStateMessagesEmitted() { - return stateMetricsTracker.getTotalSourceStateMessageEmitted(); - } - - @Override - public Long getTotalDestinationStateMessagesEmitted() { - return stateMetricsTracker.getTotalDestinationStateMessageEmitted(); - } - - @Override - public Long getMaxSecondsToReceiveSourceStateMessage() { - return stateMetricsTracker.getMaxSecondsToReceiveSourceStateMessage(); - } - - @Override - public Long getMeanSecondsToReceiveSourceStateMessage() { - return stateMetricsTracker.getMeanSecondsToReceiveSourceStateMessage(); - } - - @Override - public Optional getMaxSecondsBetweenStateMessageEmittedAndCommitted() { - if (unreliableStateTimingMetrics) { - return Optional.empty(); - } - - return Optional.of(stateMetricsTracker.getMaxSecondsBetweenStateMessageEmittedAndCommitted()); - } - - @Override - public Optional getMeanSecondsBetweenStateMessageEmittedAndCommitted() { - if (unreliableStateTimingMetrics) { - return Optional.empty(); - } - - return Optional.of(stateMetricsTracker.getMeanSecondsBetweenStateMessageEmittedAndCommitted()); - } - - @Override - public Boolean getUnreliableStateTimingMetrics() { - return unreliableStateTimingMetrics; - } - - private void logMessageAsJSON(final String caller, final AirbyteMessage message) { - if (!featureFlagLogConnectorMsgs) { - return; - } - - log.info(caller + " message | " + Jsons.serialize(message)); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/MessageTracker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/MessageTracker.java deleted file mode 100644 index bd02749f3f82..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/MessageTracker.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.book_keeping; - -import io.airbyte.config.FailureReason; -import io.airbyte.config.State; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import java.util.Map; -import java.util.Optional; - -/** - * Interface to handle extracting metadata from the stream of data flowing from a Source to a - * Destination. - */ -public interface MessageTracker { - - /** - * Accepts an AirbyteMessage emitted from a source and tracks any metadata about it that is required - * by the Platform. - * - * @param message message to derive metadata from. - */ - void acceptFromSource(AirbyteMessage message); - - /** - * Accepts an AirbyteMessage emitted from a destination and tracks any metadata about it that is - * required by the Platform. - * - * @param message message to derive metadata from. - */ - void acceptFromDestination(AirbyteMessage message); - - /** - * Get the current source state of the stream. - * - * @return returns the last StateMessage that was accepted from the source. If no StateMessage was - * accepted, empty. - */ - Optional getSourceOutputState(); - - /** - * Get the current destination state of the stream. - * - * @return returns the last StateMessage that was accepted from the destination. If no StateMessage - * was accepted, empty. - */ - Optional getDestinationOutputState(); - - /** - * Get the per-stream committed record count. - * - * @return returns a map of committed record count by stream name. If committed record counts cannot - * be computed, empty. - */ - Optional> getStreamToCommittedRecords(); - - /** - * Get the per-stream emitted record count. This includes messages that were emitted by the source, - * but never committed by the destination. - * - * @return returns a map of emitted record count by stream name. - */ - Map getStreamToEmittedRecords(); - - /** - * Get the per-stream estimated record count provided by - * {@link io.airbyte.protocol.models.AirbyteEstimateTraceMessage}. - * - * @return returns a map of estimated record count by stream name. - */ - Map getStreamToEstimatedRecords(); - - /** - * Get the per-stream emitted byte count. This includes messages that were emitted by the source, - * but never committed by the destination. - * - * @return returns a map of emitted record count by stream name. - */ - Map getStreamToEmittedBytes(); - - /** - * Get the per-stream estimated byte count provided by - * {@link io.airbyte.protocol.models.AirbyteEstimateTraceMessage}. - * - * @return returns a map of estimated bytes by stream name. - */ - Map getStreamToEstimatedBytes(); - - /** - * Get the overall emitted record count. This includes messages that were emitted by the source, but - * never committed by the destination. - * - * @return returns the total count of emitted records across all streams. - */ - long getTotalRecordsEmitted(); - - /** - * Get the overall estimated record count. - * - * @return returns the total count of estimated records across all streams. - */ - long getTotalRecordsEstimated(); - - /** - * Get the overall emitted bytes. This includes messages that were emitted by the source, but never - * committed by the destination. - * - * @return returns the total emitted bytes across all streams. - */ - long getTotalBytesEmitted(); - - /** - * Get the overall estimated bytes. - * - * @return returns the total count of estimated bytes across all streams. - */ - long getTotalBytesEstimated(); - - /** - * Get the overall committed record count. - * - * @return returns the total count of committed records across all streams. If total committed - * record count cannot be computed, empty. - */ - Optional getTotalRecordsCommitted(); - - /** - * Get the count of state messages emitted from the source connector. - * - * @return returns the total count of state messages emitted from the source. - */ - Long getTotalSourceStateMessagesEmitted(); - - Long getTotalDestinationStateMessagesEmitted(); - - Long getMaxSecondsToReceiveSourceStateMessage(); - - Long getMeanSecondsToReceiveSourceStateMessage(); - - Optional getMaxSecondsBetweenStateMessageEmittedAndCommitted(); - - Optional getMeanSecondsBetweenStateMessageEmittedAndCommitted(); - - AirbyteTraceMessage getFirstDestinationErrorTraceMessage(); - - AirbyteTraceMessage getFirstSourceErrorTraceMessage(); - - FailureReason errorTraceMessageFailure(Long jobId, Integer attempt); - - Boolean getUnreliableStateTimingMetrics(); - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/StateDeltaTracker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/StateDeltaTracker.java deleted file mode 100644 index 2aff31bd176c..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/StateDeltaTracker.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.book_keeping; - -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import com.google.common.annotations.VisibleForTesting; -import datadog.trace.api.Trace; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import lombok.extern.slf4j.Slf4j; - -/** - * This class tracks "deltas" between states in compact {@code byte[]}s with the following schema: - * - *

- *  [(state hash),(stream index),(record count)...] with the last two elements repeating per stream in the delta.
- * 
- *

- * This class also maintains a {@code Set} of {@code committedStateHashes} so that it can accumulate - * both committed and total record counts per stream. - *

- * The StateDeltaTracker is initialized with a memory limit. If this memory limit is exceeded, new - * states deltas will not be added and per-stream record counts will not be able to be computed. - * This is to prevent OutOfMemoryErrors from crashing the sync. - */ -@Slf4j -public class StateDeltaTracker { - - private static final int STATE_HASH_BYTES = Integer.BYTES; - private static final int STREAM_INDEX_BYTES = Short.BYTES; - private static final int RECORD_COUNT_BYTES = Long.BYTES; - private static final int BYTES_PER_STREAM = STREAM_INDEX_BYTES + RECORD_COUNT_BYTES; - - private final Set committedStateHashes; - private final Map streamToCommittedRecords; - - /** - * Every time a state is added, a new byte[] containing the state hash and per-stream delta will be - * added to this list. Every time a state is committed, state deltas up to the committed state are - * removed from the head of the list and aggregated into the committed count map. The source thread - * adds while the destination thread removes, so synchronization is necessary to provide - * thread-safety. - */ - @VisibleForTesting - protected final List stateDeltas; - - @VisibleForTesting - protected long remainingCapacity; - @VisibleForTesting - protected boolean capacityExceeded; - - public StateDeltaTracker(final long memoryLimitBytes) { - this.committedStateHashes = new HashSet<>(); - this.streamToCommittedRecords = new HashMap<>(); - this.stateDeltas = new ArrayList<>(); - this.remainingCapacity = memoryLimitBytes; - this.capacityExceeded = false; - } - - /** - * Converts the given state hash and per-stream record count map into a {@code byte[]} and stores - * it. - *

- * This method leverages a synchronized block to provide thread safety between the source thread - * calling addState while the destination thread calls commitStateHash. - * - * @throws StateDeltaTrackerException thrown when the memory footprint of stateDeltas exceeds - * available capacity. - */ - @Trace(operationName = WORKER_OPERATION_NAME) - public void addState(final int stateHash, final Map streamIndexToRecordCount) throws StateDeltaTrackerException { - synchronized (this) { - final int size = STATE_HASH_BYTES + (streamIndexToRecordCount.size() * BYTES_PER_STREAM); - - if (capacityExceeded || remainingCapacity < size) { - capacityExceeded = true; - throw new StateDeltaTrackerException("Memory capacity is exceeded for StateDeltaTracker."); - } - - final ByteBuffer delta = ByteBuffer.allocate(size); - - delta.putInt(stateHash); - - for (final Map.Entry entry : streamIndexToRecordCount.entrySet()) { - delta.putShort(entry.getKey()); - delta.putLong(entry.getValue()); - } - - stateDeltas.add(delta.array()); - remainingCapacity -= delta.array().length; - } - } - - /** - * Mark the given {@code stateHash} as committed. - *

- * This method leverages a synchronized block to provide thread safety between the source thread - * calling addState while the destination thread calls commitStateHash. - * - * @throws StateDeltaTrackerException thrown when committed counts can no longer be reliably - * computed. - */ - @Trace(operationName = WORKER_OPERATION_NAME) - public void commitStateHash(final int stateHash) throws StateDeltaTrackerException { - synchronized (this) { - if (capacityExceeded) { - throw new StateDeltaTrackerException("Memory capacity exceeded for StateDeltaTracker, so states cannot be reliably committed"); - } - if (committedStateHashes.contains(stateHash)) { - throw new StateDeltaTrackerException( - String.format("State hash %d was already committed, likely indicating a state hash collision", stateHash)); - } - - committedStateHashes.add(stateHash); - int currStateHash; - do { - if (stateDeltas.isEmpty()) { - throw new StateDeltaTrackerException(String.format("Delta was not stored for state hash %d", stateHash)); - } - // as deltas are removed and aggregated into committed count map, reclaim capacity - final ByteBuffer currDelta = ByteBuffer.wrap(stateDeltas.remove(0)); - remainingCapacity += currDelta.capacity(); - - currStateHash = currDelta.getInt(); - - final int numStreams = (currDelta.capacity() - STATE_HASH_BYTES) / BYTES_PER_STREAM; - for (int i = 0; i < numStreams; i++) { - final short streamIndex = currDelta.getShort(); - final long recordCount = currDelta.getLong(); - - // aggregate delta into committed count map - final long committedRecordCount = streamToCommittedRecords.getOrDefault(streamIndex, 0L); - streamToCommittedRecords.put(streamIndex, committedRecordCount + recordCount); - } - } while (currStateHash != stateHash); // repeat until each delta up to the committed state is aggregated - } - } - - @Trace(operationName = WORKER_OPERATION_NAME) - public Map getStreamToCommittedRecords() { - return streamToCommittedRecords; - } - - /** - * Thrown when the StateDeltaTracker encounters an issue that prevents it from reliably computing - * committed record deltas. - */ - public static class StateDeltaTrackerException extends Exception { - - public StateDeltaTrackerException(final String message) { - super(message); - } - - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/StateMetricsTracker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/StateMetricsTracker.java deleted file mode 100644 index 9fe5b07a58b7..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/StateMetricsTracker.java +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.book_keeping; - -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.StreamDescriptor; -import java.nio.ByteBuffer; -import java.time.LocalDateTime; -import java.time.ZoneOffset; -import java.time.temporal.ChronoUnit; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicLong; - -public class StateMetricsTracker { - - private static final int STATE_HASH_SIZE = Integer.BYTES; - private static final int EPOCH_TIME_SIZE = Long.BYTES; - private static final int BYTE_ARRAY_SIZE = STATE_HASH_SIZE + EPOCH_TIME_SIZE; - - private final List stateHashesAndTimestamps; - private final Map> streamStateHashesAndTimestamps; - private LocalDateTime firstRecordReceivedAt; - private LocalDateTime lastStateMessageReceivedAt; - private Long maxSecondsToReceiveSourceStateMessage; - private Long meanSecondsToReceiveSourceStateMessage; - private Long maxSecondsBetweenStateMessageEmittedandCommitted; - private Long meanSecondsBetweenStateMessageEmittedandCommitted; - private final AtomicLong totalSourceEmittedStateMessages; - private final AtomicLong totalDestinationEmittedStateMessages; - private long remainingCapacity; - private Boolean capacityExceeded; - - public StateMetricsTracker(final Long messageLimit) { - this.stateHashesAndTimestamps = new ArrayList<>(); - this.streamStateHashesAndTimestamps = new HashMap<>(); - this.firstRecordReceivedAt = null; - this.lastStateMessageReceivedAt = null; - this.maxSecondsToReceiveSourceStateMessage = 0L; - this.meanSecondsToReceiveSourceStateMessage = 0L; - this.maxSecondsBetweenStateMessageEmittedandCommitted = 0L; - this.meanSecondsBetweenStateMessageEmittedandCommitted = 0L; - this.totalSourceEmittedStateMessages = new AtomicLong(0L); - this.totalDestinationEmittedStateMessages = new AtomicLong(0L); - this.remainingCapacity = messageLimit; - this.capacityExceeded = false; - } - - public synchronized void addState(final AirbyteStateMessage stateMessage, final int stateHash, final LocalDateTime timeEmitted) - throws StateMetricsTrackerOomException { - final long epochTime = timeEmitted.toEpochSecond(ZoneOffset.UTC); - - if (capacityExceeded || remainingCapacity < 1) { - capacityExceeded = true; - throw new StateMetricsTrackerOomException("Memory capacity is exceeded for StateMetricsTracker."); - } - - if (AirbyteStateType.STREAM == stateMessage.getType()) { - addStateMessageToStreamToStateHashTimestampTracker(stateMessage, stateHash, epochTime); - } else { - // do not track state message timestamps per stream for GLOBAL or LEGACY state - final byte[] stateTimestampByteArray = populateStateTimestampByteArray(stateHash, epochTime); - stateHashesAndTimestamps.add(stateTimestampByteArray); - remainingCapacity -= 1; - } - } - - public synchronized void updateStates(final AirbyteStateMessage stateMessage, final int stateHash, final LocalDateTime timeCommitted) - throws StateMetricsTrackerNoStateMatchException { - final LocalDateTime startingTime; - if (AirbyteStateType.STREAM == stateMessage.getType()) { - final String streamDescriptorKey = getStreamDescriptorKey(stateMessage.getStream().getStreamDescriptor()); - final List stateMessagesForStream = streamStateHashesAndTimestamps.get(streamDescriptorKey); - startingTime = findStartingTimeStampAndRemoveOlderEntries(stateMessagesForStream, stateHash); - } else { - startingTime = findStartingTimeStampAndRemoveOlderEntries(stateHashesAndTimestamps, stateHash); - } - updateMaxAndMeanSeconds(startingTime, timeCommitted); - } - - void addStateMessageToStreamToStateHashTimestampTracker(final AirbyteStateMessage stateMessage, - final int stateHash, - final Long epochTimeEmitted) { - final String streamDescriptorKey = getStreamDescriptorKey(stateMessage.getStream().getStreamDescriptor()); - final byte[] stateHashAndTimestamp = populateStateTimestampByteArray(stateHash, epochTimeEmitted); - - if (streamStateHashesAndTimestamps.get(streamDescriptorKey) == null) { - final List stateHashesAndTimestamps = new ArrayList<>(); - stateHashesAndTimestamps.add(stateHashAndTimestamp); - streamStateHashesAndTimestamps.put(streamDescriptorKey, stateHashesAndTimestamps); - } else { - final List streamDescriptorValue = streamStateHashesAndTimestamps.get(streamDescriptorKey); - streamDescriptorValue.add(stateHashAndTimestamp); - } - remainingCapacity -= 1; - } - - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - void updateMaxAndMeanSeconds(final LocalDateTime startingTime, final LocalDateTime timeCommitted) { - final Long secondsUntilCommit = calculateSecondsBetweenStateEmittedAndCommitted(startingTime, timeCommitted); - if (maxSecondsBetweenStateMessageEmittedandCommitted < secondsUntilCommit) { - maxSecondsBetweenStateMessageEmittedandCommitted = secondsUntilCommit; - } - - if (totalDestinationEmittedStateMessages.get() == 1) { - meanSecondsBetweenStateMessageEmittedandCommitted = secondsUntilCommit; - } else { - final Long newMeanSeconds = - calculateMean(meanSecondsBetweenStateMessageEmittedandCommitted, totalDestinationEmittedStateMessages.get(), secondsUntilCommit); - meanSecondsBetweenStateMessageEmittedandCommitted = newMeanSeconds; - } - } - - private LocalDateTime findStartingTimeStampAndRemoveOlderEntries(final List stateList, final int stateHash) - throws StateMetricsTrackerNoStateMatchException { - // iterate through each [state_hash, timestamp] in the list - // update the first timestamp to equal min_timestamp - // and remove all items from the list as we iterate through - // break once we reach the state hash equal to the input(destination) state hash - Boolean foundStateHash = false; - Long minTime = null; - final Iterator iterator = stateList.iterator(); - while (iterator.hasNext()) { - final byte[] stateMessageTime = iterator.next(); - final ByteBuffer current = ByteBuffer.wrap(stateMessageTime); - remainingCapacity += 1; - final int currentStateHash = current.getInt(); - final Long epochTime = current.getLong(); - if (minTime == null) { - minTime = epochTime; - } - iterator.remove(); - - if (stateHash == currentStateHash) { - foundStateHash = true; - break; - } - } - - if (!foundStateHash || minTime == null) { - throw new StateMetricsTrackerNoStateMatchException("Destination state message cannot be matched to corresponding Source state message."); - } - return LocalDateTime.ofEpochSecond(minTime, 0, ZoneOffset.UTC); - } - - Long calculateSecondsBetweenStateEmittedAndCommitted(final LocalDateTime stateMessageEmittedAt, final LocalDateTime stateMessageCommittedAt) { - return stateMessageEmittedAt.until(stateMessageCommittedAt, ChronoUnit.SECONDS); - } - - protected Long calculateMean(final Long currentMean, final Long totalCount, final Long newDataPoint) { - final Long previousCount = totalCount - 1; - final double result = (Double.valueOf(currentMean * previousCount) / totalCount) + (Double.valueOf(newDataPoint) / totalCount); - return (long) result; - } - - public void updateMaxAndMeanSecondsToReceiveStateMessage(final LocalDateTime stateMessageReceivedAt) { - final Long secondsSinceLastStateMessage = calculateSecondsSinceLastStateEmitted(stateMessageReceivedAt); - if (maxSecondsToReceiveSourceStateMessage < secondsSinceLastStateMessage) { - maxSecondsToReceiveSourceStateMessage = secondsSinceLastStateMessage; - } - - if (meanSecondsToReceiveSourceStateMessage == 0) { - meanSecondsToReceiveSourceStateMessage = secondsSinceLastStateMessage; - } else { - final Long newMeanSeconds = - calculateMean(meanSecondsToReceiveSourceStateMessage, totalSourceEmittedStateMessages.get(), secondsSinceLastStateMessage); - meanSecondsToReceiveSourceStateMessage = newMeanSeconds; - } - } - - private Long calculateSecondsSinceLastStateEmitted(final LocalDateTime stateMessageReceivedAt) { - if (lastStateMessageReceivedAt != null) { - return lastStateMessageReceivedAt.until(stateMessageReceivedAt, ChronoUnit.SECONDS); - } else if (firstRecordReceivedAt != null) { - return firstRecordReceivedAt.until(stateMessageReceivedAt, ChronoUnit.SECONDS); - } else { - // If we receive a State Message before a Record Message there is no previous timestamp to use for a - // calculation - return 0L; - } - } - - public LocalDateTime getFirstRecordReceivedAt() { - return firstRecordReceivedAt; - } - - public void setFirstRecordReceivedAt(final LocalDateTime receivedAt) { - firstRecordReceivedAt = receivedAt; - } - - public void setLastStateMessageReceivedAt(final LocalDateTime receivedAt) { - lastStateMessageReceivedAt = receivedAt; - } - - public void incrementTotalSourceEmittedStateMessages() { - totalSourceEmittedStateMessages.incrementAndGet(); - } - - public Long getTotalSourceStateMessageEmitted() { - return totalSourceEmittedStateMessages.get(); - } - - public Long getTotalDestinationStateMessageEmitted() { - return totalDestinationEmittedStateMessages.get(); - } - - public Long getMaxSecondsToReceiveSourceStateMessage() { - return maxSecondsToReceiveSourceStateMessage; - } - - public Long getMeanSecondsToReceiveSourceStateMessage() { - return meanSecondsToReceiveSourceStateMessage; - } - - public Long getMaxSecondsBetweenStateMessageEmittedAndCommitted() { - return maxSecondsBetweenStateMessageEmittedandCommitted; - } - - public Long getMeanSecondsBetweenStateMessageEmittedAndCommitted() { - return meanSecondsBetweenStateMessageEmittedandCommitted; - } - - protected void incrementTotalDestinationEmittedStateMessages() { - totalDestinationEmittedStateMessages.incrementAndGet(); - } - - private byte[] populateStateTimestampByteArray(final int stateHash, final Long epochTime) { - // allocate num of bytes of state hash + num bytes of epoch time long - final ByteBuffer delta = ByteBuffer.allocate(BYTE_ARRAY_SIZE); - delta.putInt(stateHash); - delta.putLong(epochTime); - return delta.array(); - } - - private String getStreamDescriptorKey(final StreamDescriptor streamDescriptor) { - return streamDescriptor.getName() + "-" + streamDescriptor.getNamespace(); - } - - /** - * Thrown when the StateMetricsTracker exceeds its allotted memory - */ - public static class StateMetricsTrackerOomException extends Exception { - - public StateMetricsTrackerOomException(final String message) { - super(message); - } - - } - - /** - * Thrown when the destination state message is not able to be matched to a source state message - */ - public static class StateMetricsTrackerNoStateMatchException extends Exception { - - public StateMetricsTrackerNoStateMatchException(final String message) { - super(message); - } - - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/StreamStats.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/StreamStats.java deleted file mode 100644 index 9554ec93d74a..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/book_keeping/StreamStats.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.book_keeping; - -import java.util.Objects; - -/** - * POJO for all per-stream stats. - *

- * We are not able to use a {@link Record} since we want non-final fields to accumulate counts. - */ -public class StreamStats { - - public long estimatedRecords; - public long estimatedBytes; - public long emittedRecords; - public long emittedBytes; - - public StreamStats() { - this(0L, 0L, 0L, 0L); - } - - public StreamStats(final long estimatedBytes, final long emittedBytes, final long estimatedRecords, final long emittedRecords) { - this.estimatedRecords = estimatedRecords; - this.estimatedBytes = estimatedBytes; - this.emittedRecords = emittedRecords; - this.emittedBytes = emittedBytes; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final StreamStats that = (StreamStats) o; - return estimatedRecords == that.estimatedRecords && estimatedBytes == that.estimatedBytes && emittedRecords == that.emittedRecords - && emittedBytes == that.emittedBytes; - } - - @Override - public int hashCode() { - return Objects.hash(estimatedRecords, estimatedBytes, emittedRecords, emittedBytes); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/exception/DestinationException.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/exception/DestinationException.java deleted file mode 100644 index 9ab0b31b4baa..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/exception/DestinationException.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.exception; - -public class DestinationException extends RuntimeException { - - public DestinationException(final String message) { - super(message); - } - - public DestinationException(final String message, final Throwable cause) { - super(message, cause); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/exception/SourceException.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/exception/SourceException.java deleted file mode 100644 index d4fced246f32..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/exception/SourceException.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.exception; - -public class SourceException extends RuntimeException { - - public SourceException(final String message) { - super(message); - } - - public SourceException(final String message, final Throwable cause) { - super(message, cause); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/DefaultStateAggregator.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/DefaultStateAggregator.java deleted file mode 100644 index cfe9878d7338..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/DefaultStateAggregator.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.state_aggregator; - -import com.google.common.base.Preconditions; -import io.airbyte.config.State; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; - -public class DefaultStateAggregator implements StateAggregator { - - private AirbyteStateType stateType = null; - private final StateAggregator streamStateAggregator = new StreamStateAggregator(); - private final StateAggregator singleStateAggregator = new SingleStateAggregator(); - private final boolean useStreamCapableState; - - public DefaultStateAggregator(final boolean useStreamCapableState) { - this.useStreamCapableState = useStreamCapableState; - } - - @Override - public void ingest(final AirbyteStateMessage stateMessage) { - checkTypeOrSetType(stateMessage.getType()); - - getStateAggregator().ingest(stateMessage); - } - - @Override - public State getAggregated() { - return getStateAggregator().getAggregated(); - } - - /** - * Return the state aggregator that match the state type. - */ - private StateAggregator getStateAggregator() { - if (!useStreamCapableState) { - return singleStateAggregator; - } else { - return switch (stateType) { - case STREAM -> streamStateAggregator; - case GLOBAL, LEGACY -> singleStateAggregator; - }; - } - } - - /** - * We can not have 2 different state types given to the same instance of this class. This method set - * the type if it is not. If the state type doesn't exist in the message, it is set to LEGACY - */ - private void checkTypeOrSetType(final AirbyteStateType inputStateType) { - final AirbyteStateType validatedStateType; - if (inputStateType == null) { - validatedStateType = AirbyteStateType.LEGACY; - } else { - validatedStateType = inputStateType; - } - if (this.stateType == null) { - this.stateType = validatedStateType; - } - Preconditions.checkArgument(this.stateType == validatedStateType, - "Input state type " + validatedStateType + " does not match the aggregator's current state type " + this.stateType); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/SingleStateAggregator.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/SingleStateAggregator.java deleted file mode 100644 index ae669d5e0191..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/SingleStateAggregator.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.state_aggregator; - -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import datadog.trace.api.Trace; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.State; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import java.util.List; - -class SingleStateAggregator implements StateAggregator { - - AirbyteStateMessage state; - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void ingest(final AirbyteStateMessage stateMessage) { - state = stateMessage; - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public State getAggregated() { - if (state.getType() == null || state.getType() == AirbyteStateType.LEGACY) { - return new State().withState(state.getData()); - } else { - /** - * The destination emit a Legacy state in order to be retro-compatible with old platform. If we are - * running this code, we know that the platform has been upgraded and we can thus discard the legacy - * state. Keeping the legacy state is causing issue because of its size - * (https://github.com/airbytehq/oncall/issues/731) - */ - state.setData(null); - return new State() - .withState(Jsons.jsonNode(List.of(state))); - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/StateAggregator.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/StateAggregator.java deleted file mode 100644 index f24413e03cfe..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/StateAggregator.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.state_aggregator; - -import io.airbyte.config.State; -import io.airbyte.protocol.models.AirbyteStateMessage; - -public interface StateAggregator { - - void ingest(AirbyteStateMessage stateMessage); - - State getAggregated(); - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/StreamStateAggregator.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/StreamStateAggregator.java deleted file mode 100644 index d7f0694c5aba..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/state_aggregator/StreamStateAggregator.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.state_aggregator; - -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import datadog.trace.api.Trace; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.State; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.StreamDescriptor; -import java.util.HashMap; -import java.util.Map; - -class StreamStateAggregator implements StateAggregator { - - Map aggregatedState = new HashMap<>(); - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void ingest(final AirbyteStateMessage stateMessage) { - /** - * The destination emit a Legacy state in order to be retro-compatible with old platform. If we are - * running this code, we know that the platform has been upgraded and we can thus discard the legacy - * state. Keeping the legacy state is causing issue because of its size - * (https://github.com/airbytehq/oncall/issues/731) - */ - stateMessage.setData(null); - aggregatedState.put(stateMessage.getStream().getStreamDescriptor(), stateMessage); - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public State getAggregated() { - - return new State() - .withState( - Jsons.jsonNode(aggregatedState.values())); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java index eff58aad22df..79bb870c493d 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java @@ -20,7 +20,6 @@ import io.airbyte.commons.logging.MdcScope.Builder; import io.airbyte.config.OperatorDbt; import io.airbyte.config.ResourceRequirements; -import io.airbyte.persistence.job.errorreporter.SentryExceptionHelper; import io.airbyte.protocol.models.AirbyteErrorTraceMessage; import io.airbyte.protocol.models.AirbyteErrorTraceMessage.FailureType; import io.airbyte.protocol.models.AirbyteMessage; @@ -158,7 +157,6 @@ private boolean runProcess(final String jobId, .withError(new AirbyteErrorTraceMessage() .withFailureType(FailureType.SYSTEM_ERROR) // TODO: decide on best FailureType for this .withMessage("Normalization failed during the dbt run. This may indicate a problem with the data itself.") - .withInternalMessage(buildInternalErrorMessageFromDbtStackTrace()) // due to the lack of consistent defining features in dbt errors we're injecting a breadcrumb to the // stacktrace so we can confidently identify all dbt errors when parsing and sending to Sentry // see dbt error examples: https://docs.getdbt.com/guides/legacy/debugging-errors for more context @@ -212,9 +210,4 @@ public Stream getTraceMessages() { return Stream.empty(); } - private String buildInternalErrorMessageFromDbtStackTrace() { - final Map errorMap = SentryExceptionHelper.getUsefulErrorMessageAndTypeFromDbtError(dbtErrorStack); - return errorMap.get(SentryExceptionHelper.ERROR_MAP_KEYS.ERROR_MAP_MESSAGE_KEY); - } - } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationWorker.java deleted file mode 100644 index 8138bcdb5e77..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationWorker.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.normalization; - -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.workers.Worker; - -public interface NormalizationWorker extends Worker {} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java index d51747587fbc..e2ae3a160401 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java @@ -4,10 +4,6 @@ package io.airbyte.workers.process; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.DOCKER_IMAGE_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; import static io.airbyte.workers.process.Metadata.CHECK_JOB; import static io.airbyte.workers.process.Metadata.DISCOVER_JOB; import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; @@ -21,7 +17,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import datadog.trace.api.Trace; import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.features.FeatureFlags; import io.airbyte.config.AllowedHosts; @@ -29,7 +24,6 @@ import io.airbyte.config.EnvConfigs; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.WorkerEnvConstants; -import io.airbyte.metrics.lib.ApmTraceUtils; import io.airbyte.workers.exception.WorkerException; import java.nio.file.Path; import java.util.Collections; @@ -74,10 +68,8 @@ public AirbyteIntegrationLauncher(final String jobId, this.useIsolatedPool = useIsolatedPool; } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public Process spec(final Path jobRoot) throws WorkerException { - ApmTraceUtils.addTagsToTrace(Map.of(JOB_ID_KEY, jobId, JOB_ROOT_KEY, jobRoot, DOCKER_IMAGE_KEY, imageName)); return processFactory.create( SPEC_JOB, jobId, @@ -96,10 +88,8 @@ public Process spec(final Path jobRoot) throws WorkerException { "spec"); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public Process check(final Path jobRoot, final String configFilename, final String configContents) throws WorkerException { - ApmTraceUtils.addTagsToTrace(Map.of(JOB_ID_KEY, jobId, JOB_ROOT_KEY, jobRoot, DOCKER_IMAGE_KEY, imageName)); return processFactory.create( CHECK_JOB, jobId, @@ -119,10 +109,8 @@ public Process check(final Path jobRoot, final String configFilename, final Stri CONFIG, configFilename); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public Process discover(final Path jobRoot, final String configFilename, final String configContents) throws WorkerException { - ApmTraceUtils.addTagsToTrace(Map.of(JOB_ID_KEY, jobId, JOB_ROOT_KEY, jobRoot, DOCKER_IMAGE_KEY, imageName)); return processFactory.create( DISCOVER_JOB, jobId, @@ -142,7 +130,6 @@ public Process discover(final Path jobRoot, final String configFilename, final S CONFIG, configFilename); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public Process read(final Path jobRoot, final String configFilename, @@ -152,7 +139,6 @@ public Process read(final Path jobRoot, final String stateFilename, final String stateContents) throws WorkerException { - ApmTraceUtils.addTagsToTrace(Map.of(JOB_ID_KEY, jobId, JOB_ROOT_KEY, jobRoot, DOCKER_IMAGE_KEY, imageName)); final List arguments = Lists.newArrayList( "read", CONFIG, configFilename, @@ -188,7 +174,6 @@ public Process read(final Path jobRoot, arguments.toArray(new String[arguments.size()])); } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public Process write(final Path jobRoot, final String configFilename, @@ -196,7 +181,6 @@ public Process write(final Path jobRoot, final String catalogFilename, final String catalogContents) throws WorkerException { - ApmTraceUtils.addTagsToTrace(Map.of(JOB_ID_KEY, jobId, JOB_ROOT_KEY, jobRoot, DOCKER_IMAGE_KEY, imageName)); final Map files = ImmutableMap.of( configFilename, configContents, catalogFilename, catalogContents); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncKubePodStatus.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncKubePodStatus.java deleted file mode 100644 index 819c362148b3..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncKubePodStatus.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -public enum AsyncKubePodStatus { - NOT_STARTED, // Pod hasn't been started yet. - INITIALIZING, // On-start container started but not completed - RUNNING, // Main container posted running - FAILED, // Reported status was "failed" or pod was in Error (or other terminal state) without a reported - // status. - SUCCEEDED; // Reported status was "success" so both main and on-start succeeded. -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncOrchestratorPodProcess.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncOrchestratorPodProcess.java deleted file mode 100644 index b3023018e7f0..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncOrchestratorPodProcess.java +++ /dev/null @@ -1,515 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.workers.storage.DocumentStoreClient; -import io.fabric8.kubernetes.api.model.ContainerBuilder; -import io.fabric8.kubernetes.api.model.ContainerPort; -import io.fabric8.kubernetes.api.model.DeletionPropagation; -import io.fabric8.kubernetes.api.model.EnvVar; -import io.fabric8.kubernetes.api.model.Pod; -import io.fabric8.kubernetes.api.model.PodBuilder; -import io.fabric8.kubernetes.api.model.SecretVolumeSourceBuilder; -import io.fabric8.kubernetes.api.model.Volume; -import io.fabric8.kubernetes.api.model.VolumeBuilder; -import io.fabric8.kubernetes.api.model.VolumeMount; -import io.fabric8.kubernetes.api.model.VolumeMountBuilder; -import io.fabric8.kubernetes.client.KubernetesClient; -import io.micronaut.core.util.StringUtils; -import java.io.BufferedOutputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.charset.Charset; -import java.nio.file.Path; -import java.util.AbstractMap; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; -import lombok.extern.slf4j.Slf4j; - -/** - * This process allows creating and managing a pod outside the lifecycle of the launching - * application. Unlike {@link KubePodProcess} there is no heartbeat mechanism that requires the - * launching pod and the launched pod to co-exist for the duration of execution for the launched - * pod. - *

- * Instead, this process creates the pod and interacts with a document store on cloud storage to - * understand the state of the created pod. - *

- * The document store is considered to be the truth when retrieving the status for an async pod - * process. If the store isn't updated by the underlying pod, it will appear as failed. - */ -@Slf4j -public class AsyncOrchestratorPodProcess implements KubePod { - - public static final String KUBE_POD_INFO = "KUBE_POD_INFO"; - public static final String NO_OP = "NO_OP"; - - private final KubePodInfo kubePodInfo; - private final DocumentStoreClient documentStoreClient; - private final KubernetesClient kubernetesClient; - private final String secretName; - private final String secretMountPath; - private final String googleApplicationCredentials; - private final String dataPlaneCredsSecretName; - private final String dataPlaneCredsSecretMountPath; - private final AtomicReference> cachedExitValue; - private final Map environmentVariables; - private final Integer serverPort; - - public AsyncOrchestratorPodProcess( - final KubePodInfo kubePodInfo, - final DocumentStoreClient documentStoreClient, - final KubernetesClient kubernetesClient, - final String secretName, - final String secretMountPath, - final String dataPlaneCredsSecretName, - final String dataPlaneCredsSecretMountPath, - final String googleApplicationCredentials, - final Map environmentVariables, - final Integer serverPort) { - this.kubePodInfo = kubePodInfo; - this.documentStoreClient = documentStoreClient; - this.kubernetesClient = kubernetesClient; - this.secretName = secretName; - this.secretMountPath = secretMountPath; - this.dataPlaneCredsSecretName = dataPlaneCredsSecretName; - this.dataPlaneCredsSecretMountPath = dataPlaneCredsSecretMountPath; - this.googleApplicationCredentials = googleApplicationCredentials; - this.cachedExitValue = new AtomicReference<>(Optional.empty()); - this.environmentVariables = environmentVariables; - this.serverPort = serverPort; - } - - public Optional getOutput() { - final var possibleOutput = getDocument(AsyncKubePodStatus.SUCCEEDED.name()); - - if (possibleOutput.isPresent() && possibleOutput.get().isBlank()) { - return Optional.empty(); - } else { - return possibleOutput; - } - } - - private int computeExitValue() { - final AsyncKubePodStatus docStoreStatus = getDocStoreStatus(); - - // trust the doc store if it's in a terminal state - if (docStoreStatus.equals(AsyncKubePodStatus.FAILED)) { - log.warn("State Store reports orchestrator pod {} failed", getInfo().name()); - return 1; - } else if (docStoreStatus.equals(AsyncKubePodStatus.SUCCEEDED)) { - log.info("State Store reports orchestrator pod {} succeeded", getInfo().name()); - return 0; - } - - final Pod pod = kubernetesClient.pods() - .inNamespace(getInfo().namespace()) - .withName(getInfo().name()) - .get(); - - // Since the pod creation blocks until the pod is created the first time, - // if the pod no longer exists (and we don't have a success/fail document) - // we must be in a failure state. If it wasn't able to write out its status - // we must assume failure, since the document store is the "truth" for - // async pod status. - if (pod == null) { - log.info("State Store missing status. Orchestrator pod {} non-existent. Assume failure.", getInfo().name()); - return 1; - } - - // If the pod does exist, it may be in a terminal (error or completed) state. - final boolean isTerminal = KubePodResourceHelper.isTerminal(pod); - - if (isTerminal) { - // In case the doc store was updated in between when we pulled it and when - // we read the status from the Kubernetes API, we need to check the doc store again. - final AsyncKubePodStatus secondDocStoreStatus = getDocStoreStatus(); - if (secondDocStoreStatus.equals(AsyncKubePodStatus.FAILED)) { - log.warn("State Store reports orchestrator pod {} failed", getInfo().name()); - return 1; - } else if (secondDocStoreStatus.equals(AsyncKubePodStatus.SUCCEEDED)) { - log.info("State Store reports orchestrator pod {} succeeded", getInfo().name()); - return 0; - } else { - // otherwise, the actual pod is terminal when the doc store says it shouldn't be. - log.info("The current non terminal state is {}", secondDocStoreStatus); - log.warn("State Store missing status, however orchestrator pod {} in terminal. Assume failure.", getInfo().name()); - return 1; - } - } - - // Otherwise, throw an exception because this is still running, which will be caught in hasExited - switch (docStoreStatus) { - case NOT_STARTED -> throw new IllegalThreadStateException("Pod hasn't started yet."); - case INITIALIZING -> throw new IllegalThreadStateException("Pod is initializing."); - default -> throw new IllegalThreadStateException("Pod is running."); - } - } - - @Override - public int exitValue() { - final var optionalCached = cachedExitValue.get(); - - if (optionalCached.isPresent()) { - return optionalCached.get(); - } else { - final var exitValue = computeExitValue(); - cachedExitValue.set(Optional.of(exitValue)); - return exitValue; - } - } - - @Override - public void destroy() { - final var wasDestroyed = kubernetesClient.pods() - .inNamespace(getInfo().namespace()) - .withName(getInfo().name()) - .withPropagationPolicy(DeletionPropagation.FOREGROUND) - .delete(); - - if (wasDestroyed) { - log.info("Deleted pod {} in namespace {}", getInfo().name(), getInfo().namespace()); - } else { - log.warn("Wasn't able to delete pod {} from namespace {}", getInfo().name(), getInfo().namespace()); - } - } - - // implementation copied from Process.java since this isn't a real Process - public boolean hasExited() { - try { - exitValue(); - return true; - } catch (final IllegalThreadStateException e) { - return false; - } - } - - public boolean waitFor(final long timeout, final TimeUnit unit) throws InterruptedException { - // implementation copied from Process.java since this isn't a real Process - long remainingNanos = unit.toNanos(timeout); - if (hasExited()) { - return true; - } - if (timeout <= 0) { - return false; - } - - final long deadline = System.nanoTime() + remainingNanos; - do { - // The remainingNanos bit is about calculating how much time left for the actual timeout. - // Most of the time we should be sleeping for 500ms except when we get to the actual timeout. - // We are waiting polling every 500ms for status. The trade-off here is between how often - // we poll our status storage (GCS) and how reactive we are to detect that a process is done. - Thread.sleep(Math.min(TimeUnit.NANOSECONDS.toMillis(remainingNanos) + 1, 500)); - if (hasExited()) { - return true; - } - remainingNanos = deadline - System.nanoTime(); - } while (remainingNanos > 0); - - return false; - } - - @Override - public int waitFor() throws InterruptedException { - final boolean exited = waitFor(10, TimeUnit.DAYS); - - if (exited) { - return exitValue(); - } else { - throw new InterruptedException("Pod did not complete within timeout."); - } - } - - @Override - public KubePodInfo getInfo() { - return kubePodInfo; - } - - @Override - public Process toProcess() { - return new Process() { - - @Override - public OutputStream getOutputStream() { - try { - final String output = AsyncOrchestratorPodProcess.this.getOutput().orElse(""); - final OutputStream os = new BufferedOutputStream(new ByteArrayOutputStream()); - os.write(output.getBytes(Charset.defaultCharset())); - return os; - } catch (final Exception e) { - log.warn("Unable to write output to stream.", e); - return OutputStream.nullOutputStream(); - } - } - - @Override - public InputStream getInputStream() { - return InputStream.nullInputStream(); - } - - @Override - public InputStream getErrorStream() { - return InputStream.nullInputStream(); - } - - @Override - public int waitFor() throws InterruptedException { - return AsyncOrchestratorPodProcess.this.waitFor(); - } - - @Override - public int exitValue() { - return AsyncOrchestratorPodProcess.this.exitValue(); - } - - @Override - public void destroy() { - AsyncOrchestratorPodProcess.this.destroy(); - } - - @Override - public boolean waitFor(final long timeout, final TimeUnit unit) throws InterruptedException { - return AsyncOrchestratorPodProcess.this.waitFor(timeout, unit); - } - - }; - } - - private Optional getDocument(final String key) { - return documentStoreClient.read(getInfo().namespace() + "/" + getInfo().name() + "/" + key); - } - - private boolean checkStatus(final AsyncKubePodStatus status) { - return getDocument(status.name()).isPresent(); - } - - /** - * Checks terminal states first, then running, then initialized. Defaults to not started. - *

- * The order matters here! - */ - public AsyncKubePodStatus getDocStoreStatus() { - if (checkStatus(AsyncKubePodStatus.FAILED)) { - return AsyncKubePodStatus.FAILED; - } else if (checkStatus(AsyncKubePodStatus.SUCCEEDED)) { - return AsyncKubePodStatus.SUCCEEDED; - } else if (checkStatus(AsyncKubePodStatus.RUNNING)) { - return AsyncKubePodStatus.RUNNING; - } else if (checkStatus(AsyncKubePodStatus.INITIALIZING)) { - return AsyncKubePodStatus.INITIALIZING; - } else { - return AsyncKubePodStatus.NOT_STARTED; - } - } - - // but does that mean there won't be a docker equivalent? - public void create(final Map allLabels, - final ResourceRequirements resourceRequirements, - final Map fileMap, - final Map portMap, - final Map nodeSelectors) { - final List volumes = new ArrayList<>(); - final List volumeMounts = new ArrayList<>(); - final List envVars = new ArrayList<>(); - - volumes.add(new VolumeBuilder() - .withName("airbyte-config") - .withNewEmptyDir() - .withMedium("Memory") - .endEmptyDir() - .build()); - - volumeMounts.add(new VolumeMountBuilder() - .withName("airbyte-config") - .withMountPath(KubePodProcess.CONFIG_DIR) - .build()); - - if (secretName != null && secretMountPath != null && StringUtils.isNotEmpty(googleApplicationCredentials)) { - volumes.add(new VolumeBuilder() - .withName("airbyte-secret") - .withSecret(new SecretVolumeSourceBuilder() - .withSecretName(secretName) - .withDefaultMode(420) - .build()) - .build()); - - volumeMounts.add(new VolumeMountBuilder() - .withName("airbyte-secret") - .withMountPath(secretMountPath) - .build()); - - envVars.add(new EnvVar(LogClientSingleton.GOOGLE_APPLICATION_CREDENTIALS, googleApplicationCredentials, null)); - - } - - if (StringUtils.isNotEmpty(dataPlaneCredsSecretName) && StringUtils.isNotEmpty(dataPlaneCredsSecretMountPath)) { - volumes.add(new VolumeBuilder() - .withName("airbyte-dataplane-creds") - .withSecret(new SecretVolumeSourceBuilder() - .withSecretName(dataPlaneCredsSecretName) - .withDefaultMode(420) - .build()) - .build()); - - volumeMounts.add(new VolumeMountBuilder() - .withName("airbyte-dataplane-creds") - .withMountPath(dataPlaneCredsSecretMountPath) - .build()); - } - - // Copy all additionally provided environment variables - envVars.addAll(environmentVariables.entrySet().stream().map(e -> new EnvVar(e.getKey(), e.getValue(), null)).toList()); - - final List containerPorts = KubePodProcess.createContainerPortList(portMap); - containerPorts.add(new ContainerPort(serverPort, null, null, null, null)); - - final var initContainer = new ContainerBuilder() - .withName(KubePodProcess.INIT_CONTAINER_NAME) - .withImage("busybox:1.35") - .withVolumeMounts(volumeMounts) - .withCommand(List.of( - "sh", - "-c", - String.format(""" - i=0 - until [ $i -gt 60 ] - do - echo "$i - waiting for config file transfer to complete..." - # check if the upload-complete file exists, if so exit without error - if [ -f "%s/%s" ]; then - exit 0 - fi - i=$((i+1)) - sleep 1 - done - echo "config files did not transfer in time" - # no upload-complete file was created in time, exit with error - exit 1 - """, - KubePodProcess.CONFIG_DIR, - KubePodProcess.SUCCESS_FILE_NAME))) - .build(); - - final var mainContainer = new ContainerBuilder() - .withName(KubePodProcess.MAIN_CONTAINER_NAME) - .withImage(kubePodInfo.mainContainerInfo().image()) - .withImagePullPolicy(kubePodInfo.mainContainerInfo().pullPolicy()) - .withResources(KubePodProcess.getResourceRequirementsBuilder(resourceRequirements).build()) - .withEnv(envVars) - .withPorts(containerPorts) - .withVolumeMounts(volumeMounts) - .build(); - - final Pod podToCreate = new PodBuilder() - .withApiVersion("v1") - .withNewMetadata() - .withName(getInfo().name()) - .withNamespace(getInfo().namespace()) - .withLabels(allLabels) - .endMetadata() - .withNewSpec() - .withServiceAccount("airbyte-admin") - .withAutomountServiceAccountToken(true) - .withRestartPolicy("Never") - .withContainers(mainContainer) - .withInitContainers(initContainer) - .withVolumes(volumes) - .withNodeSelector(nodeSelectors) - .endSpec() - .build(); - - // should only create after the kubernetes API creates the pod - final var createdPod = kubernetesClient.pods() - .inNamespace(getInfo().namespace()) - .createOrReplace(podToCreate); - - log.info("Waiting for pod to be running..."); - kubernetesClient.pods() - .inNamespace(kubePodInfo.namespace()) - .withName(kubePodInfo.name()) - .waitUntilCondition(p -> !p.getStatus().getInitContainerStatuses().isEmpty() - && p.getStatus().getInitContainerStatuses().get(0).getState().getWaiting() == null, - 5, TimeUnit.MINUTES); - - final var podStatus = kubernetesClient.pods() - .inNamespace(kubePodInfo.namespace()) - .withName(kubePodInfo.name()) - .get() - .getStatus(); - - final var containerState = podStatus - .getInitContainerStatuses() - .get(0) - .getState(); - - if (containerState.getRunning() == null) { - throw new RuntimeException("Pod was not running, state was: " + containerState); - } - - log.info(String.format("Pod %s/%s is running on %s", kubePodInfo.namespace(), kubePodInfo.name(), podStatus.getPodIP())); - - final var updatedFileMap = new HashMap<>(fileMap); - updatedFileMap.put(KUBE_POD_INFO, Jsons.serialize(kubePodInfo)); - - copyFilesToKubeConfigVolumeMain(createdPod, updatedFileMap); - } - - private static void copyFilesToKubeConfigVolumeMain(final Pod podDefinition, final Map files) { - final List> fileEntries = new ArrayList<>(files.entrySet()); - - // copy this file last to indicate that the copy has completed - fileEntries.add(new AbstractMap.SimpleEntry<>(KubePodProcess.SUCCESS_FILE_NAME, "")); - - Path tmpFile = null; - Process proc = null; - for (final Map.Entry file : fileEntries) { - try { - tmpFile = Path.of(IOs.writeFileToRandomTmpDir(file.getKey(), file.getValue())); - - log.info("Uploading file: " + file.getKey()); - final var containerPath = Path.of(KubePodProcess.CONFIG_DIR + "/" + file.getKey()); - - // using kubectl cp directly here, because both fabric and the official kube client APIs have - // several issues with copying files. See https://github.com/airbytehq/airbyte/issues/8643 for - // details. - final String command = String.format("kubectl cp %s %s/%s:%s -c %s", tmpFile, podDefinition.getMetadata().getNamespace(), - podDefinition.getMetadata().getName(), containerPath, KubePodProcess.INIT_CONTAINER_NAME); - log.info(command); - - proc = Runtime.getRuntime().exec(command); - log.info("Waiting for kubectl cp to complete"); - final int exitCode = proc.waitFor(); - - if (exitCode != 0) { - throw new IOException("kubectl cp failed with exit code " + exitCode); - } - - log.info("kubectl cp complete, closing process"); - } catch (final IOException | InterruptedException e) { - throw new RuntimeException(e); - } finally { - if (tmpFile != null) { - tmpFile.toFile().delete(); - } - if (proc != null) { - proc.destroy(); - } - } - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/ExitCodeWatcher.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/ExitCodeWatcher.java deleted file mode 100644 index ddf06ab1bf41..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/ExitCodeWatcher.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import com.google.common.collect.MoreCollectors; -import io.fabric8.kubernetes.api.model.ContainerStatus; -import io.fabric8.kubernetes.api.model.Pod; -import io.fabric8.kubernetes.client.informers.ResourceEventHandler; -import java.util.Optional; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; -import lombok.extern.slf4j.Slf4j; - -/** - * The exit code watcher uses the Kubernetes watch API, which provides a subscription to events for - * a pod. This subscription has better latency than polling at the expense of keeping a connection - * open with the Kubernetes API server. Since it offers all events, it helps us handle cases like - * where a pod is swept or deleted immediately after running on a Kubernetes cluster (we will still - * be able to retrieve the exit code). - */ -@Slf4j -public class ExitCodeWatcher implements ResourceEventHandler { - - private final String podName; - private final String podNamespace; - private final Consumer onExitCode; - private final Runnable onWatchFailure; - /** - * This flag is set to false when we either (a) find the pod's exit code, or (b) when the pod is - * deleted. This is so that we call exactly one of onExitCode and onWatchFailure, and we make that - * call exactly once. - *

- * We rely on this class being side-effect-free, outside of persistExitCode() and persistFailure(). - * Those two methods use compareAndSet to prevent race conditions. Everywhere else, we can be sloppy - * because we won't actually emit any output. - */ - private final AtomicBoolean active = new AtomicBoolean(true); - - /** - * @param onExitCode callback used to store the exit code - * @param onWatchFailure callback that's triggered when the watch fails. should be some failed exit - * code. - */ - public ExitCodeWatcher(final String podName, - final String podNamespace, - final Consumer onExitCode, - final Runnable onWatchFailure) { - this.podName = podName; - this.podNamespace = podNamespace; - this.onExitCode = onExitCode; - this.onWatchFailure = onWatchFailure; - } - - @Override - public void onAdd(final Pod pod) { - if (shouldCheckPod(pod)) { - final Optional exitCode = getExitCode(pod); - exitCode.ifPresent(this::persistExitCode); - } - } - - @Override - public void onUpdate(final Pod oldPod, final Pod newPod) { - if (shouldCheckPod(newPod)) { - final Optional exitCode = getExitCode(newPod); - exitCode.ifPresent(this::persistExitCode); - } - } - - @Override - public void onDelete(final Pod pod, final boolean deletedFinalStateUnknown) { - if (shouldCheckPod(pod)) { - if (!deletedFinalStateUnknown) { - final Optional exitCode = getExitCode(pod); - exitCode.ifPresentOrElse( - this::persistExitCode, - this::persistFailure); - } else { - persistFailure(); - } - } - } - - /** - * Informers without an OperationContext will monitor ALL pods in ALL namespaces; filter down to the - * one pod that we care about. If it's still running, then we obviously can't fetch its exit code. - *

- * Also, if we've already found the exit code, or the pod has been deleted, then stop doing anything - * at all. - */ - private boolean shouldCheckPod(final Pod pod) { - final boolean correctName = podName.equals(pod.getMetadata().getName()); - final boolean correctNamespace = podNamespace.equals(pod.getMetadata().getNamespace()); - return active.get() && correctName && correctNamespace && KubePodResourceHelper.isTerminal(pod); - } - - private Optional getExitCode(final Pod pod) { - final ContainerStatus mainContainerStatus = pod.getStatus().getContainerStatuses() - .stream() - .filter(containerStatus -> containerStatus.getName().equals(KubePodProcess.MAIN_CONTAINER_NAME)) - .collect(MoreCollectors.onlyElement()); - - if (mainContainerStatus.getState() != null && mainContainerStatus.getState().getTerminated() != null) { - return Optional.of(mainContainerStatus.getState().getTerminated().getExitCode()); - } - return Optional.empty(); - } - - private void persistExitCode(final int exitCode) { - if (active.compareAndSet(true, false)) { - log.info("Received exit code {} for pod {}", exitCode, podName); - onExitCode.accept(exitCode); - } - } - - private void persistFailure() { - if (active.compareAndSet(true, false)) { - // Log an error. The pod is completely gone, and we have no way to retrieve its exit code - // In theory, this shouldn't really happen. From - // https://pkg.go.dev/k8s.io/client-go/tools/cache#DeletedFinalStateUnknown: - // "in the case where an object was deleted but the watch deletion event was missed while - // disconnected from apiserver" - // But we have this handler just in case. - log.error("Pod {} was deleted before we could retrieve its exit code", podName); - onWatchFailure.run(); - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubeContainerInfo.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubeContainerInfo.java deleted file mode 100644 index 18fedb80cf7d..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubeContainerInfo.java +++ /dev/null @@ -1,7 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -public record KubeContainerInfo(String image, String pullPolicy) {} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePod.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePod.java deleted file mode 100644 index 795ded2ae1b0..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePod.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -public interface KubePod { - - int exitValue(); - - void destroy(); - - int waitFor() throws InterruptedException; - - KubePodInfo getInfo(); - - Process toProcess(); - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodInfo.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodInfo.java deleted file mode 100644 index 97fe7332592d..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodInfo.java +++ /dev/null @@ -1,7 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -public record KubePodInfo(String namespace, String name, KubeContainerInfo mainContainerInfo) {} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcess.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcess.java deleted file mode 100644 index 2aa2ebb3f5e5..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcess.java +++ /dev/null @@ -1,831 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.config.Configs; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.TolerationPOJO; -import io.airbyte.metrics.lib.MetricClientFactory; -import io.airbyte.metrics.lib.OssMetricsRegistry; -import io.fabric8.kubernetes.api.model.Container; -import io.fabric8.kubernetes.api.model.ContainerBuilder; -import io.fabric8.kubernetes.api.model.ContainerPort; -import io.fabric8.kubernetes.api.model.ContainerPortBuilder; -import io.fabric8.kubernetes.api.model.DeletionPropagation; -import io.fabric8.kubernetes.api.model.EnvVar; -import io.fabric8.kubernetes.api.model.LocalObjectReference; -import io.fabric8.kubernetes.api.model.Pod; -import io.fabric8.kubernetes.api.model.PodBuilder; -import io.fabric8.kubernetes.api.model.PodFluent; -import io.fabric8.kubernetes.api.model.Quantity; -import io.fabric8.kubernetes.api.model.ResourceRequirementsBuilder; -import io.fabric8.kubernetes.api.model.Toleration; -import io.fabric8.kubernetes.api.model.TolerationBuilder; -import io.fabric8.kubernetes.api.model.Volume; -import io.fabric8.kubernetes.api.model.VolumeBuilder; -import io.fabric8.kubernetes.api.model.VolumeMount; -import io.fabric8.kubernetes.api.model.VolumeMountBuilder; -import io.fabric8.kubernetes.client.KubernetesClient; -import io.fabric8.kubernetes.client.dsl.PodResource; -import io.fabric8.kubernetes.client.informers.SharedIndexInformer; -import io.fabric8.kubernetes.client.internal.readiness.Readiness; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.lang.ProcessHandle.Info; -import java.net.ServerSocket; -import java.net.Socket; -import java.nio.file.Path; -import java.time.Duration; -import java.util.AbstractMap; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.Collectors; -import lombok.val; -import org.apache.commons.io.output.NullOutputStream; -import org.apache.commons.text.StringEscapeUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - -/** - * A Process abstraction backed by a Kube Pod running in a Kubernetes cluster 'somewhere'. The - * parent process starting a Kube Pod Process needs to exist within the Kube networking space. This - * is so the parent process can forward data into the child's stdin and read the child's stdout and - * stderr streams and copy configuration files over. - *

- * This is made possible by: - *

    - *
  • 1) An init container that creates 3 named pipes corresponding to stdin, stdout and std err on - * a shared volume.
  • - *
  • 2) Config files (e.g. config.json, catalog.json etc) are copied from the parent process into - * a shared volume.
  • - *
  • 3) Redirecting the stdin named pipe to the original image's entrypoint and it's output into - * the respective named pipes for stdout and stderr.
  • - *
  • 4) Each named pipe has a corresponding side car. Each side car forwards its stream - * accordingly using socat. e.g. stderr/stdout is forwarded to parent process while input from the - * parent process is forwarded into stdin.
  • - *
  • 5) The parent process listens on the stdout and stederr sockets for an incoming TCP - * connection. It also initiates a TCP connection to the child process aka the Kube pod on the - * specified stdin socket.
  • - *
  • 6) The child process is able to access configuration data via the shared volume. It's inputs - * and outputs - stdin, stdout and stderr - are forwarded the parent process via the sidecars.
  • - *
  • 7) The main process has its entrypoint wrapped to perform IO redirection and better error - * handling.
  • - *
  • 8) A heartbeat sidecar checks if the worker that launched the pod is still alive. If not, the - * pod will fail.
  • - *
- * The docker image used for this pod process must expose a AIRBYTE_ENTRYPOINT which contains the - * entrypoint we will wrap when creating the main container in the pod. - *

- * See the constructor for more information. - */ - -// Suppressing AvoidPrintStackTrace PMD warnings because -// it is required for the connectors -@SuppressWarnings("PMD.AvoidPrintStackTrace") -// TODO(Davin): Better test for this. See https://github.com/airbytehq/airbyte/issues/3700. -public class KubePodProcess implements KubePod { - - private static final Configs configs = new EnvConfigs(); - - private static final Logger LOGGER = LoggerFactory.getLogger(KubePodProcess.class); - - public static final String MAIN_CONTAINER_NAME = "main"; - public static final String INIT_CONTAINER_NAME = "init"; - - private static final ResourceRequirements DEFAULT_SIDECAR_RESOURCES = new ResourceRequirements() - .withMemoryLimit(configs.getSidecarKubeMemoryLimit()).withMemoryRequest(configs.getSidecarMemoryRequest()) - .withCpuLimit(configs.getSidecarKubeCpuLimit()).withCpuRequest(configs.getSidecarKubeCpuRequest()); - private static final ResourceRequirements DEFAULT_SOCAT_RESOURCES = new ResourceRequirements() - .withMemoryLimit(configs.getSidecarKubeMemoryLimit()).withMemoryRequest(configs.getSidecarMemoryRequest()) - .withCpuLimit(configs.getSocatSidecarKubeCpuLimit()).withCpuRequest(configs.getSocatSidecarKubeCpuRequest()); - - private static final String PIPES_DIR = "/pipes"; - private static final String STDIN_PIPE_FILE = PIPES_DIR + "/stdin"; - private static final String STDOUT_PIPE_FILE = PIPES_DIR + "/stdout"; - private static final String STDERR_PIPE_FILE = PIPES_DIR + "/stderr"; - public static final String CONFIG_DIR = "/config"; - public static final String TMP_DIR = "/tmp"; - private static final String TERMINATION_DIR = "/termination"; - private static final String TERMINATION_FILE_MAIN = TERMINATION_DIR + "/main"; - private static final String TERMINATION_FILE_CHECK = TERMINATION_DIR + "/check"; - public static final String SUCCESS_FILE_NAME = "FINISHED_UPLOADING"; - - private static final int STDIN_REMOTE_PORT = 9001; - - // 143 is the typical SIGTERM exit code. - // Used when the process is destroyed and the exit code can't be retrieved. - private static final int KILLED_EXIT_CODE = 143; - - // init container should fail if no new data copied into the init container within - // INIT_RETRY_TIMEOUT_MINUTES - private static final double INIT_SLEEP_PERIOD_SECONDS = 0.1; - - // This timeout was initially 1 minute, but sync pods scheduled on newly-provisioned nodes - // are occasionally not able to start the copy within 1 minute, hence the increase to 5. - private static final Duration INIT_RETRY_TIMEOUT_MINUTES = Duration.ofMinutes(5); - - private static final int INIT_RETRY_MAX_ITERATIONS = (int) (INIT_RETRY_TIMEOUT_MINUTES.toSeconds() / INIT_SLEEP_PERIOD_SECONDS); - - private final KubernetesClient fabricClient; - private final Pod podDefinition; - - private final AtomicBoolean wasClosed = new AtomicBoolean(false); - - private final OutputStream stdin; - private InputStream stdout; - private InputStream stderr; - - private final ServerSocket stdoutServerSocket; - private final int stdoutLocalPort; - private final ServerSocket stderrServerSocket; - private final int stderrLocalPort; - private final ExecutorService executorService; - private final CompletableFuture exitCodeFuture; - private final SharedIndexInformer podInformer; - - public static String getPodIP(final KubernetesClient client, final String podName, final String podNamespace) { - final var pod = client.pods().inNamespace(podNamespace).withName(podName).get(); - if (pod == null) { - throw new RuntimeException(prependPodInfo("Error: unable to find pod!", podNamespace, podName)); - } - return pod.getStatus().getPodIP(); - } - - private static Container getInit(final boolean usesStdin, - final List mainVolumeMounts, - final String busyboxImage) - throws IOException { - - final var initCommand = MoreResources.readResource("entrypoints/sync/init.sh") - .replaceAll("USES_STDIN_VALUE", String.valueOf(usesStdin)) - .replaceAll("STDOUT_PIPE_FILE_VALUE", STDOUT_PIPE_FILE) - .replaceAll("STDERR_PIPE_FILE_VALUE", STDERR_PIPE_FILE) - .replaceAll("STDIN_PIPE_FILE_VALUE", STDIN_PIPE_FILE) - .replaceAll("MAX_ITERATION_VALUE", String.valueOf(INIT_RETRY_MAX_ITERATIONS)) - .replaceAll("SUCCESS_FILE_NAME_VALUE", SUCCESS_FILE_NAME) - .replaceAll("SLEEP_PERIOD_VALUE", String.valueOf(INIT_SLEEP_PERIOD_SECONDS)); - - return new ContainerBuilder() - .withName(INIT_CONTAINER_NAME) - .withImage(busyboxImage) - .withWorkingDir(CONFIG_DIR) - .withCommand("sh", "-c", initCommand) - .withResources(getResourceRequirementsBuilder(DEFAULT_SIDECAR_RESOURCES).build()) - .withVolumeMounts(mainVolumeMounts) - .build(); - } - - private static Container getMain(final String image, - final String imagePullPolicy, - final boolean usesStdin, - final String entrypointOverride, - final List mainVolumeMounts, - final ResourceRequirements resourceRequirements, - final Map internalToExternalPorts, - final Map envMap, - final String... args) - throws IOException { - final var argsStr = String.join(" ", args); - final var optionalStdin = usesStdin ? String.format("< %s", STDIN_PIPE_FILE) : ""; - final var entrypointOverrideValue = entrypointOverride == null ? "" : StringEscapeUtils.escapeXSI(entrypointOverride); - - // communicates its completion to the heartbeat check via a file and closes itself if the heartbeat - // fails - final var mainCommand = MoreResources.readResource("entrypoints/sync/main.sh") - .replaceAll("TERMINATION_FILE_CHECK", TERMINATION_FILE_CHECK) - .replaceAll("TERMINATION_FILE_MAIN", TERMINATION_FILE_MAIN) - .replaceAll("OPTIONAL_STDIN", optionalStdin) - .replace("ENTRYPOINT_OVERRIDE_VALUE", entrypointOverrideValue) // use replace and not replaceAll to preserve escaping and quoting - .replaceAll("ARGS", argsStr) - .replaceAll("STDERR_PIPE_FILE", STDERR_PIPE_FILE) - .replaceAll("STDOUT_PIPE_FILE", STDOUT_PIPE_FILE); - - final List containerPorts = createContainerPortList(internalToExternalPorts); - - final List envVars = envMap.entrySet().stream() - .map(entry -> new EnvVar(entry.getKey(), entry.getValue(), null)) - .collect(Collectors.toList()); - - final ContainerBuilder containerBuilder = new ContainerBuilder() - .withName(MAIN_CONTAINER_NAME) - .withPorts(containerPorts) - .withImage(image) - .withImagePullPolicy(imagePullPolicy) - .withCommand("sh", "-c", mainCommand) - .withEnv(envVars) - .withWorkingDir(CONFIG_DIR) - .withVolumeMounts(mainVolumeMounts); - - final ResourceRequirementsBuilder resourceRequirementsBuilder = getResourceRequirementsBuilder(resourceRequirements); - if (resourceRequirementsBuilder != null) { - containerBuilder.withResources(resourceRequirementsBuilder.build()); - } - return containerBuilder.build(); - } - - public static List createContainerPortList(final Map internalToExternalPorts) { - return internalToExternalPorts.keySet().stream() - .map(integer -> new ContainerPortBuilder() - .withContainerPort(integer) - .build()) - .collect(Collectors.toList()); - } - - public static void copyFilesToKubeConfigVolume(final KubernetesClient client, - final Pod podDefinition, - final Map files) { - final List> fileEntries = new ArrayList<>(files.entrySet()); - - // copy this file last to indicate that the copy has completed - fileEntries.add(new AbstractMap.SimpleEntry<>(SUCCESS_FILE_NAME, "")); - - Path tmpFile = null; - Process proc = null; - for (final Map.Entry file : fileEntries) { - try { - tmpFile = Path.of(IOs.writeFileToRandomTmpDir(file.getKey(), file.getValue())); - - LOGGER.info("Uploading file: " + file.getKey()); - final var containerPath = Path.of(CONFIG_DIR + "/" + file.getKey()); - - // using kubectl cp directly here, because both fabric and the official kube client APIs have - // several issues with copying files. See https://github.com/airbytehq/airbyte/issues/8643 for - // details. - final String command = String.format("kubectl cp %s %s/%s:%s -c %s", tmpFile, podDefinition.getMetadata().getNamespace(), - podDefinition.getMetadata().getName(), containerPath, INIT_CONTAINER_NAME); - LOGGER.info(command); - - proc = Runtime.getRuntime().exec(command); - LOGGER.info("Waiting for kubectl cp to complete"); - final int exitCode = proc.waitFor(); - - if (exitCode != 0) { - // Copying the success indicator file to the init container causes the container to immediately - // exit, causing the `kubectl cp` command to exit with code 137. This check ensures that an error is - // not thrown in this case if the init container exits successfully. - if (SUCCESS_FILE_NAME.equals(file.getKey()) && waitForInitPodToTerminate(client, podDefinition, 5, TimeUnit.MINUTES) == 0) { - LOGGER.info("Init was successful; ignoring non-zero kubectl cp exit code for success indicator file."); - } else { - throw new IOException("kubectl cp failed with exit code " + exitCode); - } - } - - LOGGER.info("kubectl cp complete, closing process"); - } catch (final IOException | InterruptedException e) { - throw new RuntimeException(e); - } finally { - if (tmpFile != null) { - try { - tmpFile.toFile().delete(); - } catch (final Exception e) { - LOGGER.info("Caught exception when deleting temp file but continuing to allow process deletion.", e); - } - } - if (proc != null) { - proc.destroy(); - } - } - } - } - - /** - * The calls in this function aren't straight-forward due to api limitations. There is no proper way - * to directly look for containers within a pod or query if a container is in a running state beside - * checking if the getRunning field is set. We could put this behind an interface, but that seems - * heavy-handed compared to the 10 lines here. - */ - private static void waitForInitPodToRun(final KubernetesClient client, final Pod podDefinition) throws InterruptedException { - // todo: this could use the watcher instead of waitUntilConditions - LOGGER.info("Waiting for init container to be ready before copying files..."); - final PodResource pod = - client.pods().inNamespace(podDefinition.getMetadata().getNamespace()).withName(podDefinition.getMetadata().getName()); - pod.waitUntilCondition(p -> p.getStatus().getInitContainerStatuses().size() != 0, 5, TimeUnit.MINUTES); - LOGGER.info("Init container present.."); - client.pods().inNamespace(podDefinition.getMetadata().getNamespace()).withName(podDefinition.getMetadata().getName()) - .waitUntilCondition(p -> p.getStatus().getInitContainerStatuses().get(0).getState().getRunning() != null, 5, TimeUnit.MINUTES); - LOGGER.info("Init container ready.."); - } - - /** - * Waits for the init container to terminate, and returns its exit code. - */ - private static int waitForInitPodToTerminate(final KubernetesClient client, - final Pod podDefinition, - final long timeUnitsToWait, - final TimeUnit timeUnit) - throws InterruptedException { - LOGGER.info("Waiting for init container to terminate before checking exit value..."); - client.pods().inNamespace(podDefinition.getMetadata().getNamespace()).withName(podDefinition.getMetadata().getName()) - .waitUntilCondition(p -> p.getStatus().getInitContainerStatuses().get(0).getState().getTerminated() != null, timeUnitsToWait, timeUnit); - final int exitValue = client.pods().inNamespace(podDefinition.getMetadata().getNamespace()).withName(podDefinition.getMetadata().getName()).get() - .getStatus().getInitContainerStatuses().get(0).getState().getTerminated().getExitCode(); - LOGGER.info("Init container terminated with exit value {}.", exitValue); - return exitValue; - } - - private Toleration[] buildPodTolerations(final List tolerations) { - if (tolerations == null || tolerations.isEmpty()) { - return null; - } - return tolerations.stream().map(workerPodToleration -> new TolerationBuilder() - .withKey(workerPodToleration.getKey()) - .withEffect(workerPodToleration.getEffect()) - .withOperator(workerPodToleration.getOperator()) - .withValue(workerPodToleration.getValue()) - .build()) - .toArray(Toleration[]::new); - } - - @SuppressWarnings("PMD.InvalidLogMessageFormat") - public KubePodProcess(final boolean isOrchestrator, - final String processRunnerHost, - final KubernetesClient fabricClient, - final String podName, - final String namespace, - final String image, - final String imagePullPolicy, - final String sidecarImagePullPolicy, - final int stdoutLocalPort, - final int stderrLocalPort, - final String kubeHeartbeatUrl, - final boolean usesStdin, - final Map files, - final String entrypointOverride, - final ResourceRequirements resourceRequirements, - final List imagePullSecrets, - final List tolerations, - final Map nodeSelectors, - final Map labels, - final Map annotations, - final String socatImage, - final String busyboxImage, - final String curlImage, - final Map envMap, - final Map internalToExternalPorts, - final String... args) - throws IOException, InterruptedException { - this.fabricClient = fabricClient; - this.stdoutLocalPort = stdoutLocalPort; - this.stderrLocalPort = stderrLocalPort; - this.stdoutServerSocket = new ServerSocket(stdoutLocalPort); - this.stderrServerSocket = new ServerSocket(stderrLocalPort); - this.executorService = Executors.newFixedThreadPool(2); - setupStdOutAndStdErrListeners(); - - if (entrypointOverride != null) { - LOGGER.info("Found entrypoint override: {}", entrypointOverride); - } - - final Volume pipeVolume = new VolumeBuilder() - .withName("airbyte-pipes") - .withNewEmptyDir() - .endEmptyDir() - .build(); - - final VolumeMount pipeVolumeMount = new VolumeMountBuilder() - .withName("airbyte-pipes") - .withMountPath(PIPES_DIR) - .build(); - - final Volume configVolume = new VolumeBuilder() - .withName("airbyte-config") - .withNewEmptyDir() - .withMedium("Memory") - .endEmptyDir() - .build(); - - final VolumeMount configVolumeMount = new VolumeMountBuilder() - .withName("airbyte-config") - .withMountPath(CONFIG_DIR) - .build(); - - final Volume terminationVolume = new VolumeBuilder() - .withName("airbyte-termination") - .withNewEmptyDir() - .endEmptyDir() - .build(); - - final VolumeMount terminationVolumeMount = new VolumeMountBuilder() - .withName("airbyte-termination") - .withMountPath(TERMINATION_DIR) - .build(); - - final Volume tmpVolume = new VolumeBuilder() - .withName("tmp") - .withNewEmptyDir() - .endEmptyDir() - .build(); - - final VolumeMount tmpVolumeMount = new VolumeMountBuilder() - .withName("tmp") - .withMountPath(TMP_DIR) - .build(); - - final Container init = getInit(usesStdin, List.of(pipeVolumeMount, configVolumeMount), busyboxImage); - final Container main = getMain( - image, - imagePullPolicy, - usesStdin, - entrypointOverride, - List.of(pipeVolumeMount, configVolumeMount, terminationVolumeMount, tmpVolumeMount), - resourceRequirements, - internalToExternalPorts, - envMap, - args); - - // Printing socat notice logs with socat -d -d - // To print info logs as well use socat -d -d -d - // more info: https://linux.die.net/man/1/socat - final io.fabric8.kubernetes.api.model.ResourceRequirements heartbeatSidecarResources = - getResourceRequirementsBuilder(DEFAULT_SIDECAR_RESOURCES).build(); - final io.fabric8.kubernetes.api.model.ResourceRequirements socatSidecarResources = - getResourceRequirementsBuilder(DEFAULT_SOCAT_RESOURCES).build(); - - final Container remoteStdin = new ContainerBuilder() - .withName("remote-stdin") - .withImage(socatImage) - .withCommand("sh", "-c", "socat -d -d TCP-L:9001 STDOUT > " + STDIN_PIPE_FILE) - .withVolumeMounts(pipeVolumeMount, terminationVolumeMount) - .withResources(socatSidecarResources) - .withImagePullPolicy(sidecarImagePullPolicy) - .build(); - - final Container relayStdout = new ContainerBuilder() - .withName("relay-stdout") - .withImage(socatImage) - .withCommand("sh", "-c", String.format("cat %s | socat -d -d -t 60 - TCP:%s:%s", STDOUT_PIPE_FILE, processRunnerHost, stdoutLocalPort)) - .withVolumeMounts(pipeVolumeMount, terminationVolumeMount) - .withResources(socatSidecarResources) - .withImagePullPolicy(sidecarImagePullPolicy) - .build(); - - final Container relayStderr = new ContainerBuilder() - .withName("relay-stderr") - .withImage(socatImage) - .withCommand("sh", "-c", String.format("cat %s | socat -d -d -t 60 - TCP:%s:%s", STDERR_PIPE_FILE, processRunnerHost, stderrLocalPort)) - .withVolumeMounts(pipeVolumeMount, terminationVolumeMount) - .withResources(socatSidecarResources) - .withImagePullPolicy(sidecarImagePullPolicy) - .build(); - - // communicates via a file if it isn't able to reach the heartbeating server and succeeds if the - // main container completes - final String heartbeatCommand = MoreResources.readResource("entrypoints/sync/check.sh") - .replaceAll("TERMINATION_FILE_CHECK", TERMINATION_FILE_CHECK) - .replaceAll("TERMINATION_FILE_MAIN", TERMINATION_FILE_MAIN) - .replaceAll("HEARTBEAT_URL", kubeHeartbeatUrl); - - final Container callHeartbeatServer = new ContainerBuilder() - .withName("call-heartbeat-server") - .withImage(curlImage) - .withCommand("sh") - .withArgs("-c", heartbeatCommand) - .withVolumeMounts(terminationVolumeMount) - .withResources(heartbeatSidecarResources) - .withImagePullPolicy(sidecarImagePullPolicy) - .build(); - - final List containers = usesStdin ? List.of(main, remoteStdin, relayStdout, relayStderr, callHeartbeatServer) - : List.of(main, relayStdout, relayStderr, callHeartbeatServer); - - PodFluent.SpecNested podBuilder = new PodBuilder() - .withApiVersion("v1") - .withNewMetadata() - .withName(podName) - .withLabels(labels) - .withAnnotations(annotations) - .endMetadata() - .withNewSpec(); - - if (isOrchestrator) { - podBuilder = podBuilder.withServiceAccount("airbyte-admin").withAutomountServiceAccountToken(true); - } - - List pullSecrets = imagePullSecrets - .stream() - .map(imagePullSecret -> new LocalObjectReference(imagePullSecret)) - .collect(Collectors.toList()); - - final Pod pod = podBuilder.withTolerations(buildPodTolerations(tolerations)) - .withImagePullSecrets(pullSecrets) // An empty list or an empty LocalObjectReference turns this into a no-op setting. - .withNodeSelector(nodeSelectors) - .withRestartPolicy("Never") - .withInitContainers(init) - .withContainers(containers) - .withVolumes(pipeVolume, configVolume, terminationVolume, tmpVolume) - .endSpec() - .build(); - - LOGGER.info("Creating pod {}...", pod.getMetadata().getName()); - val start = System.currentTimeMillis(); - - this.podDefinition = fabricClient.pods().inNamespace(namespace).createOrReplace(pod); - - // We want to create a watch before the init container runs. Then we can guarantee - // that we're checking for updates across the full lifecycle of the main container. - // This is safe only because we are blocking the init pod until we copy files onto it. - // See the ExitCodeWatcher comments for more info. - exitCodeFuture = new CompletableFuture<>(); - podInformer = fabricClient.pods() - .inNamespace(namespace) - .withName(pod.getMetadata().getName()) - .inform(); - podInformer.addEventHandler(new ExitCodeWatcher( - pod.getMetadata().getName(), - namespace, - exitCodeFuture::complete, - () -> { - LOGGER.info(prependPodInfo( - String.format( - "Exit code watcher failed to retrieve the exit code. Defaulting to %s. This is expected if the job was cancelled.", - KILLED_EXIT_CODE), - namespace, - podName)); - - exitCodeFuture.complete(KILLED_EXIT_CODE); - })); - - waitForInitPodToRun(fabricClient, podDefinition); - - LOGGER.info("Copying files..."); - copyFilesToKubeConfigVolume(fabricClient, podDefinition, files); - - LOGGER.info("Waiting until pod is ready..."); - // If a pod gets into a non-terminal error state it should be automatically killed by our - // heartbeating mechanism. - // This also handles the case where a very short pod already completes before this check completes - // the first time. - // This doesn't manage things like pods that are blocked from running for some cluster reason or if - // the init - // container got stuck somehow. - fabricClient.resource(podDefinition).waitUntilCondition(p -> { - final boolean isReady = Objects.nonNull(p) && Readiness.getInstance().isReady(p); - return isReady || KubePodResourceHelper.isTerminal(p); - }, 20, TimeUnit.MINUTES); - MetricClientFactory.getMetricClient().distribution(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, - System.currentTimeMillis() - start); - - // allow writing stdin to pod - LOGGER.info("Reading pod IP..."); - final var podIp = getPodIP(fabricClient, podName, namespace); - LOGGER.info("Pod IP: {}", podIp); - - if (usesStdin) { - LOGGER.info("Creating stdin socket..."); - final var socketToDestStdIo = new Socket(podIp, STDIN_REMOTE_PORT); - this.stdin = socketToDestStdIo.getOutputStream(); - } else { - LOGGER.info("Using null stdin output stream..."); - this.stdin = NullOutputStream.NULL_OUTPUT_STREAM; - } - } - - private void setupStdOutAndStdErrListeners() { - final var context = MDC.getCopyOfContextMap(); - executorService.submit(() -> { - MDC.setContextMap(context); - try { - LOGGER.info("Creating stdout socket server..."); - final var socket = stdoutServerSocket.accept(); // blocks until connected - // cat /proc/sys/net/ipv4/tcp_keepalive_time - // 300 - // cat /proc/sys/net/ipv4/tcp_keepalive_probes - // 5 - // cat /proc/sys/net/ipv4/tcp_keepalive_intvl - // 60 - socket.setKeepAlive(true); - LOGGER.info("Setting stdout..."); - this.stdout = socket.getInputStream(); - } catch (final IOException e) { - e.printStackTrace(); // todo: propagate exception / join at the end of constructor - } - }); - executorService.submit(() -> { - MDC.setContextMap(context); - try { - LOGGER.info("Creating stderr socket server..."); - final var socket = stderrServerSocket.accept(); // blocks until connected - socket.setKeepAlive(true); - LOGGER.info("Setting stderr..."); - this.stderr = socket.getInputStream(); - } catch (final IOException e) { - e.printStackTrace(); // todo: propagate exception / join at the end of constructor - } - }); - } - - /** - * Waits for the Kube Pod backing this process and returns the exit value after closing resources. - */ - @Override - public int waitFor() throws InterruptedException { - try { - exitCodeFuture.get(); - } catch (final ExecutionException e) { - throw new RuntimeException(e); - } - - return exitValue(); - } - - /** - * Immediately terminates the Kube Pod backing this process and cleans up IO resources. - */ - @Override - public void destroy() { - final String podName = podDefinition.getMetadata().getName(); - final String podNamespace = podDefinition.getMetadata().getNamespace(); - - LOGGER.info(prependPodInfo("Destroying Kube process.", podNamespace, podName)); - try { - fabricClient.resource(podDefinition).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete(); - exitCodeFuture.complete(KILLED_EXIT_CODE); - } finally { - close(); - LOGGER.info(prependPodInfo("Destroyed Kube process.", podNamespace, podName)); - } - } - - private Container getMainContainerFromPodDefinition() { - final Optional containerOptional = podDefinition.getSpec() - .getContainers() - .stream() - .filter(c -> MAIN_CONTAINER_NAME.equals(c.getName())) - .findFirst(); - if (containerOptional.isEmpty()) { - LOGGER.warn(String.format("Could not find main container definition for pod: %s", podDefinition.getMetadata().getName())); - return null; - } else { - return containerOptional.get(); - } - } - - @Override - public KubePodInfo getInfo() { - final Container mainContainer = getMainContainerFromPodDefinition(); - final KubeContainerInfo mainContainerInfo = new KubeContainerInfo(mainContainer.getImage(), mainContainer.getImagePullPolicy()); - return new KubePodInfo(podDefinition.getMetadata().getNamespace(), - podDefinition.getMetadata().getName(), - mainContainerInfo); - } - - /** - * Close all open resource in the opposite order of resource creation. - *

- * Null checks exist because certain local Kube clusters (e.g. Docker for Desktop) back this - * implementation with OS processes and resources, which are automatically reaped by the OS. - */ - private void close() { - final boolean previouslyClosed = wasClosed.getAndSet(true); - - // short-circuit if close was already called, so we don't re-offer ports multiple times - // since the offer call is non-atomic - if (previouslyClosed) { - return; - } - - if (this.stdin != null) { - Exceptions.swallow(this.stdin::close); - } - - if (this.stdout != null) { - Exceptions.swallow(this.stdout::close); - } - - if (this.stderr != null) { - Exceptions.swallow(this.stderr::close); - } - - Exceptions.swallow(this.stdoutServerSocket::close); - Exceptions.swallow(this.stderrServerSocket::close); - Exceptions.swallow(this.podInformer::close); - Exceptions.swallow(this.executorService::shutdownNow); - - KubePortManagerSingleton.getInstance().offer(stdoutLocalPort); - KubePortManagerSingleton.getInstance().offer(stderrLocalPort); - - LOGGER.info(prependPodInfo("Closed all resources for pod", podDefinition.getMetadata().getNamespace(), podDefinition.getMetadata().getName())); - } - - private int getReturnCode() { - if (exitCodeFuture.isDone()) { - try { - return exitCodeFuture.get(); - } catch (final InterruptedException | ExecutionException e) { - throw new RuntimeException( - prependPodInfo("Cannot find pod %s : %s while trying to retrieve exit code. This probably means the pod was not correctly created.", - podDefinition.getMetadata().getNamespace(), - podDefinition.getMetadata().getName()), - e); - } - } else { - throw new IllegalThreadStateException(prependPodInfo("Main container in kube pod has not terminated yet.", - podDefinition.getMetadata().getNamespace(), - podDefinition.getMetadata().getName())); - } - } - - @Override - public int exitValue() { - // getReturnCode throws IllegalThreadException if the Kube pod has not exited; - // close() is only called if the Kube pod has terminated. - final var returnCode = getReturnCode(); - // The OS traditionally handles process resource clean up. Therefore an exit code of 0, also - // indicates that all kernel resources were shut down. - // Because this is a custom implementation, manually close all the resources. - // Further, since the local resources are used to talk to Kubernetes resources, shut local resources - // down after Kubernetes resources are shut down, regardless of Kube termination status. - close(); - - return returnCode; - } - - @Override - public Process toProcess() { - return new Process() { - - @Override - public OutputStream getOutputStream() { - return KubePodProcess.this.stdin; - } - - @Override - public InputStream getInputStream() { - return KubePodProcess.this.stdout; - } - - @Override - public InputStream getErrorStream() { - return KubePodProcess.this.stderr; - } - - @Override - public int waitFor() throws InterruptedException { - return KubePodProcess.this.waitFor(); - } - - @Override - public int exitValue() { - return KubePodProcess.this.exitValue(); - } - - @Override - public void destroy() { - KubePodProcess.this.destroy(); - } - - @Override - public Info info() { - return new KubePodProcessInfo(podDefinition.getMetadata().getName()); - } - - }; - } - - public static ResourceRequirementsBuilder getResourceRequirementsBuilder(final ResourceRequirements resourceRequirements) { - if (resourceRequirements != null) { - final Map requestMap = new HashMap<>(); - // if null then use unbounded resource allocation - if (!com.google.common.base.Strings.isNullOrEmpty(resourceRequirements.getCpuRequest())) { - requestMap.put("cpu", Quantity.parse(resourceRequirements.getCpuRequest())); - } - if (!com.google.common.base.Strings.isNullOrEmpty(resourceRequirements.getMemoryRequest())) { - requestMap.put("memory", Quantity.parse(resourceRequirements.getMemoryRequest())); - } - final Map limitMap = new HashMap<>(); - if (!com.google.common.base.Strings.isNullOrEmpty(resourceRequirements.getCpuLimit())) { - limitMap.put("cpu", Quantity.parse(resourceRequirements.getCpuLimit())); - } - if (!com.google.common.base.Strings.isNullOrEmpty(resourceRequirements.getMemoryLimit())) { - limitMap.put("memory", Quantity.parse(resourceRequirements.getMemoryLimit())); - } - return new ResourceRequirementsBuilder() - .withRequests(requestMap) - .withLimits(limitMap); - } - return new ResourceRequirementsBuilder(); - } - - private static String prependPodInfo(final String message, final String podNamespace, final String podName) { - return String.format("(pod: %s / %s) - %s", podNamespace, podName, message); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcessInfo.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcessInfo.java deleted file mode 100644 index 183dd4353c7f..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcessInfo.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import java.lang.ProcessHandle.Info; -import java.time.Duration; -import java.time.Instant; -import java.util.Optional; - -/** - * Minimal Process info implementation to assist with debug logging. - * - * Current implement only logs out the Kubernetes pod corresponding to the JVM process. - */ -public class KubePodProcessInfo implements Info { - - private final String podName; - - public KubePodProcessInfo(final String podname) { - this.podName = podname; - } - - @Override - public Optional command() { - return Optional.of(podName); - } - - @Override - public Optional commandLine() { - return Optional.of(podName); - } - - @Override - public Optional arguments() { - return Optional.empty(); - } - - @Override - public Optional startInstant() { - return Optional.empty(); - } - - @Override - public Optional totalCpuDuration() { - return Optional.empty(); - } - - @Override - public Optional user() { - return Optional.empty(); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodResourceHelper.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodResourceHelper.java deleted file mode 100644 index 42aa2c721b28..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodResourceHelper.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import io.fabric8.kubernetes.api.model.ContainerStatus; -import io.fabric8.kubernetes.api.model.Pod; -import java.util.List; -import java.util.stream.Collectors; - -public class KubePodResourceHelper { - - public static boolean isTerminal(final Pod pod) { - if (pod.getStatus() != null) { - // Check if "main" container has terminated, as that defines whether the parent process has - // terminated. - final List mainContainerStatuses = pod.getStatus() - .getContainerStatuses() - .stream() - .filter(containerStatus -> containerStatus.getName().equals(KubePodProcess.MAIN_CONTAINER_NAME)) - .collect(Collectors.toList()); - - return mainContainerStatuses.size() == 1 && mainContainerStatuses.get(0).getState() != null - && mainContainerStatuses.get(0).getState().getTerminated() != null; - } else { - return false; - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePortManagerSingleton.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePortManagerSingleton.java deleted file mode 100644 index 8eb3f3ebf08e..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePortManagerSingleton.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Sets; -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingDeque; -import java.util.concurrent.TimeUnit; -import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Convenience wrapper around a thread-safe BlockingQueue. Keeps track of available ports for Kube - * Pod Processes. - * - * Although this data structure can do without the wrapper class, this class allows easier testing - * via the {@link #getNumAvailablePorts()} function. - * - * The singleton pattern clarifies that only one copy of this class is intended to exist per - * scheduler deployment. - */ -public class KubePortManagerSingleton { - - private static final Logger LOGGER = LoggerFactory.getLogger(KubePortManagerSingleton.class); - - private static KubePortManagerSingleton instance; - - private static final int MAX_PORTS_PER_WORKER = 4; // A sync has two workers. Each worker requires 2 ports. - private final BlockingQueue workerPorts; - - private KubePortManagerSingleton(final Set ports) { - workerPorts = new LinkedBlockingDeque<>(ports); - } - - /** - * Make sure init(ports) is called once prior to repeatedly using getInstance(). - */ - public static synchronized KubePortManagerSingleton getInstance() { - if (instance == null) { - throw new RuntimeException("Must initialize with init(ports) before using."); - } - return instance; - } - - /** - * Sets up the port range; make sure init(ports) is called once prior to repeatedly using - * getInstance(). Init won't fail (it will perform a no-op) if re-initializd with the same set of - * ports. - */ - public static synchronized void init(final Set ports) { - if (instance == null) { - instance = new KubePortManagerSingleton(ports); - } else if (Sets.intersection(instance.getAllPorts(), ports).size() == ports.size()) { - LOGGER.info("Skipping initializing KubePortManagerSingleton since ports specified are the same."); - } else { - throw new RuntimeException("Cannot initialize twice with different ports!"); - } - } - - public Integer take() throws InterruptedException { - return workerPorts.poll(10, TimeUnit.MINUTES); - } - - @VisibleForTesting - public @Nullable Integer takeImmediately() { - return workerPorts.poll(); - } - - public void offer(final Integer port) { - if (!workerPorts.contains(port)) { - workerPorts.add(port); - } - } - - protected Set getAllPorts() { - return new HashSet<>(workerPorts); - } - - public int getNumAvailablePorts() { - return workerPorts.size(); - } - - public int getSupportedWorkers() { - return workerPorts.size() / MAX_PORTS_PER_WORKER; - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java deleted file mode 100644 index 1c76cfc77d3a..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.map.MoreMaps; -import io.airbyte.config.AllowedHosts; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.exception.WorkerException; -import io.fabric8.kubernetes.client.KubernetesClient; -import java.net.InetAddress; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class KubeProcessFactory implements ProcessFactory { - - @VisibleForTesting - public static final int KUBE_NAME_LEN_LIMIT = 63; - - private static final Logger LOGGER = LoggerFactory.getLogger(KubeProcessFactory.class); - - private final WorkerConfigs workerConfigs; - private final String namespace; - private final KubernetesClient fabricClient; - private final String kubeHeartbeatUrl; - private final String processRunnerHost; - private final boolean isOrchestrator; - - /** - * Sets up a process factory with the default processRunnerHost. - */ - public KubeProcessFactory(final WorkerConfigs workerConfigs, - final String namespace, - final KubernetesClient fabricClient, - final String kubeHeartbeatUrl, - final boolean isOrchestrator) { - this( - workerConfigs, - namespace, - fabricClient, - kubeHeartbeatUrl, - Exceptions.toRuntime(() -> InetAddress.getLocalHost().getHostAddress()), - isOrchestrator); - } - - /** - * @param namespace kubernetes namespace where spawned pods will live - * @param fabricClient fabric8 kubernetes client - * @param kubeHeartbeatUrl a url where if the response is not 200 the spawned process will fail - * itself - * @param processRunnerHost is the local host or ip of the machine running the process factory. - * injectable for testing. - * @param isOrchestrator determines if this should run as airbyte-admin - */ - @VisibleForTesting - public KubeProcessFactory(final WorkerConfigs workerConfigs, - final String namespace, - final KubernetesClient fabricClient, - final String kubeHeartbeatUrl, - final String processRunnerHost, - final boolean isOrchestrator) { - this.workerConfigs = workerConfigs; - this.namespace = namespace; - this.fabricClient = fabricClient; - this.kubeHeartbeatUrl = kubeHeartbeatUrl; - this.processRunnerHost = processRunnerHost; - this.isOrchestrator = isOrchestrator; - } - - @Override - public Process create( - final String jobType, - final String jobId, - final int attempt, - final Path jobRoot, - final String imageName, - final boolean isCustomConnector, - final boolean usesStdin, - final Map files, - final String entrypoint, - final ResourceRequirements resourceRequirements, - final AllowedHosts allowedHosts, - final Map customLabels, - final Map jobMetadata, - final Map internalToExternalPorts, - final String... args) - throws WorkerException { - try { - // used to differentiate source and destination processes with the same id and attempt - final String podName = ProcessFactory.createProcessName(imageName, jobType, jobId, attempt, KUBE_NAME_LEN_LIMIT); - LOGGER.info("Attempting to start pod = {} for {} with resources {} and allowedHosts {}", podName, imageName, resourceRequirements, - allowedHosts); - - final int stdoutLocalPort = KubePortManagerSingleton.getInstance().take(); - LOGGER.info("{} stdoutLocalPort = {}", podName, stdoutLocalPort); - - final int stderrLocalPort = KubePortManagerSingleton.getInstance().take(); - LOGGER.info("{} stderrLocalPort = {}", podName, stderrLocalPort); - - final var allLabels = getLabels(jobId, attempt, customLabels); - - // If using isolated pool, check workerConfigs has isolated pool set. If not set, fall back to use - // regular node pool. - final var nodeSelectors = - isCustomConnector ? workerConfigs.getWorkerIsolatedKubeNodeSelectors().orElse(workerConfigs.getworkerKubeNodeSelectors()) - : workerConfigs.getworkerKubeNodeSelectors(); - - return new KubePodProcess( - isOrchestrator, - processRunnerHost, - fabricClient, - podName, - namespace, - imageName, - workerConfigs.getJobImagePullPolicy(), - workerConfigs.getSidecarImagePullPolicy(), - stdoutLocalPort, - stderrLocalPort, - kubeHeartbeatUrl, - usesStdin, - files, - entrypoint, - resourceRequirements, - workerConfigs.getJobImagePullSecrets(), - workerConfigs.getWorkerKubeTolerations(), - nodeSelectors, - allLabels, - workerConfigs.getWorkerKubeAnnotations(), - workerConfigs.getJobSocatImage(), - workerConfigs.getJobBusyboxImage(), - workerConfigs.getJobCurlImage(), - MoreMaps.merge(jobMetadata, workerConfigs.getEnvMap()), - internalToExternalPorts, - args).toProcess(); - } catch (final Exception e) { - throw new WorkerException(e.getMessage(), e); - } - } - - /** - * Returns general labels to be applied to all Kubernetes pods. All general labels should be added - * here. - */ - public static Map getLabels(final String jobId, final int attemptId, final Map customLabels) { - final var allLabels = new HashMap<>(customLabels); - - final var generalKubeLabels = Map.of( - Metadata.JOB_LABEL_KEY, jobId, - Metadata.ATTEMPT_LABEL_KEY, String.valueOf(attemptId), - Metadata.WORKER_POD_LABEL_KEY, Metadata.WORKER_POD_LABEL_VALUE); - - allLabels.putAll(generalKubeLabels); - - return allLabels; - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/DockerComposeDocumentStoreClient.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/DockerComposeDocumentStoreClient.java deleted file mode 100644 index ae580329869e..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/DockerComposeDocumentStoreClient.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.storage; - -import io.airbyte.commons.io.IOs; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Optional; - -/** - * Document store for when all we have is an FS. This should only be used in the docker-compose - * case. Leverages the workspace mount as a storage area. - */ -public class DockerComposeDocumentStoreClient implements DocumentStoreClient { - - private static final Path STATE_PATH = Path.of("document_store"); - private final Path workspaceMount; - - public static DockerComposeDocumentStoreClient create(final Path workspaceMount) { - return new DockerComposeDocumentStoreClient(workspaceMount); - } - - public DockerComposeDocumentStoreClient(final Path workspaceMount) { - this.workspaceMount = workspaceMount; - } - - private Path getRoot() { - return workspaceMount.resolve(STATE_PATH); - } - - private Path getPath(final String id) { - return getRoot().resolve(String.format("%s.yaml", id)); - } - - @Override - public void write(final String id, final String document) { - final Path path = getPath(id); - createDirectoryWithParents(path.getParent()); - IOs.writeFile(path, document); - } - - private void createDirectoryWithParents(final Path path) { - try { - Files.createDirectories(path); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public Optional read(final String id) { - final Path path = getPath(id); - if (Files.exists(path)) { - return Optional.ofNullable(IOs.readFile(path)); - } else { - return Optional.empty(); - } - } - - @Override - public boolean delete(final String id) { - final Path path = getPath(id); - try { - return Files.deleteIfExists(path); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/DocumentStoreClient.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/DocumentStoreClient.java deleted file mode 100644 index 85ab1c8de540..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/DocumentStoreClient.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.storage; - -import java.util.Optional; - -/** - * Interface for treating cloud storage like a simple document store. - */ -public interface DocumentStoreClient { - - /** - * Writes a document with a given id. If a document already exists at this id it will be - * overwritten. - * - * @param id of the document to write - * @param document to write - */ - void write(String id, String document); - - /** - * Reads document with a given id. - * - * @param id of the document to read. - * @return the document - */ - Optional read(String id); - - /** - * Deletes the document with provided id. - * - * @param id of document to delete - * @return true if deletes something, otherwise false. - */ - boolean delete(String id); - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/GcsDocumentStoreClient.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/GcsDocumentStoreClient.java deleted file mode 100644 index 3cc5d1a15f5c..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/GcsDocumentStoreClient.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.storage; - -import com.google.cloud.storage.Blob; -import com.google.cloud.storage.BlobId; -import com.google.cloud.storage.BlobInfo; -import com.google.cloud.storage.Storage; -import io.airbyte.config.storage.CloudStorageConfigs.GcsConfig; -import io.airbyte.config.storage.DefaultGcsClientFactory; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.util.Optional; - -/** - * Document store on top of the GCS Client (Storage). - */ -public class GcsDocumentStoreClient implements DocumentStoreClient { - - private final String bucketName; - private final Path root; - private final Storage gcsClient; - - public static GcsDocumentStoreClient create(final GcsConfig config, final Path root) { - return new GcsDocumentStoreClient( - new DefaultGcsClientFactory(config).get(), - config.getBucketName(), - root); - } - - public GcsDocumentStoreClient(final Storage gcsClient, final String bucketName, final Path root) { - this.gcsClient = gcsClient; - this.bucketName = bucketName; - this.root = root; - } - - String getKey(final String id) { - return root + "/" + id; - } - - BlobId getBlobId(final String id) { - return BlobId.of(bucketName, getKey(id)); - } - - @Override - public void write(final String id, final String document) { - final BlobInfo blobInfo = BlobInfo.newBuilder(getBlobId(id)).build(); - gcsClient.create(blobInfo, document.getBytes(StandardCharsets.UTF_8)); - } - - @Override - public Optional read(final String id) { - final Blob blob = gcsClient.get(getBlobId(id)); - if (blob != null && blob.exists()) { - return Optional.of(new String(gcsClient.readAllBytes(BlobId.of(bucketName, getKey(id))), StandardCharsets.UTF_8)); - } else { - return Optional.empty(); - } - } - - @Override - public boolean delete(final String id) { - return gcsClient.delete(BlobId.of(bucketName, getKey(id))); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/S3DocumentStoreClient.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/S3DocumentStoreClient.java deleted file mode 100644 index aec04f11f67a..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/S3DocumentStoreClient.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.storage; - -import io.airbyte.config.storage.CloudStorageConfigs.MinioConfig; -import io.airbyte.config.storage.CloudStorageConfigs.S3Config; -import io.airbyte.config.storage.DefaultS3ClientFactory; -import io.airbyte.config.storage.MinioS3ClientFactory; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import software.amazon.awssdk.core.ResponseBytes; -import software.amazon.awssdk.core.sync.RequestBody; -import software.amazon.awssdk.services.s3.S3Client; -import software.amazon.awssdk.services.s3.model.DeleteObjectRequest; -import software.amazon.awssdk.services.s3.model.GetObjectRequest; -import software.amazon.awssdk.services.s3.model.GetObjectResponse; -import software.amazon.awssdk.services.s3.model.HeadObjectRequest; -import software.amazon.awssdk.services.s3.model.NoSuchKeyException; -import software.amazon.awssdk.services.s3.model.PutObjectRequest; - -/** - * Document store on top of the S3Client. - */ -public class S3DocumentStoreClient implements DocumentStoreClient { - - private static final Logger LOGGER = LoggerFactory.getLogger(S3DocumentStoreClient.class); - - private final String bucketName; - private final Path root; - private final S3Client s3Client; - - public static S3DocumentStoreClient minio(final MinioConfig config, final Path root) { - return new S3DocumentStoreClient( - new MinioS3ClientFactory(config).get(), - config.getBucketName(), - root); - } - - public static S3DocumentStoreClient s3(final S3Config config, final Path root) { - return new S3DocumentStoreClient( - new DefaultS3ClientFactory(config).get(), - config.getBucketName(), - root); - } - - public S3DocumentStoreClient(final S3Client s3Client, final String bucketName, final Path root) { - this.s3Client = s3Client; - this.bucketName = bucketName; - this.root = root; - } - - String getKey(final String id) { - return root + "/" + id; - } - - @Override - public void write(final String id, final String document) { - final PutObjectRequest request = PutObjectRequest.builder() - .bucket(bucketName) - .key(getKey(id)) - .build(); - - s3Client.putObject(request, RequestBody.fromString(document)); - } - - @Override - public Optional read(final String id) { - try { - final ResponseBytes objectAsBytes = s3Client.getObjectAsBytes(GetObjectRequest.builder() - .bucket(bucketName) - .key(getKey(id)) - .build()); - return Optional.of(objectAsBytes.asString(StandardCharsets.UTF_8)); - } catch (final NoSuchKeyException e) { - LOGGER.debug("Could not find record with id {}", id); - return Optional.empty(); - } - } - - @Override - public boolean delete(final String id) { - boolean keyExists = true; - try { - s3Client.headObject(HeadObjectRequest.builder().bucket(bucketName).key(getKey(id)).build()); - } catch (final NoSuchKeyException e) { - keyExists = false; - } - - s3Client.deleteObject(DeleteObjectRequest.builder().bucket(bucketName).key(getKey(id)).build()); - return keyExists; - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/StateClients.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/StateClients.java deleted file mode 100644 index 4b43fea96ae3..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/storage/StateClients.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.storage; - -import io.airbyte.config.storage.CloudStorageConfigs; -import java.nio.file.Path; - -public class StateClients { - - public static DocumentStoreClient create(final CloudStorageConfigs cloudStorageConfigs, final Path prefix) { - DocumentStoreClient documentStoreClient = null; - - switch (cloudStorageConfigs.getType()) { - case S3 -> { - documentStoreClient = S3DocumentStoreClient.s3(cloudStorageConfigs.getS3Config(), prefix); - } - case MINIO -> { - documentStoreClient = S3DocumentStoreClient.minio(cloudStorageConfigs.getMinioConfig(), prefix); - } - case GCS -> { - documentStoreClient = GcsDocumentStoreClient.create(cloudStorageConfigs.getGcsConfig(), prefix); - } - } - - return documentStoreClient; - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/DbtLauncherWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/DbtLauncherWorker.java deleted file mode 100644 index 6f96eb9f49c6..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/DbtLauncherWorker.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.sync; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.config.OperatorDbtInput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.ContainerOrchestratorConfig; -import io.airbyte.workers.WorkerConfigs; -import io.temporal.activity.ActivityExecutionContext; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; - -public class DbtLauncherWorker extends LauncherWorker { - - public static final String DBT = "dbt-orchestrator"; - private static final String POD_NAME_PREFIX = "orchestrator-dbt"; - public static final String INIT_FILE_DESTINATION_LAUNCHER_CONFIG = "destinationLauncherConfig.json"; - - public DbtLauncherWorker(final UUID connectionId, - final IntegrationLauncherConfig destinationLauncherConfig, - final JobRunConfig jobRunConfig, - final WorkerConfigs workerConfigs, - final ContainerOrchestratorConfig containerOrchestratorConfig, - final Supplier activityContext, - final Integer serverPort, - final TemporalUtils temporalUtils) { - super( - connectionId, - DBT, - POD_NAME_PREFIX, - jobRunConfig, - Map.of( - INIT_FILE_DESTINATION_LAUNCHER_CONFIG, Jsons.serialize(destinationLauncherConfig)), - containerOrchestratorConfig, - workerConfigs.getResourceRequirements(), - Void.class, - activityContext, - serverPort, - temporalUtils, - workerConfigs, - // Custom connector does not use Dbt at this moment, thus this flag for runnning job under - // isolated pool can be set to false. - false); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/LauncherWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/LauncherWorker.java deleted file mode 100644 index 63faea23b100..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/LauncherWorker.java +++ /dev/null @@ -1,317 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.sync; - -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.CONNECTION_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.PROCESS_EXIT_VALUE_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; -import static io.airbyte.workers.process.Metadata.CONNECTION_ID_LABEL_KEY; -import static io.airbyte.workers.process.Metadata.ORCHESTRATOR_STEP; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; - -import com.google.common.base.Stopwatch; -import datadog.trace.api.Trace; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.commons.temporal.sync.OrchestratorConstants; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.ContainerOrchestratorConfig; -import io.airbyte.workers.Worker; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.process.AsyncKubePodStatus; -import io.airbyte.workers.process.AsyncOrchestratorPodProcess; -import io.airbyte.workers.process.KubeContainerInfo; -import io.airbyte.workers.process.KubePodInfo; -import io.airbyte.workers.process.KubePodResourceHelper; -import io.airbyte.workers.process.KubeProcessFactory; -import io.fabric8.kubernetes.api.model.DeletionPropagation; -import io.fabric8.kubernetes.api.model.Pod; -import io.fabric8.kubernetes.client.KubernetesClientException; -import io.temporal.activity.ActivityExecutionContext; -import java.nio.file.Path; -import java.time.Duration; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CancellationException; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - -/** - * Coordinates configuring and managing the state of an async process. This is tied to the (job_id, - * attempt_id) and will attempt to kill off lower attempt ids. - * - * @param a json-serializable input class for the worker - * @param either {@link Void} or a json-serializable output class for the worker - */ -@Slf4j -public class LauncherWorker implements Worker { - - private static final Duration MAX_DELETION_TIMEOUT = Duration.ofSeconds(45); - - private final UUID connectionId; - private final String application; - private final String podNamePrefix; - private final JobRunConfig jobRunConfig; - private final Map additionalFileMap; - private final ContainerOrchestratorConfig containerOrchestratorConfig; - private final ResourceRequirements resourceRequirements; - private final Class outputClass; - private final Supplier activityContext; - private final Integer serverPort; - private final TemporalUtils temporalUtils; - private final WorkerConfigs workerConfigs; - - private final boolean isCustomConnector; - private final AtomicBoolean cancelled = new AtomicBoolean(false); - private AsyncOrchestratorPodProcess process; - - public LauncherWorker(final UUID connectionId, - final String application, - final String podNamePrefix, - final JobRunConfig jobRunConfig, - final Map additionalFileMap, - final ContainerOrchestratorConfig containerOrchestratorConfig, - final ResourceRequirements resourceRequirements, - final Class outputClass, - final Supplier activityContext, - final Integer serverPort, - final TemporalUtils temporalUtils, - final WorkerConfigs workerConfigs, - final boolean isCustomConnector) { - - this.connectionId = connectionId; - this.application = application; - this.podNamePrefix = podNamePrefix; - this.jobRunConfig = jobRunConfig; - this.additionalFileMap = additionalFileMap; - this.containerOrchestratorConfig = containerOrchestratorConfig; - this.resourceRequirements = resourceRequirements; - this.outputClass = outputClass; - this.activityContext = activityContext; - this.serverPort = serverPort; - this.temporalUtils = temporalUtils; - this.workerConfigs = workerConfigs; - this.isCustomConnector = isCustomConnector; - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public OUTPUT run(final INPUT input, final Path jobRoot) throws WorkerException { - final AtomicBoolean isCanceled = new AtomicBoolean(false); - final AtomicReference cancellationCallback = new AtomicReference<>(null); - return temporalUtils.withBackgroundHeartbeat(cancellationCallback, () -> { - try { - // Assemble configuration. - final Map envMap = System.getenv().entrySet().stream() - .filter(entry -> OrchestratorConstants.ENV_VARS_TO_TRANSFER.contains(entry.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - - // Manually add the worker environment to the env var map - envMap.put(WorkerConstants.WORKER_ENVIRONMENT, containerOrchestratorConfig.workerEnvironment().name()); - - final Map fileMap = new HashMap<>(additionalFileMap); - fileMap.putAll(Map.of( - OrchestratorConstants.INIT_FILE_APPLICATION, application, - OrchestratorConstants.INIT_FILE_JOB_RUN_CONFIG, Jsons.serialize(jobRunConfig), - OrchestratorConstants.INIT_FILE_INPUT, Jsons.serialize(input), - OrchestratorConstants.INIT_FILE_ENV_MAP, Jsons.serialize(envMap))); - - final Map portMap = Map.of( - serverPort, serverPort, - OrchestratorConstants.PORT1, OrchestratorConstants.PORT1, - OrchestratorConstants.PORT2, OrchestratorConstants.PORT2, - OrchestratorConstants.PORT3, OrchestratorConstants.PORT3, - OrchestratorConstants.PORT4, OrchestratorConstants.PORT4); - - final var allLabels = KubeProcessFactory.getLabels( - jobRunConfig.getJobId(), - Math.toIntExact(jobRunConfig.getAttemptId()), - Map.of(CONNECTION_ID_LABEL_KEY, connectionId.toString(), SYNC_STEP_KEY, ORCHESTRATOR_STEP)); - - final var podNameAndJobPrefix = podNamePrefix + "-job-" + jobRunConfig.getJobId() + "-attempt-"; - final var podName = podNameAndJobPrefix + jobRunConfig.getAttemptId(); - final var mainContainerInfo = new KubeContainerInfo(containerOrchestratorConfig.containerOrchestratorImage(), - containerOrchestratorConfig.containerOrchestratorImagePullPolicy()); - final var kubePodInfo = new KubePodInfo(containerOrchestratorConfig.namespace(), - podName, - mainContainerInfo); - - ApmTraceUtils.addTagsToTrace(Map.of(CONNECTION_ID_KEY, connectionId, JOB_ID_KEY, jobRunConfig.getJobId(), JOB_ROOT_KEY, jobRoot)); - - // Use the configuration to create the process. - process = new AsyncOrchestratorPodProcess( - kubePodInfo, - containerOrchestratorConfig.documentStoreClient(), - containerOrchestratorConfig.kubernetesClient(), - containerOrchestratorConfig.secretName(), - containerOrchestratorConfig.secretMountPath(), - containerOrchestratorConfig.dataPlaneCredsSecretName(), - containerOrchestratorConfig.dataPlaneCredsSecretMountPath(), - containerOrchestratorConfig.googleApplicationCredentials(), - containerOrchestratorConfig.environmentVariables(), - serverPort); - - // Define what to do on cancellation. - cancellationCallback.set(() -> { - // When cancelled, try to set to true. - // Only proceed if value was previously false, so we only have one cancellation going. at a time - if (!isCanceled.getAndSet(true)) { - log.info("Trying to cancel async pod process."); - process.destroy(); - } - }); - - // only kill running pods and create process if it is not already running. - if (process.getDocStoreStatus().equals(AsyncKubePodStatus.NOT_STARTED)) { - log.info("Creating " + podName + " for attempt number: " + jobRunConfig.getAttemptId()); - killRunningPodsForConnection(); - - // custom connectors run in an isolated node pool from airbyte-supported connectors - // to reduce the blast radius of any problems with custom connector code. - final var nodeSelectors = - isCustomConnector ? workerConfigs.getWorkerIsolatedKubeNodeSelectors().orElse(workerConfigs.getworkerKubeNodeSelectors()) - : workerConfigs.getworkerKubeNodeSelectors(); - - try { - process.create( - allLabels, - resourceRequirements, - fileMap, - portMap, - nodeSelectors); - } catch (final KubernetesClientException e) { - ApmTraceUtils.addExceptionToTrace(e); - throw new WorkerException( - "Failed to create pod " + podName + ", pre-existing pod exists which didn't advance out of the NOT_STARTED state.", e); - } - } - - // this waitFor can resume if the activity is re-run - process.waitFor(); - - if (cancelled.get()) { - final CancellationException e = new CancellationException(); - ApmTraceUtils.addExceptionToTrace(e); - throw e; - } - - final int asyncProcessExitValue = process.exitValue(); - if (asyncProcessExitValue != 0) { - final WorkerException e = new WorkerException("Orchestrator process exited with non-zero exit code: " + asyncProcessExitValue); - ApmTraceUtils.addTagsToTrace(Map.of(PROCESS_EXIT_VALUE_KEY, asyncProcessExitValue)); - ApmTraceUtils.addExceptionToTrace(e); - throw e; - } - - final var output = process.getOutput(); - - return output.map(s -> Jsons.deserialize(s, outputClass)).orElse(null); - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - if (cancelled.get()) { - try { - log.info("Destroying process due to cancellation."); - process.destroy(); - } catch (final Exception e2) { - log.error("Failed to destroy process on cancellation.", e2); - } - throw new WorkerException("Launcher " + application + " was cancelled.", e); - } else { - throw new WorkerException("Running the launcher " + application + " failed", e); - } - } - }, activityContext); - } - - /** - * It is imperative that we do not run multiple replications, normalizations, syncs, etc. at the - * same time. Our best bet is to kill everything that is labelled with the connection id and wait - * until no more pods exist with that connection id. - */ - private void killRunningPodsForConnection() { - final var client = containerOrchestratorConfig.kubernetesClient(); - - // delete all pods with the connection id label - List runningPods = getNonTerminalPodsWithLabels(); - final Stopwatch stopwatch = Stopwatch.createStarted(); - - while (!runningPods.isEmpty() && stopwatch.elapsed().compareTo(MAX_DELETION_TIMEOUT) < 0) { - log.warn("There are currently running pods for the connection: {}. Killing these pods to enforce one execution at a time.", - getPodNames(runningPods).toString()); - - log.info("Attempting to delete pods: {}", getPodNames(runningPods).toString()); - runningPods.stream() - .parallel() - .forEach(kubePod -> client.resource(kubePod).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); - - log.info("Waiting for deletion..."); - Exceptions.toRuntime(() -> Thread.sleep(1000)); - - runningPods = getNonTerminalPodsWithLabels(); - } - - if (runningPods.isEmpty()) { - log.info("Successfully deleted all running pods for the connection!"); - } else { - final RuntimeException e = new RuntimeException("Unable to delete pods: " + getPodNames(runningPods).toString()); - ApmTraceUtils.addExceptionToTrace(e); - throw e; - } - } - - private List getPodNames(final List pods) { - return pods.stream().map(pod -> pod.getMetadata().getName()).collect(Collectors.toList()); - } - - private List getNonTerminalPodsWithLabels() { - return containerOrchestratorConfig.kubernetesClient().pods() - .inNamespace(containerOrchestratorConfig.namespace()) - .withLabels(Map.of(CONNECTION_ID_LABEL_KEY, connectionId.toString())) - .list() - .getItems() - .stream() - .filter(kubePod -> !KubePodResourceHelper.isTerminal(kubePod)) - .collect(Collectors.toList()); - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void cancel() { - cancelled.set(true); - - if (process == null) { - return; - } - - log.debug("Closing sync runner process"); - killRunningPodsForConnection(); - - if (process.hasExited()) { - log.info("Successfully cancelled process."); - } else { - // try again - killRunningPodsForConnection(); - - if (process.hasExited()) { - log.info("Successfully cancelled process."); - } else { - log.error("Unable to cancel process"); - } - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/NormalizationLauncherWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/NormalizationLauncherWorker.java deleted file mode 100644 index bcef1f671194..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/NormalizationLauncherWorker.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.sync; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.ContainerOrchestratorConfig; -import io.airbyte.workers.WorkerConfigs; -import io.temporal.activity.ActivityExecutionContext; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; - -public class NormalizationLauncherWorker extends LauncherWorker { - - public static final String NORMALIZATION = "normalization-orchestrator"; - private static final String POD_NAME_PREFIX = "orchestrator-norm"; - public static final String INIT_FILE_DESTINATION_LAUNCHER_CONFIG = "destinationLauncherConfig.json"; - - public NormalizationLauncherWorker(final UUID connectionId, - final IntegrationLauncherConfig destinationLauncherConfig, - final JobRunConfig jobRunConfig, - final WorkerConfigs workerConfigs, - final ContainerOrchestratorConfig containerOrchestratorConfig, - final Supplier activityContext, - final Integer serverPort, - final TemporalUtils temporalUtils) { - super( - connectionId, - NORMALIZATION, - POD_NAME_PREFIX, - jobRunConfig, - Map.of( - INIT_FILE_DESTINATION_LAUNCHER_CONFIG, Jsons.serialize(destinationLauncherConfig)), - containerOrchestratorConfig, - workerConfigs.getResourceRequirements(), - NormalizationSummary.class, - activityContext, - serverPort, - temporalUtils, - workerConfigs, - // Normalization process will happen only on a fixed set of connectors, - // thus they are not going to be run under custom connectors. Setting this to false. - false); - - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/ReplicationLauncherWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/ReplicationLauncherWorker.java deleted file mode 100644 index 5540562a8d20..000000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/ReplicationLauncherWorker.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.sync; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.config.ReplicationOutput; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.ContainerOrchestratorConfig; -import io.airbyte.workers.WorkerConfigs; -import io.temporal.activity.ActivityExecutionContext; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; - -/** - * Launches a container-orchestrator container/pod to manage the message passing for the replication - * step. This step configs onto the container-orchestrator and retrieves logs and the output from - * the container-orchestrator. - */ -public class ReplicationLauncherWorker extends LauncherWorker { - - public static final String REPLICATION = "replication-orchestrator"; - private static final String POD_NAME_PREFIX = "orchestrator-repl"; - public static final String INIT_FILE_SOURCE_LAUNCHER_CONFIG = "sourceLauncherConfig.json"; - public static final String INIT_FILE_DESTINATION_LAUNCHER_CONFIG = "destinationLauncherConfig.json"; - - public ReplicationLauncherWorker(final UUID connectionId, - final ContainerOrchestratorConfig containerOrchestratorConfig, - final IntegrationLauncherConfig sourceLauncherConfig, - final IntegrationLauncherConfig destinationLauncherConfig, - final JobRunConfig jobRunConfig, - final ResourceRequirements resourceRequirements, - final Supplier activityContext, - final Integer serverPort, - final TemporalUtils temporalUtils, - final WorkerConfigs workerConfigs) { - super( - connectionId, - REPLICATION, - POD_NAME_PREFIX, - jobRunConfig, - Map.of( - INIT_FILE_SOURCE_LAUNCHER_CONFIG, Jsons.serialize(sourceLauncherConfig), - INIT_FILE_DESTINATION_LAUNCHER_CONFIG, Jsons.serialize(destinationLauncherConfig)), - containerOrchestratorConfig, - resourceRequirements, - ReplicationOutput.class, - activityContext, - serverPort, - temporalUtils, - workerConfigs, - sourceLauncherConfig.getIsCustomConnector() || destinationLauncherConfig.getIsCustomConnector()); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java deleted file mode 100644 index 9058809b9a53..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers; - -import static org.junit.jupiter.api.Assertions.assertThrows; - -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.featureflag.TestClient; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.workers.exception.RecordSchemaValidationException; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import io.airbyte.workers.test_utils.TestConfigHelpers; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class RecordSchemaValidatorTest { - - private StandardSyncInput syncInput; - private static final String STREAM_NAME = "user_preferences"; - private static final String FIELD_NAME = "favorite_color"; - private static final AirbyteMessage VALID_RECORD = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "blue"); - private static final AirbyteMessage INVALID_RECORD = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, 3); - - @BeforeEach - void setup() throws Exception { - final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(); - syncInput = syncPair.getValue(); - } - - @Test - void testValidateValidSchema() throws Exception { - final var featureFlagClient = new TestClient(); - final var recordSchemaValidator = new RecordSchemaValidator(featureFlagClient, syncInput.getWorkspaceId(), - WorkerUtils.mapStreamNamesToSchemas(syncInput)); - recordSchemaValidator.validateSchema(VALID_RECORD.getRecord(), AirbyteStreamNameNamespacePair.fromRecordMessage(VALID_RECORD.getRecord())); - } - - @Test - void testValidateInvalidSchema() throws Exception { - final var featureFlagClient = new TestClient(); - final RecordSchemaValidator recordSchemaValidator = new RecordSchemaValidator(featureFlagClient, syncInput.getWorkspaceId(), - WorkerUtils.mapStreamNamesToSchemas(syncInput)); - assertThrows(RecordSchemaValidationException.class, () -> recordSchemaValidator.validateSchema(INVALID_RECORD.getRecord(), - AirbyteStreamNameNamespacePair.fromRecordMessage(INVALID_RECORD.getRecord()))); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultNormalizationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultNormalizationWorkerTest.java deleted file mode 100644 index 90ae6cac14e6..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultNormalizationWorkerTest.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.FailureReason.FailureOrigin; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.normalization.NormalizationRunner; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import io.airbyte.workers.test_utils.TestConfigHelpers; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.stream.Stream; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class DefaultNormalizationWorkerTest { - - private static final String JOB_ID = "0"; - private static final int JOB_ATTEMPT = 0; - private static final Path WORKSPACE_ROOT = Path.of("workspaces/10"); - private static final AirbyteTraceMessage ERROR_TRACE_MESSAGE = - AirbyteMessageUtils.createErrorTraceMessage("a normalization error occurred", 123.0); - - private WorkerConfigs workerConfigs; - private Path jobRoot; - private Path normalizationRoot; - private NormalizationInput normalizationInput; - private NormalizationRunner normalizationRunner; - - @BeforeEach - void setup() throws Exception { - workerConfigs = new WorkerConfigs(new EnvConfigs()); - jobRoot = Files.createDirectories(Files.createTempDirectory("test").resolve(WORKSPACE_ROOT)); - normalizationRoot = jobRoot.resolve("normalize"); - - final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(); - normalizationInput = new NormalizationInput() - .withDestinationConfiguration(syncPair.getValue().getDestinationConfiguration()) - .withCatalog(syncPair.getValue().getCatalog()) - .withResourceRequirements(workerConfigs.getResourceRequirements()); - - normalizationRunner = mock(NormalizationRunner.class); - - when(normalizationRunner.normalize( - JOB_ID, - JOB_ATTEMPT, - normalizationRoot, - normalizationInput.getDestinationConfiguration(), - normalizationInput.getCatalog(), workerConfigs.getResourceRequirements())) - .thenReturn(true); - } - - @Test - void test() throws Exception { - final DefaultNormalizationWorker normalizationWorker = - new DefaultNormalizationWorker(JOB_ID, JOB_ATTEMPT, normalizationRunner, WorkerEnvironment.DOCKER); - - final NormalizationSummary normalizationOutput = normalizationWorker.run(normalizationInput, jobRoot); - - verify(normalizationRunner).start(); - verify(normalizationRunner).normalize( - JOB_ID, - JOB_ATTEMPT, - normalizationRoot, - normalizationInput.getDestinationConfiguration(), - normalizationInput.getCatalog(), workerConfigs.getResourceRequirements()); - verify(normalizationRunner).close(); - assertNotNull(normalizationOutput.getStartTime()); - assertNotNull(normalizationOutput.getEndTime()); - } - - // This test verifies the expected behaviour prior to adding TRACE message handling - // if no TRACE messages are emitted we should throw a WorkerException as before - @Test - void testFailure() throws Exception { - when(normalizationRunner.normalize(JOB_ID, - JOB_ATTEMPT, - normalizationRoot, - normalizationInput.getDestinationConfiguration(), - normalizationInput.getCatalog(), workerConfigs.getResourceRequirements())) - .thenReturn(false); - - final DefaultNormalizationWorker normalizationWorker = - new DefaultNormalizationWorker(JOB_ID, JOB_ATTEMPT, normalizationRunner, WorkerEnvironment.DOCKER); - - assertThrows(WorkerException.class, () -> normalizationWorker.run(normalizationInput, jobRoot)); - - verify(normalizationRunner).start(); - } - - // This test verifies failure behaviour when we have TRACE messages emitted from normalization - // instead of throwing an exception, we should return the summary with a non-empty FailureReasons - // array - @Test - void testFailureWithTraceMessage() throws Exception { - when(normalizationRunner.normalize(JOB_ID, - JOB_ATTEMPT, - normalizationRoot, - normalizationInput.getDestinationConfiguration(), - normalizationInput.getCatalog(), workerConfigs.getResourceRequirements())) - .thenReturn(false); - - when(normalizationRunner.getTraceMessages()).thenReturn(Stream.of(ERROR_TRACE_MESSAGE)); - - final DefaultNormalizationWorker normalizationWorker = - new DefaultNormalizationWorker(JOB_ID, JOB_ATTEMPT, normalizationRunner, WorkerEnvironment.DOCKER); - - final NormalizationSummary normalizationOutput = normalizationWorker.run(normalizationInput, jobRoot); - - verify(normalizationRunner).start(); - verify(normalizationRunner).normalize( - JOB_ID, - JOB_ATTEMPT, - normalizationRoot, - normalizationInput.getDestinationConfiguration(), - normalizationInput.getCatalog(), workerConfigs.getResourceRequirements()); - verify(normalizationRunner).close(); - assertNotNull(normalizationOutput.getStartTime()); - assertNotNull(normalizationOutput.getEndTime()); - assertFalse(normalizationOutput.getFailures().isEmpty()); - assertTrue(normalizationOutput.getFailures().stream() - .anyMatch(f -> f.getFailureOrigin().equals(FailureOrigin.NORMALIZATION) - && f.getExternalMessage().contains(ERROR_TRACE_MESSAGE.getError().getMessage()))); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java deleted file mode 100644 index b263a494e02c..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java +++ /dev/null @@ -1,883 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import static java.lang.Thread.sleep; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.atLeastOnce; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.FailureReason; -import io.airbyte.config.FailureReason.FailureOrigin; -import io.airbyte.config.ReplicationAttemptSummary; -import io.airbyte.config.ReplicationOutput; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.config.StandardSyncSummary.ReplicationStatus; -import io.airbyte.config.State; -import io.airbyte.config.StreamSyncStats; -import io.airbyte.config.SyncStats; -import io.airbyte.config.WorkerDestinationConfig; -import io.airbyte.config.WorkerSourceConfig; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.featureflag.TestClient; -import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.metrics.lib.MetricClientFactory; -import io.airbyte.protocol.models.AirbyteLogMessage.Level; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.protocol.models.Config; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.workers.RecordSchemaValidator; -import io.airbyte.workers.WorkerMetricReporter; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.helper.ConnectorConfigUpdater; -import io.airbyte.workers.helper.FailureHelper; -import io.airbyte.workers.internal.AirbyteDestination; -import io.airbyte.workers.internal.AirbyteSource; -import io.airbyte.workers.internal.NamespacingMapper; -import io.airbyte.workers.internal.book_keeping.AirbyteMessageTracker; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import io.airbyte.workers.test_utils.TestConfigHelpers; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.function.Executable; -import org.mockito.Mockito; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - -class DefaultReplicationWorkerTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultReplicationWorkerTest.class); - - private static final String JOB_ID = "0"; - private static final int JOB_ATTEMPT = 0; - private static final Path WORKSPACE_ROOT = Path.of("workspaces/10"); - private static final String STREAM_NAME = "user_preferences"; - private static final String FIELD_NAME = "favorite_color"; - private static final AirbyteMessage RECORD_MESSAGE1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "blue"); - private static final AirbyteMessage RECORD_MESSAGE2 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "yellow"); - private static final AirbyteMessage RECORD_MESSAGE3 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, 3); - private static final AirbyteMessage STATE_MESSAGE = AirbyteMessageUtils.createStateMessage("checkpoint", "1"); - private static final AirbyteTraceMessage ERROR_TRACE_MESSAGE = - AirbyteMessageUtils.createErrorTraceMessage("a connector error occurred", Double.valueOf(123)); - final Config CONNECTOR_CONFIG = new Config().withAdditionalProperty("my_key", "my_new_value"); - final AirbyteMessage CONFIG_MESSAGE = AirbyteMessageUtils.createConfigControlMessage(CONNECTOR_CONFIG, 1D); - private static final String STREAM1 = "stream1"; - - private static final String NAMESPACE = "namespace"; - private static final String INDUCED_EXCEPTION = "induced exception"; - - private Path jobRoot; - private AirbyteSource source; - private NamespacingMapper mapper; - private AirbyteDestination destination; - private StandardSyncInput syncInput; - private WorkerSourceConfig sourceConfig; - private WorkerDestinationConfig destinationConfig; - private AirbyteMessageTracker messageTracker; - private RecordSchemaValidator recordSchemaValidator; - private MetricClient metricClient; - private WorkerMetricReporter workerMetricReporter; - private ConnectorConfigUpdater connectorConfigUpdater; - - @SuppressWarnings("unchecked") - @BeforeEach - void setup() throws Exception { - MDC.clear(); - - jobRoot = Files.createDirectories(Files.createTempDirectory("test").resolve(WORKSPACE_ROOT)); - - final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(); - syncInput = syncPair.getValue(); - - sourceConfig = WorkerUtils.syncToWorkerSourceConfig(syncInput); - destinationConfig = WorkerUtils.syncToWorkerDestinationConfig(syncInput); - - source = mock(AirbyteSource.class); - mapper = mock(NamespacingMapper.class); - destination = mock(AirbyteDestination.class); - messageTracker = mock(AirbyteMessageTracker.class); - recordSchemaValidator = mock(RecordSchemaValidator.class); - connectorConfigUpdater = mock(ConnectorConfigUpdater.class); - metricClient = MetricClientFactory.getMetricClient(); - workerMetricReporter = new WorkerMetricReporter(metricClient, "docker_image:v1.0.0"); - - when(source.isFinished()).thenReturn(false, false, false, true); - when(destination.isFinished()).thenReturn(false, false, false, true); - when(source.attemptRead()).thenReturn(Optional.of(RECORD_MESSAGE1), Optional.empty(), Optional.of(RECORD_MESSAGE2)); - when(destination.attemptRead()).thenReturn(Optional.of(STATE_MESSAGE)); - when(mapper.mapCatalog(destinationConfig.getCatalog())).thenReturn(destinationConfig.getCatalog()); - when(mapper.mapMessage(RECORD_MESSAGE1)).thenReturn(RECORD_MESSAGE1); - when(mapper.mapMessage(RECORD_MESSAGE2)).thenReturn(RECORD_MESSAGE2); - when(mapper.mapMessage(RECORD_MESSAGE3)).thenReturn(RECORD_MESSAGE3); - when(mapper.mapMessage(CONFIG_MESSAGE)).thenReturn(CONFIG_MESSAGE); - } - - @AfterEach - void tearDown() { - MDC.clear(); - } - - @Test - void test() throws Exception { - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - worker.run(syncInput, jobRoot); - - verify(source).start(sourceConfig, jobRoot); - verify(destination).start(destinationConfig, jobRoot); - verify(destination).accept(RECORD_MESSAGE1); - verify(destination).accept(RECORD_MESSAGE2); - verify(source, atLeastOnce()).close(); - verify(destination).close(); - verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE1.getRecord(), - AirbyteStreamNameNamespacePair.fromRecordMessage(RECORD_MESSAGE1.getRecord())); - verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE2.getRecord(), - AirbyteStreamNameNamespacePair.fromRecordMessage(RECORD_MESSAGE2.getRecord())); - } - - @Test - void testInvalidSchema() throws Exception { - when(source.attemptRead()).thenReturn(Optional.of(RECORD_MESSAGE1), Optional.of(RECORD_MESSAGE2), Optional.of(RECORD_MESSAGE3)); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, - false); - - worker.run(syncInput, jobRoot); - - verify(source).start(sourceConfig, jobRoot); - verify(destination).start(destinationConfig, jobRoot); - verify(destination).accept(RECORD_MESSAGE1); - verify(destination).accept(RECORD_MESSAGE2); - verify(destination).accept(RECORD_MESSAGE3); - verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE1.getRecord(), - AirbyteStreamNameNamespacePair.fromRecordMessage(RECORD_MESSAGE1.getRecord())); - verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE2.getRecord(), - AirbyteStreamNameNamespacePair.fromRecordMessage(RECORD_MESSAGE2.getRecord())); - verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE3.getRecord(), - AirbyteStreamNameNamespacePair.fromRecordMessage(RECORD_MESSAGE3.getRecord())); - verify(source).close(); - verify(destination).close(); - } - - @Test - void testSourceNonZeroExitValue() throws Exception { - when(source.getExitValue()).thenReturn(1); - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); - assertTrue(output.getFailures().stream().anyMatch(f -> f.getFailureOrigin().equals(FailureOrigin.SOURCE))); - } - - @Test - void testReplicationRunnableSourceFailure() throws Exception { - final String SOURCE_ERROR_MESSAGE = "the source had a failure"; - - when(source.attemptRead()).thenThrow(new RuntimeException(SOURCE_ERROR_MESSAGE)); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); - assertTrue(output.getFailures().stream() - .anyMatch(f -> f.getFailureOrigin().equals(FailureOrigin.SOURCE) && f.getStacktrace().contains(SOURCE_ERROR_MESSAGE))); - } - - @Test - void testReplicationRunnableSourceUpdateConfig() throws Exception { - when(source.attemptRead()).thenReturn(Optional.of(RECORD_MESSAGE1), Optional.of(CONFIG_MESSAGE), Optional.empty()); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertEquals(ReplicationStatus.COMPLETED, output.getReplicationAttemptSummary().getStatus()); - - verify(connectorConfigUpdater).updateSource(syncInput.getSourceId(), CONNECTOR_CONFIG); - } - - @Test - void testSourceConfigPersistError() throws Exception { - when(source.attemptRead()).thenReturn(Optional.of(CONFIG_MESSAGE)); - when(source.isFinished()).thenReturn(false, true); - - final String PERSIST_ERROR_MESSAGE = "there was a problem persisting the new config"; - doThrow(new RuntimeException(PERSIST_ERROR_MESSAGE)).when(connectorConfigUpdater).updateSource(Mockito.any(), Mockito.any()); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertEquals(ReplicationStatus.COMPLETED, output.getReplicationAttemptSummary().getStatus()); - - verify(connectorConfigUpdater).updateSource(syncInput.getSourceId(), CONNECTOR_CONFIG); - } - - @Test - void testReplicationRunnableDestinationUpdateConfig() throws Exception { - when(destination.attemptRead()).thenReturn(Optional.of(STATE_MESSAGE), Optional.of(CONFIG_MESSAGE)); - when(destination.isFinished()).thenReturn(false, false, true); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertEquals(ReplicationStatus.COMPLETED, output.getReplicationAttemptSummary().getStatus()); - - verify(connectorConfigUpdater).updateDestination(syncInput.getDestinationId(), CONNECTOR_CONFIG); - } - - @Test - void testDestinationConfigPersistError() throws Exception { - when(destination.attemptRead()).thenReturn(Optional.of(CONFIG_MESSAGE)); - when(destination.isFinished()).thenReturn(false, true); - - final String PERSIST_ERROR_MESSAGE = "there was a problem persisting the new config"; - doThrow(new RuntimeException(PERSIST_ERROR_MESSAGE)).when(connectorConfigUpdater).updateDestination(Mockito.any(), Mockito.any()); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertEquals(ReplicationStatus.COMPLETED, output.getReplicationAttemptSummary().getStatus()); - - verify(connectorConfigUpdater).updateDestination(syncInput.getDestinationId(), CONNECTOR_CONFIG); - } - - @Test - void testReplicationRunnableDestinationFailure() throws Exception { - final String DESTINATION_ERROR_MESSAGE = "the destination had a failure"; - - doThrow(new RuntimeException(DESTINATION_ERROR_MESSAGE)).when(destination).accept(Mockito.any()); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); - assertTrue(output.getFailures().stream() - .anyMatch(f -> f.getFailureOrigin().equals(FailureOrigin.DESTINATION) && f.getStacktrace().contains(DESTINATION_ERROR_MESSAGE))); - } - - @Test - void testReplicationRunnableDestinationFailureViaTraceMessage() throws Exception { - final FailureReason failureReason = FailureHelper.destinationFailure(ERROR_TRACE_MESSAGE, Long.valueOf(JOB_ID), JOB_ATTEMPT); - when(messageTracker.errorTraceMessageFailure(Long.valueOf(JOB_ID), JOB_ATTEMPT)).thenReturn(failureReason); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertTrue(output.getFailures().stream() - .anyMatch(f -> f.getFailureOrigin().equals(FailureOrigin.DESTINATION) - && f.getExternalMessage().contains(ERROR_TRACE_MESSAGE.getError().getMessage()))); - } - - @Test - void testReplicationRunnableWorkerFailure() throws Exception { - final String WORKER_ERROR_MESSAGE = "the worker had a failure"; - - doThrow(new RuntimeException(WORKER_ERROR_MESSAGE)).when(messageTracker).acceptFromSource(Mockito.any()); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); - assertTrue(output.getFailures().stream() - .anyMatch(f -> f.getFailureOrigin().equals(FailureOrigin.REPLICATION) && f.getStacktrace().contains(WORKER_ERROR_MESSAGE))); - } - - @Test - void testOnlyStateAndRecordMessagesDeliveredToDestination() throws Exception { - final AirbyteMessage LOG_MESSAGE = AirbyteMessageUtils.createLogMessage(Level.INFO, "a log message"); - final AirbyteMessage TRACE_MESSAGE = AirbyteMessageUtils.createErrorMessage("a trace message", 123456.0); - when(mapper.mapMessage(LOG_MESSAGE)).thenReturn(LOG_MESSAGE); - when(mapper.mapMessage(TRACE_MESSAGE)).thenReturn(TRACE_MESSAGE); - when(source.isFinished()).thenReturn(false, false, false, false, true); - when(source.attemptRead()).thenReturn(Optional.of(RECORD_MESSAGE1), Optional.of(LOG_MESSAGE), Optional.of(TRACE_MESSAGE), - Optional.of(RECORD_MESSAGE2)); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - worker.run(syncInput, jobRoot); - - verify(source).start(sourceConfig, jobRoot); - verify(destination).start(destinationConfig, jobRoot); - verify(destination).accept(RECORD_MESSAGE1); - verify(destination).accept(RECORD_MESSAGE2); - verify(destination, never()).accept(LOG_MESSAGE); - verify(destination, never()).accept(TRACE_MESSAGE); - } - - @Test - void testOnlySelectedFieldsDeliveredToDestinationWithFieldSelectionEnabled() throws Exception { - // Generate a record with an extra field. - final AirbyteMessage recordWithExtraFields = Jsons.clone(RECORD_MESSAGE1); - ((ObjectNode) recordWithExtraFields.getRecord().getData()).put("AnUnexpectedField", "SomeValue"); - when(mapper.mapMessage(recordWithExtraFields)).thenReturn(recordWithExtraFields); - when(source.attemptRead()).thenReturn(Optional.of(recordWithExtraFields)); - when(source.isFinished()).thenReturn(false, true); - // Use a real schema validator to make sure validation doesn't affect this. - final String streamName = sourceConfig.getCatalog().getStreams().get(0).getStream().getName(); - final String streamNamespace = sourceConfig.getCatalog().getStreams().get(0).getStream().getNamespace(); - recordSchemaValidator = new RecordSchemaValidator(new TestClient(), syncInput.getWorkspaceId(), - Map.of(new AirbyteStreamNameNamespacePair(streamName, streamNamespace), - sourceConfig.getCatalog().getStreams().get(0).getStream().getJsonSchema())); - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, true); - - worker.run(syncInput, jobRoot); - - // Despite reading recordWithExtraFields from the source, we write the original RECORD_MESSAGE1 to - // the destination because the new field has been filtered out. - verify(destination).accept(RECORD_MESSAGE1); - } - - @Test - void testAllFieldsDeliveredWithFieldSelectionDisabled() throws Exception { - // Generate a record with an extra field. - final AirbyteMessage recordWithExtraFields = Jsons.clone(RECORD_MESSAGE1); - ((ObjectNode) recordWithExtraFields.getRecord().getData()).put("AnUnexpectedField", "SomeValue"); - when(mapper.mapMessage(recordWithExtraFields)).thenReturn(recordWithExtraFields); - when(source.attemptRead()).thenReturn(Optional.of(recordWithExtraFields)); - when(source.isFinished()).thenReturn(false, true); - // Use a real schema validator to make sure validation doesn't affect this. - final String streamName = sourceConfig.getCatalog().getStreams().get(0).getStream().getName(); - final String streamNamespace = sourceConfig.getCatalog().getStreams().get(0).getStream().getNamespace(); - recordSchemaValidator = new RecordSchemaValidator(new TestClient(), syncInput.getWorkspaceId(), - Map.of(new AirbyteStreamNameNamespacePair(streamName, streamNamespace), - sourceConfig.getCatalog().getStreams().get(0).getStream().getJsonSchema())); - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - worker.run(syncInput, jobRoot); - - // Despite the field not being in the catalog, we write the extra field anyway because field - // selection is disabled. - verify(destination).accept(recordWithExtraFields); - } - - @Test - void testDestinationNonZeroExitValue() throws Exception { - when(destination.getExitValue()).thenReturn(1); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); - assertTrue(output.getFailures().stream().anyMatch(f -> f.getFailureOrigin().equals(FailureOrigin.DESTINATION))); - } - - @Test - void testDestinationRunnableDestinationFailure() throws Exception { - final String DESTINATION_ERROR_MESSAGE = "the destination had a failure"; - - doThrow(new RuntimeException(DESTINATION_ERROR_MESSAGE)).when(destination).notifyEndOfInput(); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); - assertTrue(output.getFailures().stream() - .anyMatch(f -> f.getFailureOrigin().equals(FailureOrigin.DESTINATION) && f.getStacktrace().contains(DESTINATION_ERROR_MESSAGE))); - } - - @Test - void testDestinationRunnableWorkerFailure() throws Exception { - final String WORKER_ERROR_MESSAGE = "the worker had a failure"; - - doThrow(new RuntimeException(WORKER_ERROR_MESSAGE)).when(messageTracker).acceptFromDestination(Mockito.any()); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput output = worker.run(syncInput, jobRoot); - assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); - assertTrue(output.getFailures().stream() - .anyMatch(f -> f.getFailureOrigin().equals(FailureOrigin.REPLICATION) && f.getStacktrace().contains(WORKER_ERROR_MESSAGE))); - } - - @Test - void testLoggingInThreads() throws IOException, WorkerException { - // set up the mdc so that actually log to a file, so that we can verify that file logging captures - // threads. - final Path jobRoot = Files.createTempDirectory(Path.of("/tmp"), "mdc_test"); - LogClientSingleton.getInstance().setJobMdc(WorkerEnvironment.DOCKER, LogConfigs.EMPTY, jobRoot); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - worker.run(syncInput, jobRoot); - - final Path logPath = jobRoot.resolve(LogClientSingleton.LOG_FILENAME); - final String logs = IOs.readFile(logPath); - - // make sure we get logs from the threads. - assertTrue(logs.contains("Replication thread started.")); - assertTrue(logs.contains("Destination output thread started.")); - } - - @Test - void testLogMaskRegex() throws IOException { - final Path jobRoot = Files.createTempDirectory(Path.of("/tmp"), "mdc_test"); - MDC.put(LogClientSingleton.WORKSPACE_MDC_KEY, jobRoot.toString()); - - LOGGER.info( - "500 Server Error: Internal Server Error for url: https://api.hubapi.com/crm/v3/objects/contact?limit=100&archived=false&hapikey=secret-key_1&after=5315621"); - - final Path logPath = jobRoot.resolve("logs.log"); - final String logs = IOs.readFile(logPath); - assertTrue(logs.contains("apikey")); - assertFalse(logs.contains("secret-key_1")); - } - - @SuppressWarnings({"BusyWait"}) - @Test - void testCancellation() throws InterruptedException { - final AtomicReference output = new AtomicReference<>(); - when(source.isFinished()).thenReturn(false); - when(messageTracker.getDestinationOutputState()).thenReturn(Optional.of(new State().withState(STATE_MESSAGE.getState().getData()))); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final Thread workerThread = new Thread(() -> { - try { - output.set(worker.run(syncInput, jobRoot)); - } catch (final WorkerException e) { - throw new RuntimeException(e); - } - }); - - workerThread.start(); - - // verify the worker is actually running before we kill it. - while (Mockito.mockingDetails(messageTracker).getInvocations().size() < 5) { - LOGGER.info("waiting for worker to start running"); - sleep(100); - } - - worker.cancel(); - Assertions.assertTimeout(Duration.ofSeconds(5), (Executable) workerThread::join); - assertNotNull(output.get()); - assertEquals(output.get().getState().getState(), STATE_MESSAGE.getState().getData()); - } - - @Test - void testPopulatesOutputOnSuccess() throws WorkerException { - final JsonNode expectedState = Jsons.jsonNode(ImmutableMap.of("updated_at", 10L)); - when(messageTracker.getDestinationOutputState()).thenReturn(Optional.of(new State().withState(expectedState))); - when(messageTracker.getTotalRecordsEmitted()).thenReturn(12L); - when(messageTracker.getTotalBytesEmitted()).thenReturn(100L); - when(messageTracker.getTotalSourceStateMessagesEmitted()).thenReturn(3L); - when(messageTracker.getTotalDestinationStateMessagesEmitted()).thenReturn(1L); - when(messageTracker.getStreamToEmittedBytes()).thenReturn(Collections.singletonMap(new AirbyteStreamNameNamespacePair(STREAM1, NAMESPACE), 100L)); - when(messageTracker.getStreamToEmittedRecords()) - .thenReturn(Collections.singletonMap(new AirbyteStreamNameNamespacePair(STREAM1, NAMESPACE), 12L)); - when(messageTracker.getMaxSecondsToReceiveSourceStateMessage()).thenReturn(5L); - when(messageTracker.getMeanSecondsToReceiveSourceStateMessage()).thenReturn(4L); - when(messageTracker.getMaxSecondsBetweenStateMessageEmittedAndCommitted()).thenReturn(Optional.of(6L)); - when(messageTracker.getMeanSecondsBetweenStateMessageEmittedAndCommitted()).thenReturn(Optional.of(3L)); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput actual = worker.run(syncInput, jobRoot); - final ReplicationOutput replicationOutput = new ReplicationOutput() - .withReplicationAttemptSummary(new ReplicationAttemptSummary() - .withRecordsSynced(12L) - .withBytesSynced(100L) - .withStatus(ReplicationStatus.COMPLETED) - .withTotalStats(new SyncStats() - .withRecordsEmitted(12L) - .withBytesEmitted(100L) - .withSourceStateMessagesEmitted(3L) - .withDestinationStateMessagesEmitted(1L) - .withMaxSecondsBeforeSourceStateMessageEmitted(5L) - .withMeanSecondsBeforeSourceStateMessageEmitted(4L) - .withMaxSecondsBetweenStateMessageEmittedandCommitted(6L) - .withMeanSecondsBetweenStateMessageEmittedandCommitted(3L) - .withRecordsCommitted(12L)) // since success, should use emitted count - .withStreamStats(Collections.singletonList( - new StreamSyncStats() - .withStreamName(STREAM1) - .withStreamNamespace(NAMESPACE) - .withStats(new SyncStats() - .withBytesEmitted(100L) - .withRecordsEmitted(12L) - .withRecordsCommitted(12L) // since success, should use emitted count - .withSourceStateMessagesEmitted(null) - .withDestinationStateMessagesEmitted(null) - .withMaxSecondsBeforeSourceStateMessageEmitted(null) - .withMeanSecondsBeforeSourceStateMessageEmitted(null) - .withMaxSecondsBetweenStateMessageEmittedandCommitted(null) - .withMeanSecondsBetweenStateMessageEmittedandCommitted(null))))) - .withOutputCatalog(syncInput.getCatalog()) - .withState(new State().withState(expectedState)); - - // good enough to verify that times are present. - assertNotNull(actual.getReplicationAttemptSummary().getStartTime()); - assertNotNull(actual.getReplicationAttemptSummary().getEndTime()); - - // verify output object matches declared json schema spec. - final Set validate = new JsonSchemaValidator() - .validate(Jsons.jsonNode(Jsons.jsonNode(JsonSchemaValidator.getSchema(ConfigSchema.REPLICATION_OUTPUT.getConfigSchemaFile()))), - Jsons.jsonNode(actual)); - assertTrue(validate.isEmpty(), "Validation errors: " + Strings.join(validate, ",")); - - // remove times, so we can do the rest of the object <> object comparison. - actual.getReplicationAttemptSummary().withStartTime(null).withEndTime(null).getTotalStats().withReplicationStartTime(null) - .withReplicationEndTime(null) - .withSourceReadStartTime(null).withSourceReadEndTime(null) - .withDestinationWriteStartTime(null).withDestinationWriteEndTime(null); - - assertEquals(replicationOutput, actual); - } - - @Test - void testPopulatesStateOnFailureIfAvailable() throws Exception { - doThrow(new IllegalStateException(INDUCED_EXCEPTION)).when(source).close(); - when(messageTracker.getDestinationOutputState()).thenReturn(Optional.of(new State().withState(STATE_MESSAGE.getState().getData()))); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput actual = worker.run(syncInput, jobRoot); - assertNotNull(actual); - assertEquals(STATE_MESSAGE.getState().getData(), actual.getState().getState()); - } - - @Test - void testRetainsStateOnFailureIfNewStateNotAvailable() throws Exception { - doThrow(new IllegalStateException(INDUCED_EXCEPTION)).when(source).close(); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput actual = worker.run(syncInput, jobRoot); - - assertNotNull(actual); - assertEquals(syncInput.getState().getState(), actual.getState().getState()); - } - - @Test - void testPopulatesStatsOnFailureIfAvailable() throws Exception { - doThrow(new IllegalStateException(INDUCED_EXCEPTION)).when(source).close(); - when(messageTracker.getTotalRecordsEmitted()).thenReturn(12L); - when(messageTracker.getTotalBytesEmitted()).thenReturn(100L); - when(messageTracker.getTotalRecordsCommitted()).thenReturn(Optional.of(6L)); - when(messageTracker.getTotalSourceStateMessagesEmitted()).thenReturn(3L); - when(messageTracker.getTotalDestinationStateMessagesEmitted()).thenReturn(2L); - when(messageTracker.getStreamToEmittedBytes()).thenReturn(Collections.singletonMap(new AirbyteStreamNameNamespacePair(STREAM1, NAMESPACE), 100L)); - when(messageTracker.getStreamToEmittedRecords()) - .thenReturn(Collections.singletonMap(new AirbyteStreamNameNamespacePair(STREAM1, NAMESPACE), 12L)); - when(messageTracker.getStreamToCommittedRecords()) - .thenReturn(Optional.of(Collections.singletonMap(new AirbyteStreamNameNamespacePair(STREAM1, NAMESPACE), 6L))); - when(messageTracker.getMaxSecondsToReceiveSourceStateMessage()).thenReturn(10L); - when(messageTracker.getMeanSecondsToReceiveSourceStateMessage()).thenReturn(8L); - when(messageTracker.getMaxSecondsBetweenStateMessageEmittedAndCommitted()).thenReturn(Optional.of(12L)); - when(messageTracker.getMeanSecondsBetweenStateMessageEmittedAndCommitted()).thenReturn(Optional.of(11L)); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput actual = worker.run(syncInput, jobRoot); - final SyncStats expectedTotalStats = new SyncStats() - .withRecordsEmitted(12L) - .withBytesEmitted(100L) - .withSourceStateMessagesEmitted(3L) - .withDestinationStateMessagesEmitted(2L) - .withMaxSecondsBeforeSourceStateMessageEmitted(10L) - .withMeanSecondsBeforeSourceStateMessageEmitted(8L) - .withMaxSecondsBetweenStateMessageEmittedandCommitted(12L) - .withMeanSecondsBetweenStateMessageEmittedandCommitted(11L) - .withRecordsCommitted(6L); - final List expectedStreamStats = Collections.singletonList( - new StreamSyncStats() - .withStreamName(STREAM1) - .withStreamNamespace(NAMESPACE) - .withStats(new SyncStats() - .withBytesEmitted(100L) - .withRecordsEmitted(12L) - .withRecordsCommitted(6L) - .withSourceStateMessagesEmitted(null) - .withDestinationStateMessagesEmitted(null))); - - assertNotNull(actual); - // null out timing stats for assertion matching - assertEquals(expectedTotalStats, actual.getReplicationAttemptSummary().getTotalStats().withReplicationStartTime(null).withReplicationEndTime(null) - .withSourceReadStartTime(null).withSourceReadEndTime(null).withDestinationWriteStartTime(null).withDestinationWriteEndTime(null)); - assertEquals(expectedStreamStats, actual.getReplicationAttemptSummary().getStreamStats()); - } - - @Test - void testDoesNotPopulatesStateOnFailureIfNotAvailable() throws Exception { - final StandardSyncInput syncInputWithoutState = Jsons.clone(syncInput); - syncInputWithoutState.setState(null); - - doThrow(new IllegalStateException(INDUCED_EXCEPTION)).when(source).close(); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - - final ReplicationOutput actual = worker.run(syncInputWithoutState, jobRoot); - - assertNotNull(actual); - assertNull(actual.getState()); - } - - @Test - void testDoesNotPopulateOnIrrecoverableFailure() { - doThrow(new IllegalStateException(INDUCED_EXCEPTION)).when(messageTracker).getTotalRecordsEmitted(); - - final ReplicationWorker worker = new DefaultReplicationWorker( - JOB_ID, - JOB_ATTEMPT, - source, - mapper, - destination, - messageTracker, - recordSchemaValidator, - workerMetricReporter, - connectorConfigUpdater, false); - assertThrows(WorkerException.class, () -> worker.run(syncInput, jobRoot)); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/EmptyAirbyteDestination.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/EmptyAirbyteDestination.java deleted file mode 100644 index 96653c2e699d..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/EmptyAirbyteDestination.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import io.airbyte.config.WorkerDestinationConfig; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.workers.internal.AirbyteDestination; -import java.nio.file.Path; -import java.util.Optional; - -/** - * Empty Airbyte Destination. Does nothing with messages. Intended for performance testing. - */ -public class EmptyAirbyteDestination implements AirbyteDestination { - - @Override - public void start(WorkerDestinationConfig destinationConfig, Path jobRoot) throws Exception { - - } - - @Override - public void accept(AirbyteMessage message) throws Exception { - - } - - @Override - public void notifyEndOfInput() throws Exception { - - } - - @Override - public boolean isFinished() { - return true; - } - - @Override - public int getExitValue() { - return 0; - } - - @Override - public Optional attemptRead() { - return Optional.empty(); - } - - @Override - public void close() throws Exception {} - - @Override - public void cancel() throws Exception { - - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/LimitedIntegrationLauncher.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/LimitedIntegrationLauncher.java deleted file mode 100644 index b5e2f3e50809..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/LimitedIntegrationLauncher.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.process.IntegrationLauncher; -import java.nio.file.Path; - -/** - * Test-only launcher to launch {@link LimitedSourceProcess}. Intended as a convenient test harness - * for testing. - */ -public class LimitedIntegrationLauncher implements IntegrationLauncher { - - @Override - public Process spec(Path jobRoot) throws WorkerException { - return null; - } - - @Override - public Process check(Path jobRoot, String configFilename, String configContents) throws WorkerException { - return null; - } - - @Override - public Process discover(Path jobRoot, String configFilename, String configContents) throws WorkerException { - return null; - } - - @Override - public Process read(Path jobRoot, - String configFilename, - String configContents, - String catalogFilename, - String catalogContents, - String stateFilename, - String stateContents) - throws WorkerException { - return new LimitedSourceProcess(); - } - - @Override - public Process write(Path jobRoot, - String configFilename, - String configContents, - String catalogFilename, - String catalogContents) - throws WorkerException { - return null; - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/LimitedSourceProcess.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/LimitedSourceProcess.java deleted file mode 100644 index 081b05bfb8a0..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/LimitedSourceProcess.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import com.fasterxml.jackson.databind.ObjectMapper; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.PipedInputStream; -import java.io.PipedOutputStream; -import java.lang.ProcessHandle.Info; -import java.nio.charset.Charset; -import java.time.Duration; -import java.time.Instant; -import java.util.Optional; -import java.util.concurrent.Executors; - -/** - * Basic Airbyte Source that emits {@link LimitedSourceProcess#TOTAL_RECORDS} before finishing. - * Intended for performance testing. - */ -public class LimitedSourceProcess extends Process { - - private static final int TOTAL_RECORDS = 2_000_000; - private static final ObjectMapper MAPPER = new ObjectMapper(); - - private int currRecs = 0; - private final PipedInputStream is = new PipedInputStream(); - - @Override - public OutputStream getOutputStream() { - return null; - } - - @Override - public InputStream getInputStream() { - final OutputStream os; - // start writing to the input stream - try { - os = new PipedOutputStream(is); - } catch (IOException e) { - throw new RuntimeException(e); - } - - Executors.newSingleThreadExecutor().submit( - () -> { - try { - while (currRecs != TOTAL_RECORDS) { - var msg = AirbyteMessageUtils.createRecordMessage("s1", "data", - "This is a fairly long sentence to provide some bytes here. More bytes is better as it helps us measure performance." - + "Random append to prevent dead code generation :"); - os.write(MAPPER.writeValueAsString(msg).getBytes(Charset.defaultCharset())); - os.write(System.getProperty("line.separator").getBytes(Charset.defaultCharset())); - currRecs++; - } - os.flush(); - os.close(); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); - - return is; - } - - @Override - public InputStream getErrorStream() { - return new PipedInputStream(); - } - - @Override - public int waitFor() throws InterruptedException { - while (exitValue() != 0) { - Thread.sleep(1000 * 10); - } - return exitValue(); - } - - @Override - public int exitValue() { - if (currRecs == TOTAL_RECORDS) { - try { - is.close(); - } catch (IOException e) { - throw new RuntimeException(e); - } - return 0; - } - - throw new IllegalThreadStateException("process hasn't exited"); - - } - - @Override - public void destroy() { - currRecs = TOTAL_RECORDS; - - try { - is.close(); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public Info info() { - return new TestProcessInfo(); - } - - static class TestProcessInfo implements Info { - - @Override - public Optional command() { - return Optional.of("test process"); - } - - @Override - public Optional commandLine() { - return Optional.of("test process"); - } - - @Override - public Optional arguments() { - return Optional.empty(); - } - - @Override - public Optional startInstant() { - return Optional.empty(); - } - - @Override - public Optional totalCpuDuration() { - return Optional.empty(); - } - - @Override - public Optional user() { - return Optional.empty(); - } - - } - - public static void main(String[] args) {} - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerPerformanceTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerPerformanceTest.java deleted file mode 100644 index 641023cc88b2..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerPerformanceTest.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.protocol.AirbyteMessageMigrator; -import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider; -import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory; -import io.airbyte.commons.protocol.ConfiguredAirbyteCatalogMigrator; -import io.airbyte.commons.protocol.migrations.v1.AirbyteMessageMigrationV1; -import io.airbyte.commons.protocol.migrations.v1.ConfiguredAirbyteCatalogMigrationV1; -import io.airbyte.commons.protocol.serde.AirbyteMessageV0Deserializer; -import io.airbyte.commons.protocol.serde.AirbyteMessageV0Serializer; -import io.airbyte.commons.version.Version; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.ReplicationOutput; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.featureflag.TestClient; -import io.airbyte.metrics.lib.NotImplementedMetricClient; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.workers.RecordSchemaValidator; -import io.airbyte.workers.WorkerMetricReporter; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.helper.ConnectorConfigUpdater; -import io.airbyte.workers.internal.DefaultAirbyteSource; -import io.airbyte.workers.internal.NamespacingMapper; -import io.airbyte.workers.internal.VersionedAirbyteStreamFactory; -import io.airbyte.workers.internal.book_keeping.AirbyteMessageTracker; -import io.airbyte.workers.process.IntegrationLauncher; -import java.io.IOException; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicReference; -import lombok.extern.slf4j.Slf4j; -import org.mockito.Mockito; -import org.openjdk.jmh.annotations.Benchmark; -import org.openjdk.jmh.annotations.BenchmarkMode; -import org.openjdk.jmh.annotations.Fork; -import org.openjdk.jmh.annotations.Measurement; -import org.openjdk.jmh.annotations.Mode; -import org.openjdk.jmh.annotations.Warmup; - -@Slf4j -public class ReplicationWorkerPerformanceTest { - - /** - * Hook up the DefaultReplicationWorker to a test harness with an insanely quick Source - * {@link LimitedSourceProcess} via the {@link LimitedIntegrationLauncher} and Destination - * {@link EmptyAirbyteDestination}. - *

- * Harness uses Java Micro Benchmark to run the E2E sync a configured number of times. It then - * reports a time distribution for the time taken to run the E2E sync. - *

- * Because the reported time does not explicitly include throughput numbers, throughput logging has - * been added. This class is intended to help devs understand the impact of changes on throughput. - *

- * To use this, simply run the main method, make yourself a cup of coffee for 5 mins, then look the - * logs. - */ - @Benchmark - // SampleTime = the time taken to run the benchmarked method. Use this because we only care about - // the time taken to sync the entire dataset. - @BenchmarkMode(Mode.SampleTime) - // Warming up the JVM stabilises results however takes longer. Skip this for now since we don't need - // that fine a result. - @Warmup(iterations = 0) - // How many runs to do. - @Fork(1) - // Within each run, how many iterations to do. - @Measurement(iterations = 2) - public void executeOneSync() throws InterruptedException { - final var perDestination = new EmptyAirbyteDestination(); - final var messageTracker = new AirbyteMessageTracker(new EnvVariableFeatureFlags()); - final var connectorConfigUpdater = Mockito.mock(ConnectorConfigUpdater.class); - final var metricReporter = new WorkerMetricReporter(new NotImplementedMetricClient(), "test-image:0.01"); - final var dstNamespaceMapper = new NamespacingMapper(NamespaceDefinitionType.DESTINATION, "", ""); - final var workspaceID = UUID.randomUUID(); - final var validator = new RecordSchemaValidator(new TestClient(), workspaceID, Map.of( - new AirbyteStreamNameNamespacePair("s1", null), - CatalogHelpers.fieldsToJsonSchema(io.airbyte.protocol.models.Field.of("data", JsonSchemaType.STRING)))); - - final IntegrationLauncher integrationLauncher = new LimitedIntegrationLauncher(); - final var serDeProvider = new AirbyteMessageSerDeProvider( - List.of(new AirbyteMessageV0Deserializer()), - List.of(new AirbyteMessageV0Serializer())); - serDeProvider.initialize(); - - final var msgMigrator = new AirbyteMessageMigrator(List.of(new AirbyteMessageMigrationV1())); - msgMigrator.initialize(); - final ConfiguredAirbyteCatalogMigrator catalogMigrator = new ConfiguredAirbyteCatalogMigrator( - List.of(new ConfiguredAirbyteCatalogMigrationV1())); - catalogMigrator.initialize(); - final var migratorFactory = new AirbyteProtocolVersionedMigratorFactory(msgMigrator, catalogMigrator); - - final var versionFac = - new VersionedAirbyteStreamFactory(serDeProvider, migratorFactory, new Version("0.2.0"), Optional.empty(), - Optional.of(RuntimeException.class)); - final var versionedAbSource = - new DefaultAirbyteSource(integrationLauncher, versionFac, migratorFactory.getProtocolSerializer(new Version("0.2.0")), - new EnvVariableFeatureFlags()); - - final var worker = new DefaultReplicationWorker("1", 0, - versionedAbSource, - dstNamespaceMapper, - perDestination, - messageTracker, - validator, - metricReporter, - connectorConfigUpdater, - false); - final AtomicReference output = new AtomicReference<>(); - final Thread workerThread = new Thread(() -> { - try { - final var ignoredPath = Path.of("/"); - final StandardSyncInput testInput = new StandardSyncInput().withCatalog( - // The stream fields here are intended to match the records emitted by the LimitedSourceProcess - // class. - CatalogHelpers.createConfiguredAirbyteCatalog("s1", null, Field.of("data", JsonSchemaType.STRING))); - output.set(worker.run(testInput, ignoredPath)); - } catch (final WorkerException e) { - throw new RuntimeException(e); - } - }); - - workerThread.start(); - workerThread.join(); - final var summary = output.get().getReplicationAttemptSummary(); - final var mbRead = summary.getBytesSynced() / 1_000_000; - final var timeTakenSec = (summary.getEndTime() - summary.getStartTime()) / 1000.0; - log.info("MBs read: {}, Time taken sec: {}, MB/s: {}", mbRead, timeTakenSec, mbRead / timeTakenSec); - } - - public static void main(final String[] args) throws IOException, InterruptedException { - // Run this main class to start benchmarking. - org.openjdk.jmh.Main.main(args); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/StubAirbyteMapper.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/StubAirbyteMapper.java deleted file mode 100644 index 43bcd775686e..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/StubAirbyteMapper.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.workers.internal.AirbyteMapper; - -/** - * Stub mapper testing what happens without any mapping. - */ -public class StubAirbyteMapper implements AirbyteMapper { - - @Override - public ConfiguredAirbyteCatalog mapCatalog(ConfiguredAirbyteCatalog catalog) { - return null; - } - - @Override - public AirbyteMessage mapMessage(AirbyteMessage message) { - return message; - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/AirbyteProtocolPredicateTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/AirbyteProtocolPredicateTest.java deleted file mode 100644 index 0596f4426403..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/AirbyteProtocolPredicateTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class AirbyteProtocolPredicateTest { - - private static final String STREAM_NAME = "user_preferences"; - private static final String FIELD_NAME = "favorite_color"; - private static final String GREEN = "green"; - - private AirbyteProtocolPredicate predicate; - - @BeforeEach - void setup() { - predicate = new AirbyteProtocolPredicate(); - } - - @Test - void testValid() { - assertTrue(predicate.test(Jsons.jsonNode(AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, GREEN)))); - } - - @Test - void testInValid() { - assertFalse(predicate.test(Jsons.deserialize("{ \"fish\": \"tuna\"}"))); - } - - @Test - void testConcatenatedValid() { - final String concatenated = - Jsons.serialize(AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, GREEN)) - + Jsons.serialize(AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "yellow")); - - assertTrue(predicate.test(Jsons.deserialize(concatenated))); - } - - @Test - void testMissingNewLineAndLineStartsWithValidRecord() { - final String concatenated = - Jsons.serialize(AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, GREEN)) - + "{ \"fish\": \"tuna\"}"; - - assertTrue(predicate.test(Jsons.deserialize(concatenated))); - } - - @Test - void testMissingNewLineAndLineStartsWithInvalidRecord() { - final String concatenated = - "{ \"fish\": \"tuna\"}" - + Jsons.serialize(AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, GREEN)); - - assertFalse(predicate.test(Jsons.deserialize(concatenated))); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/DefaultAirbyteDestinationTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/DefaultAirbyteDestinationTest.java deleted file mode 100644 index 79eb0e3e5867..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/DefaultAirbyteDestinationTest.java +++ /dev/null @@ -1,243 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import static io.airbyte.commons.logging.LoggingHelper.RESET; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.RETURNS_DEEP_STUBS; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.google.common.collect.Lists; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.LoggingHelper.Color; -import io.airbyte.commons.protocol.DefaultProtocolSerializer; -import io.airbyte.commons.protocol.ProtocolSerializer; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.WorkerDestinationConfig; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.process.IntegrationLauncher; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import io.airbyte.workers.test_utils.TestConfigHelpers; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.List; -import java.util.stream.Stream; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class DefaultAirbyteDestinationTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultAirbyteDestinationTest.class); - private static final Path TEST_ROOT = Path.of("/tmp/airbyte_tests"); - private static final String JOB_ROOT_PREFIX = "workspace"; - private static final String STREAM_NAME = "user_preferences"; - private static final String FIELD_NAME = "favorite_color"; - - private static final WorkerDestinationConfig DESTINATION_CONFIG = - WorkerUtils.syncToWorkerDestinationConfig(TestConfigHelpers.createSyncConfig().getValue()); - - private static final List MESSAGES = Lists.newArrayList( - AirbyteMessageUtils.createStateMessage("checkpoint", "1"), - AirbyteMessageUtils.createStateMessage("checkpoint", "2")); - - private static Path logJobRoot; - - static { - try { - logJobRoot = Files.createTempDirectory(Path.of("/tmp"), "mdc_test"); - LogClientSingleton.getInstance().setJobMdc(WorkerEnvironment.DOCKER, LogConfigs.EMPTY, logJobRoot); - } catch (final IOException e) { - LOGGER.error(e.toString()); - } - } - - private Path jobRoot; - private IntegrationLauncher integrationLauncher; - private Process process; - private AirbyteStreamFactory streamFactory; - private AirbyteMessageBufferedWriterFactory messageWriterFactory; - private final ProtocolSerializer protocolSerializer = new DefaultProtocolSerializer(); - private ByteArrayOutputStream outputStream; - - @BeforeEach - void setup() throws IOException, WorkerException { - jobRoot = Files.createTempDirectory(Files.createDirectories(TEST_ROOT), JOB_ROOT_PREFIX); - - process = mock(Process.class); - outputStream = spy(new ByteArrayOutputStream()); - when(process.getOutputStream()).thenReturn(outputStream); - when(process.getInputStream()).thenReturn(new ByteArrayInputStream("input".getBytes(StandardCharsets.UTF_8))); - when(process.getErrorStream()).thenReturn(new ByteArrayInputStream("error".getBytes(StandardCharsets.UTF_8))); - - integrationLauncher = mock(IntegrationLauncher.class, RETURNS_DEEP_STUBS); - final InputStream inputStream = mock(InputStream.class); - when(integrationLauncher.write( - jobRoot, - WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, - Jsons.serialize(DESTINATION_CONFIG.getDestinationConnectionConfiguration()), - WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME, - Jsons.serialize(DESTINATION_CONFIG.getCatalog()))) - .thenReturn(process); - - when(process.isAlive()).thenReturn(true); - when(process.getInputStream()).thenReturn(inputStream); - - streamFactory = noop -> MESSAGES.stream(); - messageWriterFactory = new DefaultAirbyteMessageBufferedWriterFactory(); - } - - @AfterEach - void tearDown() throws IOException { - // The log file needs to be present and empty - final Path logFile = logJobRoot.resolve(LogClientSingleton.LOG_FILENAME); - if (Files.exists(logFile)) { - Files.delete(logFile); - } - Files.createFile(logFile); - } - - @SuppressWarnings("BusyWait") - @Test - void testSuccessfulLifecycle() throws Exception { - final AirbyteDestination destination = - new DefaultAirbyteDestination(integrationLauncher, streamFactory, messageWriterFactory, protocolSerializer); - destination.start(DESTINATION_CONFIG, jobRoot); - - final AirbyteMessage recordMessage = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "blue"); - destination.accept(recordMessage); - - final List messages = Lists.newArrayList(); - - assertFalse(destination.isFinished()); - messages.add(destination.attemptRead().get()); - assertFalse(destination.isFinished()); - messages.add(destination.attemptRead().get()); - assertFalse(destination.isFinished()); - - when(process.isAlive()).thenReturn(false); - assertTrue(destination.isFinished()); - - verify(outputStream, never()).close(); - - destination.notifyEndOfInput(); - - verify(outputStream).close(); - - destination.close(); - - Assertions.assertEquals(MESSAGES, messages); - - Assertions.assertTimeout(Duration.ofSeconds(5), () -> { - while (process.getErrorStream().available() != 0) { - Thread.sleep(50); - } - }); - - verify(process).exitValue(); - } - - @Test - void testTaggedLogs() throws Exception { - - final AirbyteDestination destination = - new DefaultAirbyteDestination(integrationLauncher, streamFactory, messageWriterFactory, protocolSerializer); - destination.start(DESTINATION_CONFIG, jobRoot); - - final AirbyteMessage recordMessage = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "blue"); - destination.accept(recordMessage); - - final List messages = Lists.newArrayList(); - - messages.add(destination.attemptRead().get()); - messages.add(destination.attemptRead().get()); - - when(process.isAlive()).thenReturn(false); - - destination.notifyEndOfInput(); - - destination.close(); - - final Path logPath = logJobRoot.resolve(LogClientSingleton.LOG_FILENAME); - final Stream logs = IOs.readFile(logPath).lines(); - - logs.forEach(line -> { - org.assertj.core.api.Assertions.assertThat(line) - .startsWith(Color.YELLOW_BACKGROUND.getCode() + "destination" + RESET); - }); - } - - @Test - void testCloseNotifiesLifecycle() throws Exception { - final AirbyteDestination destination = new DefaultAirbyteDestination(integrationLauncher); - destination.start(DESTINATION_CONFIG, jobRoot); - - verify(outputStream, never()).close(); - - when(process.isAlive()).thenReturn(false); - destination.close(); - verify(outputStream).close(); - } - - @Test - void testNonzeroExitCodeThrowsException() throws Exception { - final AirbyteDestination destination = new DefaultAirbyteDestination(integrationLauncher); - destination.start(DESTINATION_CONFIG, jobRoot); - - when(process.isAlive()).thenReturn(false); - when(process.exitValue()).thenReturn(1); - Assertions.assertThrows(WorkerException.class, destination::close); - } - - @Test - void testIgnoredExitCodes() throws Exception { - final AirbyteDestination destination = new DefaultAirbyteDestination(integrationLauncher); - destination.start(DESTINATION_CONFIG, jobRoot); - when(process.isAlive()).thenReturn(false); - - DefaultAirbyteDestination.IGNORED_EXIT_CODES.forEach(exitCode -> { - when(process.exitValue()).thenReturn(exitCode); - Assertions.assertDoesNotThrow(destination::close); - }); - } - - @Test - void testGetExitValue() throws Exception { - final AirbyteDestination destination = new DefaultAirbyteDestination(integrationLauncher); - destination.start(DESTINATION_CONFIG, jobRoot); - - when(process.isAlive()).thenReturn(false); - when(process.exitValue()).thenReturn(2); - - assertEquals(2, destination.getExitValue()); - // call a second time to verify that exit value is cached - assertEquals(2, destination.getExitValue()); - verify(process, times(1)).exitValue(); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/EmptyAirbyteSourceTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/EmptyAirbyteSourceTest.java deleted file mode 100644 index 827333645cc3..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/EmptyAirbyteSourceTest.java +++ /dev/null @@ -1,505 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ResetSourceConfiguration; -import io.airbyte.config.State; -import io.airbyte.config.WorkerSourceConfig; -import io.airbyte.protocol.models.AirbyteGlobalState; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.AirbyteStream; -import io.airbyte.protocol.models.AirbyteStreamState; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.StreamDescriptor; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class EmptyAirbyteSourceTest { - - private EmptyAirbyteSource emptyAirbyteSource; - private final AirbyteMessage EMPTY_MESSAGE = - new AirbyteMessage().withType(Type.STATE) - .withState(new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(Jsons.emptyObject())); - - private final ConfiguredAirbyteCatalog airbyteCatalog = new ConfiguredAirbyteCatalog() - .withStreams(Lists.newArrayList( - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("a")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("b")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("c")))); - - @BeforeEach - void init() { - emptyAirbyteSource = new EmptyAirbyteSource(true); - } - - @Test - void testLegacy() throws Exception { - emptyAirbyteSource.start(new WorkerSourceConfig(), null); - - legacyStateResult(); - } - - @Test - void testLegacyWithEmptyConfig() throws Exception { - emptyAirbyteSource.start(new WorkerSourceConfig().withSourceConnectionConfiguration(Jsons.emptyObject()), null); - - legacyStateResult(); - } - - @Test - void testLegacyWithWrongConfigFormat() throws Exception { - emptyAirbyteSource.start(new WorkerSourceConfig().withSourceConnectionConfiguration(Jsons.jsonNode( - Map.of("not", "expected"))), null); - - legacyStateResult(); - } - - @Test - void testEmptyListOfStreams() throws Exception { - final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() - .withStreamsToReset(new ArrayList<>()); - final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) - .withCatalog(airbyteCatalog); - - emptyAirbyteSource.start(workerSourceConfig, null); - - legacyStateResult(); - } - - @Test - void nonStartedSource() { - final Throwable thrown = Assertions.catchThrowable(() -> emptyAirbyteSource.attemptRead()); - Assertions.assertThat(thrown) - .isInstanceOf(IllegalStateException.class); - } - - @Test - void testGlobal() throws Exception { - final List streamDescriptors = getProtocolStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); - - final List streamToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); - - final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() - .withStreamsToReset(streamToReset); - final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) - .withState(new State() - .withState(Jsons.jsonNode(createGlobalState(streamDescriptors, Jsons.emptyObject())))) - .withCatalog(airbyteCatalog); - - emptyAirbyteSource.start(workerSourceConfig, null); - - final Optional maybeMessage = emptyAirbyteSource.attemptRead(); - Assertions.assertThat(maybeMessage) - .isNotEmpty(); - - final AirbyteMessage message = maybeMessage.get(); - Assertions.assertThat(message.getType()).isEqualTo(Type.STATE); - - /* - * The comparison could be what it is below but it makes it hard to see what is the diff. It has - * been break dow into multiples assertions. (same comment in the other tests) - * - * AirbyteStateMessage expectedState = new AirbyteStateMessage() - * .withStateType(AirbyteStateType.GLOBAL) .withGlobal( new AirbyteGlobalState() - * .withSharedState(Jsons.emptyObject()) .withStreamStates( Lists.newArrayList( new - * AirbyteStreamState().withStreamState(null).withStreamDescriptor(new - * StreamDescriptor().withName("a")), new - * AirbyteStreamState().withStreamState(null).withStreamDescriptor(new - * StreamDescriptor().withName("b")), new - * AirbyteStreamState().withStreamState(null).withStreamDescriptor(new - * StreamDescriptor().withName("c")) ) ) ); - * - * Assertions.assertThat(stateMessage).isEqualTo(expectedState); - */ - final AirbyteStateMessage stateMessage = message.getState(); - Assertions.assertThat(stateMessage.getType()).isEqualTo(AirbyteStateType.GLOBAL); - Assertions.assertThat(stateMessage.getGlobal().getSharedState()).isNull(); - Assertions.assertThat(stateMessage.getGlobal().getStreamStates()) - .map(streamState -> streamState.getStreamDescriptor()) - .containsExactlyElementsOf(streamDescriptors); - Assertions.assertThat(stateMessage.getGlobal().getStreamStates()) - .map(streamState -> streamState.getStreamState()) - .containsOnlyNulls(); - - Assertions.assertThat(emptyAirbyteSource.attemptRead()) - .isEmpty(); - } - - @Test - void testGlobalPartial() throws Exception { - final String NOT_RESET_STREAM_NAME = "c"; - - final List streamDescriptors = getProtocolStreamDescriptorFromName(Lists.newArrayList("a", "b", NOT_RESET_STREAM_NAME)); - - final List streamToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b")); - - final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() - .withStreamsToReset(streamToReset); - final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) - .withState(new State() - .withState(Jsons.jsonNode(createGlobalState(streamDescriptors, Jsons.emptyObject())))) - .withCatalog(airbyteCatalog); - - emptyAirbyteSource.start(workerSourceConfig, null); - - final Optional maybeMessage = emptyAirbyteSource.attemptRead(); - Assertions.assertThat(maybeMessage) - .isNotEmpty(); - - final AirbyteMessage message = maybeMessage.get(); - Assertions.assertThat(message.getType()).isEqualTo(Type.STATE); - - final AirbyteStateMessage stateMessage = message.getState(); - - Assertions.assertThat(stateMessage.getType()).isEqualTo(AirbyteStateType.GLOBAL); - Assertions.assertThat(stateMessage.getGlobal().getSharedState()).isEqualTo(Jsons.emptyObject()); - Assertions.assertThat(stateMessage.getGlobal().getStreamStates()) - .filteredOn(streamState -> streamState.getStreamDescriptor().getName() != NOT_RESET_STREAM_NAME) - .map(AirbyteStreamState::getStreamState) - .containsOnlyNulls(); - Assertions.assertThat(stateMessage.getGlobal().getStreamStates()) - .filteredOn(streamState -> streamState.getStreamDescriptor().getName() == NOT_RESET_STREAM_NAME) - .map(AirbyteStreamState::getStreamState) - .contains(Jsons.emptyObject()); - - Assertions.assertThat(emptyAirbyteSource.attemptRead()) - .isEmpty(); - - Assertions.assertThat(emptyAirbyteSource.isFinished()).isTrue(); - } - - @Test - void testGlobalNewStream() throws Exception { - final String NEW_STREAM = "c"; - - final List streamDescriptors = getProtocolStreamDescriptorFromName(Lists.newArrayList("a", "b")); - - final List streamToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", NEW_STREAM)); - - final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() - .withStreamsToReset(streamToReset); - final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) - .withState(new State() - .withState(Jsons.jsonNode(createGlobalState(streamDescriptors, Jsons.emptyObject())))) - .withCatalog(airbyteCatalog); - - emptyAirbyteSource.start(workerSourceConfig, null); - - final Optional maybeMessage = emptyAirbyteSource.attemptRead(); - Assertions.assertThat(maybeMessage) - .isNotEmpty(); - - final AirbyteMessage message = maybeMessage.get(); - Assertions.assertThat(message.getType()).isEqualTo(Type.STATE); - - final AirbyteStateMessage stateMessage = message.getState(); - - Assertions.assertThat(stateMessage.getType()).isEqualTo(AirbyteStateType.GLOBAL); - Assertions.assertThat(stateMessage.getGlobal().getSharedState()).isNull(); - Assertions.assertThat(stateMessage.getGlobal().getStreamStates()) - .map(AirbyteStreamState::getStreamState) - .containsOnlyNulls(); - Assertions.assertThat(stateMessage.getGlobal().getStreamStates()) - .filteredOn(streamState -> streamState.getStreamDescriptor().getName() == NEW_STREAM) - .hasSize(1); - - Assertions.assertThat(emptyAirbyteSource.attemptRead()) - .isEmpty(); - - Assertions.assertThat(emptyAirbyteSource.isFinished()).isTrue(); - } - - @Test - void testPerStream() throws Exception { - final List streamDescriptors = getProtocolStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); - - final List streamToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); - - final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() - .withStreamsToReset(streamToReset); - final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) - .withState(new State() - .withState(Jsons.jsonNode(createPerStreamState(streamDescriptors)))) - .withCatalog(airbyteCatalog); - - emptyAirbyteSource.start(workerSourceConfig, null); - - streamToReset.forEach(this::testReceiveNullStreamState); - - Assertions.assertThat(emptyAirbyteSource.attemptRead()) - .isEmpty(); - - Assertions.assertThat(emptyAirbyteSource.isFinished()).isTrue(); - } - - @Test - void testPerStreamWithExtraState() throws Exception { - // This should never happen but nothing keeps us from processing the reset and not fail - final List streamDescriptors = getProtocolStreamDescriptorFromName(Lists.newArrayList("a", "b", "c", "d")); - - final List streamToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); - - final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() - .withStreamsToReset(streamToReset); - final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) - .withState(new State() - .withState(Jsons.jsonNode(createPerStreamState(streamDescriptors)))) - .withCatalog(airbyteCatalog); - - emptyAirbyteSource.start(workerSourceConfig, null); - - streamToReset.forEach(this::testReceiveNullStreamState); - - Assertions.assertThat(emptyAirbyteSource.attemptRead()) - .isEmpty(); - - Assertions.assertThat(emptyAirbyteSource.isFinished()).isTrue(); - } - - @Test - void testPerStreamWithMissingState() throws Exception { - final String NEW_STREAM = "c"; - - final List streamDescriptors = getProtocolStreamDescriptorFromName(Lists.newArrayList("a", "b")); - - final List streamToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", NEW_STREAM)); - - final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() - .withStreamsToReset(streamToReset); - final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) - .withState(new State() - .withState(Jsons.jsonNode(createPerStreamState(streamDescriptors)))) - .withCatalog(airbyteCatalog); - - emptyAirbyteSource.start(workerSourceConfig, null); - - streamToReset.forEach(this::testReceiveNullStreamState); - - Assertions.assertThat(emptyAirbyteSource.attemptRead()) - .isEmpty(); - - Assertions.assertThat(emptyAirbyteSource.isFinished()).isTrue(); - } - - // In the LEGACY state, if the list of streams passed in to be reset does not include every stream - // in the Catalog, then something has gone wrong and we should throw an error - @Test - void testLegacyWithMissingCatalogStream() { - - final List streamToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); - - final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() - .withStreamsToReset(streamToReset); - final ConfiguredAirbyteCatalog airbyteCatalogWithExtraStream = new ConfiguredAirbyteCatalog() - .withStreams(Lists.newArrayList( - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("a")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("b")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("c")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("d")))); - - final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) - .withState(new State() - .withState(Jsons.emptyObject())) - .withCatalog(airbyteCatalogWithExtraStream); - - Assertions.assertThatThrownBy(() -> emptyAirbyteSource.start(workerSourceConfig, null)) - .isInstanceOf(IllegalStateException.class); - - } - - // If there are extra streams to reset that do not exist in the Catalog, the reset should work - // properly with all streams being reset - @Test - void testLegacyWithResettingExtraStreamNotInCatalog() throws Exception { - final List streamToResetWithExtra = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", "c", "d")); - - final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() - .withStreamsToReset(streamToResetWithExtra); - final ConfiguredAirbyteCatalog airbyteCatalog = new ConfiguredAirbyteCatalog() - .withStreams(Lists.newArrayList( - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("a")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("b")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("c")))); - - final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) - .withState(new State() - .withState(Jsons.emptyObject())) - .withCatalog(airbyteCatalog); - - emptyAirbyteSource.start(workerSourceConfig, null); - - final Optional maybeMessage = emptyAirbyteSource.attemptRead(); - Assertions.assertThat(maybeMessage) - .isNotEmpty(); - - final AirbyteMessage message = maybeMessage.get(); - Assertions.assertThat(message.getType()).isEqualTo(Type.STATE); - - final AirbyteStateMessage stateMessage = message.getState(); - Assertions.assertThat(stateMessage.getType()).isEqualTo(AirbyteStateType.LEGACY); - Assertions.assertThat(stateMessage.getData()).isEqualTo(Jsons.emptyObject()); - - Assertions.assertThat(emptyAirbyteSource.attemptRead()) - .isEmpty(); - - Assertions.assertThat(emptyAirbyteSource.isFinished()).isTrue(); - - } - - @Test - void testLegacyWithNewConfig() throws Exception { - final List streamToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); - - final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() - .withStreamsToReset(streamToReset); - final ConfiguredAirbyteCatalog airbyteCatalog = new ConfiguredAirbyteCatalog() - .withStreams(Lists.newArrayList( - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("a")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("b")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("c")))); - - final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) - .withState(new State() - .withState(Jsons.emptyObject())) - .withCatalog(airbyteCatalog); - - emptyAirbyteSource.start(workerSourceConfig, null); - - final Optional maybeMessage = emptyAirbyteSource.attemptRead(); - Assertions.assertThat(maybeMessage) - .isNotEmpty(); - - final AirbyteMessage message = maybeMessage.get(); - Assertions.assertThat(message.getType()).isEqualTo(Type.STATE); - - final AirbyteStateMessage stateMessage = message.getState(); - Assertions.assertThat(stateMessage.getType()).isEqualTo(AirbyteStateType.LEGACY); - Assertions.assertThat(stateMessage.getData()).isEqualTo(Jsons.emptyObject()); - - Assertions.assertThat(emptyAirbyteSource.attemptRead()) - .isEmpty(); - - Assertions.assertThat(emptyAirbyteSource.isFinished()).isTrue(); - } - - @Test - void testLegacyWithNullState() throws Exception { - final List streamToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); - - final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() - .withStreamsToReset(streamToReset); - final ConfiguredAirbyteCatalog airbyteCatalogWithExtraStream = new ConfiguredAirbyteCatalog() - .withStreams(Lists.newArrayList( - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("a")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("b")), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("c")))); - - final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) - .withCatalog(airbyteCatalogWithExtraStream); - - emptyAirbyteSource.start(workerSourceConfig, null); - - final Optional maybeMessage = emptyAirbyteSource.attemptRead(); - Assertions.assertThat(maybeMessage) - .isNotEmpty(); - - final AirbyteMessage message = maybeMessage.get(); - Assertions.assertThat(message.getType()).isEqualTo(Type.STATE); - - final AirbyteStateMessage stateMessage = message.getState(); - Assertions.assertThat(stateMessage.getType()).isEqualTo(AirbyteStateType.LEGACY); - Assertions.assertThat(stateMessage.getData()).isEqualTo(Jsons.emptyObject()); - - Assertions.assertThat(emptyAirbyteSource.attemptRead()) - .isEmpty(); - - Assertions.assertThat(emptyAirbyteSource.isFinished()).isTrue(); - } - - private void testReceiveNullStreamState(final StreamDescriptor streamDescriptor) { - final Optional maybeMessage = emptyAirbyteSource.attemptRead(); - Assertions.assertThat(maybeMessage) - .isNotEmpty(); - - final AirbyteMessage message = maybeMessage.get(); - Assertions.assertThat(message.getType()).isEqualTo(Type.STATE); - - final AirbyteStateMessage stateMessage = message.getState(); - Assertions.assertThat(stateMessage.getType()).isEqualTo(AirbyteStateType.STREAM); - Assertions.assertThat(stateMessage.getStream().getStreamDescriptor()).isEqualTo(new StreamDescriptor() - .withName(streamDescriptor.getName()) - .withNamespace(streamDescriptor.getNamespace())); - Assertions.assertThat(stateMessage.getStream().getStreamState()).isNull(); - } - - private List getProtocolStreamDescriptorFromName(final List names) { - return names.stream().map( - name -> new StreamDescriptor().withName(name)).toList(); - } - - private List getConfigStreamDescriptorFromName(final List names) { - return names.stream().map( - name -> new StreamDescriptor().withName(name)).toList(); - } - - private void legacyStateResult() { - Assertions.assertThat(emptyAirbyteSource.attemptRead()) - .isNotEmpty() - .contains(EMPTY_MESSAGE); - - Assertions.assertThat(emptyAirbyteSource.attemptRead()) - .isEmpty(); - } - - private List createPerStreamState(final List streamDescriptors) { - return streamDescriptors.stream().map(streamDescriptor -> new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream( - new AirbyteStreamState() - .withStreamDescriptor(streamDescriptor) - .withStreamState(Jsons.emptyObject()))) - .toList(); - } - - private List createGlobalState(final List streamDescriptors, final JsonNode sharedState) { - final AirbyteGlobalState globalState = new AirbyteGlobalState() - .withSharedState(sharedState) - .withStreamStates( - streamDescriptors.stream().map(streamDescriptor -> new AirbyteStreamState() - .withStreamDescriptor(streamDescriptor) - .withStreamState(Jsons.emptyObject())) - .toList()); - - return Lists.newArrayList( - new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(globalState)); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/HeartbeatMonitorTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/HeartbeatMonitorTest.java deleted file mode 100644 index ebab913497fa..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/HeartbeatMonitorTest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.time.Duration; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.function.Supplier; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class HeartbeatMonitorTest { - - private static final Duration HEART_BEAT_FRESH_DURATION = Duration.of(30, ChronoUnit.SECONDS); - - private static final Instant NOW = Instant.now(); - private static final Instant FIVE_SECONDS_BEFORE = NOW.minus(5, ChronoUnit.SECONDS); - private static final Instant THIRTY_SECONDS_BEFORE = NOW.minus(30, ChronoUnit.SECONDS); - - private Supplier nowSupplier; - private HeartbeatMonitor heartbeatMonitor; - - @SuppressWarnings("unchecked") - @BeforeEach - void setup() { - nowSupplier = mock(Supplier.class); - heartbeatMonitor = new HeartbeatMonitor(HEART_BEAT_FRESH_DURATION, nowSupplier); - } - - @Test - void testNeverBeat() { - assertFalse(heartbeatMonitor.isBeating()); - } - - @Test - void testFreshBeat() { - when(nowSupplier.get()).thenReturn(FIVE_SECONDS_BEFORE).thenReturn(NOW); - heartbeatMonitor.beat(); - assertTrue(heartbeatMonitor.isBeating()); - } - - @Test - void testStaleBeat() { - when(nowSupplier.get()).thenReturn(THIRTY_SECONDS_BEFORE).thenReturn(NOW); - heartbeatMonitor.beat(); - assertFalse(heartbeatMonitor.isBeating()); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/NamespacingMapperTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/NamespacingMapperTest.java deleted file mode 100644 index 4111de80a805..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/NamespacingMapperTest.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class NamespacingMapperTest { - - private static final String INPUT_NAMESPACE = "source_namespace"; - private static final String OUTPUT_PREFIX = "output_"; - private static final String STREAM_NAME = "user_preferences"; - private static final String FIELD_NAME = "favorite_color"; - private static final String BLUE = "blue"; - - private static final ConfiguredAirbyteCatalog CATALOG = CatalogHelpers.createConfiguredAirbyteCatalog( - STREAM_NAME, - INPUT_NAMESPACE, - Field.of(FIELD_NAME, JsonSchemaType.STRING)); - private AirbyteMessage RECORD_MESSAGE; - - private static AirbyteMessage createRecordMessage() { - final AirbyteMessage message = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, BLUE); - message.getRecord().withNamespace(INPUT_NAMESPACE); - return message; - } - - @BeforeEach - void setUp() { - RECORD_MESSAGE = createRecordMessage(); - } - - @Test - void testSourceNamespace() { - final NamespacingMapper mapper = new NamespacingMapper(NamespaceDefinitionType.SOURCE, null, OUTPUT_PREFIX); - - final ConfiguredAirbyteCatalog originalCatalog = Jsons.clone(CATALOG); - final ConfiguredAirbyteCatalog expectedCatalog = CatalogHelpers.createConfiguredAirbyteCatalog( - OUTPUT_PREFIX + STREAM_NAME, - INPUT_NAMESPACE, - Field.of(FIELD_NAME, JsonSchemaType.STRING)); - final ConfiguredAirbyteCatalog actualCatalog = mapper.mapCatalog(CATALOG); - - assertEquals(originalCatalog, CATALOG); - assertEquals(expectedCatalog, actualCatalog); - - final AirbyteMessage originalMessage = Jsons.clone(RECORD_MESSAGE); - assertEquals(originalMessage, RECORD_MESSAGE); - - final AirbyteMessage expectedMessage = AirbyteMessageUtils.createRecordMessage(OUTPUT_PREFIX + STREAM_NAME, FIELD_NAME, BLUE); - expectedMessage.getRecord().withNamespace(INPUT_NAMESPACE); - - final AirbyteMessage actualMessage = mapper.mapMessage(RECORD_MESSAGE); - assertEquals(expectedMessage, actualMessage); - } - - @Test - void testEmptySourceNamespace() { - final NamespacingMapper mapper = new NamespacingMapper(NamespaceDefinitionType.SOURCE, null, OUTPUT_PREFIX); - - final ConfiguredAirbyteCatalog originalCatalog = Jsons.clone(CATALOG); - assertEquals(originalCatalog, CATALOG); - originalCatalog.getStreams().get(0).getStream().withNamespace(null); - final ConfiguredAirbyteCatalog expectedCatalog = CatalogHelpers.createConfiguredAirbyteCatalog( - OUTPUT_PREFIX + STREAM_NAME, - null, - Field.of(FIELD_NAME, JsonSchemaType.STRING)); - final ConfiguredAirbyteCatalog actualCatalog = mapper.mapCatalog(originalCatalog); - - assertEquals(expectedCatalog, actualCatalog); - - final AirbyteMessage originalMessage = Jsons.clone(RECORD_MESSAGE); - assertEquals(originalMessage, RECORD_MESSAGE); - originalMessage.getRecord().withNamespace(null); - - final AirbyteMessage expectedMessage = AirbyteMessageUtils.createRecordMessage(OUTPUT_PREFIX + STREAM_NAME, FIELD_NAME, BLUE); - expectedMessage.getRecord().withNamespace(null); - final AirbyteMessage actualMessage = mapper.mapMessage(originalMessage); - - assertEquals(expectedMessage, actualMessage); - } - - @Test - void testDestinationNamespace() { - final NamespacingMapper mapper = new NamespacingMapper(NamespaceDefinitionType.DESTINATION, null, OUTPUT_PREFIX); - - final ConfiguredAirbyteCatalog originalCatalog = Jsons.clone(CATALOG); - final ConfiguredAirbyteCatalog expectedCatalog = CatalogHelpers.createConfiguredAirbyteCatalog( - OUTPUT_PREFIX + STREAM_NAME, - null, - Field.of(FIELD_NAME, JsonSchemaType.STRING)); - final ConfiguredAirbyteCatalog actualCatalog = mapper.mapCatalog(CATALOG); - - assertEquals(originalCatalog, CATALOG); - assertEquals(expectedCatalog, actualCatalog); - - final AirbyteMessage originalMessage = Jsons.clone(RECORD_MESSAGE); - assertEquals(originalMessage, RECORD_MESSAGE); - - final AirbyteMessage expectedMessage = AirbyteMessageUtils.createRecordMessage(OUTPUT_PREFIX + STREAM_NAME, FIELD_NAME, BLUE); - final AirbyteMessage actualMessage = mapper.mapMessage(RECORD_MESSAGE); - assertEquals(expectedMessage, actualMessage); - } - - @Test - void testCustomFormatWithVariableNamespace() { - final NamespacingMapper mapper = new NamespacingMapper(NamespaceDefinitionType.CUSTOMFORMAT, "${SOURCE_NAMESPACE}_suffix", OUTPUT_PREFIX); - - final String expectedNamespace = INPUT_NAMESPACE + "_suffix"; - final ConfiguredAirbyteCatalog originalCatalog = Jsons.clone(CATALOG); - final ConfiguredAirbyteCatalog expectedCatalog = CatalogHelpers.createConfiguredAirbyteCatalog( - OUTPUT_PREFIX + STREAM_NAME, expectedNamespace, - Field.of(FIELD_NAME, JsonSchemaType.STRING)); - final ConfiguredAirbyteCatalog actualCatalog = mapper.mapCatalog(CATALOG); - - assertEquals(originalCatalog, CATALOG); - assertEquals(expectedCatalog, actualCatalog); - - final AirbyteMessage originalMessage = Jsons.clone(RECORD_MESSAGE); - assertEquals(originalMessage, RECORD_MESSAGE); - - final AirbyteMessage expectedMessage = AirbyteMessageUtils.createRecordMessage(OUTPUT_PREFIX + STREAM_NAME, FIELD_NAME, BLUE); - expectedMessage.getRecord().withNamespace(expectedNamespace); - final AirbyteMessage actualMessage = mapper.mapMessage(RECORD_MESSAGE); - assertEquals(expectedMessage, actualMessage); - } - - @Test - void testCustomFormatWithoutVariableNamespace() { - final NamespacingMapper mapper = new NamespacingMapper(NamespaceDefinitionType.CUSTOMFORMAT, "output", OUTPUT_PREFIX); - - final String expectedNamespace = "output"; - final ConfiguredAirbyteCatalog originalCatalog = Jsons.clone(CATALOG); - final ConfiguredAirbyteCatalog expectedCatalog = CatalogHelpers.createConfiguredAirbyteCatalog( - OUTPUT_PREFIX + STREAM_NAME, expectedNamespace, - Field.of(FIELD_NAME, JsonSchemaType.STRING)); - final ConfiguredAirbyteCatalog actualCatalog = mapper.mapCatalog(CATALOG); - - assertEquals(originalCatalog, CATALOG); - assertEquals(expectedCatalog, actualCatalog); - - final AirbyteMessage originalMessage = Jsons.clone(RECORD_MESSAGE); - assertEquals(originalMessage, RECORD_MESSAGE); - - final AirbyteMessage expectedMessage = AirbyteMessageUtils.createRecordMessage(OUTPUT_PREFIX + STREAM_NAME, FIELD_NAME, BLUE); - expectedMessage.getRecord().withNamespace(expectedNamespace); - final AirbyteMessage actualMessage = mapper.mapMessage(RECORD_MESSAGE); - assertEquals(expectedMessage, actualMessage); - } - - @Test - void testEmptyCustomFormatWithVariableNamespace() { - final NamespacingMapper mapper = new NamespacingMapper(NamespaceDefinitionType.CUSTOMFORMAT, "${SOURCE_NAMESPACE}", OUTPUT_PREFIX); - - final ConfiguredAirbyteCatalog originalCatalog = Jsons.clone(CATALOG); - assertEquals(originalCatalog, CATALOG); - originalCatalog.getStreams().get(0).getStream().withNamespace(null); - final ConfiguredAirbyteCatalog expectedCatalog = CatalogHelpers.createConfiguredAirbyteCatalog( - OUTPUT_PREFIX + STREAM_NAME, - null, - Field.of(FIELD_NAME, JsonSchemaType.STRING)); - final ConfiguredAirbyteCatalog actualCatalog = mapper.mapCatalog(originalCatalog); - - assertEquals(expectedCatalog, actualCatalog); - - final AirbyteMessage originalMessage = Jsons.clone(RECORD_MESSAGE); - assertEquals(originalMessage, RECORD_MESSAGE); - originalMessage.getRecord().withNamespace(null); - - final AirbyteMessage expectedMessage = AirbyteMessageUtils.createRecordMessage(OUTPUT_PREFIX + STREAM_NAME, FIELD_NAME, BLUE); - expectedMessage.getRecord().withNamespace(null); - final AirbyteMessage actualMessage = mapper.mapMessage(originalMessage); - - assertEquals(expectedMessage, actualMessage); - } - - @Test - void testEmptyPrefix() { - final NamespacingMapper mapper = new NamespacingMapper(NamespaceDefinitionType.SOURCE, null, null); - - final ConfiguredAirbyteCatalog originalCatalog = Jsons.clone(CATALOG); - final ConfiguredAirbyteCatalog expectedCatalog = CatalogHelpers.createConfiguredAirbyteCatalog( - STREAM_NAME, - INPUT_NAMESPACE, - Field.of(FIELD_NAME, JsonSchemaType.STRING)); - final ConfiguredAirbyteCatalog actualCatalog = mapper.mapCatalog(CATALOG); - - assertEquals(originalCatalog, CATALOG); - assertEquals(expectedCatalog, actualCatalog); - - final AirbyteMessage originalMessage = Jsons.clone(RECORD_MESSAGE); - assertEquals(originalMessage, RECORD_MESSAGE); - - final AirbyteMessage expectedMessage = AirbyteMessageUtils.createRecordMessage( - STREAM_NAME, - FIELD_NAME, BLUE); - expectedMessage.getRecord().withNamespace(INPUT_NAMESPACE); - final AirbyteMessage actualMessage = mapper.mapMessage(RECORD_MESSAGE); - assertEquals(expectedMessage, actualMessage); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java deleted file mode 100644 index 987235c8fe5d..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; - -import io.airbyte.commons.protocol.AirbyteMessageMigrator; -import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider; -import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory; -import io.airbyte.commons.protocol.ConfiguredAirbyteCatalogMigrator; -import io.airbyte.commons.protocol.serde.AirbyteMessageV0Deserializer; -import io.airbyte.commons.protocol.serde.AirbyteMessageV0Serializer; -import io.airbyte.commons.protocol.serde.AirbyteMessageV1Deserializer; -import io.airbyte.commons.protocol.serde.AirbyteMessageV1Serializer; -import io.airbyte.commons.version.Version; -import io.airbyte.protocol.models.AirbyteMessage; -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.io.StringReader; -import java.nio.charset.Charset; -import java.util.List; -import java.util.Optional; -import java.util.stream.Stream; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.platform.commons.util.ClassLoaderUtils; - -class VersionedAirbyteStreamFactoryTest { - - AirbyteMessageSerDeProvider serDeProvider; - AirbyteProtocolVersionedMigratorFactory migratorFactory; - - final static Version defaultVersion = new Version("0.2.0"); - - @BeforeEach - void beforeEach() { - serDeProvider = spy(new AirbyteMessageSerDeProvider( - List.of(new AirbyteMessageV0Deserializer(), new AirbyteMessageV1Deserializer()), - List.of(new AirbyteMessageV0Serializer(), new AirbyteMessageV1Serializer()))); - serDeProvider.initialize(); - final AirbyteMessageMigrator airbyteMessageMigrator = new AirbyteMessageMigrator( - // TODO once data types v1 is re-enabled, this test should contain the migration - List.of(/* new AirbyteMessageMigrationV1() */)); - airbyteMessageMigrator.initialize(); - final ConfiguredAirbyteCatalogMigrator configuredAirbyteCatalogMigrator = new ConfiguredAirbyteCatalogMigrator( - // TODO once data types v1 is re-enabled, this test should contain the migration - List.of(/* new ConfiguredAirbyteCatalogMigrationV1() */)); - configuredAirbyteCatalogMigrator.initialize(); - migratorFactory = spy(new AirbyteProtocolVersionedMigratorFactory(airbyteMessageMigrator, configuredAirbyteCatalogMigrator)); - } - - @Test - void testCreate() { - final Version initialVersion = new Version("0.1.2"); - final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty()); - - final BufferedReader bufferedReader = new BufferedReader(new StringReader("")); - streamFactory.create(bufferedReader); - - verify(serDeProvider).getDeserializer(initialVersion); - verify(migratorFactory).getAirbyteMessageMigrator(initialVersion); - } - - @Test - void testCreateWithVersionDetection() { - final Version initialVersion = new Version("0.0.0"); - final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty()) - .withDetectVersion(true); - - final BufferedReader bufferedReader = - getBuffereredReader("version-detection/logs-with-version.jsonl"); - final Stream stream = streamFactory.create(bufferedReader); - - long messageCount = stream.toList().size(); - verify(serDeProvider).getDeserializer(initialVersion); - verify(serDeProvider).getDeserializer(new Version("0.5.9")); - assertEquals(1, messageCount); - } - - @Test - void testCreateWithVersionDetectionFallback() { - final Version initialVersion = new Version("0.0.6"); - final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty()) - .withDetectVersion(true); - - final BufferedReader bufferedReader = - getBuffereredReader("version-detection/logs-without-version.jsonl"); - final Stream stream = streamFactory.create(bufferedReader); - - final long messageCount = stream.toList().size(); - verify(serDeProvider).getDeserializer(initialVersion); - verify(serDeProvider).getDeserializer(defaultVersion); - assertEquals(1, messageCount); - } - - @Test - void testCreateWithVersionDetectionWithoutSpecMessage() { - final Version initialVersion = new Version("0.0.1"); - final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty()) - .withDetectVersion(true); - - final BufferedReader bufferedReader = - getBuffereredReader("version-detection/logs-without-spec-message.jsonl"); - final Stream stream = streamFactory.create(bufferedReader); - - final long messageCount = stream.toList().size(); - verify(serDeProvider).getDeserializer(initialVersion); - verify(serDeProvider).getDeserializer(defaultVersion); - assertEquals(2, messageCount); - } - - BufferedReader getBuffereredReader(final String resourceFile) { - return new BufferedReader( - new InputStreamReader( - ClassLoaderUtils.getDefaultClassLoader().getResourceAsStream(resourceFile), - Charset.defaultCharset())); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/book_keeping/AirbyteMessageTrackerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/book_keeping/AirbyteMessageTrackerTest.java deleted file mode 100644 index 94dc33a76ad4..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/book_keeping/AirbyteMessageTrackerTest.java +++ /dev/null @@ -1,415 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.book_keeping; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.FailureReason; -import io.airbyte.config.State; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.workers.helper.FailureHelper; -import io.airbyte.workers.internal.book_keeping.StateDeltaTracker.StateDeltaTrackerException; -import io.airbyte.workers.internal.state_aggregator.StateAggregator; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class AirbyteMessageTrackerTest { - - private static final String NAMESPACE_1 = "avengers"; - private static final String STREAM_1 = "iron man"; - private static final String STREAM_2 = "black widow"; - private static final String STREAM_3 = "hulk"; - private static final String INDUCED_EXCEPTION = "induced exception"; - - private AirbyteMessageTracker messageTracker; - - @Mock - private StateDeltaTracker mStateDeltaTracker; - - @Mock - private StateAggregator mStateAggregator; - - @BeforeEach - void setup() { - final StateMetricsTracker stateMetricsTracker = new StateMetricsTracker(10L * 1024L * 1024L); - this.messageTracker = new AirbyteMessageTracker(mStateDeltaTracker, mStateAggregator, stateMetricsTracker, new EnvVariableFeatureFlags()); - } - - @Test - void testGetTotalRecordsStatesAndBytesEmitted() { - final AirbyteMessage r1 = AirbyteMessageUtils.createRecordMessage(STREAM_1, 123); - final AirbyteMessage s1 = AirbyteMessageUtils.createStateMessage(1); - final AirbyteMessage s2 = AirbyteMessageUtils.createStateMessage(2); - - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(s1); - messageTracker.acceptFromSource(s2); - - assertEquals(3, messageTracker.getTotalRecordsEmitted()); - assertEquals(3L * Jsons.getEstimatedByteSize(r1.getRecord().getData()), messageTracker.getTotalBytesEmitted()); - assertEquals(2, messageTracker.getTotalSourceStateMessagesEmitted()); - } - - @Test - void testRetainsLatestSourceAndDestinationState() { - final int s1Value = 111; - final int s2Value = 222; - final int s3Value = 333; - final AirbyteMessage s1 = AirbyteMessageUtils.createStateMessage(s1Value); - final AirbyteMessage s2 = AirbyteMessageUtils.createStateMessage(s2Value); - final AirbyteMessage s3 = AirbyteMessageUtils.createStateMessage(s3Value); - - final State expectedState = new State().withState(Jsons.jsonNode(s2Value)); - Mockito.when(mStateAggregator.getAggregated()).thenReturn(expectedState); - - messageTracker.acceptFromSource(s1); - messageTracker.acceptFromSource(s2); - messageTracker.acceptFromSource(s3); - messageTracker.acceptFromDestination(s1); - messageTracker.acceptFromDestination(s2); - - assertTrue(messageTracker.getSourceOutputState().isPresent()); - assertEquals(new State().withState(Jsons.jsonNode(s3Value)), messageTracker.getSourceOutputState().get()); - - assertTrue(messageTracker.getDestinationOutputState().isPresent()); - assertEquals(expectedState, messageTracker.getDestinationOutputState().get()); - } - - @Test - void testReturnEmptyStateIfNoneEverAccepted() { - assertTrue(messageTracker.getSourceOutputState().isEmpty()); - assertTrue(messageTracker.getDestinationOutputState().isEmpty()); - } - - @Test - void testEmittedRecordsByStream() { - final AirbyteMessage r1 = AirbyteMessageUtils.createRecordMessage(STREAM_1, 1); - final AirbyteMessage r2 = AirbyteMessageUtils.createRecordMessage(STREAM_2, 2); - final AirbyteMessage r3 = AirbyteMessageUtils.createRecordMessage(STREAM_3, 3); - - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(r2); - messageTracker.acceptFromSource(r2); - messageTracker.acceptFromSource(r3); - messageTracker.acceptFromSource(r3); - messageTracker.acceptFromSource(r3); - - final HashMap expected = new HashMap<>(); - expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r1.getRecord()), 1L); - expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r2.getRecord()), 2L); - expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r3.getRecord()), 3L); - - assertEquals(expected, messageTracker.getStreamToEmittedRecords()); - } - - @Test - void testEmittedBytesByStream() { - final AirbyteMessage r1 = AirbyteMessageUtils.createRecordMessage(STREAM_1, 1); - final AirbyteMessage r2 = AirbyteMessageUtils.createRecordMessage(STREAM_2, 2); - final AirbyteMessage r3 = AirbyteMessageUtils.createRecordMessage(STREAM_3, 3); - - final long r1Bytes = Jsons.getEstimatedByteSize(r1.getRecord().getData()); - final long r2Bytes = Jsons.getEstimatedByteSize(r2.getRecord().getData()); - final long r3Bytes = Jsons.getEstimatedByteSize(r3.getRecord().getData()); - - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(r2); - messageTracker.acceptFromSource(r2); - messageTracker.acceptFromSource(r3); - messageTracker.acceptFromSource(r3); - messageTracker.acceptFromSource(r3); - - final Map expected = new HashMap<>(); - expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r1.getRecord()), r1Bytes); - expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r2.getRecord()), r2Bytes * 2); - expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r3.getRecord()), r3Bytes * 3); - - assertEquals(expected, messageTracker.getStreamToEmittedBytes()); - } - - @Test - void testGetCommittedRecordsByStream() { - final AirbyteMessage r1 = AirbyteMessageUtils.createRecordMessage(STREAM_1, 1); - final AirbyteMessage r2 = AirbyteMessageUtils.createRecordMessage(STREAM_2, 2); - final AirbyteMessage r3 = AirbyteMessageUtils.createRecordMessage(STREAM_3, 3); - final AirbyteMessage s1 = AirbyteMessageUtils.createStateMessage(1); - final AirbyteMessage s2 = AirbyteMessageUtils.createStateMessage(2); - - messageTracker.acceptFromSource(r1); // should make stream 1 index 0 - messageTracker.acceptFromSource(r2); // should make stream 2 index 1 - messageTracker.acceptFromSource(r2); - messageTracker.acceptFromSource(s1); // emit state 1 - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(r2); - messageTracker.acceptFromDestination(s1); // commit state 1 - messageTracker.acceptFromSource(r3); // should make stream 3 index 2 - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(s2); // emit state 2 - - final Map countsByIndex = new HashMap<>(); - final Map expected = new HashMap<>(); - Mockito.when(mStateDeltaTracker.getStreamToCommittedRecords()).thenReturn(countsByIndex); - - countsByIndex.put((short) 0, 1L); - countsByIndex.put((short) 1, 2L); - // result only contains counts up to state 1 - expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r1.getRecord()), 1L); - expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r2.getRecord()), 2L); - assertEquals(expected, messageTracker.getStreamToCommittedRecords().get()); - - countsByIndex.clear(); - expected.clear(); - messageTracker.acceptFromDestination(s2); // now commit state 2 - countsByIndex.put((short) 0, 3L); - countsByIndex.put((short) 1, 3L); - countsByIndex.put((short) 2, 1L); - // result updated with counts between state 1 and state 2 - expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r1.getRecord()), 3L); - expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r2.getRecord()), 3L); - expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r3.getRecord()), 1L); - assertEquals(expected, messageTracker.getStreamToCommittedRecords().get()); - } - - @Test - void testGetCommittedRecordsByStream_emptyWhenAddStateThrowsException() throws Exception { - Mockito.doThrow(new StateDeltaTrackerException(INDUCED_EXCEPTION)).when(mStateDeltaTracker).addState(Mockito.anyInt(), Mockito.anyMap()); - - final AirbyteMessage r1 = AirbyteMessageUtils.createRecordMessage(STREAM_1, 1); - final AirbyteMessage s1 = AirbyteMessageUtils.createStateMessage(1); - - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(s1); - messageTracker.acceptFromDestination(s1); - - assertTrue(messageTracker.getStreamToCommittedRecords().isEmpty()); - } - - @Test - void testGetCommittedRecordsByStream_emptyWhenCommitStateHashThrowsException() throws Exception { - Mockito.doThrow(new StateDeltaTrackerException(INDUCED_EXCEPTION)).when(mStateDeltaTracker).commitStateHash(Mockito.anyInt()); - - final AirbyteMessage r1 = AirbyteMessageUtils.createRecordMessage(STREAM_1, 1); - final AirbyteMessage s1 = AirbyteMessageUtils.createStateMessage(1); - - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(s1); - messageTracker.acceptFromDestination(s1); - - assertTrue(messageTracker.getStreamToCommittedRecords().isEmpty()); - } - - @Test - void testTotalRecordsCommitted() { - final AirbyteMessage r1 = AirbyteMessageUtils.createRecordMessage(STREAM_1, 1); - final AirbyteMessage r2 = AirbyteMessageUtils.createRecordMessage(STREAM_2, 2); - final AirbyteMessage r3 = AirbyteMessageUtils.createRecordMessage(STREAM_3, 3); - final AirbyteMessage s1 = AirbyteMessageUtils.createStateMessage(1); - final AirbyteMessage s2 = AirbyteMessageUtils.createStateMessage(2); - - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(r2); - messageTracker.acceptFromSource(r2); - messageTracker.acceptFromSource(s1); // emit state 1 - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(r2); - messageTracker.acceptFromDestination(s1); // commit state 1 - messageTracker.acceptFromSource(r3); - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(s2); // emit state 2 - - final Map countsByIndex = new HashMap<>(); - Mockito.when(mStateDeltaTracker.getStreamToCommittedRecords()).thenReturn(countsByIndex); - - countsByIndex.put((short) 0, 1L); - countsByIndex.put((short) 1, 2L); - // result only contains counts up to state 1 - assertEquals(3L, messageTracker.getTotalRecordsCommitted().get()); - - countsByIndex.clear(); - messageTracker.acceptFromDestination(s2); // now commit state 2 - countsByIndex.put((short) 0, 3L); - countsByIndex.put((short) 1, 3L); - countsByIndex.put((short) 2, 1L); - // result updated with counts between state 1 and state 2 - assertEquals(7L, messageTracker.getTotalRecordsCommitted().get()); - } - - @Test - void testGetTotalRecordsCommitted_emptyWhenAddStateThrowsException() throws Exception { - Mockito.doThrow(new StateDeltaTrackerException(INDUCED_EXCEPTION)).when(mStateDeltaTracker).addState(Mockito.anyInt(), Mockito.anyMap()); - - final AirbyteMessage r1 = AirbyteMessageUtils.createRecordMessage(STREAM_1, 1); - final AirbyteMessage s1 = AirbyteMessageUtils.createStateMessage(1); - - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(s1); - messageTracker.acceptFromDestination(s1); - - assertTrue(messageTracker.getTotalRecordsCommitted().isEmpty()); - } - - @Test - void testGetTotalRecordsCommitted_emptyWhenCommitStateHashThrowsException() throws Exception { - Mockito.doThrow(new StateDeltaTrackerException(INDUCED_EXCEPTION)).when(mStateDeltaTracker).commitStateHash(Mockito.anyInt()); - - final AirbyteMessage r1 = AirbyteMessageUtils.createRecordMessage(STREAM_1, 1); - final AirbyteMessage s1 = AirbyteMessageUtils.createStateMessage(1); - - messageTracker.acceptFromSource(r1); - messageTracker.acceptFromSource(s1); - messageTracker.acceptFromDestination(s1); - - assertTrue(messageTracker.getTotalRecordsCommitted().isEmpty()); - } - - @Test - void testGetFirstDestinationAndSourceMessages() { - final AirbyteMessage sourceMessage1 = AirbyteMessageUtils.createErrorMessage("source trace 1", 123.0); - final AirbyteMessage sourceMessage2 = AirbyteMessageUtils.createErrorMessage("source trace 2", 124.0); - final AirbyteMessage destMessage1 = AirbyteMessageUtils.createErrorMessage("dest trace 1", 125.0); - final AirbyteMessage destMessage2 = AirbyteMessageUtils.createErrorMessage("dest trace 2", 126.0); - messageTracker.acceptFromSource(sourceMessage1); - messageTracker.acceptFromSource(sourceMessage2); - messageTracker.acceptFromDestination(destMessage1); - messageTracker.acceptFromDestination(destMessage2); - - assertEquals(messageTracker.getFirstDestinationErrorTraceMessage(), destMessage1.getTrace()); - assertEquals(messageTracker.getFirstSourceErrorTraceMessage(), sourceMessage1.getTrace()); - } - - @Test - void testGetFirstDestinationAndSourceMessagesWithNulls() { - assertNull(messageTracker.getFirstDestinationErrorTraceMessage()); - assertNull(messageTracker.getFirstSourceErrorTraceMessage()); - } - - @Test - void testErrorTraceMessageFailureWithMultipleTraceErrors() { - final AirbyteMessage sourceMessage1 = AirbyteMessageUtils.createErrorMessage("source trace 1", 123.0); - final AirbyteMessage sourceMessage2 = AirbyteMessageUtils.createErrorMessage("source trace 2", 124.0); - final AirbyteMessage destMessage1 = AirbyteMessageUtils.createErrorMessage("dest trace 1", 125.0); - final AirbyteMessage destMessage2 = AirbyteMessageUtils.createErrorMessage("dest trace 2", 126.0); - messageTracker.acceptFromSource(sourceMessage1); - messageTracker.acceptFromSource(sourceMessage2); - messageTracker.acceptFromDestination(destMessage1); - messageTracker.acceptFromDestination(destMessage2); - - final FailureReason failureReason = FailureHelper.sourceFailure(sourceMessage1.getTrace(), Long.valueOf(123), 1); - assertEquals(messageTracker.errorTraceMessageFailure(123L, 1), - failureReason); - } - - @Test - void testErrorTraceMessageFailureWithOneTraceError() { - final AirbyteMessage destMessage = AirbyteMessageUtils.createErrorMessage("dest trace 1", 125.0); - messageTracker.acceptFromDestination(destMessage); - - final FailureReason failureReason = FailureHelper.destinationFailure(destMessage.getTrace(), Long.valueOf(123), 1); - assertEquals(messageTracker.errorTraceMessageFailure(123L, 1), failureReason); - } - - @Test - void testErrorTraceMessageFailureWithNoTraceErrors() { - assertEquals(messageTracker.errorTraceMessageFailure(123L, 1), null); - } - - @Nested - class Estimates { - - // receiving an estimate for two streams should save - @Test - @DisplayName("when given stream estimates, should return correct per-stream estimates") - void streamShouldSaveAndReturnIndividualStreamCountsCorrectly() { - final var est1 = AirbyteMessageUtils.createStreamEstimateMessage(STREAM_1, NAMESPACE_1, 100L, 10L); - final var est2 = AirbyteMessageUtils.createStreamEstimateMessage(STREAM_2, NAMESPACE_1, 200L, 10L); - - messageTracker.acceptFromSource(est1); - messageTracker.acceptFromSource(est2); - - final var streamToEstBytes = messageTracker.getStreamToEstimatedBytes(); - final var expStreamToEstBytes = Map.of( - new AirbyteStreamNameNamespacePair(STREAM_1, NAMESPACE_1), 100L, - new AirbyteStreamNameNamespacePair(STREAM_2, NAMESPACE_1), 200L); - assertEquals(expStreamToEstBytes, streamToEstBytes); - - final var streamToEstRecs = messageTracker.getStreamToEstimatedRecords(); - final var expStreamToEstRecs = Map.of( - new AirbyteStreamNameNamespacePair(STREAM_1, NAMESPACE_1), 10L, - new AirbyteStreamNameNamespacePair(STREAM_2, NAMESPACE_1), 10L); - assertEquals(expStreamToEstRecs, streamToEstRecs); - } - - @Test - @DisplayName("when given stream estimates, should return correct total estimates") - void streamShouldSaveAndReturnTotalCountsCorrectly() { - final var est1 = AirbyteMessageUtils.createStreamEstimateMessage(STREAM_1, NAMESPACE_1, 100L, 10L); - final var est2 = AirbyteMessageUtils.createStreamEstimateMessage(STREAM_2, NAMESPACE_1, 200L, 10L); - - messageTracker.acceptFromSource(est1); - messageTracker.acceptFromSource(est2); - - final var totalEstBytes = messageTracker.getTotalBytesEstimated(); - assertEquals(300L, totalEstBytes); - - final var totalEstRecs = messageTracker.getTotalRecordsEstimated(); - assertEquals(20L, totalEstRecs); - } - - @Test - @DisplayName("should error when given both Stream and Sync estimates") - void shouldErrorOnBothStreamAndSyncEstimates() { - final var est1 = AirbyteMessageUtils.createStreamEstimateMessage(STREAM_1, NAMESPACE_1, 100L, 10L); - final var est2 = AirbyteMessageUtils.createSyncEstimateMessage(200L, 10L); - - messageTracker.acceptFromSource(est1); - assertThrows(IllegalArgumentException.class, () -> messageTracker.acceptFromSource(est2)); - } - - @Test - @DisplayName("when given sync estimates, should return correct total estimates") - void syncShouldSaveAndReturnTotalCountsCorrectly() { - final var est = AirbyteMessageUtils.createSyncEstimateMessage(200L, 10L); - messageTracker.acceptFromSource(est); - - final var totalEstBytes = messageTracker.getTotalBytesEstimated(); - assertEquals(200L, totalEstBytes); - - final var totalEstRecs = messageTracker.getTotalRecordsEstimated(); - assertEquals(10L, totalEstRecs); - } - - @Test - @DisplayName("when given sync estimates, should not return any per-stream estimates") - void syncShouldNotHaveStreamEstimates() { - final var est = AirbyteMessageUtils.createSyncEstimateMessage(200L, 10L); - messageTracker.acceptFromSource(est); - - final var streamToEstBytes = messageTracker.getStreamToEstimatedBytes(); - assertTrue(streamToEstBytes.isEmpty()); - final var streamToEstRecs = messageTracker.getStreamToEstimatedRecords(); - assertTrue(streamToEstRecs.isEmpty()); - } - - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/book_keeping/StateDeltaTrackerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/book_keeping/StateDeltaTrackerTest.java deleted file mode 100644 index 2b7cfd664ab2..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/book_keeping/StateDeltaTrackerTest.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.book_keeping; - -import io.airbyte.workers.internal.book_keeping.StateDeltaTracker.StateDeltaTrackerException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class StateDeltaTrackerTest { - - private static final int STATE_1_HASH = 1; - private static final int STATE_2_HASH = 2; - private static final int STATE_3_HASH = Integer.MAX_VALUE; - private static final int NEVER_ADDED_STATE_HASH = 20; - - private static final short STREAM_INDEX_1 = (short) 111; - private static final short STREAM_INDEX_2 = (short) 222; - private static final short STREAM_INDEX_3 = (short) 333; - private static final short STREAM_INDEX_4 = Short.MAX_VALUE; - - private static final long STATE_1_STREAM_1_COUNT = 11L; - private static final long STATE_1_STREAM_2_COUNT = 12L; - - private static final long STATE_2_STREAM_1_COUNT = 21L; - private static final long STATE_2_STREAM_3_COUNT = 23L; - - private static final long STATE_3_STREAM_3_COUNT = 33L; - private static final long STATE_3_STREAM_4_COUNT = 34L; - - // enough capacity for above 3 states, which are each 24 bytes (8 byte hash + two 10 byte stream - // counts - private static final long INITIAL_DELTA_MEMORY_CAPACITY = 72L; - - private StateDeltaTracker stateDeltaTracker; - - @BeforeEach - void setup() throws Exception { - final Map state1Counts = new HashMap<>(); - state1Counts.put(STREAM_INDEX_1, STATE_1_STREAM_1_COUNT); - state1Counts.put(STREAM_INDEX_2, STATE_1_STREAM_2_COUNT); - - final Map state2Counts = new HashMap<>(); - state2Counts.put(STREAM_INDEX_1, STATE_2_STREAM_1_COUNT); - state2Counts.put(STREAM_INDEX_3, STATE_2_STREAM_3_COUNT); - - final Map state3Counts = new HashMap<>(); - state3Counts.put(STREAM_INDEX_3, STATE_3_STREAM_3_COUNT); - state3Counts.put(STREAM_INDEX_4, STATE_3_STREAM_4_COUNT); - - stateDeltaTracker = new StateDeltaTracker(INITIAL_DELTA_MEMORY_CAPACITY); - stateDeltaTracker.addState(STATE_1_HASH, state1Counts); - stateDeltaTracker.addState(STATE_2_HASH, state2Counts); - stateDeltaTracker.addState(STATE_3_HASH, state3Counts); - } - - @Test - void testAddState_throwsExceptionWhenCapacityExceeded() { - Assertions.assertThrows(StateDeltaTrackerException.class, () -> stateDeltaTracker.addState(4, Collections.singletonMap((short) 444, 44L))); - Assertions.assertTrue(stateDeltaTracker.capacityExceeded); - } - - @Test - void testCommitStateHash_throwsExceptionWhenStateHashConflict() throws Exception { - stateDeltaTracker.commitStateHash(STATE_1_HASH); - stateDeltaTracker.commitStateHash(STATE_2_HASH); - - Assertions.assertThrows(StateDeltaTrackerException.class, () -> stateDeltaTracker.commitStateHash(STATE_1_HASH)); - } - - @Test - void testCommitStateHash_throwsExceptionIfCapacityExceededEarlier() { - stateDeltaTracker.capacityExceeded = true; - Assertions.assertThrows(StateDeltaTrackerException.class, () -> stateDeltaTracker.commitStateHash(STATE_1_HASH)); - } - - @Test - void testCommitStateHash_throwsExceptionIfCommitStateHashCalledBeforeAddingState() { - Assertions.assertThrows(StateDeltaTrackerException.class, () -> stateDeltaTracker.commitStateHash(NEVER_ADDED_STATE_HASH)); - } - - @Test - void testGetCommittedRecordsByStream() throws Exception { - // before anything is committed, returned map should be empty and deltas should contain three states - final Map expected = new HashMap<>(); - Assertions.assertEquals(expected, stateDeltaTracker.getStreamToCommittedRecords()); - Assertions.assertEquals(3, stateDeltaTracker.stateDeltas.size()); - - stateDeltaTracker.commitStateHash(STATE_1_HASH); - expected.put(STREAM_INDEX_1, STATE_1_STREAM_1_COUNT); - expected.put(STREAM_INDEX_2, STATE_1_STREAM_2_COUNT); - Assertions.assertEquals(expected, stateDeltaTracker.getStreamToCommittedRecords()); - Assertions.assertEquals(2, stateDeltaTracker.stateDeltas.size()); - expected.clear(); - - stateDeltaTracker.commitStateHash(STATE_2_HASH); - expected.put(STREAM_INDEX_1, STATE_1_STREAM_1_COUNT + STATE_2_STREAM_1_COUNT); - expected.put(STREAM_INDEX_2, STATE_1_STREAM_2_COUNT); - expected.put(STREAM_INDEX_3, STATE_2_STREAM_3_COUNT); - Assertions.assertEquals(expected, stateDeltaTracker.getStreamToCommittedRecords()); - Assertions.assertEquals(1, stateDeltaTracker.stateDeltas.size()); - expected.clear(); - - stateDeltaTracker.commitStateHash(STATE_3_HASH); - expected.put(STREAM_INDEX_1, STATE_1_STREAM_1_COUNT + STATE_2_STREAM_1_COUNT); - expected.put(STREAM_INDEX_2, STATE_1_STREAM_2_COUNT); - expected.put(STREAM_INDEX_3, STATE_2_STREAM_3_COUNT + STATE_3_STREAM_3_COUNT); - expected.put(STREAM_INDEX_4, STATE_3_STREAM_4_COUNT); - Assertions.assertEquals(expected, stateDeltaTracker.getStreamToCommittedRecords()); - - // since all states are committed, capacity should be freed and the delta queue should be empty - Assertions.assertEquals(INITIAL_DELTA_MEMORY_CAPACITY, stateDeltaTracker.remainingCapacity); - Assertions.assertEquals(0, stateDeltaTracker.stateDeltas.size()); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/book_keeping/StateMetricsTrackerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/book_keeping/StateMetricsTrackerTest.java deleted file mode 100644 index f4a0098f733f..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/book_keeping/StateMetricsTrackerTest.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.book_keeping; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.workers.internal.book_keeping.StateMetricsTracker.StateMetricsTrackerNoStateMatchException; -import io.airbyte.workers.internal.book_keeping.StateMetricsTracker.StateMetricsTrackerOomException; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import java.time.LocalDateTime; -import java.time.format.DateTimeFormatter; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class StateMetricsTrackerTest { - - private StateMetricsTracker stateMetricsTracker; - private static final String STREAM_1 = "stream1"; - private static final String STREAM_2 = "stream2"; - private static final DateTimeFormatter FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); - private static final String SECOND_ZERO = "2022-01-01 12:00:00"; - private static final String SECOND_ONE = "2022-01-01 12:00:01"; - private static final String SECOND_TWO = "2022-01-01 12:00:02"; - private static final String SECOND_FIVE = "2022-01-01 12:00:05"; - private static final String SECOND_SIX = "2022-01-01 12:00:06"; - - @BeforeEach - void setup() { - this.stateMetricsTracker = new StateMetricsTracker(873813L); - } - - @Test - void testCalculateMean() throws Exception { - // Mean for 3 state messages is 5, 4th state message is 9, new mean should be 6 - assertEquals(6L, stateMetricsTracker.calculateMean(5L, 4L, 9L)); - - // Mean for 5 state messages is 10, 4th state message is 12, new mean is 10.33 rounded down to 10 - assertEquals(10L, stateMetricsTracker.calculateMean(10L, 6L, 12L)); - } - - @Test - void testStreamMaxandMeanSecondsBetweenStateMessageEmittedandCommitted() - throws StateMetricsTrackerOomException, StateMetricsTrackerNoStateMatchException { - final AirbyteStateMessage s1s1 = AirbyteMessageUtils.createStreamStateMessage(STREAM_1, 1); - final AirbyteStateMessage s1s2 = AirbyteMessageUtils.createStreamStateMessage(STREAM_1, 2); - final AirbyteStateMessage s1s3 = AirbyteMessageUtils.createStreamStateMessage(STREAM_1, 3); - final AirbyteStateMessage s2s1 = AirbyteMessageUtils.createStreamStateMessage(STREAM_2, 1); - final AirbyteStateMessage s2s2 = AirbyteMessageUtils.createStreamStateMessage(STREAM_2, 2); - - stateMetricsTracker.addState(s1s1, 0, LocalDateTime.parse(SECOND_ZERO, FORMATTER)); // stream 1 state - stateMetricsTracker.addState(s1s2, 1, LocalDateTime.parse(SECOND_ONE, FORMATTER)); // stream 1 state - stateMetricsTracker.addState(s2s1, 0, LocalDateTime.parse(SECOND_TWO, FORMATTER)); // stream 2 state - stateMetricsTracker.addState(s1s3, 2, LocalDateTime.parse("2022-01-01 12:00:03", FORMATTER)); // stream 1 state - - // Committed up to 2nd state message in stream 1 - time to commit is 5 seconds (second 00 to second - // 05) - stateMetricsTracker.incrementTotalDestinationEmittedStateMessages(); - stateMetricsTracker.updateStates(s1s2, 1, LocalDateTime.parse(SECOND_FIVE, FORMATTER)); - - // Committed final state message for stream 1 - time to commit is 7 seconds (second 03 to second 10) - stateMetricsTracker.incrementTotalDestinationEmittedStateMessages(); - stateMetricsTracker.updateStates(s1s3, 2, LocalDateTime.parse("2022-01-01 12:00:10", FORMATTER)); - - stateMetricsTracker.addState(s2s2, 2, LocalDateTime.parse("2022-01-01 12:00:11", FORMATTER)); - - // Commit final state message for stream 2 - time to commit is 12 seconds (second 14 - second 02) - stateMetricsTracker.incrementTotalDestinationEmittedStateMessages(); - stateMetricsTracker.updateStates(s2s2, 2, LocalDateTime.parse("2022-01-01 12:00:14", FORMATTER)); - - // max time across both streams was 12, mean time across all streams was (5 + 7 + 12)/3 == 24/3 == 8 - assertEquals(12L, stateMetricsTracker.getMaxSecondsBetweenStateMessageEmittedAndCommitted()); - assertEquals(8L, stateMetricsTracker.getMeanSecondsBetweenStateMessageEmittedAndCommitted()); - } - - @Test - void testGlobalMaxandMeanSecondsBetweenStateMessageEmittedandCommitted() - throws StateMetricsTrackerOomException, StateMetricsTrackerNoStateMatchException { - final AirbyteMessage s1 = AirbyteMessageUtils.createGlobalStateMessage(1, STREAM_1); - final AirbyteMessage s2 = AirbyteMessageUtils.createGlobalStateMessage(2, STREAM_1); - final AirbyteMessage s3 = AirbyteMessageUtils.createGlobalStateMessage(3, STREAM_1); - - // 3 global state messages emitted - stateMetricsTracker.addState(s1.getState(), 0, LocalDateTime.parse(SECOND_ZERO, FORMATTER)); - stateMetricsTracker.addState(s2.getState(), 1, LocalDateTime.parse(SECOND_ONE, FORMATTER)); - stateMetricsTracker.addState(s3.getState(), 2, LocalDateTime.parse(SECOND_TWO, FORMATTER)); - - // Committed up to 2nd state message - time to commit is 5 seconds (second 00 to second 05) - stateMetricsTracker.incrementTotalDestinationEmittedStateMessages(); - stateMetricsTracker.updateStates(s2.getState(), 1, LocalDateTime.parse(SECOND_FIVE, FORMATTER)); - - // Committed final state message - time to commit is 7 seconds (second 02 to second 09) - stateMetricsTracker.incrementTotalDestinationEmittedStateMessages(); - stateMetricsTracker.updateStates(s3.getState(), 2, LocalDateTime.parse("2022-01-01 12:00:09", FORMATTER)); - - assertEquals(7L, stateMetricsTracker.getMaxSecondsBetweenStateMessageEmittedAndCommitted()); - assertEquals(6L, stateMetricsTracker.getMeanSecondsBetweenStateMessageEmittedAndCommitted()); - } - - @Test - void testStateMetricsTrackerOomExceptionThrown() throws StateMetricsTrackerOomException { - final StateMetricsTracker stateMetricsTrackerOom = new StateMetricsTracker(2L); - - final AirbyteMessage s1 = AirbyteMessageUtils.createGlobalStateMessage(1, STREAM_1); - final AirbyteMessage s2 = AirbyteMessageUtils.createGlobalStateMessage(2, STREAM_1); - final AirbyteMessage s3 = AirbyteMessageUtils.createGlobalStateMessage(3, STREAM_1); - - // 3 global state messages emitted - stateMetricsTrackerOom.addState(s1.getState(), 0, LocalDateTime.parse(SECOND_ZERO, FORMATTER)); - stateMetricsTrackerOom.addState(s2.getState(), 1, LocalDateTime.parse(SECOND_ONE, FORMATTER)); - - assertThrows(StateMetricsTrackerOomException.class, - () -> stateMetricsTrackerOom.addState(s3.getState(), 2, LocalDateTime.parse(SECOND_TWO, FORMATTER))); - - } - - @Test - void testStateMetricsTrackerNoStateMatchExceptionThrown() throws StateMetricsTrackerNoStateMatchException, StateMetricsTrackerOomException { - final AirbyteMessage s1 = AirbyteMessageUtils.createGlobalStateMessage(1, STREAM_1); - final AirbyteMessage s2 = AirbyteMessageUtils.createGlobalStateMessage(2, STREAM_1); - final AirbyteMessage s3 = AirbyteMessageUtils.createGlobalStateMessage(3, STREAM_1); - - // destination emits state message hash when there are no source state message hashes stored - stateMetricsTracker.incrementTotalDestinationEmittedStateMessages(); - assertThrows(StateMetricsTrackerNoStateMatchException.class, - () -> stateMetricsTracker.updateStates(s1.getState(), 4, LocalDateTime.parse(SECOND_FIVE, FORMATTER))); - - stateMetricsTracker.addState(s1.getState(), 0, LocalDateTime.parse(SECOND_ZERO, FORMATTER)); - stateMetricsTracker.addState(s2.getState(), 1, LocalDateTime.parse(SECOND_ONE, FORMATTER)); - stateMetricsTracker.addState(s3.getState(), 2, LocalDateTime.parse(SECOND_TWO, FORMATTER)); - - // destination emits a state message hash that does not correspond to any source state message - // hashes - assertThrows(StateMetricsTrackerNoStateMatchException.class, - () -> stateMetricsTracker.updateStates(s3.getState(), 4, LocalDateTime.parse(SECOND_FIVE, FORMATTER))); - } - - @Test - void testStreamMaxandMeanSecondsBeforeStateMessageEmitted() { - // first record received at second 0 - stateMetricsTracker.setFirstRecordReceivedAt(LocalDateTime.parse(SECOND_ZERO, FORMATTER)); - - // receive state at second 2 - stateMetricsTracker.incrementTotalSourceEmittedStateMessages(); - stateMetricsTracker.updateMaxAndMeanSecondsToReceiveStateMessage(LocalDateTime.parse(SECOND_TWO, FORMATTER)); - stateMetricsTracker.setLastStateMessageReceivedAt(LocalDateTime.parse(SECOND_TWO, FORMATTER)); - // max and mean seconds to receive state message are both 2 seconds - assertEquals(2L, stateMetricsTracker.getMaxSecondsToReceiveSourceStateMessage()); - assertEquals(2L, stateMetricsTracker.getMeanSecondsToReceiveSourceStateMessage()); - - // another state message received after 4 more seconds - stateMetricsTracker.incrementTotalSourceEmittedStateMessages(); - stateMetricsTracker.updateMaxAndMeanSecondsToReceiveStateMessage(LocalDateTime.parse(SECOND_SIX, FORMATTER)); - stateMetricsTracker.setLastStateMessageReceivedAt(LocalDateTime.parse(SECOND_SIX, FORMATTER)); - - // max and mean seconds to receive state message are both 2 seconds - assertEquals(4L, stateMetricsTracker.getMaxSecondsToReceiveSourceStateMessage()); - assertEquals(3L, stateMetricsTracker.getMeanSecondsToReceiveSourceStateMessage()); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/state_aggregator/StateAggregatorTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/state_aggregator/StateAggregatorTest.java deleted file mode 100644 index 77f51d0955f5..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/state_aggregator/StateAggregatorTest.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal.state_aggregator; - -import static io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType.GLOBAL; -import static io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType.LEGACY; -import static io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType.STREAM; - -import com.google.common.collect.Lists; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.State; -import io.airbyte.protocol.models.AirbyteGlobalState; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.AirbyteStreamState; -import io.airbyte.protocol.models.StreamDescriptor; -import java.util.Arrays; -import java.util.List; -import java.util.stream.Stream; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.EnumSource; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class StateAggregatorTest { - - StateAggregator stateAggregator; - boolean USE_STREAM_CAPABLE_STATE = true; - boolean DONT_USE_STREAM_CAPABLE_STATE = false; - - @BeforeEach - void init() { - stateAggregator = new DefaultStateAggregator(DONT_USE_STREAM_CAPABLE_STATE); - } - - @ParameterizedTest - @EnumSource(AirbyteStateType.class) - void testCantMixType(final AirbyteStateType stateType) { - final Stream allTypes = Arrays.stream(AirbyteStateType.values()); - - stateAggregator.ingest(getEmptyMessage(stateType)); - - final List differentTypes = allTypes.filter(type -> type != stateType).toList(); - differentTypes.forEach(differentType -> Assertions.assertThatThrownBy(() -> stateAggregator.ingest(getEmptyMessage(differentType)))); - } - - @Test - void testCantMixNullType() { - final List allIncompatibleTypes = Lists.newArrayList(GLOBAL, STREAM); - - stateAggregator.ingest(getEmptyMessage(null)); - - allIncompatibleTypes.forEach(differentType -> Assertions.assertThatThrownBy(() -> stateAggregator.ingest(getEmptyMessage(differentType)))); - - stateAggregator.ingest(getEmptyMessage(LEGACY)); - } - - @Test - void testNullState() { - final AirbyteStateMessage state1 = getNullMessage(1); - final AirbyteStateMessage state2 = getNullMessage(2); - - stateAggregator.ingest(state1); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(state1.getData())); - - stateAggregator.ingest(state2); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(state2.getData())); - } - - @Test - void testLegacyState() { - final AirbyteStateMessage state1 = getLegacyMessage(1); - final AirbyteStateMessage state2 = getLegacyMessage(2); - - stateAggregator.ingest(state1); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(state1.getData())); - - stateAggregator.ingest(state2); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(state2.getData())); - } - - @Test - void testGlobalState() { - final AirbyteStateMessage state1 = getGlobalMessage(1); - final AirbyteStateMessage state2 = getGlobalMessage(2); - - final AirbyteStateMessage state1NoData = getGlobalMessage(1).withData(null); - final AirbyteStateMessage state2NoData = getGlobalMessage(2).withData(null); - - stateAggregator.ingest(Jsons.object(Jsons.jsonNode(state1), AirbyteStateMessage.class)); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(Jsons.jsonNode(List.of(state1NoData)))); - - stateAggregator.ingest(Jsons.object(Jsons.jsonNode(state2), AirbyteStateMessage.class)); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(Jsons.jsonNode(List.of(state2NoData)))); - } - - @Test - void testStreamStateWithFeatureFlagOff() { - final AirbyteStateMessage state1 = getStreamMessage("a", 1); - final AirbyteStateMessage state2 = getStreamMessage("b", 2); - final AirbyteStateMessage state3 = getStreamMessage("b", 3); - - stateAggregator.ingest(state1); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(Jsons.jsonNode(List.of(state1)))); - - stateAggregator.ingest(state2); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(Jsons.jsonNode(List.of(state2)))); - - stateAggregator.ingest(state3); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(Jsons.jsonNode(List.of(state3)))); - } - - @Test - void testStreamStateWithFeatureFlagOn() { - final AirbyteStateMessage state1 = getStreamMessage("a", 1); - final AirbyteStateMessage state2 = getStreamMessage("b", 2); - final AirbyteStateMessage state3 = getStreamMessage("b", 3); - - final AirbyteStateMessage state1NoData = getStreamMessage("a", 1).withData(null); - final AirbyteStateMessage state2NoData = getStreamMessage("b", 2).withData(null); - final AirbyteStateMessage state3NoData = getStreamMessage("b", 3).withData(null); - - stateAggregator = new DefaultStateAggregator(USE_STREAM_CAPABLE_STATE); - - stateAggregator.ingest(Jsons.object(Jsons.jsonNode(state1), AirbyteStateMessage.class)); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(Jsons.jsonNode(List.of(state1NoData)))); - - stateAggregator.ingest(Jsons.object(Jsons.jsonNode(state2), AirbyteStateMessage.class)); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(Jsons.jsonNode(List.of(state2NoData, state1NoData)))); - - stateAggregator.ingest(Jsons.object(Jsons.jsonNode(state3), AirbyteStateMessage.class)); - Assertions.assertThat(stateAggregator.getAggregated()).isEqualTo(new State() - .withState(Jsons.jsonNode(List.of(state3NoData, state1NoData)))); - } - - private AirbyteStateMessage getNullMessage(final int stateValue) { - return new AirbyteStateMessage().withData(Jsons.jsonNode(stateValue)); - } - - private AirbyteStateMessage getLegacyMessage(final int stateValue) { - return new AirbyteStateMessage().withType(LEGACY).withData(Jsons.jsonNode(stateValue)); - } - - private AirbyteStateMessage getGlobalMessage(final int stateValue) { - return new AirbyteStateMessage().withType(GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withStreamStates( - List.of( - new AirbyteStreamState() - .withStreamDescriptor( - new StreamDescriptor() - .withName("test")) - .withStreamState(Jsons.jsonNode(stateValue))))) - .withData(Jsons.jsonNode("HelloWorld")); - } - - private AirbyteStateMessage getStreamMessage(final String streamName, final int stateValue) { - return new AirbyteStateMessage().withType(STREAM) - .withStream( - new AirbyteStreamState() - .withStreamDescriptor( - new StreamDescriptor() - .withName(streamName)) - .withStreamState(Jsons.jsonNode(stateValue))) - .withData(Jsons.jsonNode("Hello")); - } - - private AirbyteStateMessage getEmptyMessage(final AirbyteStateType stateType) { - if (stateType == STREAM) { - return new AirbyteStateMessage() - .withType(STREAM) - .withStream( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor())); - } - - return new AirbyteStateMessage().withType(stateType); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/normalization/DefaultNormalizationRunnerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/normalization/DefaultNormalizationRunnerTest.java deleted file mode 100644 index 22b8f824f2a3..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/normalization/DefaultNormalizationRunnerTest.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.normalization; - -import static io.airbyte.commons.logging.LoggingHelper.RESET; -import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; -import static io.airbyte.workers.process.Metadata.NORMALIZE_STEP; -import static io.airbyte.workers.process.Metadata.SYNC_JOB; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.LoggingHelper.Color; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.process.ProcessFactory; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Map; -import java.util.stream.Stream; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -class DefaultNormalizationRunnerTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultNormalizationRunnerTest.class); - - private static final String JOB_ID = "0"; - private static final int JOB_ATTEMPT = 0; - - private static final String NORMALIZATION_IMAGE = "airbyte/normalization"; - private static final String NORMALIZATION_TAG = "42.42.42"; - private static final String INTEGRATION_TYPE = "postgres"; - - private static Path logJobRoot; - - static { - try { - logJobRoot = Files.createTempDirectory(Path.of("/tmp"), "mdc_test"); - LogClientSingleton.getInstance().setJobMdc(WorkerEnvironment.DOCKER, LogConfigs.EMPTY, logJobRoot); - } catch (final IOException e) { - LOGGER.error(e.getMessage()); - } - } - - private WorkerConfigs workerConfigs; - private Path jobRoot; - private ProcessFactory processFactory; - private Process process; - private JsonNode config; - private ConfiguredAirbyteCatalog catalog; - - @BeforeEach - void setup() throws IOException, WorkerException { - workerConfigs = new WorkerConfigs(new EnvConfigs()); - jobRoot = Files.createDirectories(Files.createTempDirectory("test")); - processFactory = mock(ProcessFactory.class); - process = mock(Process.class); - - config = mock(JsonNode.class); - catalog = mock(ConfiguredAirbyteCatalog.class); - - final Map files = ImmutableMap.of( - WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, Jsons.serialize(config), - WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME, Jsons.serialize(catalog)); - - when(processFactory.create(NORMALIZE_STEP, JOB_ID, JOB_ATTEMPT, jobRoot, - getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), false, false, files, null, - workerConfigs.getResourceRequirements(), - null, - Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, NORMALIZE_STEP), - Map.of(), - Map.of(), - "run", - "--integration-type", INTEGRATION_TYPE, - "--config", WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, - "--catalog", WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME)) - .thenReturn(process); - when(process.getInputStream()).thenReturn(new ByteArrayInputStream("hello".getBytes(StandardCharsets.UTF_8))); - when(process.getErrorStream()).thenReturn(new ByteArrayInputStream("hello".getBytes(StandardCharsets.UTF_8))); - } - - @AfterEach - public void tearDown() throws IOException { - // The log file needs to be present and empty - final Path logFile = logJobRoot.resolve(LogClientSingleton.LOG_FILENAME); - if (Files.exists(logFile)) { - Files.delete(logFile); - } - Files.createFile(logFile); - } - - @Test - void test() throws Exception { - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - - when(process.exitValue()).thenReturn(0); - - assertTrue(runner.normalize(JOB_ID, JOB_ATTEMPT, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - } - - @Test - void testLog() throws Exception { - - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - - when(process.exitValue()).thenReturn(0); - - assertTrue(runner.normalize(JOB_ID, JOB_ATTEMPT, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - - final Path logPath = logJobRoot.resolve(LogClientSingleton.LOG_FILENAME); - final Stream logs = IOs.readFile(logPath).lines(); - - logs - .filter(line -> !line.contains("EnvConfigs(getEnvOrDefault)")) - .forEach(line -> { - org.assertj.core.api.Assertions.assertThat(line) - .startsWith(Color.GREEN_BACKGROUND.getCode() + "normalization" + RESET); - }); - } - - @Test - void testClose() throws Exception { - when(process.isAlive()).thenReturn(true).thenReturn(false); - - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - runner.normalize(JOB_ID, JOB_ATTEMPT, jobRoot, config, catalog, workerConfigs.getResourceRequirements()); - runner.close(); - - verify(process).waitFor(); - } - - @Test - void testFailure() throws Exception { - when(process.exitValue()).thenReturn(1); - - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - assertFalse(runner.normalize(JOB_ID, JOB_ATTEMPT, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - - verify(process).waitFor(); - - assertThrows(WorkerException.class, runner::close); - } - - @Test - void testFailureWithTraceMessage() throws Exception { - when(process.exitValue()).thenReturn(1); - - final String errorTraceString = """ - {"type": "TRACE", "trace": { - "type": "ERROR", "emitted_at": 123.0, "error": { - "message": "Something went wrong in normalization.", "internal_message": "internal msg", - "stack_trace": "abc.xyz", "failure_type": "system_error"}}} - """.replace("\n", ""); - when(process.getInputStream()).thenReturn(new ByteArrayInputStream(errorTraceString.getBytes(StandardCharsets.UTF_8))); - - final NormalizationRunner runner = new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), - INTEGRATION_TYPE); - assertFalse(runner.normalize(JOB_ID, JOB_ATTEMPT, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - - assertEquals(1, runner.getTraceMessages().count()); - - verify(process).waitFor(); - - assertThrows(WorkerException.class, runner::close); - } - - @Test - void testFailureWithDbtError() throws Exception { - when(process.exitValue()).thenReturn(1); - - final String dbtErrorString = """ - [info ] [MainThread]: Completed with 1 error and 0 warnings: - [info ] [MainThread]: - [error] [MainThread]: Database Error in model xyz (models/generated/airbyte_incremental/abc/xyz.sql) - [error] [MainThread]: 1292 (22007): Truncated incorrect DOUBLE value: 'ABC' - [error] [MainThread]: compiled SQL at ../build/run/airbyte_utils/models/generated/airbyte_incremental/abc/xyz.sql - [info ] [MainThread]: - [info ] [MainThread]: Done. PASS=1 WARN=0 ERROR=1 SKIP=0 TOTAL=2 - """; - when(process.getInputStream()).thenReturn(new ByteArrayInputStream(dbtErrorString.getBytes(StandardCharsets.UTF_8))); - - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - assertFalse(runner.normalize(JOB_ID, JOB_ATTEMPT, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - - assertEquals(1, runner.getTraceMessages().count()); - - verify(process).waitFor(); - - assertThrows(WorkerException.class, runner::close); - } - - @Test - void testFailureWithDbtErrorJsonFormat() throws Exception { - when(process.exitValue()).thenReturn(1); - - final String dbtErrorString = - """ - {"code": "Q035", "data": {"description": "table model public.start_products", "execution_time": 0.1729569435119629, "index": 1, "status": "error", "total": 2}, "invocation_id": "6ada8ee5-11c1-4239-8bd0-7e45178217c5", "level": "error", "log_version": 1, "msg": "1 of 2 ERROR creating table model public.start_products................................................................. [\\u001b[31mERROR\\u001b[0m in 0.17s]", "node_info": {"materialized": "table", "node_finished_at": null, "node_name": "start_products", "node_path": "generated/airbyte_incremental/public/start_products.sql", "node_started_at": "2022-07-18T15:04:27.036328", "node_status": "compiling", "resource_type": "model", "type": "node_status", "unique_id": "model.airbyte_utils.start_products"}, "pid": 14, "thread_name": "Thread-1", "ts": "2022-07-18T15:04:27.215077Z", "type": "log_line"} - """; - when(process.getInputStream()).thenReturn(new ByteArrayInputStream(dbtErrorString.getBytes(StandardCharsets.UTF_8))); - - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - assertFalse(runner.normalize(JOB_ID, JOB_ATTEMPT, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - - assertEquals(1, runner.getTraceMessages().count()); - - verify(process).waitFor(); - - assertThrows(WorkerException.class, runner::close); - } - - static String getTaggedImageName(final String repository, final String tag) { - return repository + ":" + tag; - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/AirbyteIntegrationLauncherTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/AirbyteIntegrationLauncherTest.java deleted file mode 100644 index da5040edf412..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/AirbyteIntegrationLauncherTest.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import static io.airbyte.workers.process.Metadata.CHECK_JOB; -import static io.airbyte.workers.process.Metadata.DISCOVER_JOB; -import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; -import static io.airbyte.workers.process.Metadata.READ_STEP; -import static io.airbyte.workers.process.Metadata.SPEC_JOB; -import static io.airbyte.workers.process.Metadata.SYNC_JOB; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; -import static io.airbyte.workers.process.Metadata.WRITE_STEP; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.config.Configs; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.WorkerEnvConstants; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.exception.WorkerException; -import java.nio.file.Path; -import java.util.Collections; -import java.util.Map; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class AirbyteIntegrationLauncherTest { - - private static final String CONFIG = "config"; - private static final String CATALOG = "catalog"; - private static final String CONFIG_ARG = "--config"; - private static final String JOB_ID = "0"; - private static final int JOB_ATTEMPT = 0; - private static final Path JOB_ROOT = Path.of("abc"); - public static final String FAKE_IMAGE = "fake_image"; - private static final Map CONFIG_FILES = ImmutableMap.of( - CONFIG, "{}"); - private static final Map CONFIG_CATALOG_FILES = ImmutableMap.of( - CONFIG, "{}", - CATALOG, "{}"); - private static final Map CONFIG_CATALOG_STATE_FILES = ImmutableMap.of( - CONFIG, "{}", - CATALOG, "{}", - "state", "{}"); - - private static final FeatureFlags FEATURE_FLAGS = new EnvVariableFeatureFlags(); - private static final Configs CONFIGS = new EnvConfigs(); - - private static final Map JOB_METADATA = - Maps.newHashMap( - ImmutableMap.builder() - .put(WorkerEnvConstants.WORKER_CONNECTOR_IMAGE, FAKE_IMAGE) - .put(WorkerEnvConstants.WORKER_JOB_ID, JOB_ID) - .put(WorkerEnvConstants.WORKER_JOB_ATTEMPT, String.valueOf(JOB_ATTEMPT)) - .put(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, String.valueOf(FEATURE_FLAGS.useStreamCapableState())) - .put(EnvVariableFeatureFlags.AUTO_DETECT_SCHEMA, String.valueOf(FEATURE_FLAGS.autoDetectSchema())) - .put(EnvVariableFeatureFlags.APPLY_FIELD_SELECTION, String.valueOf(FEATURE_FLAGS.applyFieldSelection())) - .put(EnvVariableFeatureFlags.FIELD_SELECTION_WORKSPACES, FEATURE_FLAGS.fieldSelectionWorkspaces()) - .put(EnvVariableFeatureFlags.STRICT_COMPARISON_NORMALIZATION_WORKSPACES, FEATURE_FLAGS.strictComparisonNormalizationWorkspaces()) - .put(EnvVariableFeatureFlags.STRICT_COMPARISON_NORMALIZATION_TAG, FEATURE_FLAGS.strictComparisonNormalizationTag()) - .put(EnvConfigs.SOCAT_KUBE_CPU_LIMIT, CONFIGS.getSocatSidecarKubeCpuLimit()) - .put(EnvConfigs.SOCAT_KUBE_CPU_REQUEST, CONFIGS.getSocatSidecarKubeCpuRequest()) - .put(EnvConfigs.LAUNCHDARKLY_KEY, CONFIGS.getLaunchDarklyKey()) - .put(EnvConfigs.FEATURE_FLAG_CLIENT, CONFIGS.getFeatureFlagClient()) - .build()); - - private WorkerConfigs workerConfigs; - @Mock - private ProcessFactory processFactory; - private AirbyteIntegrationLauncher launcher; - - @BeforeEach - void setUp() { - workerConfigs = new WorkerConfigs(new EnvConfigs()); - launcher = new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, FAKE_IMAGE, processFactory, workerConfigs.getResourceRequirements(), null, false, - FEATURE_FLAGS); - } - - @Test - void spec() throws WorkerException { - launcher.spec(JOB_ROOT); - - Mockito.verify(processFactory).create(SPEC_JOB, JOB_ID, JOB_ATTEMPT, JOB_ROOT, FAKE_IMAGE, false, false, Collections.emptyMap(), null, - workerConfigs.getResourceRequirements(), null, Map.of(JOB_TYPE_KEY, SPEC_JOB), JOB_METADATA, - Map.of(), - "spec"); - } - - @Test - void check() throws WorkerException { - launcher.check(JOB_ROOT, CONFIG, "{}"); - - Mockito.verify(processFactory).create(CHECK_JOB, JOB_ID, JOB_ATTEMPT, JOB_ROOT, FAKE_IMAGE, false, false, CONFIG_FILES, null, - workerConfigs.getResourceRequirements(), - null, - Map.of(JOB_TYPE_KEY, CHECK_JOB), - JOB_METADATA, - Map.of(), - "check", - CONFIG_ARG, CONFIG); - } - - @Test - void discover() throws WorkerException { - launcher.discover(JOB_ROOT, CONFIG, "{}"); - - Mockito.verify(processFactory).create(DISCOVER_JOB, JOB_ID, JOB_ATTEMPT, JOB_ROOT, FAKE_IMAGE, false, false, CONFIG_FILES, null, - workerConfigs.getResourceRequirements(), - null, - Map.of(JOB_TYPE_KEY, DISCOVER_JOB), - JOB_METADATA, - Map.of(), - "discover", - CONFIG_ARG, CONFIG); - } - - @Test - void read() throws WorkerException { - launcher.read(JOB_ROOT, CONFIG, "{}", CATALOG, "{}", "state", "{}"); - - Mockito.verify(processFactory).create(READ_STEP, JOB_ID, JOB_ATTEMPT, JOB_ROOT, FAKE_IMAGE, false, false, CONFIG_CATALOG_STATE_FILES, null, - workerConfigs.getResourceRequirements(), - null, - Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, READ_STEP), - JOB_METADATA, - Map.of(), - Lists.newArrayList( - "read", - CONFIG_ARG, CONFIG, - "--catalog", CATALOG, - "--state", "state").toArray(new String[0])); - } - - @Test - void write() throws WorkerException { - launcher.write(JOB_ROOT, CONFIG, "{}", CATALOG, "{}"); - - Mockito.verify(processFactory).create(WRITE_STEP, JOB_ID, JOB_ATTEMPT, JOB_ROOT, FAKE_IMAGE, false, true, CONFIG_CATALOG_FILES, null, - workerConfigs.getResourceRequirements(), - null, - Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, WRITE_STEP), - JOB_METADATA, - Map.of(), - "write", - CONFIG_ARG, CONFIG, - "--catalog", CATALOG); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/DockerProcessFactoryTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/DockerProcessFactoryTest.java deleted file mode 100644 index 91af9841e100..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/DockerProcessFactoryTest.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.when; - -import com.google.common.base.Stopwatch; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.EnvConfigs; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.Test; - -// todo (cgardens) - these are not truly "unit" tests as they are check resources on the internet. -// we should move them to "integration" tests, when we have facility to do so. -@Slf4j -class DockerProcessFactoryTest { - - private static final Path TEST_ROOT = Path.of("/tmp/airbyte_tests"); - private static final String PROCESS_FACTORY = "process_factory"; - private static final String BUSYBOX = "busybox"; - - /** - * {@link DockerProcessFactoryTest#testImageExists()} will fail if jq is not installed. The logs get - * swallowed when run from gradle. This test exists to explicitly fail with a clear error message - * when jq is not installed. - */ - @Test - void testJqExists() throws IOException { - final Process process = new ProcessBuilder("jq", "--version").start(); - final StringBuilder out = new StringBuilder(); - final StringBuilder err = new StringBuilder(); - LineGobbler.gobble(process.getInputStream(), out::append); - LineGobbler.gobble(process.getErrorStream(), err::append); - - WorkerUtils.gentleClose(process, 1, TimeUnit.MINUTES); - - assertEquals(0, process.exitValue(), - String.format("Error while checking for jq. STDOUT: %s STDERR: %s Please make sure jq is installed (used by testImageExists)", out, err)); - } - - /** - * This test will fail if jq is not installed. If run from gradle the log line that mentions the jq - * issue will be swallowed. The exception is visible if run from intellij or with STDERR logging - * turned on in gradle. - */ - @Test - void testImageExists() throws IOException, WorkerException { - final Path workspaceRoot = Files.createTempDirectory(Files.createDirectories(TEST_ROOT), PROCESS_FACTORY); - - final DockerProcessFactory processFactory = new DockerProcessFactory(new WorkerConfigs(new EnvConfigs()), workspaceRoot, null, null, null); - assertTrue(processFactory.checkImageExists(BUSYBOX)); - } - - @Test - void testImageDoesNotExist() throws IOException, WorkerException { - final Path workspaceRoot = Files.createTempDirectory(Files.createDirectories(TEST_ROOT), PROCESS_FACTORY); - - final DockerProcessFactory processFactory = new DockerProcessFactory(new WorkerConfigs(new EnvConfigs()), workspaceRoot, null, null, null); - assertFalse(processFactory.checkImageExists("airbyte/fake:0.1.2")); - } - - @Test - void testFileWriting() throws IOException, WorkerException { - final Path workspaceRoot = Files.createTempDirectory(Files.createDirectories(TEST_ROOT), PROCESS_FACTORY); - final Path jobRoot = workspaceRoot.resolve("job"); - - final DockerProcessFactory processFactory = - new DockerProcessFactory(new WorkerConfigs(new EnvConfigs()), workspaceRoot, null, null, null); - processFactory.create("tester", "job_id", 0, jobRoot, BUSYBOX, false, false, ImmutableMap.of("config.json", "{\"data\": 2}"), "echo hi", - new WorkerConfigs(new EnvConfigs()).getResourceRequirements(), null, Map.of(), Map.of(), Map.of()); - - assertEquals( - Jsons.jsonNode(ImmutableMap.of("data", 2)), - Jsons.deserialize(IOs.readFile(jobRoot, "config.json"))); - } - - /** - * Tests that the env var map passed in is accessible within the process. - */ - @Test - void testEnvMapSet() throws IOException, WorkerException, InterruptedException { - final Path workspaceRoot = Files.createTempDirectory(Files.createDirectories(TEST_ROOT), PROCESS_FACTORY); - final Path jobRoot = workspaceRoot.resolve("job"); - - final WorkerConfigs workerConfigs = spy(new WorkerConfigs(new EnvConfigs())); - when(workerConfigs.getEnvMap()).thenReturn(Map.of("ENV_VAR_1", "ENV_VALUE_1")); - - final DockerProcessFactory processFactory = - new DockerProcessFactory( - workerConfigs, - workspaceRoot, - null, - null, - "host"); - - waitForDockerToInitialize(processFactory, jobRoot, workerConfigs); - - final Process process = processFactory.create( - "tester", - "job_id", - 0, - jobRoot, - BUSYBOX, - false, - false, - Map.of(), - "/bin/sh", - workerConfigs.getResourceRequirements(), - null, - Map.of(), - Map.of(), - Map.of(), - "-c", - "echo ENV_VAR_1=$ENV_VAR_1"); - - final StringBuilder out = new StringBuilder(); - final StringBuilder err = new StringBuilder(); - LineGobbler.gobble(process.getInputStream(), out::append); - LineGobbler.gobble(process.getErrorStream(), err::append); - - WorkerUtils.gentleClose(process, 20, TimeUnit.SECONDS); - - assertEquals(0, process.exitValue(), String.format("Process failed with stdout: %s and stderr: %s", out, err)); - assertEquals("ENV_VAR_1=ENV_VALUE_1", out.toString(), String.format("Output did not contain the expected string. stdout: %s", out)); - } - - private void waitForDockerToInitialize(final ProcessFactory processFactory, final Path jobRoot, final WorkerConfigs workerConfigs) - throws InterruptedException, WorkerException { - final var stopwatch = Stopwatch.createStarted(); - - while (stopwatch.elapsed().compareTo(Duration.ofSeconds(30)) < 0) { - final Process p = processFactory.create( - "tester", - "job_id_" + RandomStringUtils.randomAlphabetic(4), - 0, - jobRoot, - BUSYBOX, - false, - false, - Map.of(), - "/bin/sh", - workerConfigs.getResourceRequirements(), - null, - Map.of(), - Map.of(), - Map.of(), - "-c", - "echo ENV_VAR_1=$ENV_VAR_1"); - p.waitFor(); - final int exitStatus = p.exitValue(); - - if (exitStatus == 0) { - log.info("Successfully ran test docker command."); - return; - } - } - - throw new RuntimeException("Failed to run test docker command after timeout."); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/KubePodProcessTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/KubePodProcessTest.java deleted file mode 100644 index 011a965a3d20..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/KubePodProcessTest.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.github.dockerjava.api.DockerClient; -import com.github.dockerjava.api.command.BuildImageResultCallback; -import com.github.dockerjava.httpclient5.ApacheDockerHttpClient; -import com.github.dockerjava.transport.DockerHttpClient; -import io.airbyte.commons.string.Strings; -import io.fabric8.kubernetes.api.model.ContainerBuilder; -import io.fabric8.kubernetes.api.model.Pod; -import io.fabric8.kubernetes.api.model.PodBuilder; -import io.fabric8.kubernetes.client.DefaultKubernetesClient; -import io.fabric8.kubernetes.client.KubernetesClient; -import java.io.File; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.testcontainers.shaded.com.github.dockerjava.core.DefaultDockerClientConfig; -import org.testcontainers.shaded.com.github.dockerjava.core.DockerClientConfig; -import org.testcontainers.shaded.com.github.dockerjava.core.DockerClientImpl; -import org.testcontainers.shaded.com.google.common.io.Resources; - -// Disabled until we start minikube on the node. -@Disabled -class KubePodProcessTest { - - private static final KubernetesClient K8s = new DefaultKubernetesClient(); - - private static final String TEST_IMAGE_WITH_VAR_PATH = "Dockerfile.with_var"; - private static final String TEST_IMAGE_WITH_VAR_NAME = "worker-test:with-var"; - - private static final String TEST_IMAGE_NO_VAR_PATH = "Dockerfile.no_var"; - private static final String TEST_IMAGE_NO_VAR_NAME = "worker-test:no-var"; - - private class DockerUtils { - - private static final DockerClientConfig CONFIG = DefaultDockerClientConfig.createDefaultConfigBuilder().build(); - private static final DockerHttpClient HTTP_CLIENT = new ApacheDockerHttpClient.Builder() - .dockerHost(CONFIG.getDockerHost()) - .sslConfig(CONFIG.getSSLConfig()) - .maxConnections(100) - .build(); - private static final DockerClient DOCKER_CLIENT = DockerClientImpl.getInstance(CONFIG, HTTP_CLIENT); - - public static String buildImage(final String dockerFilePath, final String tag) { - return DOCKER_CLIENT.buildImageCmd() - .withDockerfile(new File(dockerFilePath)) - .withTags(Set.of(tag)) - .exec(new BuildImageResultCallback()) - .awaitImageId(); - } - - } - - @BeforeAll - static void setup() { - final var varDockerfile = Resources.getResource(TEST_IMAGE_WITH_VAR_PATH); - DockerUtils.buildImage(varDockerfile.getPath(), TEST_IMAGE_WITH_VAR_NAME); - - final var noVarDockerfile = Resources.getResource(TEST_IMAGE_NO_VAR_PATH); - DockerUtils.buildImage(noVarDockerfile.getPath(), TEST_IMAGE_NO_VAR_NAME); - } - - @Nested - class GetPodIp { - - @Test - @DisplayName("Should error when the given pod does not exists.") - void testGetPodIpNoPod() { - assertThrows(RuntimeException.class, () -> KubePodProcess.getPodIP(K8s, "pod-does-not-exist", "default")); - } - - @Test - @DisplayName("Should return the correct pod ip.") - void testGetPodIpGoodPod() throws InterruptedException { - final var sleep = new ContainerBuilder() - .withImage("busybox") - .withName("sleep") - .withCommand("sleep", "100000") - .build(); - - final var podName = Strings.addRandomSuffix("test-get-pod-good-pod", "-", 5); - final Pod podDef = new PodBuilder() - .withApiVersion("v1") - .withNewMetadata() - .withName(podName) - .endMetadata() - .withNewSpec() - .withRestartPolicy("Never") - .withRestartPolicy("Never") - .withContainers(sleep) - .endSpec() - .build(); - - final String namespace = "default"; - final Pod pod = K8s.pods().inNamespace(namespace).createOrReplace(podDef); - K8s.resource(pod).waitUntilReady(20, TimeUnit.SECONDS); - - final var ip = KubePodProcess.getPodIP(K8s, podName, namespace); - final var exp = K8s.pods().inNamespace(namespace).withName(podName).get().getStatus().getPodIP(); - assertEquals(exp, ip); - K8s.resource(podDef).inNamespace(namespace).delete(); - } - - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/ProcessFactoryTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/ProcessFactoryTest.java deleted file mode 100644 index 4a76d723b7cb..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/ProcessFactoryTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import static io.airbyte.workers.process.Metadata.SYNC_JOB; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class ProcessFactoryTest { - - @Test - void getPodNameNormal() { - final var name = ProcessFactory.createProcessName("airbyte/tester:1", SYNC_JOB, "1", 10, - KubeProcessFactory.KUBE_NAME_LEN_LIMIT); - final var withoutRandSuffix = name.substring(0, name.length() - 5); - Assertions.assertEquals("tester-sync-1-10-", withoutRandSuffix); - } - - @Test - void getPodNameTruncated() { - final var name = - ProcessFactory.createProcessName("airbyte/very-very-very-long-name-longer-than-63-chars:2", - SYNC_JOB, "1", 10, KubeProcessFactory.KUBE_NAME_LEN_LIMIT); - final var withoutRandSuffix = name.substring(0, name.length() - 5); - Assertions.assertEquals("very-very-very-long-name-longer-than-63-chars-sync-1-10-", withoutRandSuffix); - } - - @Test - void testHandlingDashAsFirstCharacter() { - final var uuid = "7339ba3b-cb53-4210-9591-c70d4a372330"; - final var name = ProcessFactory.createProcessName("airbyte/source-google-adwordsv2:latest", SYNC_JOB, - uuid, 10, KubeProcessFactory.KUBE_NAME_LEN_LIMIT); - - final var withoutRandSuffix = name.substring(0, name.length() - 5); - Assertions.assertEquals("le-adwordsv2-sync-7339ba3b-cb53-4210-9591-c70d4a372330-10-", withoutRandSuffix); - } - - @Test - void testOnlyDashes() { - final var uuid = "7339ba3b-cb53-4210-9591-c70d4a372330"; - final var name = ProcessFactory.createProcessName("--------", SYNC_JOB, uuid, - 10, KubeProcessFactory.KUBE_NAME_LEN_LIMIT); - - final var withoutRandSuffix = name.substring(0, name.length() - 5); - Assertions.assertEquals("sync-7339ba3b-cb53-4210-9591-c70d4a372330-10-", withoutRandSuffix); - } - - @Test - void testOnlyNumeric() { - final var uuid = "7339ba3b-cb53-4210-9591-c70d4a372330"; - final var name = ProcessFactory.createProcessName("0000000000", SYNC_JOB, uuid, - 10, KubeProcessFactory.KUBE_NAME_LEN_LIMIT); - - final var withoutRandSuffix = name.substring(0, name.length() - 5); - Assertions.assertEquals("sync-7339ba3b-cb53-4210-9591-c70d4a372330-10-", withoutRandSuffix); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/storage/DockerComposeDocumentStoreClientTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/storage/DockerComposeDocumentStoreClientTest.java deleted file mode 100644 index 41869c91281d..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/storage/DockerComposeDocumentStoreClientTest.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.storage; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Optional; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class DockerComposeDocumentStoreClientTest { - - private static final String KEY = "a"; - private static final String DOCUMENT = "hello"; - private static final String DOCUMENT2 = "bye"; - - private DockerComposeDocumentStoreClient client; - - @BeforeEach - void setup() throws IOException { - final Path testRoot = Files.createTempDirectory(Path.of("/tmp"), "document_store"); - client = new DockerComposeDocumentStoreClient(testRoot); - } - - // todo (cgardens) - possible to dedupe this test with S3CloudDocumentStoreClientTest - @Test - void test() { - final Optional emptyResponse = client.read(KEY); - assertFalse(emptyResponse.isPresent()); - - client.write(KEY, DOCUMENT); - final Optional actualDocument = client.read(KEY); - assertTrue(actualDocument.isPresent()); - assertEquals(DOCUMENT, actualDocument.get()); - - client.write(KEY, DOCUMENT2); - final Optional actualDocumentUpdated = client.read(KEY); - assertTrue(actualDocumentUpdated.isPresent()); - assertEquals(DOCUMENT2, actualDocumentUpdated.get()); - - assertTrue(client.delete(KEY)); - assertFalse(client.delete(KEY)); - - final Optional emptyResponseAfterDeletion = client.read(KEY); - assertFalse(emptyResponseAfterDeletion.isPresent()); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/storage/GcsDocumentStoreClientTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/storage/GcsDocumentStoreClientTest.java deleted file mode 100644 index 18443118e320..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/storage/GcsDocumentStoreClientTest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.storage; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.google.cloud.storage.Storage; -import com.google.cloud.storage.StorageOptions; -import java.nio.file.Path; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Test; - -@Tag("cloud-storage") -class GcsDocumentStoreClientTest { - - private static final String BUCKET_NAME = "airbyte-kube-integration-logging-test"; - private static final String KEY = "a"; - private static final String DOCUMENT = "hello"; - private static final String DOCUMENT2 = "bye"; - - private GcsDocumentStoreClient client; - - @BeforeEach - void setup() { - final Path root = Path.of("state-test" + UUID.randomUUID()); - final Storage gcsClient = StorageOptions.getDefaultInstance().getService(); - client = new GcsDocumentStoreClient(gcsClient, BUCKET_NAME, root); - } - - // todo (cgardens) - possible to dedupe this test with S3CloudDocumentStoreClientTest - @Test - void test() { - final Optional emptyResponse = client.read(KEY); - assertFalse(emptyResponse.isPresent()); - - client.write(KEY, DOCUMENT); - final Optional actualDocument = client.read(KEY); - assertTrue(actualDocument.isPresent()); - assertEquals(DOCUMENT, actualDocument.get()); - - client.write(KEY, DOCUMENT2); - final Optional actualDocumentUpdated = client.read(KEY); - assertTrue(actualDocumentUpdated.isPresent()); - assertEquals(DOCUMENT2, actualDocumentUpdated.get()); - - assertTrue(client.delete(KEY)); - assertFalse(client.delete(KEY)); - - final Optional emptyResponseAfterDeletion = client.read(KEY); - assertFalse(emptyResponseAfterDeletion.isPresent()); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/storage/S3DocumentStoreClientTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/storage/S3DocumentStoreClientTest.java deleted file mode 100644 index b67fc83ec68c..000000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/storage/S3DocumentStoreClientTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.storage; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.nio.file.Path; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Test; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.s3.S3Client; - -@Tag("cloud-storage") -class S3DocumentStoreClientTest { - - private static final String BUCKET_NAME = "airbyte-kube-integration-logging-test"; - private static final Region REGION = Region.of("us-west-2"); - private static final String KEY = "a"; - private static final String DOCUMENT = "hello"; - private static final String DOCUMENT2 = "bye"; - - private S3DocumentStoreClient client; - - @BeforeEach - void setup() { - final Path root = Path.of("state-test" + UUID.randomUUID()); - final S3Client s3Client = S3Client.builder().region(REGION).build(); - client = new S3DocumentStoreClient(s3Client, BUCKET_NAME, root); - } - - // todo (cgardens) - possible to dedupe this test with GcsCloudDocumentStoreClientTest - @Test - void test() { - final Optional emptyResponse = client.read(KEY); - assertFalse(emptyResponse.isPresent()); - - client.write(KEY, DOCUMENT); - final Optional actualDocument = client.read(KEY); - assertTrue(actualDocument.isPresent()); - assertEquals(DOCUMENT, actualDocument.get()); - - client.write(KEY, DOCUMENT2); - final Optional actualDocumentUpdated = client.read(KEY); - assertTrue(actualDocumentUpdated.isPresent()); - assertEquals(DOCUMENT2, actualDocumentUpdated.get()); - - assertTrue(client.delete(KEY)); - assertFalse(client.delete(KEY)); - - final Optional emptyResponseAfterDeletion = client.read(KEY); - assertFalse(emptyResponseAfterDeletion.isPresent()); - } - -} diff --git a/airbyte-config/config-persistence/build.gradle b/airbyte-config/config-persistence/build.gradle index d43e903ab3c2..39b1a627b85c 100644 --- a/airbyte-config/config-persistence/build.gradle +++ b/airbyte-config/config-persistence/build.gradle @@ -15,7 +15,6 @@ dependencies { implementation project(':airbyte-db:jooq') implementation project(':airbyte-json-validation') implementation libs.airbyte.protocol - implementation project(':airbyte-metrics:metrics-lib') implementation 'commons-io:commons-io:2.7' implementation 'com.google.cloud:google-cloud-secretmanager:2.0.5' diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java index dd2600bf05df..09cbb87b09e6 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java @@ -56,7 +56,6 @@ import io.airbyte.db.instance.configs.jooq.generated.enums.ActorType; import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; import io.airbyte.db.instance.configs.jooq.generated.enums.StatusType; -import io.airbyte.metrics.lib.MetricQueries; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.StreamDescriptor; @@ -1460,14 +1459,6 @@ public int countDestinationsForWorkspace(final UUID workspaceId) throws IOExcept * metrics without exposing the underlying database connection. */ - public List getSrcIdAndDestIdToReleaseStages(final UUID srcId, final UUID dstId) throws IOException { - return database.query(ctx -> MetricQueries.srcIdAndDestIdToReleaseStages(ctx, srcId, dstId)); - } - - public List getJobIdToReleaseStages(final long jobId) throws IOException { - return database.query(ctx -> MetricQueries.jobIdToReleaseStages(ctx, jobId)); - } - private Condition includeTombstones(final Field tombstoneField, final boolean includeTombstones) { if (includeTombstones) { return DSL.trueCondition(); diff --git a/airbyte-config/init/build.gradle b/airbyte-config/init/build.gradle index 1be61a9257c8..d46a5e2c6888 100644 --- a/airbyte-config/init/build.gradle +++ b/airbyte-config/init/build.gradle @@ -9,7 +9,6 @@ dependencies { implementation 'commons-cli:commons-cli:1.4' implementation project(':airbyte-config:config-models') implementation project(':airbyte-config:config-persistence') - implementation project(':airbyte-persistence:job-persistence') implementation libs.airbyte.protocol implementation project(':airbyte-json-validation') implementation libs.lombok diff --git a/airbyte-config/init/src/main/java/io/airbyte/config/init/ApplyDefinitionsHelper.java b/airbyte-config/init/src/main/java/io/airbyte/config/init/ApplyDefinitionsHelper.java deleted file mode 100644 index 3d9795b0ee45..000000000000 --- a/airbyte-config/init/src/main/java/io/airbyte/config/init/ApplyDefinitionsHelper.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.init; - -import io.airbyte.commons.version.AirbyteProtocolVersion; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import lombok.extern.slf4j.Slf4j; - -/** - * Helper class used to apply actor definitions from a DefinitionsProvider to the database. This is - * here to enable easy reuse of definition application logic in bootloader and cron. - */ -@Singleton -@Requires(bean = ConfigRepository.class) -@Requires(bean = JobPersistence.class) -@Slf4j -public class ApplyDefinitionsHelper { - - private final ConfigRepository configRepository; - private final Optional definitionsProviderOptional; - private final JobPersistence jobPersistence; - - public ApplyDefinitionsHelper(final ConfigRepository configRepository, - final Optional definitionsProviderOptional, - final JobPersistence jobPersistence) { - this.configRepository = configRepository; - this.definitionsProviderOptional = definitionsProviderOptional; - this.jobPersistence = jobPersistence; - } - - public void apply() throws JsonValidationException, IOException { - apply(false); - } - - /** - * Apply the latest definitions from the provider to the repository. - * - * @param updateAll - Whether we should overwrite all stored definitions - */ - public void apply(final boolean updateAll) throws JsonValidationException, IOException { - if (definitionsProviderOptional.isPresent()) { - final DefinitionsProvider definitionsProvider = definitionsProviderOptional.get(); - final Optional currentProtocolRange = getCurrentProtocolRange(); - - if (updateAll) { - final List latestSourceDefinitions = definitionsProvider.getSourceDefinitions(); - for (final StandardSourceDefinition def : filterStandardSourceDefinitions(currentProtocolRange, latestSourceDefinitions)) { - configRepository.writeStandardSourceDefinition(def); - } - - final List latestDestinationDefinitions = definitionsProvider.getDestinationDefinitions(); - for (final StandardDestinationDefinition def : filterStandardDestinationDefinitions(currentProtocolRange, latestDestinationDefinitions)) { - configRepository.writeStandardDestinationDefinition(def); - } - } else { - // todo (pedroslopez): Logic to apply definitions should be moved outside of the - // DatabaseConfigPersistence class and behavior standardized - configRepository.seedActorDefinitions( - filterStandardSourceDefinitions(currentProtocolRange, definitionsProvider.getSourceDefinitions()), - filterStandardDestinationDefinitions(currentProtocolRange, definitionsProvider.getDestinationDefinitions())); - } - } else { - log.warn("Skipping application of latest definitions. Definitions provider not configured."); - } - } - - private List filterStandardDestinationDefinitions(final Optional protocolVersionRange, - final List destDefs) { - if (protocolVersionRange.isEmpty()) { - return destDefs; - } - - return destDefs.stream().filter(def -> { - final boolean isSupported = isProtocolVersionSupported(protocolVersionRange.get(), def.getSpec().getProtocolVersion()); - if (!isSupported) { - log.warn("Destination {} {} has an incompatible protocol version ({})... ignoring.", - def.getDestinationDefinitionId(), def.getName(), def.getSpec().getProtocolVersion()); - } - return isSupported; - }).toList(); - } - - private List filterStandardSourceDefinitions(final Optional protocolVersionRange, - final List sourceDefs) { - if (protocolVersionRange.isEmpty()) { - return sourceDefs; - } - - return sourceDefs.stream().filter(def -> { - final boolean isSupported = isProtocolVersionSupported(protocolVersionRange.get(), def.getSpec().getProtocolVersion()); - if (!isSupported) { - log.warn("Source {} {} has an incompatible protocol version ({})... ignoring.", - def.getSourceDefinitionId(), def.getName(), def.getSpec().getProtocolVersion()); - } - return isSupported; - }).toList(); - } - - private boolean isProtocolVersionSupported(final AirbyteProtocolVersionRange protocolVersionRange, final String protocolVersion) { - return protocolVersionRange.isSupported(AirbyteProtocolVersion.getWithDefault(protocolVersion)); - } - - private Optional getCurrentProtocolRange() throws IOException { - if (jobPersistence == null) { - // TODO Remove this once cloud has been migrated and job persistence is always defined - return Optional.empty(); - } - - return jobPersistence.getCurrentProtocolVersionRange(); - } - -} diff --git a/airbyte-config/init/src/test/java/io/airbyte/config/init/ApplyDefinitionsHelperTest.java b/airbyte-config/init/src/test/java/io/airbyte/config/init/ApplyDefinitionsHelperTest.java deleted file mode 100644 index 5649caef9bc9..000000000000 --- a/airbyte-config/init/src/test/java/io/airbyte/config/init/ApplyDefinitionsHelperTest.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.init; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.Version; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; - -/** - * Test suite for the {@link ApplyDefinitionsHelper} class. - */ -class ApplyDefinitionsHelperTest { - - private static final UUID SOURCE_DEF_ID1 = UUID.randomUUID(); - private static final UUID DEST_DEF_ID2 = UUID.randomUUID(); - private static final String CONNECT_NAME1 = "connector1"; - private static final String CONNECT_NAME2 = "connector2"; - private static final String DOCUMENTATION_URL = "https://wwww.example.com"; - private static final String DOCKER_REPOSITORY = "airbyte/connector"; - private static final String DOCKER_TAG = "0.1.0"; - private static final String PROTOCOL_VERSION_1 = "1.0.0"; - private static final String PROTOCOL_VERSION_2 = "2.0.0"; - public static final StandardSourceDefinition SOURCE_DEF1 = new StandardSourceDefinition() - .withSourceDefinitionId(SOURCE_DEF_ID1) - .withDockerRepository(DOCKER_REPOSITORY) - .withDockerImageTag(DOCKER_TAG) - .withName(CONNECT_NAME1) - .withDocumentationUrl(DOCUMENTATION_URL) - .withSpec(new ConnectorSpecification().withProtocolVersion(PROTOCOL_VERSION_1)); - public static final StandardSourceDefinition SOURCE_DEF2 = new StandardSourceDefinition() - .withSourceDefinitionId(SOURCE_DEF_ID1) - .withDockerRepository(DOCKER_REPOSITORY) - .withDockerImageTag(DOCKER_TAG) - .withName(CONNECT_NAME2) - .withDocumentationUrl(DOCUMENTATION_URL) - .withSpec(new ConnectorSpecification().withProtocolVersion(PROTOCOL_VERSION_2)); - - public static final StandardDestinationDefinition DEST_DEF1 = new StandardDestinationDefinition() - .withDestinationDefinitionId(DEST_DEF_ID2) - .withDockerRepository(DOCKER_REPOSITORY) - .withDockerImageTag(DOCKER_TAG) - .withName(CONNECT_NAME1) - .withDocumentationUrl(DOCUMENTATION_URL) - .withSpec(new ConnectorSpecification().withProtocolVersion(PROTOCOL_VERSION_2)); - - public static final StandardDestinationDefinition DEST_DEF2 = new StandardDestinationDefinition() - .withDestinationDefinitionId(DEST_DEF_ID2) - .withDockerRepository(DOCKER_REPOSITORY) - .withDockerImageTag(DOCKER_TAG) - .withName(CONNECT_NAME2) - .withDocumentationUrl(DOCUMENTATION_URL) - .withSpec(new ConnectorSpecification().withProtocolVersion(PROTOCOL_VERSION_1)); - - private ConfigRepository configRepository; - private DefinitionsProvider definitionsProvider; - private JobPersistence jobPersistence; - private ApplyDefinitionsHelper applyDefinitionsHelper; - - @BeforeEach - void setup() throws IOException { - configRepository = mock(ConfigRepository.class); - definitionsProvider = mock(DefinitionsProvider.class); - jobPersistence = mock(JobPersistence.class); - - applyDefinitionsHelper = new ApplyDefinitionsHelper(configRepository, Optional.of(definitionsProvider), jobPersistence); - - // default calls to empty. - when(configRepository.listStandardDestinationDefinitions(true)).thenReturn(Collections.emptyList()); - when(configRepository.listStandardSourceDefinitions(true)).thenReturn(Collections.emptyList()); - when(definitionsProvider.getDestinationDefinitions()).thenReturn(Collections.emptyList()); - when(definitionsProvider.getSourceDefinitions()).thenReturn(Collections.emptyList()); - } - - @Test - void testUpdateAllAddRecord() throws JsonValidationException, IOException { - when(definitionsProvider.getSourceDefinitions()).thenReturn(List.of(SOURCE_DEF1)); - when(definitionsProvider.getDestinationDefinitions()).thenReturn(List.of(DEST_DEF1)); - - applyDefinitionsHelper.apply(true); - - verify(configRepository).writeStandardSourceDefinition(SOURCE_DEF1); - verify(configRepository).writeStandardDestinationDefinition(DEST_DEF1); - verify(definitionsProvider).getDestinationDefinitions(); - verify(definitionsProvider).getSourceDefinitions(); - verifyNoMoreInteractions(configRepository); - verifyNoMoreInteractions(definitionsProvider); - } - - @Test - void testUpdateAllMutateRecord() throws JsonValidationException, IOException { - when(configRepository.listStandardSourceDefinitions(true)).thenReturn(List.of(SOURCE_DEF2)); - when(configRepository.listStandardDestinationDefinitions(true)).thenReturn(List.of(DEST_DEF2)); - - when(definitionsProvider.getSourceDefinitions()).thenReturn(List.of(SOURCE_DEF1)); - when(definitionsProvider.getDestinationDefinitions()).thenReturn(List.of(DEST_DEF1)); - - applyDefinitionsHelper.apply(true); - - verify(configRepository).writeStandardSourceDefinition(SOURCE_DEF1); - verify(configRepository).writeStandardDestinationDefinition(DEST_DEF1); - verify(definitionsProvider).getDestinationDefinitions(); - verify(definitionsProvider).getSourceDefinitions(); - verifyNoMoreInteractions(configRepository); - verifyNoMoreInteractions(definitionsProvider); - } - - @Test - void testUpdateAllNoDeleteRecord() throws JsonValidationException, IOException { - when(configRepository.listStandardSourceDefinitions(true)).thenReturn(List.of(SOURCE_DEF1)); - when(configRepository.listStandardDestinationDefinitions(true)).thenReturn(List.of(DEST_DEF1)); - - applyDefinitionsHelper.apply(true); - - verify(definitionsProvider).getDestinationDefinitions(); - verify(definitionsProvider).getSourceDefinitions(); - verifyNoMoreInteractions(configRepository); - verifyNoMoreInteractions(definitionsProvider); - } - - @Test - void testApplyOSS() throws JsonValidationException, IOException { - when(definitionsProvider.getSourceDefinitions()).thenReturn(List.of(SOURCE_DEF1)); - when(definitionsProvider.getDestinationDefinitions()).thenReturn(List.of(DEST_DEF1)); - - applyDefinitionsHelper.apply(); - - verify(configRepository).seedActorDefinitions(List.of(SOURCE_DEF1), List.of(DEST_DEF1)); - verify(definitionsProvider).getDestinationDefinitions(); - verify(definitionsProvider).getSourceDefinitions(); - verifyNoMoreInteractions(configRepository); - verifyNoMoreInteractions(definitionsProvider); - } - - @ParameterizedTest - @ValueSource(booleans = {false, true}) - void testDefinitionsFiltering(final boolean updateAll) throws JsonValidationException, IOException { - when(jobPersistence.getCurrentProtocolVersionRange()) - .thenReturn(Optional.of(new AirbyteProtocolVersionRange(new Version("2.0.0"), new Version("3.0.0")))); - - when(definitionsProvider.getSourceDefinitions()).thenReturn(List.of(SOURCE_DEF1, SOURCE_DEF2)); - when(definitionsProvider.getDestinationDefinitions()).thenReturn(List.of(DEST_DEF1, DEST_DEF2)); - - applyDefinitionsHelper.apply(updateAll); - - if (updateAll) { - verify(configRepository).writeStandardSourceDefinition(SOURCE_DEF2); - verify(configRepository).writeStandardDestinationDefinition(DEST_DEF1); - verifyNoMoreInteractions(configRepository); - } else { - verify(configRepository).seedActorDefinitions(List.of(SOURCE_DEF2), List.of(DEST_DEF1)); - } - } - - @Test - void testMissingDefinitionsProvider() { - final ApplyDefinitionsHelper helper = new ApplyDefinitionsHelper(configRepository, Optional.empty(), jobPersistence); - assertDoesNotThrow(() -> helper.apply()); - } - -} diff --git a/airbyte-connector-builder-server/.coveragerc b/airbyte-connector-builder-server/.coveragerc deleted file mode 100644 index 034c0c0c28f5..000000000000 --- a/airbyte-connector-builder-server/.coveragerc +++ /dev/null @@ -1,3 +0,0 @@ -[report] -# show lines missing coverage -show_missing = true diff --git a/airbyte-connector-builder-server/.dockerignore b/airbyte-connector-builder-server/.dockerignore deleted file mode 100644 index 5cea6d6cbdd1..000000000000 --- a/airbyte-connector-builder-server/.dockerignore +++ /dev/null @@ -1,4 +0,0 @@ -build -!build/airbyte_api_client -.venv -connector_builder.egg-info diff --git a/airbyte-connector-builder-server/.gitignore b/airbyte-connector-builder-server/.gitignore deleted file mode 100644 index 2456084a5e54..000000000000 --- a/airbyte-connector-builder-server/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -.coverage -.venv -state_*.yaml diff --git a/airbyte-connector-builder-server/.python-version b/airbyte-connector-builder-server/.python-version deleted file mode 100644 index a9f8d1be337f..000000000000 --- a/airbyte-connector-builder-server/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.9.11 diff --git a/airbyte-connector-builder-server/CDK_VERSION b/airbyte-connector-builder-server/CDK_VERSION deleted file mode 100644 index 94a5fe438afc..000000000000 --- a/airbyte-connector-builder-server/CDK_VERSION +++ /dev/null @@ -1 +0,0 @@ -0.25.0 \ No newline at end of file diff --git a/airbyte-connector-builder-server/CHANGELOG.md b/airbyte-connector-builder-server/CHANGELOG.md deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/airbyte-connector-builder-server/Dockerfile b/airbyte-connector-builder-server/Dockerfile deleted file mode 100644 index 7badf775681d..000000000000 --- a/airbyte-connector-builder-server/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM python:3.9-slim as base - -RUN apt-get upgrade \ - && pip install --upgrade pip - -WORKDIR /home/connector-builder-server -COPY . ./ - -RUN pip install --no-cache-dir . - -ENTRYPOINT ["uvicorn", "connector_builder.entrypoint:app", "--host", "0.0.0.0", "--port", "80"] - -LABEL io.airbyte.version=0.40.32 -LABEL io.airbyte.name=airbyte/connector-builder-server diff --git a/airbyte-connector-builder-server/README.md b/airbyte-connector-builder-server/README.md deleted file mode 100644 index eb6d5f83bcec..000000000000 --- a/airbyte-connector-builder-server/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# Connector builder - - -## Getting started - -Set up the virtual environment and install dependencies -```bash -python -m venv .venv -source .venv/bin/activate -pip install . -``` - -Then run the server -```bash -uvicorn connector_builder.entrypoint:app --host 0.0.0.0 --port 8080 -``` - -The server is now reachable on localhost:8080 - -## Changing the used CDK version - -Update the `airbyte-connector-builder-server/CDK_VERSION` file to point to the target version. -This will make sure both this project and the webapp depending on it will stay in sync. - -### OpenAPI generation - -Run it via Gradle by running this from the Airbyte project root: -```bash -./gradlew :airbyte-connector-builder-server:generateOpenApiPythonServer -``` diff --git a/airbyte-connector-builder-server/build.gradle b/airbyte-connector-builder-server/build.gradle deleted file mode 100644 index 4f8bb1e08b0d..000000000000 --- a/airbyte-connector-builder-server/build.gradle +++ /dev/null @@ -1,61 +0,0 @@ -import org.openapitools.generator.gradle.plugin.tasks.GenerateTask - -plugins { - id "org.openapi.generator" version "5.3.1" - id 'airbyte-python-docker' - id 'airbyte-docker' -} - -airbytePythonDocker { - moduleDirectory 'connector_builder' -} - -task generateOpenApiPythonServer(type: GenerateTask) { - outputs.upToDateWhen { false } - - def generatedCodeDir = "$buildDir/airbyte_connector_builder_server" - inputSpec = "$rootDir.absolutePath/airbyte-connector-builder-server/src/main/openapi/openapi.yaml" - outputDir = generatedCodeDir - - generatorName = "python-fastapi" - configFile = "$projectDir/openapi/generator_config.yaml" - templateDir = "$projectDir/openapi/templates" - packageName = "connector_builder.generated" - - // After we generate, we're only interested in the API declaration and the generated pydantic models. - // So we copy those from the build/ directory - doLast { - def sourceDir = "$generatedCodeDir/src/connector_builder/generated/" - def targetDir = "$projectDir/connector_builder/generated" - mkdir targetDir - copy { - from "$sourceDir/apis" - include "*_interface.py", "__init__.py" - into "$targetDir/apis" - } - copy { - from "$sourceDir/models" - include "*.py" - into "$targetDir/models" - } - } -} - -project.build.dependsOn(generateOpenApiPythonServer) - -// java modules such as airbyte-server can use copyGeneratedTar to copy the files to the docker image -// We cannot do this here because we don't generate a tar file -// Instead, we copy the files into the build directory so they can be copied to the docker container -task prepareBuild(type: Copy) { - from layout.projectDirectory.file(".") - exclude '.*' - exclude 'build' - exclude '**/*.pyc' - - into layout.buildDirectory.dir("docker") -} - -tasks.named("buildDockerImage") { - dependsOn prepareBuild - dependsOn copyDocker -} diff --git a/airbyte-connector-builder-server/connector_builder/__init__.py b/airbyte-connector-builder-server/connector_builder/__init__.py deleted file mode 100644 index c941b3045795..000000000000 --- a/airbyte-connector-builder-server/connector_builder/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-connector-builder-server/connector_builder/entrypoint.py b/airbyte-connector-builder-server/connector_builder/entrypoint.py deleted file mode 100644 index 593f6b672f67..000000000000 --- a/airbyte-connector-builder-server/connector_builder/entrypoint.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from connector_builder.generated.apis.default_api_interface import initialize_router -from connector_builder.impl.default_api import DefaultApiImpl -from connector_builder.impl.low_code_cdk_adapter import LowCodeSourceAdapterFactory -from fastapi import FastAPI -from fastapi.middleware.cors import CORSMiddleware - -_MAXIMUM_NUMBER_OF_PAGES_PER_SLICE = 5 -_MAXIMUM_NUMBER_OF_SLICES = 5 -_ADAPTER_FACTORY = LowCodeSourceAdapterFactory(_MAXIMUM_NUMBER_OF_PAGES_PER_SLICE, _MAXIMUM_NUMBER_OF_SLICES) - -app = FastAPI( - title="Connector Builder Server API", - description="Connector Builder Server API ", - version="1.0.0", -) - -app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) - -app.include_router(initialize_router(DefaultApiImpl(_ADAPTER_FACTORY, _MAXIMUM_NUMBER_OF_PAGES_PER_SLICE, _MAXIMUM_NUMBER_OF_SLICES))) diff --git a/airbyte-connector-builder-server/connector_builder/generated/apis/__init__.py b/airbyte-connector-builder-server/connector_builder/generated/apis/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/airbyte-connector-builder-server/connector_builder/generated/apis/default_api_interface.py b/airbyte-connector-builder-server/connector_builder/generated/apis/default_api_interface.py deleted file mode 100644 index d21d514bdbe9..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/apis/default_api_interface.py +++ /dev/null @@ -1,173 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# -# This file was auto-generated from Airbyte's custom OpenAPI templates. Do not edit it manually. -# coding: utf-8 - -import inspect -from abc import ABC, abstractmethod -from typing import Callable, Dict, List # noqa: F401 - -from fastapi import ( # noqa: F401 - APIRouter, - Body, - Cookie, - Depends, - Form, - Header, - Path, - Query, - Response, - Security, - status, -) - -from connector_builder.generated.models.extra_models import TokenModel # noqa: F401 - - -from connector_builder.generated.models.invalid_input_exception_info import InvalidInputExceptionInfo -from connector_builder.generated.models.known_exception_info import KnownExceptionInfo -from connector_builder.generated.models.resolve_manifest import ResolveManifest -from connector_builder.generated.models.resolve_manifest_request_body import ResolveManifestRequestBody -from connector_builder.generated.models.stream_read import StreamRead -from connector_builder.generated.models.stream_read_request_body import StreamReadRequestBody -from connector_builder.generated.models.streams_list_read import StreamsListRead -from connector_builder.generated.models.streams_list_request_body import StreamsListRequestBody - - -class DefaultApi(ABC): - """ - NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - Do not edit the class manually. - """ - - @abstractmethod - async def get_manifest_template( - self, - ) -> str: - """ - Return a connector manifest template to use as the default value for the yaml editor - """ - - @abstractmethod - async def list_streams( - self, - streams_list_request_body: StreamsListRequestBody = Body(None, description=""), - ) -> StreamsListRead: - """ - List all streams present in the connector manifest, along with their specific request URLs - """ - - @abstractmethod - async def read_stream( - self, - stream_read_request_body: StreamReadRequestBody = Body(None, description=""), - ) -> StreamRead: - """ - Reads a specific stream in the source. TODO in a later phase - only read a single slice of data. - """ - - @abstractmethod - async def resolve_manifest( - self, - resolve_manifest_request_body: ResolveManifestRequestBody = Body(None, description=""), - ) -> ResolveManifest: - """ - Given a JSON manifest, returns a JSON manifest with all of the $refs and $options resolved and flattened - """ - - -def _assert_signature_is_set(method: Callable) -> None: - """ - APIRouter().add_api_route expects the input method to have a signature. It gets signatures - by running inspect.signature(method) under the hood. - - In the case that an instance method does not declare "self" as an input parameter (due to developer error - for example), then the call to inspect.signature() raises a ValueError and fails. - - Ideally, we'd automatically detect & correct this problem. To do that, we'd need to do - setattr(method, "__signature__", ) but that's not possible because instance - methods (i.e the input to this function) are object subclasses, and you can't use setattr on objects - (https://stackoverflow.com/a/12839070/3237889) - - The workaround this method implements is to raise an exception at runtime if the input method fails - when inspect.signature() is called. This is good enough because the error will be detected - immediately when the developer tries to run the server, so builds should very quickly fail and this - will practically never make it to a production scenario. - """ - try: - inspect.signature(method) - except ValueError as e: - # Based on empirical observation, the call to inspect fails with a ValueError - # with exactly one argument: "invalid method signature" - if e.args and len(e.args) == 1 and e.args[0] == "invalid method signature": - # I couldn't figure out how to setattr on a "method" object to populate the signature. For now just kick - # it back to the developer and tell them to set the "self" variable - raise Exception(f"Method {method.__name__} in class {type(method.__self__).__name__} must declare the variable 'self'. ") - else: - raise - - -def initialize_router(api: DefaultApi) -> APIRouter: - router = APIRouter() - - _assert_signature_is_set(api.get_manifest_template) - router.add_api_route( - "/v1/manifest_template", - endpoint=api.get_manifest_template, - methods=["GET"], - responses={ - 200: {"model": str, "description": "Successful operation"}, - }, - tags=["default"], - summary="Return a connector manifest template to use as the default value for the yaml editor", - response_model_by_alias=True, - ) - - _assert_signature_is_set(api.list_streams) - router.add_api_route( - "/v1/streams/list", - endpoint=api.list_streams, - methods=["POST"], - responses={ - 200: {"model": StreamsListRead, "description": "Successful operation"}, - 400: {"model": KnownExceptionInfo, "description": "Exception occurred; see message for details."}, - 422: {"model": InvalidInputExceptionInfo, "description": "Input failed validation"}, - }, - tags=["default"], - summary="List all streams present in the connector manifest, along with their specific request URLs", - response_model_by_alias=True, - ) - - _assert_signature_is_set(api.read_stream) - router.add_api_route( - "/v1/stream/read", - endpoint=api.read_stream, - methods=["POST"], - responses={ - 200: {"model": StreamRead, "description": "Successful operation"}, - 400: {"model": KnownExceptionInfo, "description": "Exception occurred; see message for details."}, - 422: {"model": InvalidInputExceptionInfo, "description": "Input failed validation"}, - }, - tags=["default"], - summary="Reads a specific stream in the source. TODO in a later phase - only read a single slice of data.", - response_model_by_alias=True, - ) - - _assert_signature_is_set(api.resolve_manifest) - router.add_api_route( - "/v1/manifest/resolve", - endpoint=api.resolve_manifest, - methods=["POST"], - responses={ - 200: {"model": ResolveManifest, "description": "Successful operation"}, - 400: {"model": KnownExceptionInfo, "description": "Exception occurred; see message for details."}, - 422: {"model": InvalidInputExceptionInfo, "description": "Input failed validation"}, - }, - tags=["default"], - summary="Given a JSON manifest, returns a JSON manifest with all of the $refs and $options resolved and flattened", - response_model_by_alias=True, - ) - - - return router diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/__init__.py b/airbyte-connector-builder-server/connector_builder/generated/models/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/add_fields.py b/airbyte-connector-builder-server/connector_builder/generated/models/add_fields.py deleted file mode 100644 index ae3c26318e44..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/add_fields.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.add_fields_all_of import AddFieldsAllOf -from connector_builder.generated.models.added_field_definition import AddedFieldDefinition -from connector_builder.generated.models.parsed_add_field_definition import ParsedAddFieldDefinition - - -class AddFields(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - AddFields - a model defined in OpenAPI - - fields: The fields of this AddFields. - parsed_fields: The parsed_fields of this AddFields [Optional]. - """ - - fields: List[AddedFieldDefinition] = Field(alias="fields") - parsed_fields: Optional[List[ParsedAddFieldDefinition]] = Field(alias="_parsed_fields", default=None) - -AddFields.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/add_fields_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/add_fields_all_of.py deleted file mode 100644 index fec6699e211d..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/add_fields_all_of.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.added_field_definition import AddedFieldDefinition -from connector_builder.generated.models.parsed_add_field_definition import ParsedAddFieldDefinition - - -class AddFieldsAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - AddFieldsAllOf - a model defined in OpenAPI - - fields: The fields of this AddFieldsAllOf. - parsed_fields: The parsed_fields of this AddFieldsAllOf [Optional]. - """ - - fields: List[AddedFieldDefinition] = Field(alias="fields") - parsed_fields: Optional[List[ParsedAddFieldDefinition]] = Field(alias="_parsed_fields", default=None) - -AddFieldsAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/added_field_definition.py b/airbyte-connector-builder-server/connector_builder/generated/models/added_field_definition.py deleted file mode 100644 index 622683e02bcf..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/added_field_definition.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring - - -class AddedFieldDefinition(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - AddedFieldDefinition - a model defined in OpenAPI - - path: The path of this AddedFieldDefinition. - value: The value of this AddedFieldDefinition. - """ - - path: List[str] = Field(alias="path") - value: AnyOfInterpolatedStringstring = Field(alias="value") - -AddedFieldDefinition.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/api_key_authenticator.py b/airbyte-connector-builder-server/connector_builder/generated/models/api_key_authenticator.py deleted file mode 100644 index c9b4ed8abd5f..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/api_key_authenticator.py +++ /dev/null @@ -1,30 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.api_key_authenticator_all_of import ApiKeyAuthenticatorAllOf - - -class ApiKeyAuthenticator(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ApiKeyAuthenticator - a model defined in OpenAPI - - header: The header of this ApiKeyAuthenticator. - api_token: The api_token of this ApiKeyAuthenticator. - config: The config of this ApiKeyAuthenticator. - """ - - header: AnyOfInterpolatedStringstring = Field(alias="header") - api_token: AnyOfInterpolatedStringstring = Field(alias="api_token") - config: Dict[str, Any] = Field(alias="config") - -ApiKeyAuthenticator.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/api_key_authenticator_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/api_key_authenticator_all_of.py deleted file mode 100644 index bf7546979880..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/api_key_authenticator_all_of.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring - - -class ApiKeyAuthenticatorAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ApiKeyAuthenticatorAllOf - a model defined in OpenAPI - - header: The header of this ApiKeyAuthenticatorAllOf. - api_token: The api_token of this ApiKeyAuthenticatorAllOf. - config: The config of this ApiKeyAuthenticatorAllOf. - """ - - header: AnyOfInterpolatedStringstring = Field(alias="header") - api_token: AnyOfInterpolatedStringstring = Field(alias="api_token") - config: Dict[str, Any] = Field(alias="config") - -ApiKeyAuthenticatorAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/basic_http_authenticator.py b/airbyte-connector-builder-server/connector_builder/generated/models/basic_http_authenticator.py deleted file mode 100644 index 05ec03210336..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/basic_http_authenticator.py +++ /dev/null @@ -1,30 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.basic_http_authenticator_all_of import BasicHttpAuthenticatorAllOf - - -class BasicHttpAuthenticator(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - BasicHttpAuthenticator - a model defined in OpenAPI - - username: The username of this BasicHttpAuthenticator. - config: The config of this BasicHttpAuthenticator. - password: The password of this BasicHttpAuthenticator [Optional]. - """ - - username: AnyOfInterpolatedStringstring = Field(alias="username") - config: Dict[str, Any] = Field(alias="config") - password: Optional[AnyOfInterpolatedStringstring] = Field(alias="password", default=None) - -BasicHttpAuthenticator.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/basic_http_authenticator_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/basic_http_authenticator_all_of.py deleted file mode 100644 index 46aec5658cc3..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/basic_http_authenticator_all_of.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring - - -class BasicHttpAuthenticatorAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - BasicHttpAuthenticatorAllOf - a model defined in OpenAPI - - username: The username of this BasicHttpAuthenticatorAllOf. - config: The config of this BasicHttpAuthenticatorAllOf. - password: The password of this BasicHttpAuthenticatorAllOf [Optional]. - """ - - username: AnyOfInterpolatedStringstring = Field(alias="username") - config: Dict[str, Any] = Field(alias="config") - password: Optional[AnyOfInterpolatedStringstring] = Field(alias="password", default=None) - -BasicHttpAuthenticatorAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/bearer_authenticator.py b/airbyte-connector-builder-server/connector_builder/generated/models/bearer_authenticator.py deleted file mode 100644 index a599282e3a7f..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/bearer_authenticator.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.bearer_authenticator_all_of import BearerAuthenticatorAllOf - - -class BearerAuthenticator(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - BearerAuthenticator - a model defined in OpenAPI - - api_token: The api_token of this BearerAuthenticator. - config: The config of this BearerAuthenticator. - """ - - api_token: AnyOfInterpolatedStringstring = Field(alias="api_token") - config: Dict[str, Any] = Field(alias="config") - -BearerAuthenticator.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/bearer_authenticator_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/bearer_authenticator_all_of.py deleted file mode 100644 index 8ed9ce407402..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/bearer_authenticator_all_of.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring - - -class BearerAuthenticatorAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - BearerAuthenticatorAllOf - a model defined in OpenAPI - - api_token: The api_token of this BearerAuthenticatorAllOf. - config: The config of this BearerAuthenticatorAllOf. - """ - - api_token: AnyOfInterpolatedStringstring = Field(alias="api_token") - config: Dict[str, Any] = Field(alias="config") - -BearerAuthenticatorAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/cartesian_product_stream_slicer.py b/airbyte-connector-builder-server/connector_builder/generated/models/cartesian_product_stream_slicer.py deleted file mode 100644 index b388be6bfb31..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/cartesian_product_stream_slicer.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_cartesian_product_stream_slicer_datetime_stream_slicer_list_stream_slicer_single_slice_substream_slicer import AnyOfCartesianProductStreamSlicerDatetimeStreamSlicerListStreamSlicerSingleSliceSubstreamSlicer -from connector_builder.generated.models.cartesian_product_stream_slicer_all_of import CartesianProductStreamSlicerAllOf -from connector_builder.generated.models.stream_slicer import StreamSlicer - - -class CartesianProductStreamSlicer(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - CartesianProductStreamSlicer - a model defined in OpenAPI - - stream_slicers: The stream_slicers of this CartesianProductStreamSlicer. - """ - - stream_slicers: List[AnyOfCartesianProductStreamSlicerDatetimeStreamSlicerListStreamSlicerSingleSliceSubstreamSlicer] = Field(alias="stream_slicers") - -CartesianProductStreamSlicer.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/cartesian_product_stream_slicer_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/cartesian_product_stream_slicer_all_of.py deleted file mode 100644 index 4c2c9c750e3c..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/cartesian_product_stream_slicer_all_of.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_cartesian_product_stream_slicer_datetime_stream_slicer_list_stream_slicer_single_slice_substream_slicer import AnyOfCartesianProductStreamSlicerDatetimeStreamSlicerListStreamSlicerSingleSliceSubstreamSlicer - - -class CartesianProductStreamSlicerAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - CartesianProductStreamSlicerAllOf - a model defined in OpenAPI - - stream_slicers: The stream_slicers of this CartesianProductStreamSlicerAllOf. - """ - - stream_slicers: List[AnyOfCartesianProductStreamSlicerDatetimeStreamSlicerListStreamSlicerSingleSliceSubstreamSlicer] = Field(alias="stream_slicers") - -CartesianProductStreamSlicerAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/check_stream.py b/airbyte-connector-builder-server/connector_builder/generated/models/check_stream.py deleted file mode 100644 index aa7cfa6b794b..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/check_stream.py +++ /dev/null @@ -1,24 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class CheckStream(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - CheckStream - a model defined in OpenAPI - - stream_names: The stream_names of this CheckStream. - """ - - stream_names: List[str] = Field(alias="stream_names") - -CheckStream.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/composite_error_handler.py b/airbyte-connector-builder-server/connector_builder/generated/models/composite_error_handler.py deleted file mode 100644 index 159d8d11403f..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/composite_error_handler.py +++ /dev/null @@ -1,26 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_composite_error_handler_default_error_handler import AnyOfCompositeErrorHandlerDefaultErrorHandler -from connector_builder.generated.models.composite_error_handler_all_of import CompositeErrorHandlerAllOf - - -class CompositeErrorHandler(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - CompositeErrorHandler - a model defined in OpenAPI - - error_handlers: The error_handlers of this CompositeErrorHandler. - """ - - error_handlers: List[AnyOfCompositeErrorHandlerDefaultErrorHandler] = Field(alias="error_handlers") - -CompositeErrorHandler.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/composite_error_handler_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/composite_error_handler_all_of.py deleted file mode 100644 index a9b8d949b600..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/composite_error_handler_all_of.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_composite_error_handler_default_error_handler import AnyOfCompositeErrorHandlerDefaultErrorHandler - - -class CompositeErrorHandlerAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - CompositeErrorHandlerAllOf - a model defined in OpenAPI - - error_handlers: The error_handlers of this CompositeErrorHandlerAllOf. - """ - - error_handlers: List[AnyOfCompositeErrorHandlerDefaultErrorHandler] = Field(alias="error_handlers") - -CompositeErrorHandlerAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/config_component_schema.py b/airbyte-connector-builder-server/connector_builder/generated/models/config_component_schema.py deleted file mode 100644 index 4dd567a444e6..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/config_component_schema.py +++ /dev/null @@ -1,30 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.check_stream import CheckStream -from connector_builder.generated.models.declarative_stream import DeclarativeStream - - -class ConfigComponentSchema(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ConfigComponentSchema - a model defined in OpenAPI - - version: The version of this ConfigComponentSchema. - check: The check of this ConfigComponentSchema. - streams: The streams of this ConfigComponentSchema. - """ - - version: str = Field(alias="version") - check: CheckStream = Field(alias="check") - streams: List[DeclarativeStream] = Field(alias="streams") - -ConfigComponentSchema.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/connector_manifest.py b/airbyte-connector-builder-server/connector_builder/generated/models/connector_manifest.py deleted file mode 100644 index 0ef33db0a1e8..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/connector_manifest.py +++ /dev/null @@ -1,30 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.check_stream import CheckStream -from connector_builder.generated.models.declarative_stream import DeclarativeStream - - -class ConnectorManifest(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ConnectorManifest - a model defined in OpenAPI - - version: The version of this ConnectorManifest. - check: The check of this ConnectorManifest. - streams: The streams of this ConnectorManifest. - """ - - version: str = Field(alias="version") - check: CheckStream = Field(alias="check") - streams: List[DeclarativeStream] = Field(alias="streams") - -ConnectorManifest.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/constant_backoff_strategy.py b/airbyte-connector-builder-server/connector_builder/generated/models/constant_backoff_strategy.py deleted file mode 100644 index fafdbd2c1055..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/constant_backoff_strategy.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringnumberstring import AnyOfInterpolatedStringnumberstring -from connector_builder.generated.models.constant_backoff_strategy_all_of import ConstantBackoffStrategyAllOf - - -class ConstantBackoffStrategy(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ConstantBackoffStrategy - a model defined in OpenAPI - - backoff_time_in_seconds: The backoff_time_in_seconds of this ConstantBackoffStrategy. - config: The config of this ConstantBackoffStrategy. - """ - - backoff_time_in_seconds: AnyOfInterpolatedStringnumberstring = Field(alias="backoff_time_in_seconds") - config: Dict[str, Any] = Field(alias="config") - -ConstantBackoffStrategy.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/constant_backoff_strategy_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/constant_backoff_strategy_all_of.py deleted file mode 100644 index 11849c7a2d7f..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/constant_backoff_strategy_all_of.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringnumberstring import AnyOfInterpolatedStringnumberstring - - -class ConstantBackoffStrategyAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ConstantBackoffStrategyAllOf - a model defined in OpenAPI - - backoff_time_in_seconds: The backoff_time_in_seconds of this ConstantBackoffStrategyAllOf. - config: The config of this ConstantBackoffStrategyAllOf. - """ - - backoff_time_in_seconds: AnyOfInterpolatedStringnumberstring = Field(alias="backoff_time_in_seconds") - config: Dict[str, Any] = Field(alias="config") - -ConstantBackoffStrategyAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/cursor_pagination_strategy.py b/airbyte-connector-builder-server/connector_builder/generated/models/cursor_pagination_strategy.py deleted file mode 100644 index e5c1b4e1d22d..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/cursor_pagination_strategy.py +++ /dev/null @@ -1,36 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_booleanstring import AnyOfInterpolatedBooleanstring -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.cursor_pagination_strategy_all_of import CursorPaginationStrategyAllOf -from connector_builder.generated.models.json_decoder import JsonDecoder - - -class CursorPaginationStrategy(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - CursorPaginationStrategy - a model defined in OpenAPI - - cursor_value: The cursor_value of this CursorPaginationStrategy. - config: The config of this CursorPaginationStrategy. - page_size: The page_size of this CursorPaginationStrategy [Optional]. - stop_condition: The stop_condition of this CursorPaginationStrategy [Optional]. - decoder: The decoder of this CursorPaginationStrategy [Optional]. - """ - - cursor_value: AnyOfInterpolatedStringstring = Field(alias="cursor_value") - config: Dict[str, Any] = Field(alias="config") - page_size: Optional[int] = Field(alias="page_size", default=None) - stop_condition: Optional[AnyOfInterpolatedBooleanstring] = Field(alias="stop_condition", default=None) - decoder: Optional[JsonDecoder] = Field(alias="decoder", default=None) - -CursorPaginationStrategy.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/cursor_pagination_strategy_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/cursor_pagination_strategy_all_of.py deleted file mode 100644 index 10281e69e51a..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/cursor_pagination_strategy_all_of.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_booleanstring import AnyOfInterpolatedBooleanstring -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.json_decoder import JsonDecoder - - -class CursorPaginationStrategyAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - CursorPaginationStrategyAllOf - a model defined in OpenAPI - - cursor_value: The cursor_value of this CursorPaginationStrategyAllOf. - config: The config of this CursorPaginationStrategyAllOf. - page_size: The page_size of this CursorPaginationStrategyAllOf [Optional]. - stop_condition: The stop_condition of this CursorPaginationStrategyAllOf [Optional]. - decoder: The decoder of this CursorPaginationStrategyAllOf [Optional]. - """ - - cursor_value: AnyOfInterpolatedStringstring = Field(alias="cursor_value") - config: Dict[str, Any] = Field(alias="config") - page_size: Optional[int] = Field(alias="page_size", default=None) - stop_condition: Optional[AnyOfInterpolatedBooleanstring] = Field(alias="stop_condition", default=None) - decoder: Optional[JsonDecoder] = Field(alias="decoder", default=None) - -CursorPaginationStrategyAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/datetime_stream_slicer.py b/airbyte-connector-builder-server/connector_builder/generated/models/datetime_stream_slicer.py deleted file mode 100644 index 6e752c9c9819..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/datetime_stream_slicer.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.any_of_min_max_datetimestring import AnyOfMinMaxDatetimestring -from connector_builder.generated.models.datetime_stream_slicer_all_of import DatetimeStreamSlicerAllOf -from connector_builder.generated.models.request_option import RequestOption -from connector_builder.generated.models.stream_slicer import StreamSlicer - - -class DatetimeStreamSlicer(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DatetimeStreamSlicer - a model defined in OpenAPI - - start_datetime: The start_datetime of this DatetimeStreamSlicer. - end_datetime: The end_datetime of this DatetimeStreamSlicer. - step: The step of this DatetimeStreamSlicer. - cursor_field: The cursor_field of this DatetimeStreamSlicer. - datetime_format: The datetime_format of this DatetimeStreamSlicer. - config: The config of this DatetimeStreamSlicer. - cursor: The cursor of this DatetimeStreamSlicer [Optional]. - cursor_end: The cursor_end of this DatetimeStreamSlicer [Optional]. - start_time_option: The start_time_option of this DatetimeStreamSlicer [Optional]. - end_time_option: The end_time_option of this DatetimeStreamSlicer [Optional]. - stream_state_field_start: The stream_state_field_start of this DatetimeStreamSlicer [Optional]. - stream_state_field_end: The stream_state_field_end of this DatetimeStreamSlicer [Optional]. - lookback_window: The lookback_window of this DatetimeStreamSlicer [Optional]. - """ - - start_datetime: AnyOfMinMaxDatetimestring = Field(alias="start_datetime") - end_datetime: AnyOfMinMaxDatetimestring = Field(alias="end_datetime") - step: str = Field(alias="step") - cursor_field: AnyOfInterpolatedStringstring = Field(alias="cursor_field") - datetime_format: str = Field(alias="datetime_format") - config: Dict[str, Any] = Field(alias="config") - cursor: Optional[Dict[str, Any]] = Field(alias="_cursor", default=None) - cursor_end: Optional[Dict[str, Any]] = Field(alias="_cursor_end", default=None) - start_time_option: Optional[RequestOption] = Field(alias="start_time_option", default=None) - end_time_option: Optional[RequestOption] = Field(alias="end_time_option", default=None) - stream_state_field_start: Optional[str] = Field(alias="stream_state_field_start", default=None) - stream_state_field_end: Optional[str] = Field(alias="stream_state_field_end", default=None) - lookback_window: Optional[AnyOfInterpolatedStringstring] = Field(alias="lookback_window", default=None) - -DatetimeStreamSlicer.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/datetime_stream_slicer_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/datetime_stream_slicer_all_of.py deleted file mode 100644 index dcd4d06d5fd5..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/datetime_stream_slicer_all_of.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.any_of_min_max_datetimestring import AnyOfMinMaxDatetimestring -from connector_builder.generated.models.request_option import RequestOption - - -class DatetimeStreamSlicerAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DatetimeStreamSlicerAllOf - a model defined in OpenAPI - - start_datetime: The start_datetime of this DatetimeStreamSlicerAllOf. - end_datetime: The end_datetime of this DatetimeStreamSlicerAllOf. - step: The step of this DatetimeStreamSlicerAllOf. - cursor_field: The cursor_field of this DatetimeStreamSlicerAllOf. - datetime_format: The datetime_format of this DatetimeStreamSlicerAllOf. - config: The config of this DatetimeStreamSlicerAllOf. - cursor: The cursor of this DatetimeStreamSlicerAllOf [Optional]. - cursor_end: The cursor_end of this DatetimeStreamSlicerAllOf [Optional]. - start_time_option: The start_time_option of this DatetimeStreamSlicerAllOf [Optional]. - end_time_option: The end_time_option of this DatetimeStreamSlicerAllOf [Optional]. - stream_state_field_start: The stream_state_field_start of this DatetimeStreamSlicerAllOf [Optional]. - stream_state_field_end: The stream_state_field_end of this DatetimeStreamSlicerAllOf [Optional]. - lookback_window: The lookback_window of this DatetimeStreamSlicerAllOf [Optional]. - """ - - start_datetime: AnyOfMinMaxDatetimestring = Field(alias="start_datetime") - end_datetime: AnyOfMinMaxDatetimestring = Field(alias="end_datetime") - step: str = Field(alias="step") - cursor_field: AnyOfInterpolatedStringstring = Field(alias="cursor_field") - datetime_format: str = Field(alias="datetime_format") - config: Dict[str, Any] = Field(alias="config") - cursor: Optional[Dict[str, Any]] = Field(alias="_cursor", default=None) - cursor_end: Optional[Dict[str, Any]] = Field(alias="_cursor_end", default=None) - start_time_option: Optional[RequestOption] = Field(alias="start_time_option", default=None) - end_time_option: Optional[RequestOption] = Field(alias="end_time_option", default=None) - stream_state_field_start: Optional[str] = Field(alias="stream_state_field_start", default=None) - stream_state_field_end: Optional[str] = Field(alias="stream_state_field_end", default=None) - lookback_window: Optional[AnyOfInterpolatedStringstring] = Field(alias="lookback_window", default=None) - -DatetimeStreamSlicerAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/declarative_oauth2_authenticator.py b/airbyte-connector-builder-server/connector_builder/generated/models/declarative_oauth2_authenticator.py deleted file mode 100644 index e027ed33a7d4..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/declarative_oauth2_authenticator.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.declarative_oauth2_authenticator_all_of import DeclarativeOauth2AuthenticatorAllOf - - -class DeclarativeOauth2Authenticator(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DeclarativeOauth2Authenticator - a model defined in OpenAPI - - token_refresh_endpoint: The token_refresh_endpoint of this DeclarativeOauth2Authenticator. - client_id: The client_id of this DeclarativeOauth2Authenticator. - client_secret: The client_secret of this DeclarativeOauth2Authenticator. - refresh_token: The refresh_token of this DeclarativeOauth2Authenticator. - config: The config of this DeclarativeOauth2Authenticator. - scopes: The scopes of this DeclarativeOauth2Authenticator [Optional]. - token_expiry_date: The token_expiry_date of this DeclarativeOauth2Authenticator [Optional]. - token_expiry_date: The token_expiry_date of this DeclarativeOauth2Authenticator [Optional]. - access_token_name: The access_token_name of this DeclarativeOauth2Authenticator [Optional]. - expires_in_name: The expires_in_name of this DeclarativeOauth2Authenticator [Optional]. - refresh_request_body: The refresh_request_body of this DeclarativeOauth2Authenticator [Optional]. - grant_type: The grant_type of this DeclarativeOauth2Authenticator [Optional]. - """ - - token_refresh_endpoint: AnyOfInterpolatedStringstring = Field(alias="token_refresh_endpoint") - client_id: AnyOfInterpolatedStringstring = Field(alias="client_id") - client_secret: AnyOfInterpolatedStringstring = Field(alias="client_secret") - refresh_token: AnyOfInterpolatedStringstring = Field(alias="refresh_token") - config: Dict[str, Any] = Field(alias="config") - scopes: Optional[List[str]] = Field(alias="scopes", default=None) - token_expiry_date: Optional[AnyOfInterpolatedStringstring] = Field(alias="token_expiry_date", default=None) - token_expiry_date: Optional[object] = Field(alias="_token_expiry_date", default=None) - access_token_name: Optional[AnyOfInterpolatedStringstring] = Field(alias="access_token_name", default=None) - expires_in_name: Optional[AnyOfInterpolatedStringstring] = Field(alias="expires_in_name", default=None) - refresh_request_body: Optional[Dict[str, Any]] = Field(alias="refresh_request_body", default=None) - grant_type: Optional[AnyOfInterpolatedStringstring] = Field(alias="grant_type", default=None) - -DeclarativeOauth2Authenticator.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/declarative_oauth2_authenticator_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/declarative_oauth2_authenticator_all_of.py deleted file mode 100644 index 1b8d2820b052..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/declarative_oauth2_authenticator_all_of.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring - - -class DeclarativeOauth2AuthenticatorAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DeclarativeOauth2AuthenticatorAllOf - a model defined in OpenAPI - - token_refresh_endpoint: The token_refresh_endpoint of this DeclarativeOauth2AuthenticatorAllOf. - client_id: The client_id of this DeclarativeOauth2AuthenticatorAllOf. - client_secret: The client_secret of this DeclarativeOauth2AuthenticatorAllOf. - refresh_token: The refresh_token of this DeclarativeOauth2AuthenticatorAllOf. - config: The config of this DeclarativeOauth2AuthenticatorAllOf. - scopes: The scopes of this DeclarativeOauth2AuthenticatorAllOf [Optional]. - token_expiry_date: The token_expiry_date of this DeclarativeOauth2AuthenticatorAllOf [Optional]. - token_expiry_date: The token_expiry_date of this DeclarativeOauth2AuthenticatorAllOf [Optional]. - access_token_name: The access_token_name of this DeclarativeOauth2AuthenticatorAllOf [Optional]. - expires_in_name: The expires_in_name of this DeclarativeOauth2AuthenticatorAllOf [Optional]. - refresh_request_body: The refresh_request_body of this DeclarativeOauth2AuthenticatorAllOf [Optional]. - grant_type: The grant_type of this DeclarativeOauth2AuthenticatorAllOf [Optional]. - """ - - token_refresh_endpoint: AnyOfInterpolatedStringstring = Field(alias="token_refresh_endpoint") - client_id: AnyOfInterpolatedStringstring = Field(alias="client_id") - client_secret: AnyOfInterpolatedStringstring = Field(alias="client_secret") - refresh_token: AnyOfInterpolatedStringstring = Field(alias="refresh_token") - config: Dict[str, Any] = Field(alias="config") - scopes: Optional[List[str]] = Field(alias="scopes", default=None) - token_expiry_date: Optional[AnyOfInterpolatedStringstring] = Field(alias="token_expiry_date", default=None) - token_expiry_date: Optional[object] = Field(alias="_token_expiry_date", default=None) - access_token_name: Optional[AnyOfInterpolatedStringstring] = Field(alias="access_token_name", default=None) - expires_in_name: Optional[AnyOfInterpolatedStringstring] = Field(alias="expires_in_name", default=None) - refresh_request_body: Optional[Dict[str, Any]] = Field(alias="refresh_request_body", default=None) - grant_type: Optional[AnyOfInterpolatedStringstring] = Field(alias="grant_type", default=None) - -DeclarativeOauth2AuthenticatorAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/declarative_stream.py b/airbyte-connector-builder-server/connector_builder/generated/models/declarative_stream.py deleted file mode 100644 index 0ae815717dbe..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/declarative_stream.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_add_fields_remove_fields import AnyOfAddFieldsRemoveFields -from connector_builder.generated.models.any_of_json_file_schema_loader_default_schema_loader import AnyOfJsonFileSchemaLoaderDefaultSchemaLoader -from connector_builder.generated.models.any_ofarrayarraystring import AnyOfarrayarraystring -from connector_builder.generated.models.any_ofarraystring import AnyOfarraystring -from connector_builder.generated.models.simple_retriever import SimpleRetriever - - -class DeclarativeStream(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DeclarativeStream - a model defined in OpenAPI - - retriever: The retriever of this DeclarativeStream. - config: The config of this DeclarativeStream. - name: The name of this DeclarativeStream [Optional]. - primary_key: The primary_key of this DeclarativeStream [Optional]. - schema_loader: The schema_loader of this DeclarativeStream [Optional]. - name: The name of this DeclarativeStream [Optional]. - primary_key: The primary_key of this DeclarativeStream [Optional]. - schema_loader: The schema_loader of this DeclarativeStream [Optional]. - stream_cursor_field: The stream_cursor_field of this DeclarativeStream [Optional]. - transformations: The transformations of this DeclarativeStream [Optional]. - checkpoint_interval: The checkpoint_interval of this DeclarativeStream [Optional]. - """ - - retriever: SimpleRetriever = Field(alias="retriever") - config: Dict[str, Any] = Field(alias="config") - name: Optional[str] = Field(alias="name", default=None) - primary_key: Optional[AnyOfarrayarraystring] = Field(alias="primary_key", default=None) - schema_loader: Optional[AnyOfJsonFileSchemaLoaderDefaultSchemaLoader] = Field(alias="schema_loader", default=None) - name: Optional[str] = Field(alias="_name", default=None) - primary_key: Optional[str] = Field(alias="_primary_key", default=None) - schema_loader: Optional[AnyOfJsonFileSchemaLoaderDefaultSchemaLoader] = Field(alias="_schema_loader", default=None) - stream_cursor_field: Optional[AnyOfarraystring] = Field(alias="stream_cursor_field", default=None) - transformations: Optional[List[AnyOfAddFieldsRemoveFields]] = Field(alias="transformations", default=None) - checkpoint_interval: Optional[int] = Field(alias="checkpoint_interval", default=None) - -DeclarativeStream.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/default_error_handler.py b/airbyte-connector-builder-server/connector_builder/generated/models/default_error_handler.py deleted file mode 100644 index 374308d1060f..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/default_error_handler.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_constant_backoff_strategy_exponential_backoff_strategy_wait_time_from_header_backoff_strategy_wait_until_time_from_header_backoff_strategy import AnyOfConstantBackoffStrategyExponentialBackoffStrategyWaitTimeFromHeaderBackoffStrategyWaitUntilTimeFromHeaderBackoffStrategy -from connector_builder.generated.models.default_error_handler_all_of import DefaultErrorHandlerAllOf -from connector_builder.generated.models.http_response_filter import HttpResponseFilter - - -class DefaultErrorHandler(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DefaultErrorHandler - a model defined in OpenAPI - - config: The config of this DefaultErrorHandler. - response_filters: The response_filters of this DefaultErrorHandler [Optional]. - max_retries: The max_retries of this DefaultErrorHandler [Optional]. - max_retries: The max_retries of this DefaultErrorHandler [Optional]. - backoff_strategies: The backoff_strategies of this DefaultErrorHandler [Optional]. - """ - - config: Dict[str, Any] = Field(alias="config") - response_filters: Optional[List[HttpResponseFilter]] = Field(alias="response_filters", default=None) - max_retries: Optional[int] = Field(alias="max_retries", default=None) - max_retries: Optional[int] = Field(alias="_max_retries", default=None) - backoff_strategies: Optional[List[AnyOfConstantBackoffStrategyExponentialBackoffStrategyWaitTimeFromHeaderBackoffStrategyWaitUntilTimeFromHeaderBackoffStrategy]] = Field(alias="backoff_strategies", default=None) - -DefaultErrorHandler.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/default_error_handler_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/default_error_handler_all_of.py deleted file mode 100644 index 4179e5b1fa66..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/default_error_handler_all_of.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_constant_backoff_strategy_exponential_backoff_strategy_wait_time_from_header_backoff_strategy_wait_until_time_from_header_backoff_strategy import AnyOfConstantBackoffStrategyExponentialBackoffStrategyWaitTimeFromHeaderBackoffStrategyWaitUntilTimeFromHeaderBackoffStrategy -from connector_builder.generated.models.http_response_filter import HttpResponseFilter - - -class DefaultErrorHandlerAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DefaultErrorHandlerAllOf - a model defined in OpenAPI - - config: The config of this DefaultErrorHandlerAllOf. - response_filters: The response_filters of this DefaultErrorHandlerAllOf [Optional]. - max_retries: The max_retries of this DefaultErrorHandlerAllOf [Optional]. - max_retries: The max_retries of this DefaultErrorHandlerAllOf [Optional]. - backoff_strategies: The backoff_strategies of this DefaultErrorHandlerAllOf [Optional]. - """ - - config: Dict[str, Any] = Field(alias="config") - response_filters: Optional[List[HttpResponseFilter]] = Field(alias="response_filters", default=None) - max_retries: Optional[int] = Field(alias="max_retries", default=None) - max_retries: Optional[int] = Field(alias="_max_retries", default=None) - backoff_strategies: Optional[List[AnyOfConstantBackoffStrategyExponentialBackoffStrategyWaitTimeFromHeaderBackoffStrategyWaitUntilTimeFromHeaderBackoffStrategy]] = Field(alias="backoff_strategies", default=None) - -DefaultErrorHandlerAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/default_paginator.py b/airbyte-connector-builder-server/connector_builder/generated/models/default_paginator.py deleted file mode 100644 index 375e1c572b49..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/default_paginator.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_cursor_pagination_strategy_offset_increment_page_increment import AnyOfCursorPaginationStrategyOffsetIncrementPageIncrement -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.default_paginator_all_of import DefaultPaginatorAllOf -from connector_builder.generated.models.json_decoder import JsonDecoder -from connector_builder.generated.models.paginator import Paginator -from connector_builder.generated.models.request_option import RequestOption - - -class DefaultPaginator(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DefaultPaginator - a model defined in OpenAPI - - pagination_strategy: The pagination_strategy of this DefaultPaginator. - config: The config of this DefaultPaginator. - url_base: The url_base of this DefaultPaginator. - decoder: The decoder of this DefaultPaginator [Optional]. - token: The token of this DefaultPaginator [Optional]. - page_size_option: The page_size_option of this DefaultPaginator [Optional]. - page_token_option: The page_token_option of this DefaultPaginator [Optional]. - """ - - pagination_strategy: AnyOfCursorPaginationStrategyOffsetIncrementPageIncrement = Field(alias="pagination_strategy") - config: Dict[str, Any] = Field(alias="config") - url_base: AnyOfInterpolatedStringstring = Field(alias="url_base") - decoder: Optional[JsonDecoder] = Field(alias="decoder", default=None) - token: Optional[object] = Field(alias="_token", default=None) - page_size_option: Optional[RequestOption] = Field(alias="page_size_option", default=None) - page_token_option: Optional[RequestOption] = Field(alias="page_token_option", default=None) - -DefaultPaginator.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/default_paginator_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/default_paginator_all_of.py deleted file mode 100644 index d683a54cb4b8..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/default_paginator_all_of.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_cursor_pagination_strategy_offset_increment_page_increment import AnyOfCursorPaginationStrategyOffsetIncrementPageIncrement -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.json_decoder import JsonDecoder -from connector_builder.generated.models.request_option import RequestOption - - -class DefaultPaginatorAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DefaultPaginatorAllOf - a model defined in OpenAPI - - pagination_strategy: The pagination_strategy of this DefaultPaginatorAllOf. - config: The config of this DefaultPaginatorAllOf. - url_base: The url_base of this DefaultPaginatorAllOf. - decoder: The decoder of this DefaultPaginatorAllOf [Optional]. - token: The token of this DefaultPaginatorAllOf [Optional]. - page_size_option: The page_size_option of this DefaultPaginatorAllOf [Optional]. - page_token_option: The page_token_option of this DefaultPaginatorAllOf [Optional]. - """ - - pagination_strategy: AnyOfCursorPaginationStrategyOffsetIncrementPageIncrement = Field(alias="pagination_strategy") - config: Dict[str, Any] = Field(alias="config") - url_base: AnyOfInterpolatedStringstring = Field(alias="url_base") - decoder: Optional[JsonDecoder] = Field(alias="decoder", default=None) - token: Optional[object] = Field(alias="_token", default=None) - page_size_option: Optional[RequestOption] = Field(alias="page_size_option", default=None) - page_token_option: Optional[RequestOption] = Field(alias="page_token_option", default=None) - -DefaultPaginatorAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/default_schema_loader.py b/airbyte-connector-builder-server/connector_builder/generated/models/default_schema_loader.py deleted file mode 100644 index fc431f1eebbe..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/default_schema_loader.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.default_schema_loader_all_of import DefaultSchemaLoaderAllOf - - -class DefaultSchemaLoader(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DefaultSchemaLoader - a model defined in OpenAPI - - config: The config of this DefaultSchemaLoader. - """ - - config: Dict[str, Any] = Field(alias="config") - -DefaultSchemaLoader.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/default_schema_loader_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/default_schema_loader_all_of.py deleted file mode 100644 index 8e753d9bcb7c..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/default_schema_loader_all_of.py +++ /dev/null @@ -1,24 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class DefaultSchemaLoaderAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DefaultSchemaLoaderAllOf - a model defined in OpenAPI - - config: The config of this DefaultSchemaLoaderAllOf. - """ - - config: Dict[str, Any] = Field(alias="config") - -DefaultSchemaLoaderAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/dpath_extractor.py b/airbyte-connector-builder-server/connector_builder/generated/models/dpath_extractor.py deleted file mode 100644 index 86bb8f6145fd..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/dpath_extractor.py +++ /dev/null @@ -1,31 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.dpath_extractor_all_of import DpathExtractorAllOf -from connector_builder.generated.models.json_decoder import JsonDecoder - - -class DpathExtractor(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DpathExtractor - a model defined in OpenAPI - - field_pointer: The field_pointer of this DpathExtractor. - config: The config of this DpathExtractor. - decoder: The decoder of this DpathExtractor [Optional]. - """ - - field_pointer: List[AnyOfInterpolatedStringstring] = Field(alias="field_pointer") - config: Dict[str, Any] = Field(alias="config") - decoder: Optional[JsonDecoder] = Field(alias="decoder", default=None) - -DpathExtractor.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/dpath_extractor_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/dpath_extractor_all_of.py deleted file mode 100644 index 9e7b152be530..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/dpath_extractor_all_of.py +++ /dev/null @@ -1,30 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.json_decoder import JsonDecoder - - -class DpathExtractorAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - DpathExtractorAllOf - a model defined in OpenAPI - - field_pointer: The field_pointer of this DpathExtractorAllOf. - config: The config of this DpathExtractorAllOf. - decoder: The decoder of this DpathExtractorAllOf [Optional]. - """ - - field_pointer: List[AnyOfInterpolatedStringstring] = Field(alias="field_pointer") - config: Dict[str, Any] = Field(alias="config") - decoder: Optional[JsonDecoder] = Field(alias="decoder", default=None) - -DpathExtractorAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/exponential_backoff_strategy.py b/airbyte-connector-builder-server/connector_builder/generated/models/exponential_backoff_strategy.py deleted file mode 100644 index 551ef4562591..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/exponential_backoff_strategy.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringnumberstring import AnyOfInterpolatedStringnumberstring -from connector_builder.generated.models.exponential_backoff_strategy_all_of import ExponentialBackoffStrategyAllOf - - -class ExponentialBackoffStrategy(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ExponentialBackoffStrategy - a model defined in OpenAPI - - config: The config of this ExponentialBackoffStrategy. - factor: The factor of this ExponentialBackoffStrategy [Optional]. - """ - - config: Dict[str, Any] = Field(alias="config") - factor: Optional[AnyOfInterpolatedStringnumberstring] = Field(alias="factor", default=None) - -ExponentialBackoffStrategy.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/exponential_backoff_strategy_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/exponential_backoff_strategy_all_of.py deleted file mode 100644 index d2c26b9321a2..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/exponential_backoff_strategy_all_of.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringnumberstring import AnyOfInterpolatedStringnumberstring - - -class ExponentialBackoffStrategyAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ExponentialBackoffStrategyAllOf - a model defined in OpenAPI - - config: The config of this ExponentialBackoffStrategyAllOf. - factor: The factor of this ExponentialBackoffStrategyAllOf [Optional]. - """ - - config: Dict[str, Any] = Field(alias="config") - factor: Optional[AnyOfInterpolatedStringnumberstring] = Field(alias="factor", default=None) - -ExponentialBackoffStrategyAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/extra_models.py b/airbyte-connector-builder-server/connector_builder/generated/models/extra_models.py deleted file mode 100644 index a3a283fb842b..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/extra_models.py +++ /dev/null @@ -1,8 +0,0 @@ -# coding: utf-8 - -from pydantic import BaseModel - -class TokenModel(BaseModel): - """Defines a token model.""" - - sub: str diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/http_request.py b/airbyte-connector-builder-server/connector_builder/generated/models/http_request.py deleted file mode 100644 index 5f29b9d0b7c5..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/http_request.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 - - -class HttpRequest(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - HttpRequest - a model defined in OpenAPI - - url: The url of this HttpRequest. - parameters: The parameters of this HttpRequest [Optional]. - body: The body of this HttpRequest [Optional]. - headers: The headers of this HttpRequest [Optional]. - http_method: The http_method of this HttpRequest. - """ - - url: str - parameters: Optional[Dict[str, Any]] = None - body: Optional[Dict[str, Any]] = None - headers: Optional[Dict[str, Any]] = None - http_method: str - -HttpRequest.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/http_requester.py b/airbyte-connector-builder-server/connector_builder/generated/models/http_requester.py deleted file mode 100644 index bf4be60005cd..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/http_requester.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_composite_error_handler_default_error_handler import AnyOfCompositeErrorHandlerDefaultErrorHandler -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.any_of_no_auth_declarative_oauth2_authenticator_api_key_authenticator_bearer_authenticator_basic_http_authenticator import AnyOfNoAuthDeclarativeOauth2AuthenticatorApiKeyAuthenticatorBearerAuthenticatorBasicHttpAuthenticator -from connector_builder.generated.models.any_ofstringstring import AnyOfstringstring -from connector_builder.generated.models.http_requester_all_of import HttpRequesterAllOf -from connector_builder.generated.models.interpolated_request_options_provider import InterpolatedRequestOptionsProvider -from connector_builder.generated.models.requester import Requester - - -class HttpRequester(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - HttpRequester - a model defined in OpenAPI - - name: The name of this HttpRequester. - url_base: The url_base of this HttpRequester. - path: The path of this HttpRequester. - config: The config of this HttpRequester. - http_method: The http_method of this HttpRequester [Optional]. - request_options_provider: The request_options_provider of this HttpRequester [Optional]. - authenticator: The authenticator of this HttpRequester [Optional]. - error_handler: The error_handler of this HttpRequester [Optional]. - """ - - name: str = Field(alias="name") - url_base: AnyOfInterpolatedStringstring = Field(alias="url_base") - path: AnyOfInterpolatedStringstring = Field(alias="path") - config: Dict[str, Any] = Field(alias="config") - http_method: Optional[AnyOfstringstring] = Field(alias="http_method", default=None) - request_options_provider: Optional[InterpolatedRequestOptionsProvider] = Field(alias="request_options_provider", default=None) - authenticator: Optional[AnyOfNoAuthDeclarativeOauth2AuthenticatorApiKeyAuthenticatorBearerAuthenticatorBasicHttpAuthenticator] = Field(alias="authenticator", default=None) - error_handler: Optional[AnyOfCompositeErrorHandlerDefaultErrorHandler] = Field(alias="error_handler", default=None) - -HttpRequester.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/http_requester_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/http_requester_all_of.py deleted file mode 100644 index 7f2a42e36b6f..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/http_requester_all_of.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_composite_error_handler_default_error_handler import AnyOfCompositeErrorHandlerDefaultErrorHandler -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.any_of_no_auth_declarative_oauth2_authenticator_api_key_authenticator_bearer_authenticator_basic_http_authenticator import AnyOfNoAuthDeclarativeOauth2AuthenticatorApiKeyAuthenticatorBearerAuthenticatorBasicHttpAuthenticator -from connector_builder.generated.models.any_ofstringstring import AnyOfstringstring -from connector_builder.generated.models.interpolated_request_options_provider import InterpolatedRequestOptionsProvider - - -class HttpRequesterAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - HttpRequesterAllOf - a model defined in OpenAPI - - name: The name of this HttpRequesterAllOf. - url_base: The url_base of this HttpRequesterAllOf. - path: The path of this HttpRequesterAllOf. - config: The config of this HttpRequesterAllOf. - http_method: The http_method of this HttpRequesterAllOf [Optional]. - request_options_provider: The request_options_provider of this HttpRequesterAllOf [Optional]. - authenticator: The authenticator of this HttpRequesterAllOf [Optional]. - error_handler: The error_handler of this HttpRequesterAllOf [Optional]. - """ - - name: str = Field(alias="name") - url_base: AnyOfInterpolatedStringstring = Field(alias="url_base") - path: AnyOfInterpolatedStringstring = Field(alias="path") - config: Dict[str, Any] = Field(alias="config") - http_method: Optional[AnyOfstringstring] = Field(alias="http_method", default=None) - request_options_provider: Optional[InterpolatedRequestOptionsProvider] = Field(alias="request_options_provider", default=None) - authenticator: Optional[AnyOfNoAuthDeclarativeOauth2AuthenticatorApiKeyAuthenticatorBearerAuthenticatorBasicHttpAuthenticator] = Field(alias="authenticator", default=None) - error_handler: Optional[AnyOfCompositeErrorHandlerDefaultErrorHandler] = Field(alias="error_handler", default=None) - -HttpRequesterAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/http_response.py b/airbyte-connector-builder-server/connector_builder/generated/models/http_response.py deleted file mode 100644 index 49a81ebeb628..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/http_response.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 - - -class HttpResponse(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - HttpResponse - a model defined in OpenAPI - - status: The status of this HttpResponse. - body: The body of this HttpResponse [Optional]. - headers: The headers of this HttpResponse [Optional]. - """ - - status: int - body: Optional[Dict[str, Any]] = None - headers: Optional[Dict[str, Any]] = None - -HttpResponse.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/http_response_filter.py b/airbyte-connector-builder-server/connector_builder/generated/models/http_response_filter.py deleted file mode 100644 index 8440949d7c26..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/http_response_filter.py +++ /dev/null @@ -1,37 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_booleanstring import AnyOfInterpolatedBooleanstring -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.any_ofstringstring import AnyOfstringstring - - -class HttpResponseFilter(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - HttpResponseFilter - a model defined in OpenAPI - - action: The action of this HttpResponseFilter. - config: The config of this HttpResponseFilter. - http_codes: The http_codes of this HttpResponseFilter [Optional]. - error_message_contains: The error_message_contains of this HttpResponseFilter [Optional]. - predicate: The predicate of this HttpResponseFilter [Optional]. - error_message: The error_message of this HttpResponseFilter [Optional]. - """ - - action: AnyOfstringstring = Field(alias="action") - config: Dict[str, Any] = Field(alias="config") - http_codes: Optional[list[int]] = Field(alias="http_codes", default=None) - error_message_contains: Optional[str] = Field(alias="error_message_contains", default=None) - predicate: Optional[AnyOfInterpolatedBooleanstring] = Field(alias="predicate", default=None) - error_message: Optional[AnyOfInterpolatedStringstring] = Field(alias="error_message", default=None) - -HttpResponseFilter.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_boolean.py b/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_boolean.py deleted file mode 100644 index 4c50980fec09..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_boolean.py +++ /dev/null @@ -1,24 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class InterpolatedBoolean(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - InterpolatedBoolean - a model defined in OpenAPI - - condition: The condition of this InterpolatedBoolean. - """ - - condition: str = Field(alias="condition") - -InterpolatedBoolean.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_request_options_provider.py b/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_request_options_provider.py deleted file mode 100644 index 7a8a9afbff0c..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_request_options_provider.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_ofmapstring import AnyOfmapstring -from connector_builder.generated.models.interpolated_request_options_provider_all_of import InterpolatedRequestOptionsProviderAllOf - - -class InterpolatedRequestOptionsProvider(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - InterpolatedRequestOptionsProvider - a model defined in OpenAPI - - config: The config of this InterpolatedRequestOptionsProvider [Optional]. - request_parameters: The request_parameters of this InterpolatedRequestOptionsProvider [Optional]. - request_headers: The request_headers of this InterpolatedRequestOptionsProvider [Optional]. - request_body_data: The request_body_data of this InterpolatedRequestOptionsProvider [Optional]. - request_body_json: The request_body_json of this InterpolatedRequestOptionsProvider [Optional]. - """ - - config: Optional[Dict[str, Any]] = Field(alias="config", default=None) - request_parameters: Optional[AnyOfmapstring] = Field(alias="request_parameters", default=None) - request_headers: Optional[AnyOfmapstring] = Field(alias="request_headers", default=None) - request_body_data: Optional[AnyOfmapstring] = Field(alias="request_body_data", default=None) - request_body_json: Optional[AnyOfmapstring] = Field(alias="request_body_json", default=None) - -InterpolatedRequestOptionsProvider.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_request_options_provider_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_request_options_provider_all_of.py deleted file mode 100644 index 3958af7231c5..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_request_options_provider_all_of.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_ofmapstring import AnyOfmapstring - - -class InterpolatedRequestOptionsProviderAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - InterpolatedRequestOptionsProviderAllOf - a model defined in OpenAPI - - config: The config of this InterpolatedRequestOptionsProviderAllOf [Optional]. - request_parameters: The request_parameters of this InterpolatedRequestOptionsProviderAllOf [Optional]. - request_headers: The request_headers of this InterpolatedRequestOptionsProviderAllOf [Optional]. - request_body_data: The request_body_data of this InterpolatedRequestOptionsProviderAllOf [Optional]. - request_body_json: The request_body_json of this InterpolatedRequestOptionsProviderAllOf [Optional]. - """ - - config: Optional[Dict[str, Any]] = Field(alias="config", default=None) - request_parameters: Optional[AnyOfmapstring] = Field(alias="request_parameters", default=None) - request_headers: Optional[AnyOfmapstring] = Field(alias="request_headers", default=None) - request_body_data: Optional[AnyOfmapstring] = Field(alias="request_body_data", default=None) - request_body_json: Optional[AnyOfmapstring] = Field(alias="request_body_json", default=None) - -InterpolatedRequestOptionsProviderAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_string.py b/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_string.py deleted file mode 100644 index f202e9f19f22..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/interpolated_string.py +++ /dev/null @@ -1,26 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class InterpolatedString(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - InterpolatedString - a model defined in OpenAPI - - string: The string of this InterpolatedString. - default: The default of this InterpolatedString [Optional]. - """ - - string: str = Field(alias="string") - default: Optional[str] = Field(alias="default", default=None) - -InterpolatedString.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_exception_info.py b/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_exception_info.py deleted file mode 100644 index 878690e09825..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_exception_info.py +++ /dev/null @@ -1,31 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 -from connector_builder.generated.models.invalid_input_property import InvalidInputProperty - - -class InvalidInputExceptionInfo(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - InvalidInputExceptionInfo - a model defined in OpenAPI - - message: The message of this InvalidInputExceptionInfo. - exception_class_name: The exception_class_name of this InvalidInputExceptionInfo [Optional]. - exception_stack: The exception_stack of this InvalidInputExceptionInfo [Optional]. - validation_errors: The validation_errors of this InvalidInputExceptionInfo. - """ - - message: str - exception_class_name: Optional[str] = None - exception_stack: Optional[List[str]] = None - validation_errors: List[InvalidInputProperty] - -InvalidInputExceptionInfo.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_property.py b/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_property.py deleted file mode 100644 index 35e17c112090..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_property.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 - - -class InvalidInputProperty(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - InvalidInputProperty - a model defined in OpenAPI - - property_path: The property_path of this InvalidInputProperty. - invalid_value: The invalid_value of this InvalidInputProperty [Optional]. - message: The message of this InvalidInputProperty [Optional]. - """ - - property_path: str - invalid_value: Optional[str] = None - message: Optional[str] = None - -InvalidInputProperty.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/json_decoder.py b/airbyte-connector-builder-server/connector_builder/generated/models/json_decoder.py deleted file mode 100644 index cfb540e19c7c..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/json_decoder.py +++ /dev/null @@ -1,22 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class JsonDecoder(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - JsonDecoder - a model defined in OpenAPI - - """ - - -JsonDecoder.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/json_file_schema_loader.py b/airbyte-connector-builder-server/connector_builder/generated/models/json_file_schema_loader.py deleted file mode 100644 index 2e2dd066caa9..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/json_file_schema_loader.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.json_file_schema_loader_all_of import JsonFileSchemaLoaderAllOf - - -class JsonFileSchemaLoader(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - JsonFileSchemaLoader - a model defined in OpenAPI - - config: The config of this JsonFileSchemaLoader. - file_path: The file_path of this JsonFileSchemaLoader [Optional]. - """ - - config: Dict[str, Any] = Field(alias="config") - file_path: Optional[AnyOfInterpolatedStringstring] = Field(alias="file_path", default=None) - -JsonFileSchemaLoader.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/json_file_schema_loader_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/json_file_schema_loader_all_of.py deleted file mode 100644 index 608fedfe1742..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/json_file_schema_loader_all_of.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring - - -class JsonFileSchemaLoaderAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - JsonFileSchemaLoaderAllOf - a model defined in OpenAPI - - config: The config of this JsonFileSchemaLoaderAllOf. - file_path: The file_path of this JsonFileSchemaLoaderAllOf [Optional]. - """ - - config: Dict[str, Any] = Field(alias="config") - file_path: Optional[AnyOfInterpolatedStringstring] = Field(alias="file_path", default=None) - -JsonFileSchemaLoaderAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/known_exception_info.py b/airbyte-connector-builder-server/connector_builder/generated/models/known_exception_info.py deleted file mode 100644 index d349fddc9cb5..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/known_exception_info.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 - - -class KnownExceptionInfo(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - KnownExceptionInfo - a model defined in OpenAPI - - message: The message of this KnownExceptionInfo. - exception_class_name: The exception_class_name of this KnownExceptionInfo [Optional]. - exception_stack: The exception_stack of this KnownExceptionInfo [Optional]. - """ - - message: str - exception_class_name: Optional[str] = None - exception_stack: Optional[List[str]] = None - -KnownExceptionInfo.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/list_stream_slicer.py b/airbyte-connector-builder-server/connector_builder/generated/models/list_stream_slicer.py deleted file mode 100644 index bf8d576aecc8..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/list_stream_slicer.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.any_ofarraystring import AnyOfarraystring -from connector_builder.generated.models.list_stream_slicer_all_of import ListStreamSlicerAllOf -from connector_builder.generated.models.request_option import RequestOption -from connector_builder.generated.models.stream_slicer import StreamSlicer - - -class ListStreamSlicer(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ListStreamSlicer - a model defined in OpenAPI - - slice_values: The slice_values of this ListStreamSlicer. - cursor_field: The cursor_field of this ListStreamSlicer. - config: The config of this ListStreamSlicer. - request_option: The request_option of this ListStreamSlicer [Optional]. - """ - - slice_values: AnyOfarraystring = Field(alias="slice_values") - cursor_field: AnyOfInterpolatedStringstring = Field(alias="cursor_field") - config: Dict[str, Any] = Field(alias="config") - request_option: Optional[RequestOption] = Field(alias="request_option", default=None) - -ListStreamSlicer.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/list_stream_slicer_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/list_stream_slicer_all_of.py deleted file mode 100644 index be3b00d97dd0..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/list_stream_slicer_all_of.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.any_ofarraystring import AnyOfarraystring -from connector_builder.generated.models.request_option import RequestOption - - -class ListStreamSlicerAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ListStreamSlicerAllOf - a model defined in OpenAPI - - slice_values: The slice_values of this ListStreamSlicerAllOf. - cursor_field: The cursor_field of this ListStreamSlicerAllOf. - config: The config of this ListStreamSlicerAllOf. - request_option: The request_option of this ListStreamSlicerAllOf [Optional]. - """ - - slice_values: AnyOfarraystring = Field(alias="slice_values") - cursor_field: AnyOfInterpolatedStringstring = Field(alias="cursor_field") - config: Dict[str, Any] = Field(alias="config") - request_option: Optional[RequestOption] = Field(alias="request_option", default=None) - -ListStreamSlicerAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/min_max_datetime.py b/airbyte-connector-builder-server/connector_builder/generated/models/min_max_datetime.py deleted file mode 100644 index 074e9adb17ff..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/min_max_datetime.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring - - -class MinMaxDatetime(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - MinMaxDatetime - a model defined in OpenAPI - - datetime: The datetime of this MinMaxDatetime. - datetime_format: The datetime_format of this MinMaxDatetime [Optional]. - datetime_format: The datetime_format of this MinMaxDatetime [Optional]. - min_datetime: The min_datetime of this MinMaxDatetime [Optional]. - max_datetime: The max_datetime of this MinMaxDatetime [Optional]. - """ - - datetime: AnyOfInterpolatedStringstring = Field(alias="datetime") - datetime_format: Optional[str] = Field(alias="datetime_format", default=None) - datetime_format: Optional[str] = Field(alias="_datetime_format", default=None) - min_datetime: Optional[AnyOfInterpolatedStringstring] = Field(alias="min_datetime", default=None) - max_datetime: Optional[AnyOfInterpolatedStringstring] = Field(alias="max_datetime", default=None) - -MinMaxDatetime.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/no_auth.py b/airbyte-connector-builder-server/connector_builder/generated/models/no_auth.py deleted file mode 100644 index 79d9cad7ace4..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/no_auth.py +++ /dev/null @@ -1,22 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class NoAuth(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - NoAuth - a model defined in OpenAPI - - """ - - -NoAuth.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/no_pagination.py b/airbyte-connector-builder-server/connector_builder/generated/models/no_pagination.py deleted file mode 100644 index ee4c2303efb9..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/no_pagination.py +++ /dev/null @@ -1,23 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.paginator import Paginator - - -class NoPagination(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - NoPagination - a model defined in OpenAPI - - """ - - -NoPagination.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/offset_increment.py b/airbyte-connector-builder-server/connector_builder/generated/models/offset_increment.py deleted file mode 100644 index 8c8e719fc553..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/offset_increment.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.offset_increment_all_of import OffsetIncrementAllOf - - -class OffsetIncrement(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - OffsetIncrement - a model defined in OpenAPI - - page_size: The page_size of this OffsetIncrement. - """ - - page_size: int = Field(alias="page_size") - -OffsetIncrement.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/offset_increment_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/offset_increment_all_of.py deleted file mode 100644 index 3e3e799b73c7..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/offset_increment_all_of.py +++ /dev/null @@ -1,24 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class OffsetIncrementAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - OffsetIncrementAllOf - a model defined in OpenAPI - - page_size: The page_size of this OffsetIncrementAllOf. - """ - - page_size: int = Field(alias="page_size") - -OffsetIncrementAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/page_increment.py b/airbyte-connector-builder-server/connector_builder/generated/models/page_increment.py deleted file mode 100644 index 0b366c690dc6..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/page_increment.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.page_increment_all_of import PageIncrementAllOf - - -class PageIncrement(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - PageIncrement - a model defined in OpenAPI - - page_size: The page_size of this PageIncrement. - start_from_page: The start_from_page of this PageIncrement [Optional]. - """ - - page_size: int = Field(alias="page_size") - start_from_page: Optional[int] = Field(alias="start_from_page", default=None) - -PageIncrement.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/page_increment_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/page_increment_all_of.py deleted file mode 100644 index 2276244db68b..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/page_increment_all_of.py +++ /dev/null @@ -1,26 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class PageIncrementAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - PageIncrementAllOf - a model defined in OpenAPI - - page_size: The page_size of this PageIncrementAllOf. - start_from_page: The start_from_page of this PageIncrementAllOf [Optional]. - """ - - page_size: int = Field(alias="page_size") - start_from_page: Optional[int] = Field(alias="start_from_page", default=None) - -PageIncrementAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/paginator.py b/airbyte-connector-builder-server/connector_builder/generated/models/paginator.py deleted file mode 100644 index 735e4e849d17..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/paginator.py +++ /dev/null @@ -1,22 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class Paginator(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - Paginator - a model defined in OpenAPI - - """ - - -Paginator.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/parent_stream_config.py b/airbyte-connector-builder-server/connector_builder/generated/models/parent_stream_config.py deleted file mode 100644 index 6daf56b137e4..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/parent_stream_config.py +++ /dev/null @@ -1,31 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.request_option import RequestOption - - -class ParentStreamConfig(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ParentStreamConfig - a model defined in OpenAPI - - stream: The stream of this ParentStreamConfig. - parent_key: The parent_key of this ParentStreamConfig. - stream_slice_field: The stream_slice_field of this ParentStreamConfig. - request_option: The request_option of this ParentStreamConfig [Optional]. - """ - - stream: object = Field(alias="stream") - parent_key: str = Field(alias="parent_key") - stream_slice_field: str = Field(alias="stream_slice_field") - request_option: Optional[RequestOption] = Field(alias="request_option", default=None) - -ParentStreamConfig.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/parsed_add_field_definition.py b/airbyte-connector-builder-server/connector_builder/generated/models/parsed_add_field_definition.py deleted file mode 100644 index 3a03c061ef3a..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/parsed_add_field_definition.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.interpolated_string import InterpolatedString - - -class ParsedAddFieldDefinition(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ParsedAddFieldDefinition - a model defined in OpenAPI - - path: The path of this ParsedAddFieldDefinition. - value: The value of this ParsedAddFieldDefinition. - """ - - path: List[str] = Field(alias="path") - value: InterpolatedString = Field(alias="value") - -ParsedAddFieldDefinition.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/record_filter.py b/airbyte-connector-builder-server/connector_builder/generated/models/record_filter.py deleted file mode 100644 index abe54df6b6b7..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/record_filter.py +++ /dev/null @@ -1,26 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class RecordFilter(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - RecordFilter - a model defined in OpenAPI - - config: The config of this RecordFilter. - condition: The condition of this RecordFilter [Optional]. - """ - - config: Dict[str, Any] = Field(alias="config") - condition: Optional[str] = Field(alias="condition", default=None) - -RecordFilter.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/record_selector.py b/airbyte-connector-builder-server/connector_builder/generated/models/record_selector.py deleted file mode 100644 index b0f46f9971ec..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/record_selector.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.dpath_extractor import DpathExtractor -from connector_builder.generated.models.record_filter import RecordFilter -from connector_builder.generated.models.record_selector_all_of import RecordSelectorAllOf - - -class RecordSelector(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - RecordSelector - a model defined in OpenAPI - - extractor: The extractor of this RecordSelector. - record_filter: The record_filter of this RecordSelector [Optional]. - """ - - extractor: DpathExtractor = Field(alias="extractor") - record_filter: Optional[RecordFilter] = Field(alias="record_filter", default=None) - -RecordSelector.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/record_selector_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/record_selector_all_of.py deleted file mode 100644 index 0a719cc36eb7..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/record_selector_all_of.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.dpath_extractor import DpathExtractor -from connector_builder.generated.models.record_filter import RecordFilter - - -class RecordSelectorAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - RecordSelectorAllOf - a model defined in OpenAPI - - extractor: The extractor of this RecordSelectorAllOf. - record_filter: The record_filter of this RecordSelectorAllOf [Optional]. - """ - - extractor: DpathExtractor = Field(alias="extractor") - record_filter: Optional[RecordFilter] = Field(alias="record_filter", default=None) - -RecordSelectorAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/remove_fields.py b/airbyte-connector-builder-server/connector_builder/generated/models/remove_fields.py deleted file mode 100644 index ebdbca73373c..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/remove_fields.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.remove_fields_all_of import RemoveFieldsAllOf - - -class RemoveFields(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - RemoveFields - a model defined in OpenAPI - - field_pointers: The field_pointers of this RemoveFields. - """ - - field_pointers: List[List[str]] = Field(alias="field_pointers") - -RemoveFields.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/remove_fields_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/remove_fields_all_of.py deleted file mode 100644 index a410da99f47f..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/remove_fields_all_of.py +++ /dev/null @@ -1,24 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class RemoveFieldsAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - RemoveFieldsAllOf - a model defined in OpenAPI - - field_pointers: The field_pointers of this RemoveFieldsAllOf. - """ - - field_pointers: List[List[str]] = Field(alias="field_pointers") - -RemoveFieldsAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/request_option.py b/airbyte-connector-builder-server/connector_builder/generated/models/request_option.py deleted file mode 100644 index b733d4daf372..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/request_option.py +++ /dev/null @@ -1,26 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class RequestOption(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - RequestOption - a model defined in OpenAPI - - inject_into: The inject_into of this RequestOption. - field_name: The field_name of this RequestOption [Optional]. - """ - - inject_into: str = Field(alias="inject_into") - field_name: Optional[str] = Field(alias="field_name", default=None) - -RequestOption.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/requester.py b/airbyte-connector-builder-server/connector_builder/generated/models/requester.py deleted file mode 100644 index ef05b9ae1b70..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/requester.py +++ /dev/null @@ -1,22 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class Requester(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - Requester - a model defined in OpenAPI - - """ - - -Requester.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/resolve_manifest.py b/airbyte-connector-builder-server/connector_builder/generated/models/resolve_manifest.py deleted file mode 100644 index 81bcc339a537..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/resolve_manifest.py +++ /dev/null @@ -1,24 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 - - -class ResolveManifest(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ResolveManifest - a model defined in OpenAPI - - manifest: The manifest of this ResolveManifest. - """ - - manifest: Dict[str, Any] - -ResolveManifest.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/resolve_manifest_request_body.py b/airbyte-connector-builder-server/connector_builder/generated/models/resolve_manifest_request_body.py deleted file mode 100644 index 91aded8ec523..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/resolve_manifest_request_body.py +++ /dev/null @@ -1,24 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 - - -class ResolveManifestRequestBody(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - ResolveManifestRequestBody - a model defined in OpenAPI - - manifest: The manifest of this ResolveManifestRequestBody. - """ - - manifest: Dict[str, Any] - -ResolveManifestRequestBody.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/simple_retriever.py b/airbyte-connector-builder-server/connector_builder/generated/models/simple_retriever.py deleted file mode 100644 index f6f5567029ba..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/simple_retriever.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_cartesian_product_stream_slicer_datetime_stream_slicer_list_stream_slicer_single_slice_substream_slicer import AnyOfCartesianProductStreamSlicerDatetimeStreamSlicerListStreamSlicerSingleSliceSubstreamSlicer -from connector_builder.generated.models.any_of_default_paginator_no_pagination import AnyOfDefaultPaginatorNoPagination -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.any_ofarrayarraystring import AnyOfarrayarraystring -from connector_builder.generated.models.http_requester import HttpRequester -from connector_builder.generated.models.record_selector import RecordSelector -from connector_builder.generated.models.simple_retriever_all_of import SimpleRetrieverAllOf - - -class SimpleRetriever(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - SimpleRetriever - a model defined in OpenAPI - - requester: The requester of this SimpleRetriever. - record_selector: The record_selector of this SimpleRetriever. - config: The config of this SimpleRetriever. - name: The name of this SimpleRetriever [Optional]. - name: The name of this SimpleRetriever [Optional]. - primary_key: The primary_key of this SimpleRetriever [Optional]. - primary_key: The primary_key of this SimpleRetriever [Optional]. - paginator: The paginator of this SimpleRetriever [Optional]. - stream_slicer: The stream_slicer of this SimpleRetriever [Optional]. - """ - - requester: HttpRequester = Field(alias="requester") - record_selector: RecordSelector = Field(alias="record_selector") - config: Dict[str, Any] = Field(alias="config") - name: Optional[str] = Field(alias="name", default=None) - name: Optional[AnyOfInterpolatedStringstring] = Field(alias="_name", default=None) - primary_key: Optional[AnyOfarrayarraystring] = Field(alias="primary_key", default=None) - primary_key: Optional[str] = Field(alias="_primary_key", default=None) - paginator: Optional[AnyOfDefaultPaginatorNoPagination] = Field(alias="paginator", default=None) - stream_slicer: Optional[AnyOfCartesianProductStreamSlicerDatetimeStreamSlicerListStreamSlicerSingleSliceSubstreamSlicer] = Field(alias="stream_slicer", default=None) - -SimpleRetriever.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/simple_retriever_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/simple_retriever_all_of.py deleted file mode 100644 index 4f11b427796c..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/simple_retriever_all_of.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_cartesian_product_stream_slicer_datetime_stream_slicer_list_stream_slicer_single_slice_substream_slicer import AnyOfCartesianProductStreamSlicerDatetimeStreamSlicerListStreamSlicerSingleSliceSubstreamSlicer -from connector_builder.generated.models.any_of_default_paginator_no_pagination import AnyOfDefaultPaginatorNoPagination -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.any_ofarrayarraystring import AnyOfarrayarraystring -from connector_builder.generated.models.http_requester import HttpRequester -from connector_builder.generated.models.record_selector import RecordSelector - - -class SimpleRetrieverAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - SimpleRetrieverAllOf - a model defined in OpenAPI - - requester: The requester of this SimpleRetrieverAllOf. - record_selector: The record_selector of this SimpleRetrieverAllOf. - config: The config of this SimpleRetrieverAllOf. - name: The name of this SimpleRetrieverAllOf [Optional]. - name: The name of this SimpleRetrieverAllOf [Optional]. - primary_key: The primary_key of this SimpleRetrieverAllOf [Optional]. - primary_key: The primary_key of this SimpleRetrieverAllOf [Optional]. - paginator: The paginator of this SimpleRetrieverAllOf [Optional]. - stream_slicer: The stream_slicer of this SimpleRetrieverAllOf [Optional]. - """ - - requester: HttpRequester = Field(alias="requester") - record_selector: RecordSelector = Field(alias="record_selector") - config: Dict[str, Any] = Field(alias="config") - name: Optional[str] = Field(alias="name", default=None) - name: Optional[AnyOfInterpolatedStringstring] = Field(alias="_name", default=None) - primary_key: Optional[AnyOfarrayarraystring] = Field(alias="primary_key", default=None) - primary_key: Optional[str] = Field(alias="_primary_key", default=None) - paginator: Optional[AnyOfDefaultPaginatorNoPagination] = Field(alias="paginator", default=None) - stream_slicer: Optional[AnyOfCartesianProductStreamSlicerDatetimeStreamSlicerListStreamSlicerSingleSliceSubstreamSlicer] = Field(alias="stream_slicer", default=None) - -SimpleRetrieverAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/single_slice.py b/airbyte-connector-builder-server/connector_builder/generated/models/single_slice.py deleted file mode 100644 index 35048024b976..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/single_slice.py +++ /dev/null @@ -1,23 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.stream_slicer import StreamSlicer - - -class SingleSlice(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - SingleSlice - a model defined in OpenAPI - - """ - - -SingleSlice.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read.py b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read.py deleted file mode 100644 index 775b148fa3a7..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read.py +++ /dev/null @@ -1,31 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 -from connector_builder.generated.models.stream_read_slices import StreamReadSlices - - -class StreamRead(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - StreamRead - a model defined in OpenAPI - - logs: The logs of this StreamRead. - slices: The slices of this StreamRead. - test_read_limit_reached: The test_read_limit_reached of this StreamRead. - inferred_schema: The inferred_schema of this StreamRead [Optional]. - """ - - logs: List[object] - slices: List[StreamReadSlices] - test_read_limit_reached: bool - inferred_schema: Optional[Dict[str, Any]] = None - -StreamRead.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_pages.py b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_pages.py deleted file mode 100644 index e79feb8fc382..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_pages.py +++ /dev/null @@ -1,30 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 -from connector_builder.generated.models.http_request import HttpRequest -from connector_builder.generated.models.http_response import HttpResponse - - -class StreamReadPages(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - StreamReadPages - a model defined in OpenAPI - - records: The records of this StreamReadPages. - request: The request of this StreamReadPages [Optional]. - response: The response of this StreamReadPages [Optional]. - """ - - records: List[object] - request: Optional[HttpRequest] = None - response: Optional[HttpResponse] = None - -StreamReadPages.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_request_body.py b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_request_body.py deleted file mode 100644 index 78bddf225b00..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_request_body.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 - - -class StreamReadRequestBody(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - StreamReadRequestBody - a model defined in OpenAPI - - manifest: The manifest of this StreamReadRequestBody. - stream: The stream of this StreamReadRequestBody. - config: The config of this StreamReadRequestBody. - state: The state of this StreamReadRequestBody [Optional]. - record_limit: The record_limit of this StreamReadRequestBody [Optional]. - """ - - manifest: Dict[str, Any] - stream: str - config: Dict[str, Any] - state: Optional[Dict[str, Any]] = None - record_limit: Optional[int] = None - - @validator("record_limit") - def record_limit_max(cls, value): - assert value <= 1000 - return value - - @validator("record_limit") - def record_limit_min(cls, value): - assert value >= 1 - return value - -StreamReadRequestBody.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slice_descriptor.py b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slice_descriptor.py deleted file mode 100644 index d8cbe98f031a..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slice_descriptor.py +++ /dev/null @@ -1,26 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 - - -class StreamReadSliceDescriptor(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - StreamReadSliceDescriptor - a model defined in OpenAPI - - start_datetime: The start_datetime of this StreamReadSliceDescriptor [Optional]. - list_item: The list_item of this StreamReadSliceDescriptor [Optional]. - """ - - start_datetime: Optional[datetime] = None - list_item: Optional[str] = None - -StreamReadSliceDescriptor.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slices.py b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slices.py deleted file mode 100644 index 1cfddd2a9efb..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slices.py +++ /dev/null @@ -1,30 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 -from connector_builder.generated.models.stream_read_pages import StreamReadPages -from connector_builder.generated.models.stream_read_slice_descriptor import StreamReadSliceDescriptor - - -class StreamReadSlices(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - StreamReadSlices - a model defined in OpenAPI - - pages: The pages of this StreamReadSlices. - slice_descriptor: The slice_descriptor of this StreamReadSlices [Optional]. - state: The state of this StreamReadSlices [Optional]. - """ - - pages: List[StreamReadPages] - slice_descriptor: Optional[StreamReadSliceDescriptor] = None - state: Optional[Dict[str, Any]] = None - -StreamReadSlices.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/stream_slicer.py b/airbyte-connector-builder-server/connector_builder/generated/models/stream_slicer.py deleted file mode 100644 index 56c37db2c82d..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/stream_slicer.py +++ /dev/null @@ -1,22 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 - - -class StreamSlicer(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - StreamSlicer - a model defined in OpenAPI - - """ - - -StreamSlicer.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read.py b/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read.py deleted file mode 100644 index 746b0daccb44..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 -from connector_builder.generated.models.streams_list_read_streams import StreamsListReadStreams - - -class StreamsListRead(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - StreamsListRead - a model defined in OpenAPI - - streams: The streams of this StreamsListRead. - """ - - streams: List[StreamsListReadStreams] - -StreamsListRead.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read_streams.py b/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read_streams.py deleted file mode 100644 index 643de2043e07..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read_streams.py +++ /dev/null @@ -1,26 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 - - -class StreamsListReadStreams(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - StreamsListReadStreams - a model defined in OpenAPI - - name: The name of this StreamsListReadStreams. - url: The url of this StreamsListReadStreams. - """ - - name: str - url: str - -StreamsListReadStreams.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_request_body.py b/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_request_body.py deleted file mode 100644 index 1cee99805f05..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_request_body.py +++ /dev/null @@ -1,26 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 - - -class StreamsListRequestBody(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - StreamsListRequestBody - a model defined in OpenAPI - - manifest: The manifest of this StreamsListRequestBody. - config: The config of this StreamsListRequestBody. - """ - - manifest: Dict[str, Any] - config: Dict[str, Any] - -StreamsListRequestBody.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/substream_slicer.py b/airbyte-connector-builder-server/connector_builder/generated/models/substream_slicer.py deleted file mode 100644 index a5850b6fcb13..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/substream_slicer.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.parent_stream_config import ParentStreamConfig -from connector_builder.generated.models.stream_slicer import StreamSlicer -from connector_builder.generated.models.substream_slicer_all_of import SubstreamSlicerAllOf - - -class SubstreamSlicer(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - SubstreamSlicer - a model defined in OpenAPI - - parent_stream_configs: The parent_stream_configs of this SubstreamSlicer. - """ - - parent_stream_configs: List[ParentStreamConfig] = Field(alias="parent_stream_configs") - -SubstreamSlicer.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/substream_slicer_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/substream_slicer_all_of.py deleted file mode 100644 index dab0552fa671..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/substream_slicer_all_of.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.parent_stream_config import ParentStreamConfig - - -class SubstreamSlicerAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - SubstreamSlicerAllOf - a model defined in OpenAPI - - parent_stream_configs: The parent_stream_configs of this SubstreamSlicerAllOf. - """ - - parent_stream_configs: List[ParentStreamConfig] = Field(alias="parent_stream_configs") - -SubstreamSlicerAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/wait_time_from_header_backoff_strategy.py b/airbyte-connector-builder-server/connector_builder/generated/models/wait_time_from_header_backoff_strategy.py deleted file mode 100644 index cfaf856674c5..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/wait_time_from_header_backoff_strategy.py +++ /dev/null @@ -1,30 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.wait_time_from_header_backoff_strategy_all_of import WaitTimeFromHeaderBackoffStrategyAllOf - - -class WaitTimeFromHeaderBackoffStrategy(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - WaitTimeFromHeaderBackoffStrategy - a model defined in OpenAPI - - header: The header of this WaitTimeFromHeaderBackoffStrategy. - config: The config of this WaitTimeFromHeaderBackoffStrategy. - regex: The regex of this WaitTimeFromHeaderBackoffStrategy [Optional]. - """ - - header: AnyOfInterpolatedStringstring = Field(alias="header") - config: Dict[str, Any] = Field(alias="config") - regex: Optional[str] = Field(alias="regex", default=None) - -WaitTimeFromHeaderBackoffStrategy.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/wait_time_from_header_backoff_strategy_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/wait_time_from_header_backoff_strategy_all_of.py deleted file mode 100644 index 7b2104be2636..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/wait_time_from_header_backoff_strategy_all_of.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring - - -class WaitTimeFromHeaderBackoffStrategyAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - WaitTimeFromHeaderBackoffStrategyAllOf - a model defined in OpenAPI - - header: The header of this WaitTimeFromHeaderBackoffStrategyAllOf. - config: The config of this WaitTimeFromHeaderBackoffStrategyAllOf. - regex: The regex of this WaitTimeFromHeaderBackoffStrategyAllOf [Optional]. - """ - - header: AnyOfInterpolatedStringstring = Field(alias="header") - config: Dict[str, Any] = Field(alias="config") - regex: Optional[str] = Field(alias="regex", default=None) - -WaitTimeFromHeaderBackoffStrategyAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/wait_until_time_from_header_backoff_strategy.py b/airbyte-connector-builder-server/connector_builder/generated/models/wait_until_time_from_header_backoff_strategy.py deleted file mode 100644 index 2bda5657d237..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/wait_until_time_from_header_backoff_strategy.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringnumberstring import AnyOfInterpolatedStringnumberstring -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring -from connector_builder.generated.models.wait_until_time_from_header_backoff_strategy_all_of import WaitUntilTimeFromHeaderBackoffStrategyAllOf - - -class WaitUntilTimeFromHeaderBackoffStrategy(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - WaitUntilTimeFromHeaderBackoffStrategy - a model defined in OpenAPI - - header: The header of this WaitUntilTimeFromHeaderBackoffStrategy. - config: The config of this WaitUntilTimeFromHeaderBackoffStrategy. - min_wait: The min_wait of this WaitUntilTimeFromHeaderBackoffStrategy [Optional]. - regex: The regex of this WaitUntilTimeFromHeaderBackoffStrategy [Optional]. - """ - - header: AnyOfInterpolatedStringstring = Field(alias="header") - config: Dict[str, Any] = Field(alias="config") - min_wait: Optional[AnyOfInterpolatedStringnumberstring] = Field(alias="min_wait", default=None) - regex: Optional[AnyOfInterpolatedStringstring] = Field(alias="regex", default=None) - -WaitUntilTimeFromHeaderBackoffStrategy.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/wait_until_time_from_header_backoff_strategy_all_of.py b/airbyte-connector-builder-server/connector_builder/generated/models/wait_until_time_from_header_backoff_strategy_all_of.py deleted file mode 100644 index 31e6f0965248..000000000000 --- a/airbyte-connector-builder-server/connector_builder/generated/models/wait_until_time_from_header_backoff_strategy_all_of.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding: utf-8 - -from __future__ import annotations -from datetime import date, datetime # noqa: F401 - -import re # noqa: F401 -from typing import Any, Dict, List, Optional # noqa: F401 - -from pydantic import AnyUrl, BaseModel, EmailStr, Field, validator # noqa: F401 -from connector_builder.generated.models.any_of_interpolated_stringnumberstring import AnyOfInterpolatedStringnumberstring -from connector_builder.generated.models.any_of_interpolated_stringstring import AnyOfInterpolatedStringstring - - -class WaitUntilTimeFromHeaderBackoffStrategyAllOf(BaseModel): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - - WaitUntilTimeFromHeaderBackoffStrategyAllOf - a model defined in OpenAPI - - header: The header of this WaitUntilTimeFromHeaderBackoffStrategyAllOf. - config: The config of this WaitUntilTimeFromHeaderBackoffStrategyAllOf. - min_wait: The min_wait of this WaitUntilTimeFromHeaderBackoffStrategyAllOf [Optional]. - regex: The regex of this WaitUntilTimeFromHeaderBackoffStrategyAllOf [Optional]. - """ - - header: AnyOfInterpolatedStringstring = Field(alias="header") - config: Dict[str, Any] = Field(alias="config") - min_wait: Optional[AnyOfInterpolatedStringnumberstring] = Field(alias="min_wait", default=None) - regex: Optional[AnyOfInterpolatedStringstring] = Field(alias="regex", default=None) - -WaitUntilTimeFromHeaderBackoffStrategyAllOf.update_forward_refs() \ No newline at end of file diff --git a/airbyte-connector-builder-server/connector_builder/impl/adapter.py b/airbyte-connector-builder-server/connector_builder/impl/adapter.py deleted file mode 100644 index c2a2e739a606..000000000000 --- a/airbyte-connector-builder-server/connector_builder/impl/adapter.py +++ /dev/null @@ -1,42 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from abc import ABC, abstractmethod -from typing import Any, Dict, Iterator, List - -from airbyte_cdk.models import AirbyteMessage -from airbyte_cdk.sources.streams.http import HttpStream - - -class CdkAdapter(ABC): - """ - Abstract base class for the connector builder's CDK adapter. - """ - - @abstractmethod - def get_http_streams(self, config: Dict[str, Any]) -> List[HttpStream]: - """ - Gets a list of HTTP streams. - - :param config: The user-provided configuration as specified by the source's spec. - :return: A list of `HttpStream`s. - """ - - @abstractmethod - def read_stream(self, stream: str, config: Dict[str, Any]) -> Iterator[AirbyteMessage]: - """ - Reads data from the specified stream. - - :param stream: stream - :param config: The user-provided configuration as specified by the source's spec. - :return: An iterator over `AirbyteMessage` objects. - """ - - -class CdkAdapterFactory(ABC): - - @abstractmethod - def create(self, manifest: Dict[str, Any]) -> CdkAdapter: - """Return an implementation of CdkAdapter""" - pass diff --git a/airbyte-connector-builder-server/connector_builder/impl/default_api.py b/airbyte-connector-builder-server/connector_builder/impl/default_api.py deleted file mode 100644 index d3b7ea5a8bdf..000000000000 --- a/airbyte-connector-builder-server/connector_builder/impl/default_api.py +++ /dev/null @@ -1,308 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -import logging -import traceback -from json import JSONDecodeError -from typing import Any, Dict, Iterable, Iterator, Optional, Union -from urllib.parse import parse_qs, urljoin, urlparse - -from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, Type -from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource -from airbyte_cdk.utils.schema_inferrer import SchemaInferrer -from connector_builder.generated.apis.default_api_interface import DefaultApi -from connector_builder.generated.models.http_request import HttpRequest -from connector_builder.generated.models.http_response import HttpResponse -from connector_builder.generated.models.resolve_manifest import ResolveManifest -from connector_builder.generated.models.resolve_manifest_request_body import ResolveManifestRequestBody -from connector_builder.generated.models.stream_read import StreamRead -from connector_builder.generated.models.stream_read_pages import StreamReadPages -from connector_builder.generated.models.stream_read_request_body import StreamReadRequestBody -from connector_builder.generated.models.stream_read_slices import StreamReadSlices -from connector_builder.generated.models.streams_list_read import StreamsListRead -from connector_builder.generated.models.streams_list_read_streams import StreamsListReadStreams -from connector_builder.generated.models.streams_list_request_body import StreamsListRequestBody -from connector_builder.impl.adapter import CdkAdapter, CdkAdapterFactory -from fastapi import Body, HTTPException -from jsonschema import ValidationError - - -class DefaultApiImpl(DefaultApi): - - logger = logging.getLogger("airbyte.connector-builder") - - def __init__(self, adapter_factory: CdkAdapterFactory, max_pages_per_slice, max_slices, max_record_limit: int = 1000): - self.adapter_factory = adapter_factory - self._max_pages_per_slice = max_pages_per_slice - self._max_slices = max_slices - self.max_record_limit = max_record_limit - - super().__init__() - - async def get_manifest_template(self) -> str: - return """version: "0.1.0" -definitions: - selector: - extractor: - field_pointer: [] - requester: - url_base: "https://example.com" - http_method: "GET" - authenticator: - type: BearerAuthenticator - api_token: "{{ config['api_key'] }}" - retriever: - record_selector: - $ref: "*ref(definitions.selector)" - paginator: - type: NoPagination - requester: - $ref: "*ref(definitions.requester)" - base_stream: - retriever: - $ref: "*ref(definitions.retriever)" - customers_stream: - $ref: "*ref(definitions.base_stream)" - $options: - name: "customers" - primary_key: "id" - path: "/example" - -streams: - - "*ref(definitions.customers_stream)" - -check: - stream_names: - - "customers" - -spec: - documentation_url: https://docsurl.com - connection_specification: - title: Source Name Spec # 'TODO: Replace this with the name of your source.' - type: object - required: - - api_key - additionalProperties: true - properties: - # 'TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.': - api_key: - type: string - description: API Key -""" - - async def list_streams(self, streams_list_request_body: StreamsListRequestBody = Body(None, description="")) -> StreamsListRead: - """ - Takes in a low code manifest and a config to resolve the list of streams that are available for testing - :param streams_list_request_body: Input parameters to retrieve the list of available streams - :return: Stream objects made up of a stream name and the HTTP URL it will send requests to - """ - adapter = self._create_low_code_adapter(manifest=streams_list_request_body.manifest) - - stream_list_read = [] - try: - for http_stream in adapter.get_http_streams(streams_list_request_body.config): - stream_list_read.append( - StreamsListReadStreams( - name=http_stream.name, - url=urljoin(http_stream.url_base, http_stream.path()), - ) - ) - except Exception as error: - self.logger.error( - f"Could not list streams with with error: {error.args[0]} - {DefaultApiImpl._get_stacktrace_as_string(error)}" - ) - raise HTTPException(status_code=400, detail=f"Could not list streams with with error: {error.args[0]}") - return StreamsListRead(streams=stream_list_read) - - async def read_stream(self, stream_read_request_body: StreamReadRequestBody = Body(None, description="")) -> StreamRead: - """ - Using the provided manifest and config, invokes a sync for the specified stream and returns groups of Airbyte messages - that are produced during the read operation - :param stream_read_request_body: Input parameters to trigger the read operation for a stream - :param limit: The maximum number of records requested by the client (must be within the range [1, self.max_record_limit]) - :return: Airbyte record messages produced by the sync grouped by slice and page - """ - adapter = self._create_low_code_adapter(manifest=stream_read_request_body.manifest) - schema_inferrer = SchemaInferrer() - - if stream_read_request_body.record_limit is None: - record_limit = self.max_record_limit - else: - record_limit = min(stream_read_request_body.record_limit, self.max_record_limit) - - slices = [] - log_messages = [] - try: - for message_group in self._get_message_groups( - adapter.read_stream(stream_read_request_body.stream, stream_read_request_body.config), - schema_inferrer, - record_limit, - ): - if isinstance(message_group, AirbyteLogMessage): - log_messages.append({"message": message_group.message}) - else: - slices.append(message_group) - except Exception as error: - # TODO: We're temporarily using FastAPI's default exception model. Ideally we should use exceptions defined in the OpenAPI spec - self.logger.error(f"Could not perform read with with error: {error.args[0]} - {self._get_stacktrace_as_string(error)}") - raise HTTPException( - status_code=400, - detail=f"Could not perform read with with error: {error.args[0]}", - ) - - return StreamRead( - logs=log_messages, - slices=slices, - test_read_limit_reached=self._has_reached_limit(slices), - inferred_schema=schema_inferrer.get_stream_schema(stream_read_request_body.stream) - ) - - def _has_reached_limit(self, slices): - if len(slices) >= self._max_slices: - return True - - for slice in slices: - if len(slice.pages) >= self._max_pages_per_slice: - return True - return False - - async def resolve_manifest( - self, resolve_manifest_request_body: ResolveManifestRequestBody = Body(None, description="") - ) -> ResolveManifest: - """ - Using the provided manifest, resolves $refs and $options and returns the resulting manifest to the client. - :param manifest_resolve_request_body: Input manifest whose $refs and $options will be resolved - :return: Airbyte record messages produced by the sync grouped by slice and page - """ - try: - return ResolveManifest( - manifest=ManifestDeclarativeSource( - resolve_manifest_request_body.manifest, construct_using_pydantic_models=True - ).resolved_manifest - ) - except Exception as error: - self.logger.error(f"Could not resolve manifest with error: {error.args[0]} - {self._get_stacktrace_as_string(error)}") - raise HTTPException( - status_code=400, - detail=f"Could not resolve manifest with error: {error.args[0]}", - ) - - def _get_message_groups( - self, messages: Iterator[AirbyteMessage], schema_inferrer: SchemaInferrer, limit: int - ) -> Iterable[Union[StreamReadPages, AirbyteLogMessage]]: - """ - Message groups are partitioned according to when request log messages are received. Subsequent response log messages - and record messages belong to the prior request log message and when we encounter another request, append the latest - message group, until records have been read. - - Messages received from the CDK read operation will always arrive in the following order: - {type: LOG, log: {message: "request: ..."}} - {type: LOG, log: {message: "response: ..."}} - ... 0 or more record messages - {type: RECORD, record: {data: ...}} - {type: RECORD, record: {data: ...}} - Repeats for each request/response made - - Note: The exception is that normal log messages can be received at any time which are not incorporated into grouping - """ - records_count = 0 - at_least_one_page_in_group = False - current_page_records = [] - current_slice_pages = [] - current_page_request: Optional[HttpRequest] = None - current_page_response: Optional[HttpResponse] = None - - while records_count < limit and (message := next(messages, None)): - if self._need_to_close_page(at_least_one_page_in_group, message): - self._close_page(current_page_request, current_page_response, current_slice_pages, current_page_records) - current_page_request = None - current_page_response = None - - if at_least_one_page_in_group and message.type == Type.LOG and message.log.message.startswith("slice:"): - yield StreamReadSlices(pages=current_slice_pages) - current_slice_pages = [] - at_least_one_page_in_group = False - elif message.type == Type.LOG and message.log.message.startswith("request:"): - if not at_least_one_page_in_group: - at_least_one_page_in_group = True - current_page_request = self._create_request_from_log_message(message.log) - elif message.type == Type.LOG and message.log.message.startswith("response:"): - current_page_response = self._create_response_from_log_message(message.log) - elif message.type == Type.LOG: - yield message.log - elif message.type == Type.RECORD: - current_page_records.append(message.record.data) - records_count += 1 - schema_inferrer.accumulate(message.record) - else: - self._close_page(current_page_request, current_page_response, current_slice_pages, current_page_records) - yield StreamReadSlices(pages=current_slice_pages) - - @staticmethod - def _need_to_close_page(at_least_one_page_in_group, message): - return ( - at_least_one_page_in_group - and message.type == Type.LOG - and (message.log.message.startswith("request:") or message.log.message.startswith("slice:")) - ) - - @staticmethod - def _close_page(current_page_request, current_page_response, current_slice_pages, current_page_records): - if not current_page_request or not current_page_response: - raise ValueError("Every message grouping should have at least one request and response") - - current_slice_pages.append( - StreamReadPages(request=current_page_request, response=current_page_response, records=current_page_records) - ) - current_page_records.clear() - - def _create_request_from_log_message(self, log_message: AirbyteLogMessage) -> Optional[HttpRequest]: - # TODO: As a temporary stopgap, the CDK emits request data as a log message string. Ideally this should come in the - # form of a custom message object defined in the Airbyte protocol, but this unblocks us in the immediate while the - # protocol change is worked on. - raw_request = log_message.message.partition("request:")[2] - try: - request = json.loads(raw_request) - url = urlparse(request.get("url", "")) - full_path = f"{url.scheme}://{url.hostname}{url.path}" if url else "" - parameters = parse_qs(url.query) or None - return HttpRequest( - url=full_path, - http_method=request.get("http_method", ""), - headers=request.get("headers"), - parameters=parameters, - body=request.get("body"), - ) - except JSONDecodeError as error: - self.logger.warning(f"Failed to parse log message into request object with error: {error}") - return None - - def _create_response_from_log_message(self, log_message: AirbyteLogMessage) -> Optional[HttpResponse]: - # TODO: As a temporary stopgap, the CDK emits response data as a log message string. Ideally this should come in the - # form of a custom message object defined in the Airbyte protocol, but this unblocks us in the immediate while the - # protocol change is worked on. - raw_response = log_message.message.partition("response:")[2] - try: - response = json.loads(raw_response) - body = json.loads(response.get("body", "{}")) - return HttpResponse(status=response.get("status_code"), body=body, headers=response.get("headers")) - except JSONDecodeError as error: - self.logger.warning(f"Failed to parse log message into response object with error: {error}") - return None - - def _create_low_code_adapter(self, manifest: Dict[str, Any]) -> CdkAdapter: - try: - return self.adapter_factory.create(manifest) - except ValidationError as error: - # TODO: We're temporarily using FastAPI's default exception model. Ideally we should use exceptions defined in the OpenAPI spec - self.logger.error(f"Invalid connector manifest with error: {error.message} - {DefaultApiImpl._get_stacktrace_as_string(error)}") - raise HTTPException( - status_code=400, - detail=f"Invalid connector manifest with error: {error.message}", - ) - - @staticmethod - def _get_stacktrace_as_string(error) -> str: - return "".join(traceback.TracebackException.from_exception(error).format()) diff --git a/airbyte-connector-builder-server/connector_builder/impl/low_code_cdk_adapter.py b/airbyte-connector-builder-server/connector_builder/impl/low_code_cdk_adapter.py deleted file mode 100644 index 6542a0c194f3..000000000000 --- a/airbyte-connector-builder-server/connector_builder/impl/low_code_cdk_adapter.py +++ /dev/null @@ -1,75 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from typing import Any, Dict, Iterator, List - -from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, ConfiguredAirbyteCatalog, Level -from airbyte_cdk.models import Type as MessageType -from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream -from airbyte_cdk.sources.declarative.parsers.model_to_component_factory import ModelToComponentFactory -from airbyte_cdk.sources.declarative.yaml_declarative_source import ManifestDeclarativeSource -from airbyte_cdk.sources.streams.http import HttpStream -from connector_builder.impl.adapter import CdkAdapter, CdkAdapterFactory - - -class LowCodeSourceAdapter(CdkAdapter): - def __init__(self, manifest: Dict[str, Any], limit_page_fetched_per_slice, limit_slices_fetched): - # Request and response messages are only emitted for a sources that have debug turned on - self._source = ManifestDeclarativeSource( - manifest, - debug=True, - component_factory=ModelToComponentFactory(limit_page_fetched_per_slice, limit_slices_fetched) - ) - - def get_http_streams(self, config: Dict[str, Any]) -> List[HttpStream]: - http_streams = [] - for stream in self._source.streams(config=config): - if isinstance(stream, DeclarativeStream): - if isinstance(stream.retriever, HttpStream): - http_streams.append(stream.retriever) - else: - raise TypeError( - f"A declarative stream should only have a retriever of type HttpStream, but received: {stream.retriever.__class__}" - ) - else: - raise TypeError( - f"A declarative source should only contain streams of type DeclarativeStream, but received: {stream.__class__}" - ) - return http_streams - - def read_stream(self, stream: str, config: Dict[str, Any]) -> Iterator[AirbyteMessage]: - configured_catalog = ConfiguredAirbyteCatalog.parse_obj( - { - "streams": [ - { - "stream": { - "name": stream, - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite", - } - ] - } - ) - generator = self._source.read(logger=self._source.logger, config=config, catalog=configured_catalog) - - # the generator can raise an exception - # iterate over the generated messages. if next raise an exception, catch it and yield it as an AirbyteLogMessage - try: - yield from generator - except Exception as e: - yield AirbyteMessage(type=MessageType.LOG, log=AirbyteLogMessage(level=Level.ERROR, message=str(e))) - return - - -class LowCodeSourceAdapterFactory(CdkAdapterFactory): - - def __init__(self, max_pages_per_slice, max_slices): - self._max_pages_per_slice = max_pages_per_slice - self._max_slices = max_slices - - def create(self, manifest: Dict[str, Any]) -> CdkAdapter: - return LowCodeSourceAdapter(manifest, self._max_pages_per_slice, self._max_slices) diff --git a/airbyte-connector-builder-server/dist/connector_builder_server-0.40.21-py3.10.egg b/airbyte-connector-builder-server/dist/connector_builder_server-0.40.21-py3.10.egg deleted file mode 100644 index acb8ae91ba50..000000000000 Binary files a/airbyte-connector-builder-server/dist/connector_builder_server-0.40.21-py3.10.egg and /dev/null differ diff --git a/airbyte-connector-builder-server/gradle.properties b/airbyte-connector-builder-server/gradle.properties deleted file mode 100644 index 7c28b7f24921..000000000000 --- a/airbyte-connector-builder-server/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -dockerImageName=connector-builder-server \ No newline at end of file diff --git a/airbyte-connector-builder-server/integration_tests/test_integration_test.py b/airbyte-connector-builder-server/integration_tests/test_integration_test.py deleted file mode 100644 index f5f14bc82edf..000000000000 --- a/airbyte-connector-builder-server/integration_tests/test_integration_test.py +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_test(): - assert True diff --git a/airbyte-connector-builder-server/openapi/README.md b/airbyte-connector-builder-server/openapi/README.md deleted file mode 100644 index e6ceb3c4e9aa..000000000000 --- a/airbyte-connector-builder-server/openapi/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# OpenAPI templates -This directory contains custom OpenAPI templates used to generate Python code for the FastAPI. - -**But why?** - -At the time we made this service (Nov 2022), no OSS OpenAPI generators enabled spec-first development. So we made these custom templates. - -For the full context, see: https://github.com/airbytehq/airbyte/issues/17813 - -## How we're using templates -At a high level, the expected usage pattern for these templates is to generate code using the `python-fastapi` OpenAPI generator, then copy the `models` module and the `apis` into your project. This flow should work continuously i.e: as your spec evolves, it is safe to re-do this operation. - -The only change we're making to `python-fastapi` is to define an abstract class `AbstractApi` in which every method corresponds to an API endpoint. The developer is expected to extend the class and use that to instantiate the `APIRouter` provided to FastAPI. - -The existing `python-fastapi` OpenAPI generator does a pretty good job generating Pydantic models for entities declared in the OpenAPI spec, so we take those as-is. - -## Making changes to the templates -Please make sure you are at least familiar with the [User-defined Templates](https://openapi-generator.tech/docs/customization#user-defined-templates) section of the OpenAPI docs before you start iterating. - -Relevant OpenAPI docs: -- https://openapi-generator.tech/docs/customization -- https://openapi-generator.tech/docs/templating -- https://openapi-generator.tech/docs/debugging - -Happy templating! diff --git a/airbyte-connector-builder-server/openapi/generator_config.yaml b/airbyte-connector-builder-server/openapi/generator_config.yaml deleted file mode 100644 index b36cfbe63ac3..000000000000 --- a/airbyte-connector-builder-server/openapi/generator_config.yaml +++ /dev/null @@ -1,4 +0,0 @@ -files: - api_interfaces.mustache: - templateType: API - destinationFilename: _interface.py diff --git a/airbyte-connector-builder-server/openapi/templates/api_interfaces.mustache b/airbyte-connector-builder-server/openapi/templates/api_interfaces.mustache deleted file mode 100644 index 6fd34df24e7f..000000000000 --- a/airbyte-connector-builder-server/openapi/templates/api_interfaces.mustache +++ /dev/null @@ -1,121 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# -# This file was auto-generated from Airbyte's custom OpenAPI templates. Do not edit it manually. -# coding: utf-8 - -import inspect -from abc import ABC, abstractmethod -from typing import Callable, Dict, List # noqa: F401 - -from fastapi import ( # noqa: F401 - APIRouter, - Body, - Cookie, - Depends, - Form, - Header, - Path, - Query, - Response, - Security, - status, -) - -from {{modelPackage}}.extra_models import TokenModel # noqa: F401 - - -{{#imports}} -{{import}} -{{/imports}} - - -{{#operations}} -class {{classname}}(ABC): - """ - NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - Do not edit the class manually. - """ - - {{#operation}} - @abstractmethod - async def {{operationId}}( - self, - {{#allParams}} - {{>endpoint_argument_definition}}, - {{/allParams}} - {{#hasAuthMethods}} - {{#authMethods}} - token_{{name}}: TokenModel = Security( - get_token_{{name}}{{#isOAuth}}, scopes=[{{#scopes}}"{{scope}}"{{^-last}}, {{/-last}}{{/scopes}}]{{/isOAuth}} - ), - {{/authMethods}} - {{/hasAuthMethods}} - ) -> {{returnType}}{{^returnType}}None{{/returnType}}: - """ - {{summary}} - """ - - {{/operation}} - {{/operations}} - -def _assert_signature_is_set(method: Callable) -> None: - """ - APIRouter().add_api_route expects the input method to have a signature. It gets signatures - by running inspect.signature(method) under the hood. - - In the case that an instance method does not declare "self" as an input parameter (due to developer error - for example), then the call to inspect.signature() raises a ValueError and fails. - - Ideally, we'd automatically detect & correct this problem. To do that, we'd need to do - setattr(method, "__signature__", ) but that's not possible because instance - methods (i.e the input to this function) are object subclasses, and you can't use setattr on objects - (https://stackoverflow.com/a/12839070/3237889) - - The workaround this method implements is to raise an exception at runtime if the input method fails - when inspect.signature() is called. This is good enough because the error will be detected - immediately when the developer tries to run the server, so builds should very quickly fail and this - will practically never make it to a production scenario. - """ - try: - inspect.signature(method) - except ValueError as e: - # Based on empirical observation, the call to inspect fails with a ValueError - # with exactly one argument: "invalid method signature" - if e.args and len(e.args) == 1 and e.args[0] == "invalid method signature": - # I couldn't figure out how to setattr on a "method" object to populate the signature. For now just kick - # it back to the developer and tell them to set the "self" variable - raise Exception(f"Method {method.__name__} in class {type(method.__self__).__name__} must declare the variable 'self'. ") - else: - raise - - -{{#operations}} -def initialize_router(api: {{classname}}) -> APIRouter: - router = APIRouter() - - {{#operation}} - _assert_signature_is_set(api.{{operationId}}) - router.add_api_route( - "{{path}}", - endpoint=api.{{operationId}}, - methods=["{{#lambda.uppercase}}{{httpMethod}}{{/lambda.uppercase}}"], - responses={ - {{#responses}} - {{code}}: {{=<% %>=}}{<%#dataType%>"model": <%dataType%>, "description": "<%message%>"<%/dataType%><%^dataType%>"description": "<%message%>"<%/dataType%>}<%={{ }}=%>, - {{/responses}} - }, - tags=[{{#tags}}"{{name}}"{{^-last}},{{/-last}}{{/tags}}], - {{#summary}} - summary="{{.}}", - {{/summary}} - {{#description}} - description = "{{.}}", - {{/description}} - response_model_by_alias=True, - ) - - {{/operation}} - {{/operations}} - - return router diff --git a/airbyte-connector-builder-server/pytest.ini b/airbyte-connector-builder-server/pytest.ini deleted file mode 100644 index 58f2d9ae315e..000000000000 --- a/airbyte-connector-builder-server/pytest.ini +++ /dev/null @@ -1,7 +0,0 @@ -[pytest] -log_cli = 1 -log_cli_level = INFO -log_cli_format = %(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s) -log_cli_date_format=%Y-%m-%d %H:%M:%S -markers = - integration: marks tests as integration test (deselect with '-m "not integration"') \ No newline at end of file diff --git a/airbyte-connector-builder-server/run_format.sh b/airbyte-connector-builder-server/run_format.sh deleted file mode 100755 index c2ea298c112a..000000000000 --- a/airbyte-connector-builder-server/run_format.sh +++ /dev/null @@ -1,26 +0,0 @@ -set -e - -cd $1 - -# Fail script on failing command -set -e - -# Pasted from https://github.com/airbytehq/airbyte/blob/master/buildSrc/src/main/groovy/airbyte-python.gradle#L85-L96 -pip install 'mccabe==0.6.1' -pip install 'flake8==4.0.1' -pip install 'pyproject-flake8==0.0.1a2' -pip install 'black==22.3.0' -pip install 'mypy==0.930' -pip install 'isort==5.6.4' -pip install 'pytest==6.1.2' -pip install 'coverage[toml]==6.3.1' - -# Format and static analysis -# FIXME: isort formats python files differently from gradlew format -python -m isort --settings-file=pyproject.toml ./ -python -m isort --settings-file=pyproject.toml --diff --quiet ./ -python -m black --config pyproject.toml ./ -python -m black --config pyproject.toml ./ --diff --quiet -python -m pflake8 --config pyproject.toml ./ -python -m pflake8 --config pyproject.toml ./ --diff --quiet -python -m mypy --config pyproject.toml ./ diff --git a/airbyte-connector-builder-server/run_tests.sh b/airbyte-connector-builder-server/run_tests.sh deleted file mode 100755 index d84818013941..000000000000 --- a/airbyte-connector-builder-server/run_tests.sh +++ /dev/null @@ -1,14 +0,0 @@ -set -e - -cd $1 - -# Fail script on failing command -set -e - -# Install dependencies -pip install -q -e . -pip install -q -e '.[tests]' - -# Run the tests -python -m coverage run -m pytest -p no:logging --disable-warnings unit_tests -c pytest.ini -python -m coverage run -m pytest -p no:logging --disable-warnings integration_tests -c pytest.ini diff --git a/airbyte-connector-builder-server/setup.py b/airbyte-connector-builder-server/setup.py deleted file mode 100644 index 906ce1462768..000000000000 --- a/airbyte-connector-builder-server/setup.py +++ /dev/null @@ -1,59 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import pathlib - -from setuptools import find_packages, setup - -# The directory containing this file -HERE = pathlib.Path(__file__).parent - -# The text of the README file -README = (HERE / "README.md").read_text() - -CDK_VERSION = (HERE / "CDK_VERSION").read_text() - -setup( - name="connector-builder-server", - version="0.40.32", - description="", - long_description=README, - author="Airbyte", - author_email="contact@airbyte.io", - license="MIT", - url="https://github.com/airbytehq/airbyte", - classifiers=[ - # This information is used when browsing on PyPi. - # Dev Status - "Development Status :: 3 - Alpha", - # Project Audience - "Intended Audience :: Developers", - "Topic :: Scientific/Engineering", - "Topic :: Software Development :: Libraries :: Python Modules", - "License :: OSI Approved :: MIT License", - # Python Version Support - "Programming Language :: Python :: 3.8", - ], - keywords="connect-builder", - project_urls={ - "Documentation": "https://docs.airbyte.io/", - "Source": "https://github.com/airbytehq/airbyte", - "Tracker": "https://github.com/airbytehq/airbyte/issues", - }, - packages=find_packages(exclude=("unit_tests", "integration_tests", "docs")), - package_data={}, - install_requires=[f"airbyte-cdk=={CDK_VERSION}", "fastapi", "uvicorn"], - python_requires=">=3.9.11", - extras_require={ - "tests": [ - "MyPy~=0.812", - "pytest~=6.2.5", - "pytest-cov", - "pytest-mock", - "pytest-recording", - "requests-mock", - "pre-commit", - ], - }, -) diff --git a/airbyte-connector-builder-server/src/main/openapi/openapi.yaml b/airbyte-connector-builder-server/src/main/openapi/openapi.yaml deleted file mode 100644 index 6f2147668e99..000000000000 --- a/airbyte-connector-builder-server/src/main/openapi/openapi.yaml +++ /dev/null @@ -1,338 +0,0 @@ -openapi: 3.0.0 -info: - description: | - Connector Builder Server API - - version: "1.0.0" - title: Connector Builder Server API - contact: - email: contact@airbyte.io - license: - name: MIT - url: "https://opensource.org/licenses/MIT" -externalDocs: - description: Find out more about Connector Builder - url: "https://docs.airbyte.com/connector-development/config-based/overview/" - -paths: - /v1/stream/read: - post: - summary: Reads a specific stream in the source. TODO in a later phase - only read a single slice of data. - operationId: readStream - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/StreamReadRequestBody" - required: true - responses: - "200": - description: Successful operation - content: - application/json: - schema: - $ref: "#/components/schemas/StreamRead" - "400": - $ref: "#/components/responses/ExceptionResponse" - "422": - $ref: "#/components/responses/InvalidInputResponse" - /v1/streams/list: - post: - summary: List all streams present in the connector manifest, along with their specific request URLs - operationId: listStreams - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/StreamsListRequestBody" - required: true - responses: - "200": - description: Successful operation - content: - application/json: - schema: - $ref: "#/components/schemas/StreamsListRead" - "400": - $ref: "#/components/responses/ExceptionResponse" - "422": - $ref: "#/components/responses/InvalidInputResponse" - /v1/manifest_template: - get: - summary: Return a connector manifest template to use as the default value for the yaml editor - operationId: getManifestTemplate - responses: - "200": - description: Successful operation - content: - application/json: - schema: - type: string - description: Connector manifest template string - /v1/manifest/resolve: - post: - summary: Given a JSON manifest, returns a JSON manifest with all of the $refs and $options resolved and flattened - operationId: resolveManifest - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/ResolveManifestRequestBody" - required: true - responses: - "200": - description: Successful operation - content: - application/json: - schema: - $ref: "#/components/schemas/ResolveManifest" - "400": - $ref: "#/components/responses/ExceptionResponse" - "422": - $ref: "#/components/responses/InvalidInputResponse" - -components: - schemas: - StreamRead: - type: object - required: - - logs - - slices - - test_read_limit_reached - properties: - logs: - type: array - description: The LOG AirbyteMessages that were emitted during the read of this slice - items: - type: object - # $ref: "#/components/schemas/AirbyteProtocol/definitions/AirbyteLogMessage" - slices: - type: array - description: The stream slices returned from the read command. If no stream slicer is configured, this should contain a single item containing all of the results. - items: - type: object - required: - - pages - properties: - pages: - type: array - description: The pages returned from the read command. If no pagination is configured, this should contain a single item containing all of the results. - items: - type: object - required: - - records - properties: - records: - type: array - description: The RECORD AirbyteMessages coming from the read operation for this page - items: - type: object - # $ref: "#/components/schemas/AirbyteProtocol/definitions/AirbyteRecordMessage" - request: - $ref: "#/components/schemas/HttpRequest" - response: - $ref: "#/components/schemas/HttpResponse" - sliceDescriptor: - type: object - description: "An object describing the current slice. This can be omitted if a stream slicer is not configured." - properties: - startDatetime: - type: string - format: date-time - listItem: - type: string - state: - type: object - description: The STATE AirbyteMessage emitted at the end of this slice. This can be omitted if a stream slicer is not configured. - # $ref: "#/components/schemas/AirbyteProtocol/definitions/AirbyteStateMessage" - test_read_limit_reached: - type: boolean - description: Whether the maximum number of request per slice or the maximum number of slices queried has been reached - inferred_schema: - type: object - description: The narrowest JSON Schema against which every AirbyteRecord in the slices can validate successfully. This is inferred from reading every record in the output slices. - StreamReadRequestBody: - type: object - required: - - manifest - - stream - - config - properties: - manifest: - type: object - description: The config-based connector manifest contents - # $ref: "#/components/schemas/ConnectorManifest" - stream: - type: string - description: Name of the stream to read - config: - type: object - description: The config blob containing the user inputs for testing - state: - type: object - description: The AirbyteStateMessage object to use as the starting state for this read - # $ref: "#/components/schemas/AirbyteProtocol/definitions/AirbyteStateMessage" - record_limit: - type: integer - minimum: 1 - maximum: 1000 - description: Number of records that will be returned to the client from the connector builder (max of 1000) - # --- Potential addition for a later phase --- - # numPages: - # type: integer - # description: Number of pages to read from the source for each slice - # default: 1 - HttpRequest: - type: object - required: - - url - - http_method - properties: - url: - type: string - description: URL that the request was sent to - parameters: - type: object - description: The request parameters that were set on the HTTP request, if any - body: - type: object - description: The body of the HTTP request, if present - headers: - type: object - description: The headers of the HTTP request, if any - http_method: - type: string - enum: ["GET", "POST", "PUT", "PATCH"] - description: The http method of the request ("GET", "POST", "PUT", or "PATCH") - HttpResponse: - type: object - required: - - status - properties: - status: - type: integer - description: The status of the response - body: - type: object - description: The body of the HTTP response, if present - headers: - type: object - description: The headers of the HTTP response, if any - # --- Commenting out for now since they do not work with our orval openapi client generator --- - # ConnectorManifest: - # $ref: ../../../../airbyte-cdk/python/airbyte_cdk/sources/declarative/config_component_schema.json - # AirbyteProtocol: - # $ref: ../../../../airbyte-protocol/protocol-models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml - StreamsListRequestBody: - type: object - required: - - manifest - - config - properties: - manifest: - type: object - description: The config-based connector manifest contents - # $ref: "#/components/schemas/ConnectorManifest" - config: - type: object - description: The config blob containing the user inputs for testing - StreamsListRead: - type: object - required: - - streams - properties: - streams: - type: array - items: - type: object - description: The stream names present in the connector manifest - required: - - name - - url - properties: - name: - type: string - description: The name of the stream - url: - type: string - description: The URL to which read requests will be made for this stream - # --- Potential addition for a later phase --- - # slices: - # type: array - # description: list of slices that will be retrieved for this stream - # items: - # type: object - ResolveManifestRequestBody: - type: object - required: - - manifest - properties: - manifest: - type: object - description: The config-based connector manifest contents - ResolveManifest: - type: object - required: - - manifest - properties: - manifest: - type: object - description: The config-based connector manifest contents with $refs and $options resolved - - # The following exception structs were copied from airbyte-api/src/main/openapi/config.yaml - InvalidInputProperty: - type: object - required: - - propertyPath - properties: - propertyPath: - type: string - invalidValue: - type: string - message: - type: string - KnownExceptionInfo: - type: object - required: - - message - properties: - message: - type: string - exceptionClassName: - type: string - exceptionStack: - type: array - items: - type: string - InvalidInputExceptionInfo: - type: object - required: - - message - - validationErrors - properties: - message: - type: string - exceptionClassName: - type: string - exceptionStack: - type: array - items: - type: string - validationErrors: - type: array - items: - $ref: "#/components/schemas/InvalidInputProperty" - - responses: - InvalidInputResponse: - description: Input failed validation - content: - application/json: - schema: - $ref: "#/components/schemas/InvalidInputExceptionInfo" - ExceptionResponse: - description: Exception occurred; see message for details. - content: - application/json: - schema: - $ref: "#/components/schemas/KnownExceptionInfo" diff --git a/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_default_api.py b/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_default_api.py deleted file mode 100644 index 48d3df6be8af..000000000000 --- a/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_default_api.py +++ /dev/null @@ -1,920 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import asyncio -import json -from typing import Iterator -from unittest.mock import MagicMock - -import pytest -from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, AirbyteRecordMessage, Level, Type -from connector_builder.generated.models.http_request import HttpRequest -from connector_builder.generated.models.http_response import HttpResponse -from connector_builder.generated.models.resolve_manifest import ResolveManifest -from connector_builder.generated.models.resolve_manifest_request_body import ResolveManifestRequestBody -from connector_builder.generated.models.stream_read import StreamRead -from connector_builder.generated.models.stream_read_pages import StreamReadPages -from connector_builder.generated.models.stream_read_request_body import StreamReadRequestBody -from connector_builder.generated.models.streams_list_read import StreamsListRead -from connector_builder.generated.models.streams_list_read_streams import StreamsListReadStreams -from connector_builder.generated.models.streams_list_request_body import StreamsListRequestBody -from connector_builder.impl.default_api import DefaultApiImpl -from connector_builder.impl.low_code_cdk_adapter import LowCodeSourceAdapterFactory -from fastapi import HTTPException -from pydantic.error_wrappers import ValidationError - -MAX_PAGES_PER_SLICE = 4 -MAX_SLICES = 3 - -MANIFEST = { - "version": "0.1.0", - "type": "DeclarativeSource", - "definitions": { - "selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "DeclarativeSource"}, - "retriever": { - "type": "DeclarativeSource", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "HttpRequester"}, - }, - "hashiras_stream": { - "retriever": { - "type": "DeclarativeSource", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "HttpRequester"}, - }, - "$options": {"name": "hashiras", "path": "/hashiras"}, - }, - "breathing_techniques_stream": { - "retriever": { - "type": "DeclarativeSource", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "HttpRequester"}, - }, - "$options": {"name": "breathing-techniques", "path": "/breathing_techniques"}, - }, - }, - "streams": [ - { - "type": "DeclarativeStream", - "retriever": { - "type": "SimpleRetriever", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "HttpRequester"}, - }, - "$options": {"name": "hashiras", "path": "/hashiras"}, - }, - { - "type": "DeclarativeStream", - "retriever": { - "type": "SimpleRetriever", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "HttpRequester"}, - }, - "$options": {"name": "breathing-techniques", "path": "/breathing_techniques"}, - }, - ], - "check": {"stream_names": ["hashiras"], "type": "CheckStream"}, -} - -CONFIG = {"rank": "upper-six"} - - -def request_log_message(request: dict) -> AirbyteMessage: - return AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message=f"request:{json.dumps(request)}")) - - -def response_log_message(response: dict) -> AirbyteMessage: - return AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message=f"response:{json.dumps(response)}")) - - -def record_message(stream: str, data: dict) -> AirbyteMessage: - return AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream=stream, data=data, emitted_at=1234)) - - -def slice_message() -> AirbyteMessage: - return AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message='slice:{"key": "value"}')) - - -def test_list_streams(): - expected_streams = [ - StreamsListReadStreams(name="hashiras", url="https://demonslayers.com/api/v1/hashiras"), - StreamsListReadStreams(name="breathing-techniques", url="https://demonslayers.com/api/v1/breathing_techniques"), - ] - - api = DefaultApiImpl(LowCodeSourceAdapterFactory(MAX_PAGES_PER_SLICE, MAX_SLICES), MAX_PAGES_PER_SLICE, MAX_SLICES) - streams_list_request_body = StreamsListRequestBody(manifest=MANIFEST, config=CONFIG) - loop = asyncio.get_event_loop() - actual_streams = loop.run_until_complete(api.list_streams(streams_list_request_body)) - - for i, expected_stream in enumerate(expected_streams): - assert actual_streams.streams[i] == expected_stream - - -def test_list_streams_with_interpolated_urls(): - manifest = { - "version": "0.1.0", - "type": "DeclarativeSource", - "streams": [ - { - "type": "DeclarativeStream", - "retriever": { - "type": "SimpleRetriever", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": { - "url_base": "https://{{ config['rank'] }}.muzan.com/api/v1/", - "http_method": "GET", - "type": "HttpRequester", - }, - }, - "$options": {"name": "demons", "path": "/demons"}, - } - ], - "check": {"stream_names": ["demons"], "type": "CheckStream"}, - } - - expected_streams = StreamsListRead(streams=[StreamsListReadStreams(name="demons", url="https://upper-six.muzan.com/api/v1/demons")]) - - api = DefaultApiImpl(LowCodeSourceAdapterFactory(MAX_PAGES_PER_SLICE, MAX_SLICES), MAX_PAGES_PER_SLICE, MAX_SLICES) - streams_list_request_body = StreamsListRequestBody(manifest=manifest, config=CONFIG) - loop = asyncio.get_event_loop() - actual_streams = loop.run_until_complete(api.list_streams(streams_list_request_body)) - - assert actual_streams == expected_streams - - -def test_list_streams_with_unresolved_interpolation(): - manifest = { - "version": "0.1.0", - "type": "DeclarativeSource", - "streams": [ - { - "type": "DeclarativeStream", - "retriever": { - "type": "SimpleRetriever", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": { - "url_base": "https://{{ config['not_in_config'] }}.muzan.com/api/v1/", - "http_method": "GET", - "type": "HttpRequester", - }, - }, - "$options": {"name": "demons", "path": "/demons"}, - } - ], - "check": {"stream_names": ["demons"], "type": "CheckStream"}, - } - - # The interpolated string {{ config['not_in_config'] }} doesn't resolve to anything so it ends up blank during interpolation - expected_streams = StreamsListRead(streams=[StreamsListReadStreams(name="demons", url="https://.muzan.com/api/v1/demons")]) - - api = DefaultApiImpl(LowCodeSourceAdapterFactory(MAX_PAGES_PER_SLICE, MAX_SLICES), MAX_PAGES_PER_SLICE, MAX_SLICES) - - streams_list_request_body = StreamsListRequestBody(manifest=manifest, config=CONFIG) - loop = asyncio.get_event_loop() - actual_streams = loop.run_until_complete(api.list_streams(streams_list_request_body)) - - assert actual_streams == expected_streams - - -def test_read_stream(): - request = { - "url": "https://demonslayers.com/api/v1/hashiras?era=taisho", - "headers": {"Content-Type": "application/json"}, - "http_method": "GET", - "body": {"custom": "field"}, - } - response = {"status_code": 200, "headers": {"field": "value"}, "body": '{"name": "field"}', "http_method": "GET"} - expected_schema = {"$schema": "http://json-schema.org/schema#", "properties": {"name": {"type": "string"}}, "type": "object"} - expected_pages = [ - StreamReadPages( - request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, - headers={"Content-Type": "application/json"}, - body={"custom": "field"}, - http_method="GET", - ), - response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), - records=[{"name": "Shinobu Kocho"}, {"name": "Muichiro Tokito"}], - ), - StreamReadPages( - request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, - headers={"Content-Type": "application/json"}, - body={"custom": "field"}, - http_method="GET", - ), - response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), - records=[{"name": "Mitsuri Kanroji"}], - ), - ] - - mock_source_adapter_cls = make_mock_adapter_factory( - iter( - [ - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Shinobu Kocho"}), - record_message("hashiras", {"name": "Muichiro Tokito"}), - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Mitsuri Kanroji"}), - ] - ) - ) - - api = DefaultApiImpl(mock_source_adapter_cls, MAX_PAGES_PER_SLICE, MAX_SLICES) - - loop = asyncio.get_event_loop() - actual_response: StreamRead = loop.run_until_complete( - api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras")) - ) - assert actual_response.inferred_schema == expected_schema - - single_slice = actual_response.slices[0] - for i, actual_page in enumerate(single_slice.pages): - assert actual_page == expected_pages[i] - - -def test_read_stream_with_logs(): - request = { - "url": "https://demonslayers.com/api/v1/hashiras?era=taisho", - "headers": {"Content-Type": "application/json"}, - "body": {"custom": "field"}, - "http_method": "GET", - } - response = {"status_code": 200, "headers": {"field": "value"}, "body": '{"name": "field"}'} - expected_pages = [ - StreamReadPages( - request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, - headers={"Content-Type": "application/json"}, - body={"custom": "field"}, - http_method="GET", - ), - response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), - records=[{"name": "Shinobu Kocho"}, {"name": "Muichiro Tokito"}], - ), - StreamReadPages( - request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, - headers={"Content-Type": "application/json"}, - body={"custom": "field"}, - http_method="GET", - ), - response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), - records=[{"name": "Mitsuri Kanroji"}], - ), - ] - expected_logs = [ - {"message": "log message before the request"}, - {"message": "log message during the page"}, - {"message": "log message after the response"}, - ] - - mock_source_adapter_cls = make_mock_adapter_factory( - iter( - [ - AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="log message before the request")), - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Shinobu Kocho"}), - AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="log message during the page")), - record_message("hashiras", {"name": "Muichiro Tokito"}), - AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="log message after the response")), - ] - ) - ) - - api = DefaultApiImpl(mock_source_adapter_cls, MAX_PAGES_PER_SLICE, MAX_SLICES) - - loop = asyncio.get_event_loop() - actual_response: StreamRead = loop.run_until_complete( - api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras")) - ) - - single_slice = actual_response.slices[0] - for i, actual_page in enumerate(single_slice.pages): - assert actual_page == expected_pages[i] - - for i, actual_log in enumerate(actual_response.logs): - assert actual_log == expected_logs[i] - - -@pytest.mark.parametrize( - "request_record_limit, max_record_limit", - [ - pytest.param(1, 3, id="test_create_request_with_record_limit"), - pytest.param(3, 1, id="test_create_request_record_limit_exceeds_max"), - ], -) -def test_read_stream_record_limit(request_record_limit, max_record_limit): - request = { - "url": "https://demonslayers.com/api/v1/hashiras?era=taisho", - "headers": {"Content-Type": "application/json"}, - "body": {"custom": "field"}, - } - response = {"status_code": 200, "headers": {"field": "value"}, "body": '{"name": "field"}'} - mock_source_adapter_cls = make_mock_adapter_factory( - iter( - [ - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Shinobu Kocho"}), - record_message("hashiras", {"name": "Muichiro Tokito"}), - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Mitsuri Kanroji"}), - response_log_message(response), - ] - ) - ) - n_records = 2 - record_limit = min(request_record_limit, max_record_limit) - - api = DefaultApiImpl(mock_source_adapter_cls, MAX_PAGES_PER_SLICE, MAX_SLICES, max_record_limit=max_record_limit) - loop = asyncio.get_event_loop() - actual_response: StreamRead = loop.run_until_complete( - api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras", record_limit=request_record_limit)) - ) - single_slice = actual_response.slices[0] - total_records = 0 - for i, actual_page in enumerate(single_slice.pages): - total_records += len(actual_page.records) - assert total_records == min([record_limit, n_records]) - - -@pytest.mark.parametrize( - "max_record_limit", - [ - pytest.param(2, id="test_create_request_no_record_limit"), - pytest.param(1, id="test_create_request_no_record_limit_n_records_exceed_max"), - ], -) -def test_read_stream_default_record_limit(max_record_limit): - request = { - "url": "https://demonslayers.com/api/v1/hashiras?era=taisho", - "headers": {"Content-Type": "application/json"}, - "body": {"custom": "field"}, - } - response = {"status_code": 200, "headers": {"field": "value"}, "body": '{"name": "field"}'} - mock_source_adapter_cls = make_mock_adapter_factory( - iter( - [ - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Shinobu Kocho"}), - record_message("hashiras", {"name": "Muichiro Tokito"}), - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Mitsuri Kanroji"}), - response_log_message(response), - ] - ) - ) - n_records = 2 - - api = DefaultApiImpl(mock_source_adapter_cls, MAX_PAGES_PER_SLICE, MAX_SLICES, max_record_limit=max_record_limit) - loop = asyncio.get_event_loop() - actual_response: StreamRead = loop.run_until_complete( - api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras")) - ) - single_slice = actual_response.slices[0] - total_records = 0 - for i, actual_page in enumerate(single_slice.pages): - total_records += len(actual_page.records) - assert total_records == min([max_record_limit, n_records]) - - -def test_read_stream_limit_0(): - request = { - "url": "https://demonslayers.com/api/v1/hashiras?era=taisho", - "headers": {"Content-Type": "application/json"}, - "body": {"custom": "field"}, - } - response = {"status_code": 200, "headers": {"field": "value"}, "body": '{"name": "field"}'} - mock_source_adapter_cls = make_mock_adapter_factory( - iter( - [ - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Shinobu Kocho"}), - record_message("hashiras", {"name": "Muichiro Tokito"}), - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Mitsuri Kanroji"}), - response_log_message(response), - ] - ) - ) - api = DefaultApiImpl(mock_source_adapter_cls, MAX_PAGES_PER_SLICE, MAX_SLICES) - loop = asyncio.get_event_loop() - - with pytest.raises(ValidationError): - loop.run_until_complete(api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras", record_limit=0))) - loop.run_until_complete(api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras"))) - - -def test_read_stream_no_records(): - request = { - "url": "https://demonslayers.com/api/v1/hashiras?era=taisho", - "headers": {"Content-Type": "application/json"}, - "body": {"custom": "field"}, - "http_method": "GET", - } - response = {"status_code": 200, "headers": {"field": "value"}, "body": '{"name": "field"}'} - expected_pages = [ - StreamReadPages( - request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, - headers={"Content-Type": "application/json"}, - body={"custom": "field"}, - http_method="GET", - ), - response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), - records=[], - ), - StreamReadPages( - request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, - headers={"Content-Type": "application/json"}, - body={"custom": "field"}, - http_method="GET", - ), - response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), - records=[], - ), - ] - - mock_source_adapter_cls = make_mock_adapter_factory( - iter( - [ - request_log_message(request), - response_log_message(response), - request_log_message(request), - response_log_message(response), - ] - ) - ) - - api = DefaultApiImpl(mock_source_adapter_cls, MAX_PAGES_PER_SLICE, MAX_SLICES) - - loop = asyncio.get_event_loop() - actual_response: StreamRead = loop.run_until_complete( - api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras")) - ) - - single_slice = actual_response.slices[0] - for i, actual_page in enumerate(single_slice.pages): - assert actual_page == expected_pages[i] - - -def test_invalid_manifest(): - invalid_manifest = { - "version": "0.1.0", - "definitions": { - "selector": {"extractor": {"field_pointer": ["items"]}}, - "requester": {"http_method": "GET"}, - "retriever": { - "record_selector": {"extractor": {"field_pointer": ["items"]}}, - "paginator": {"type": "NoPagination"}, - "requester": {"http_method": "GET"}, - }, - "hashiras_stream": { - "retriever": { - "record_selector": {"extractor": {"field_pointer": ["items"]}}, - "paginator": {"type": "NoPagination"}, - "requester": {"http_method": "GET"}, - }, - "$options": {"name": "hashiras", "path": "/hashiras"}, - }, - }, - "check": {"stream_names": ["hashiras"], "class_name": "airbyte_cdk.sources.declarative.checks.check_stream.CheckStream"}, - } - - expected_status_code = 400 - - api = DefaultApiImpl(LowCodeSourceAdapterFactory(MAX_PAGES_PER_SLICE, MAX_SLICES), MAX_PAGES_PER_SLICE, MAX_SLICES) - loop = asyncio.get_event_loop() - with pytest.raises(HTTPException) as actual_exception: - loop.run_until_complete(api.read_stream(StreamReadRequestBody(manifest=invalid_manifest, config={}, stream="hashiras"))) - - assert actual_exception.value.status_code == expected_status_code - - -def test_read_stream_invalid_group_format(): - response = {"status_code": 200, "headers": {"field": "value"}, "body": '{"name": "field"}'} - - mock_source_adapter_cls = make_mock_adapter_factory( - iter( - [ - response_log_message(response), - record_message("hashiras", {"name": "Shinobu Kocho"}), - record_message("hashiras", {"name": "Muichiro Tokito"}), - ] - ) - ) - - api = DefaultApiImpl(mock_source_adapter_cls, MAX_PAGES_PER_SLICE, MAX_SLICES) - - loop = asyncio.get_event_loop() - with pytest.raises(HTTPException) as actual_exception: - loop.run_until_complete(api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras"))) - - assert actual_exception.value.status_code == 400 - - -def test_read_stream_returns_error_if_stream_does_not_exist(): - expected_status_code = 400 - - api = DefaultApiImpl(LowCodeSourceAdapterFactory(MAX_PAGES_PER_SLICE, MAX_SLICES), MAX_PAGES_PER_SLICE, MAX_SLICES) - loop = asyncio.get_event_loop() - with pytest.raises(HTTPException) as actual_exception: - loop.run_until_complete(api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config={}, stream="not_in_manifest"))) - - assert actual_exception.value.status_code == expected_status_code - - -@pytest.mark.parametrize( - "log_message, expected_request", - [ - pytest.param( - 'request:{"url": "https://nichirin.com/v1/swords?color=orange", "http_method": "PUT", "headers": {"field": "name"}, "body":{"key": "value"}}', - HttpRequest( - url="https://nichirin.com/v1/swords", - parameters={"color": ["orange"]}, - headers={"field": "name"}, - body={"key": "value"}, - http_method="PUT", - ), - id="test_create_request_with_all_fields", - ), - pytest.param( - 'request:{"url": "https://nichirin.com/v1/swords?color=orange", "http_method": "GET", "headers": {"field": "name"}}', - HttpRequest( - url="https://nichirin.com/v1/swords", parameters={"color": ["orange"]}, headers={"field": "name"}, http_method="GET" - ), - id="test_create_request_with_no_body", - ), - pytest.param( - 'request:{"url": "https://nichirin.com/v1/swords?color=orange", "http_method": "PUT", "body":{"key": "value"}}', - HttpRequest(url="https://nichirin.com/v1/swords", parameters={"color": ["orange"]}, body={"key": "value"}, http_method="PUT"), - id="test_create_request_with_no_headers", - ), - pytest.param( - 'request:{"url": "https://nichirin.com/v1/swords", "http_method": "PUT", "headers": {"field": "name"}, "body":{"key": "value"}}', - HttpRequest(url="https://nichirin.com/v1/swords", headers={"field": "name"}, body={"key": "value"}, http_method="PUT"), - id="test_create_request_with_no_parameters", - ), - pytest.param( - 'request:{"url": "https://nichirin.com/v1/swords", "http_method": "POST", "headers": {"field": "name"}, "body":null}', - HttpRequest(url="https://nichirin.com/v1/swords", headers={"field": "name"}, body=None, http_method="POST"), - id="test_create_request_with_null_body", - ), - pytest.param("request:{invalid_json: }", None, id="test_invalid_json_still_does_not_crash"), - pytest.param("just a regular log message", None, id="test_no_request:_prefix_does_not_crash"), - ], -) -def test_create_request_from_log_message(log_message, expected_request): - airbyte_log_message = AirbyteLogMessage(level=Level.INFO, message=log_message) - api = DefaultApiImpl(LowCodeSourceAdapterFactory(MAX_PAGES_PER_SLICE, MAX_SLICES), MAX_PAGES_PER_SLICE, MAX_SLICES) - actual_request = api._create_request_from_log_message(airbyte_log_message) - - assert actual_request == expected_request - - -@pytest.mark.parametrize( - "log_message, expected_response", - [ - pytest.param( - {"status_code": 200, "headers": {"field": "name"}, "body": '{"id":"fire", "owner": "kyojuro_rengoku"}'}, - HttpResponse(status=200, headers={"field": "name"}, body={"id": "fire", "owner": "kyojuro_rengoku"}), - id="test_create_response_with_all_fields", - ), - pytest.param( - {"status_code": 200, "headers": {"field": "name"}}, - HttpResponse(status=200, body={}, headers={"field": "name"}), - id="test_create_response_with_no_body", - ), - pytest.param( - {"status_code": 200, "body": '{"id":"fire", "owner": "kyojuro_rengoku"}'}, - HttpResponse(status=200, body={"id": "fire", "owner": "kyojuro_rengoku"}), - id="test_create_response_with_no_headers", - ), - pytest.param("request:{invalid_json: }", None, id="test_invalid_json_still_does_not_crash"), - pytest.param("just a regular log message", None, id="test_no_response:_prefix_does_not_crash"), - ], -) -def test_create_response_from_log_message(log_message, expected_response): - if isinstance(log_message, str): - response_message = log_message - else: - response_message = f"response:{json.dumps(log_message)}" - - airbyte_log_message = AirbyteLogMessage(level=Level.INFO, message=response_message) - api = DefaultApiImpl(LowCodeSourceAdapterFactory(MAX_PAGES_PER_SLICE, MAX_SLICES), MAX_PAGES_PER_SLICE, MAX_SLICES) - actual_response = api._create_response_from_log_message(airbyte_log_message) - - assert actual_response == expected_response - - -def test_read_stream_with_many_slices(): - request = {} - response = {"status_code": 200} - - mock_source_adapter_cls = make_mock_adapter_factory( - iter( - [ - slice_message(), - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Muichiro Tokito"}), - slice_message(), - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Shinobu Kocho"}), - record_message("hashiras", {"name": "Mitsuri Kanroji"}), - request_log_message(request), - response_log_message(response), - record_message("hashiras", {"name": "Obanai Iguro"}), - request_log_message(request), - response_log_message(response), - ] - ) - ) - - api = DefaultApiImpl(mock_source_adapter_cls, MAX_PAGES_PER_SLICE, MAX_SLICES) - - loop = asyncio.get_event_loop() - stream_read: StreamRead = loop.run_until_complete( - api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras")) - ) - - assert not stream_read.test_read_limit_reached - assert len(stream_read.slices) == 2 - - assert len(stream_read.slices[0].pages) == 1 - assert len(stream_read.slices[0].pages[0].records) == 1 - - assert len(stream_read.slices[1].pages) == 3 - assert len(stream_read.slices[1].pages[0].records) == 2 - assert len(stream_read.slices[1].pages[1].records) == 1 - assert len(stream_read.slices[1].pages[2].records) == 0 - - - -def test_read_stream_given_maximum_number_of_slices_then_test_read_limit_reached(): - maximum_number_of_slices = 5 - request = {} - response = {"status_code": 200} - mock_source_adapter_cls = make_mock_adapter_factory( - iter( - [ - slice_message(), - request_log_message(request), - response_log_message(response) - ] * maximum_number_of_slices - ) - ) - - api = DefaultApiImpl(mock_source_adapter_cls, MAX_PAGES_PER_SLICE, MAX_SLICES) - - loop = asyncio.get_event_loop() - stream_read: StreamRead = loop.run_until_complete( - api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras")) - ) - - assert stream_read.test_read_limit_reached - - -def test_read_stream_given_maximum_number_of_pages_then_test_read_limit_reached(): - maximum_number_of_pages_per_slice = 5 - request = {} - response = {"status_code": 200} - mock_source_adapter_cls = make_mock_adapter_factory( - iter( - [slice_message()] + [request_log_message(request), response_log_message(response)] * maximum_number_of_pages_per_slice - ) - ) - - api = DefaultApiImpl(mock_source_adapter_cls, MAX_PAGES_PER_SLICE, MAX_SLICES) - - loop = asyncio.get_event_loop() - stream_read: StreamRead = loop.run_until_complete( - api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras")) - ) - - assert stream_read.test_read_limit_reached - - -def test_resolve_manifest(): - _stream_name = "stream_with_custom_requester" - _stream_primary_key = "id" - _stream_url_base = "https://api.sendgrid.com" - _stream_options = {"name": _stream_name, "primary_key": _stream_primary_key, "url_base": _stream_url_base} - - manifest = { - "version": "version", - "definitions": { - "schema_loader": {"name": "{{ options.stream_name }}", "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml"}, - "retriever": { - "paginator": { - "type": "DefaultPaginator", - "page_size": 10, - "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, - "page_token_option": {"inject_into": "path"}, - "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, - }, - "requester": { - "path": "/v3/marketing/lists", - "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, - "request_parameters": {"page_size": 10}, - }, - "record_selector": {"extractor": {"field_pointer": ["result"]}}, - }, - }, - "streams": [ - { - "type": "DeclarativeStream", - "$options": _stream_options, - "schema_loader": {"$ref": "*ref(definitions.schema_loader)"}, - "retriever": "*ref(definitions.retriever)", - }, - ], - "check": {"type": "CheckStream", "stream_names": ["lists"]}, - } - - expected_resolved_manifest = { - "type": "DeclarativeSource", - "version": "version", - "definitions": { - "schema_loader": {"name": "{{ options.stream_name }}", "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml"}, - "retriever": { - "paginator": { - "type": "DefaultPaginator", - "page_size": 10, - "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, - "page_token_option": {"inject_into": "path"}, - "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, - }, - "requester": { - "path": "/v3/marketing/lists", - "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, - "request_parameters": {"page_size": 10}, - }, - "record_selector": {"extractor": {"field_pointer": ["result"]}}, - }, - }, - "streams": [ - { - "type": "DeclarativeStream", - "schema_loader": { - "type": "JsonFileSchemaLoader", - "name": "{{ options.stream_name }}", - "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml", - "primary_key": _stream_primary_key, - "url_base": _stream_url_base, - "$options": _stream_options, - }, - "retriever": { - "type": "SimpleRetriever", - "paginator": { - "type": "DefaultPaginator", - "page_size": 10, - "page_size_option": { - "type": "RequestOption", - "inject_into": "request_parameter", - "field_name": "page_size", - "name": _stream_name, - "primary_key": _stream_primary_key, - "url_base": _stream_url_base, - "$options": _stream_options, - }, - "page_token_option": { - "type": "RequestOption", - "inject_into": "path", - "name": _stream_name, - "primary_key": _stream_primary_key, - "url_base": _stream_url_base, - "$options": _stream_options, - }, - "pagination_strategy": { - "type": "CursorPagination", - "cursor_value": "{{ response._metadata.next }}", - "name": _stream_name, - "primary_key": _stream_primary_key, - "url_base": _stream_url_base, - "$options": _stream_options, - }, - "name": _stream_name, - "primary_key": _stream_primary_key, - "url_base": _stream_url_base, - "$options": _stream_options, - }, - "requester": { - "type": "HttpRequester", - "path": "/v3/marketing/lists", - "authenticator": { - "type": "BearerAuthenticator", - "api_token": "{{ config.apikey }}", - "name": _stream_name, - "primary_key": _stream_primary_key, - "url_base": _stream_url_base, - "$options": _stream_options, - }, - "request_parameters": {"page_size": 10}, - "name": _stream_name, - "primary_key": _stream_primary_key, - "url_base": _stream_url_base, - "$options": _stream_options, - }, - "record_selector": { - "type": "RecordSelector", - "extractor": { - "type": "DpathExtractor", - "field_pointer": ["result"], - "name": _stream_name, - "primary_key": _stream_primary_key, - "url_base": _stream_url_base, - "$options": _stream_options, - }, - "name": _stream_name, - "primary_key": _stream_primary_key, - "url_base": _stream_url_base, - "$options": _stream_options, - }, - "name": _stream_name, - "primary_key": _stream_primary_key, - "url_base": _stream_url_base, - "$options": _stream_options, - }, - "name": _stream_name, - "primary_key": _stream_primary_key, - "url_base": _stream_url_base, - "$options": _stream_options, - }, - ], - "check": {"type": "CheckStream", "stream_names": ["lists"]}, - } - - api = DefaultApiImpl(LowCodeSourceAdapterFactory(MAX_PAGES_PER_SLICE, MAX_SLICES), MAX_PAGES_PER_SLICE, MAX_SLICES) - - loop = asyncio.get_event_loop() - actual_response: ResolveManifest = loop.run_until_complete(api.resolve_manifest(ResolveManifestRequestBody(manifest=manifest))) - assert actual_response.manifest == expected_resolved_manifest - - -def test_resolve_manifest_unresolvable_references(): - expected_status_code = 400 - - invalid_manifest = { - "version": "version", - "definitions": {}, - "streams": [ - {"type": "DeclarativeStream", "retriever": "*ref(definitions.retriever)"}, - ], - "check": {"type": "CheckStream", "stream_names": ["lists"]}, - } - - api = DefaultApiImpl(LowCodeSourceAdapterFactory(MAX_PAGES_PER_SLICE, MAX_SLICES), MAX_PAGES_PER_SLICE, MAX_SLICES) - loop = asyncio.get_event_loop() - with pytest.raises(HTTPException) as actual_exception: - loop.run_until_complete(api.resolve_manifest(ResolveManifestRequestBody(manifest=invalid_manifest))) - - assert "Undefined reference *ref(definitions.retriever)" in actual_exception.value.detail - assert actual_exception.value.status_code == expected_status_code - - -def test_resolve_manifest_invalid(): - expected_status_code = 400 - invalid_manifest = {"version": "version"} - - api = DefaultApiImpl(LowCodeSourceAdapterFactory(MAX_PAGES_PER_SLICE, MAX_SLICES), MAX_PAGES_PER_SLICE, MAX_SLICES) - loop = asyncio.get_event_loop() - with pytest.raises(HTTPException) as actual_exception: - loop.run_until_complete(api.resolve_manifest(ResolveManifestRequestBody(manifest=invalid_manifest))) - - assert "Could not resolve manifest with error" in actual_exception.value.detail - assert actual_exception.value.status_code == expected_status_code - - -def make_mock_adapter_factory(return_value: Iterator) -> MagicMock: - mock_source_adapter_factory = MagicMock() - mock_source_adapter = MagicMock() - mock_source_adapter.read_stream.return_value = return_value - mock_source_adapter_factory.create.return_value = mock_source_adapter - return mock_source_adapter_factory diff --git a/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_low_code_cdk_adapter.py b/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_low_code_cdk_adapter.py deleted file mode 100644 index c1230b418063..000000000000 --- a/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_low_code_cdk_adapter.py +++ /dev/null @@ -1,354 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from abc import ABC -from typing import Any, List, Mapping, Optional, Union -from unittest.mock import MagicMock - -import pytest -import requests -from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, AirbyteRecordMessage, Level, Type -from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream -from airbyte_cdk.sources.declarative.requesters.paginators import PaginatorTestReadDecorator -from airbyte_cdk.sources.declarative.retrievers.simple_retriever import SimpleRetrieverTestReadDecorator -from airbyte_cdk.sources.declarative.parsers.custom_exceptions import UndefinedReferenceException -from airbyte_cdk.sources.streams.http import HttpStream - -from connector_builder.impl.low_code_cdk_adapter import LowCodeSourceAdapter - - -class MockConcreteStream(HttpStream, ABC): - """ - Test class used to verify errors are correctly thrown when the adapter receives unexpected outputs - """ - - def primary_key(self) -> Optional[Union[str, List[str], List[List[str]]]]: - return None - - def url_base(self) -> str: - return "" - - def path( - self, - *, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> str: - return "" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - return None - - def parse_response( - self, - response: requests.Response, - *, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> Optional[str]: - return None - - -MANIFEST = { - "version": "0.1.0", - "type" : "DeclarativeSource", - "definitions": { - "selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type" : "DeclarativeSource" }, - "retriever": { - "type" : "DeclarativeSource", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "HttpRequester"}, - }, - "hashiras_stream": { - "retriever": { - "type" : "DeclarativeSource", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "HttpRequester"}, - }, - "$options": {"name": "hashiras", "path": "/hashiras"}, - }, - "breathing_techniques_stream": { - "retriever": { - "type" : "DeclarativeSource", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "HttpRequester"}, - }, - "$options": {"name": "breathing-techniques", "path": "/breathing_techniques"}, - }, - }, - "streams": [ - { - "type" : "DeclarativeStream", - "retriever": { - "type" : "SimpleRetriever", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "HttpRequester"}, - }, - "$options": {"name": "hashiras", "path": "/hashiras"}, - }, - { - "type" : "DeclarativeStream", - "retriever": { - "type" : "SimpleRetriever", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": {"type": "NoPagination"}, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "HttpRequester"}, - }, - "$options": {"name": "breathing-techniques", "path": "/breathing_techniques"}, - }, - ], - "check": {"stream_names": ["hashiras"], "type": "CheckStream"}, -} - -MANIFEST_WITH_REFERENCES = { - "version": "0.1.0", - "type" : "DeclarativeSource", - "definitions": { - "selector": { - "type": "RecordSelector", - "extractor": { - "type": "DpathExtractor", - "field_pointer": [] - } - }, - "requester": { - "type": "HttpRequester", - "url_base": "https://demonslayers.com/api/v1/", - "http_method": "GET", - "authenticator": { - "type": "BearerAuthenticator", - "api_token": "{{ config['api_key'] }}" - } - }, - "retriever": { - "type": "SimpleRetriever", - "record_selector": { - "$ref": "*ref(definitions.selector)" - }, - "paginator": { - "type": "NoPagination" - }, - "requester": { - "$ref": "*ref(definitions.requester)" - } - }, - "base_stream": { - "type": "DeclarativeStream", - "retriever": { - "$ref": "*ref(definitions.retriever)" - } - }, - "ranks_stream": { - "$ref": "*ref(definitions.base_stream)", - "$options": { - "name": "ranks", - "primary_key": "id", - "path": "/ranks" - } - } - }, - "streams": ["*ref(definitions.ranks_stream)"], - "check": { - "type": "CheckStream", - "stream_names": ["ranks"] - }, - "spec": { - "type": "Spec", - "documentation_url": "https://docsurl.com", - "connection_specification": { - "title": "Source Name Spec", - "type": "object", - "required": ["api_key"], - "additionalProperties": True, - "properties": { - "api_key": { - "type": "string", - "description": "API Key" - } - } - } - } -} - -MANIFEST_WITH_PAGINATOR = { - "version": "0.1.0", - "type" : "DeclarativeSource", - "definitions": { - }, - "streams": [ - { - "type" : "DeclarativeStream", - "retriever": { - "type" : "SimpleRetriever", - "record_selector": {"extractor": {"field_pointer": ["items"], "type": "DpathExtractor"}, "type": "RecordSelector"}, - "paginator": { - "type": "DefaultPaginator", - "pagination_strategy": { - "type": "OffsetIncrement", - "page_size": 10 - }, - "url_base": "https://demonslayers.com/api/v1/" - }, - "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET", "type": "HttpRequester"}, - }, - "$options": {"name": "hashiras", "path": "/hashiras"}, - }, - ], - "check": {"stream_names": ["hashiras"], "type": "CheckStream"}, -} - -def test_get_http_streams(): - expected_urls = {"https://demonslayers.com/api/v1/breathing_techniques", "https://demonslayers.com/api/v1/hashiras"} - - adapter = LowCodeSourceAdapter(MANIFEST, MAXIMUM_NUMBER_OF_PAGES_PER_SLICE, MAXIMUM_NUMBER_OF_SLICES) - actual_streams = adapter.get_http_streams(config={}) - actual_urls = {http_stream.url_base + http_stream.path() for http_stream in actual_streams} - - assert len(actual_streams) == len(expected_urls) - assert actual_urls == expected_urls - - -MAXIMUM_NUMBER_OF_PAGES_PER_SLICE = 5 -MAXIMUM_NUMBER_OF_SLICES = 5 - -def test_get_http_manifest_with_references(): - expected_urls = {"https://demonslayers.com/api/v1/ranks"} - - adapter = LowCodeSourceAdapter(MANIFEST_WITH_REFERENCES, MAXIMUM_NUMBER_OF_PAGES_PER_SLICE, MAXIMUM_NUMBER_OF_SLICES) - actual_streams = adapter.get_http_streams(config={}) - actual_urls = {http_stream.url_base + http_stream.path() for http_stream in actual_streams} - - assert len(actual_streams) == len(expected_urls) - assert actual_urls == expected_urls - - -def test_get_http_streams_non_declarative_streams(): - non_declarative_stream = MockConcreteStream() - - mock_source = MagicMock() - mock_source.streams.return_value = [non_declarative_stream] - - adapter = LowCodeSourceAdapter(MANIFEST, MAXIMUM_NUMBER_OF_PAGES_PER_SLICE, MAXIMUM_NUMBER_OF_SLICES) - adapter._source = mock_source - with pytest.raises(TypeError): - adapter.get_http_streams(config={}) - - -def test_get_http_streams_non_http_stream(): - declarative_stream_non_http_retriever = DeclarativeStream(name="hashiras", primary_key="id", retriever=MagicMock(), config={}, - options={}) - - mock_source = MagicMock() - mock_source.streams.return_value = [declarative_stream_non_http_retriever] - - adapter = LowCodeSourceAdapter(MANIFEST, MAXIMUM_NUMBER_OF_PAGES_PER_SLICE, MAXIMUM_NUMBER_OF_SLICES) - adapter._source = mock_source - with pytest.raises(TypeError): - adapter.get_http_streams(config={}) - - -def test_read_streams(): - expected_messages = iter([ - AirbyteMessage( - type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="request:{'url': 'https://demonslayers.com/v1/hashiras'}") - ), - AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="response:{'status': 200}")), - AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage(data={"name": "Tengen Uzui", "breathing_technique": "sound"}, emitted_at=1234, stream="hashiras"), - ), - AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - data={"name": "Kyojuro Rengoku", "breathing_technique": "fire"}, emitted_at=1234, stream="hashiras" - ), - ), - AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage(data={"name": "Giyu Tomioka", "breathing_technique": "water"}, emitted_at=1234, stream="hashiras"), - ), - ]) - mock_source = MagicMock() - mock_source.read.return_value = expected_messages - - adapter = LowCodeSourceAdapter(MANIFEST, MAXIMUM_NUMBER_OF_PAGES_PER_SLICE, MAXIMUM_NUMBER_OF_SLICES) - adapter._source = mock_source - actual_messages = list(adapter.read_stream("hashiras", {})) - - for i, expected_message in enumerate(expected_messages): - assert actual_messages[i] == expected_message - - -def test_read_streams_with_error(): - expected_messages = [ - AirbyteMessage( - type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="request:{'url': 'https://demonslayers.com/v1/hashiras'}") - ), - AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="response:{'status': 401}")), - AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.ERROR, message="error_message")), - ] - mock_source = MagicMock() - - def return_value(*args, **kwargs): - yield expected_messages[0] - yield expected_messages[1] - raise Exception("error_message") - - mock_source.read.side_effect = return_value - - adapter = LowCodeSourceAdapter(MANIFEST, MAXIMUM_NUMBER_OF_PAGES_PER_SLICE, MAXIMUM_NUMBER_OF_SLICES) - adapter._source = mock_source - actual_messages = list(adapter.read_stream("hashiras", {})) - - for i, expected_message in enumerate(expected_messages): - assert actual_messages[i] == expected_message - - -def test_read_streams_invalid_reference(): - invalid_reference_manifest = { - "version": "0.1.0", - "type" : "DeclarativeSource", - "definitions": { - "selector": { - "type": "RecordSelector", - "extractor": { - "type": "DpathExtractor", - "field_pointer": [] - } - }, - "ranks_stream": { - "$ref": "*ref(definitions.base_stream)", - "$options": { - "name": "ranks", - "primary_key": "id", - "path": "/ranks" - } - } - }, - "streams": ["*ref(definitions.ranks_stream)"], - "check": { - "type": "CheckStream", - "stream_names": ["ranks"] - } - } - - with pytest.raises(UndefinedReferenceException): - LowCodeSourceAdapter(invalid_reference_manifest, MAXIMUM_NUMBER_OF_PAGES_PER_SLICE, MAXIMUM_NUMBER_OF_SLICES) - - -def test_stream_use_read_test_retriever_and_paginator(): - adapter = LowCodeSourceAdapter(MANIFEST_WITH_PAGINATOR, MAXIMUM_NUMBER_OF_PAGES_PER_SLICE, MAXIMUM_NUMBER_OF_SLICES) - streams = adapter.get_http_streams(config={}) - - assert streams - for stream in streams: - assert isinstance(stream, SimpleRetrieverTestReadDecorator) - assert isinstance(stream.paginator, PaginatorTestReadDecorator) diff --git a/airbyte-connector-builder-server/unit_tests/test_unit_test.py b/airbyte-connector-builder-server/unit_tests/test_unit_test.py deleted file mode 100644 index f5f14bc82edf..000000000000 --- a/airbyte-connector-builder-server/unit_tests/test_unit_test.py +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_test(): - assert True diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile deleted file mode 100644 index 02baa3b4a03a..000000000000 --- a/airbyte-container-orchestrator/Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:1.0 -FROM ${JDK_IMAGE} AS orchestrator - -ARG DOCKER_BUILD_ARCH=amd64 - -RUN amazon-linux-extras install -y docker -RUN yum install -y jq tar && yum clean all - -RUN curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/${DOCKER_BUILD_ARCH}/kubectl" \ - && chmod +x kubectl && mv kubectl /usr/local/bin/ - -# Don't change this manually. Bump version expects to make moves based on this string -ARG VERSION=0.40.32 - -ENV APPLICATION airbyte-container-orchestrator -ENV VERSION=${VERSION} -ENV AIRBYTE_ENTRYPOINT "/app/${APPLICATION}-${VERSION}/bin/${APPLICATION}" - -WORKDIR /app - -# Grab well-known types file -COPY WellKnownTypes.json /app - -# Move orchestrator app -ADD bin/${APPLICATION}-${VERSION}.tar /app - -# wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "/app/${APPLICATION}-${VERSION}/bin/${APPLICATION}"] diff --git a/airbyte-container-orchestrator/build.gradle b/airbyte-container-orchestrator/build.gradle deleted file mode 100644 index 75da6541cc1d..000000000000 --- a/airbyte-container-orchestrator/build.gradle +++ /dev/null @@ -1,95 +0,0 @@ -import groovy.json.JsonBuilder -import groovy.yaml.YamlSlurper - -plugins { - id 'application' -} - -configurations { - airbyteProtocol -} - -dependencies { - annotationProcessor platform(libs.micronaut.bom) - annotationProcessor libs.bundles.micronaut.annotation.processor - - implementation platform(libs.micronaut.bom) - implementation libs.bundles.micronaut - - // Ensure that the versions defined in deps.toml are used - // instead of versions from transitive dependencies - implementation(libs.s3) { - // Force to avoid updated version brought in transitively from Micronaut 3.8+ - // that is incompatible with our current Helm setup - force = true - } - implementation(libs.aws.java.sdk.s3) { - // Force to avoid updated version brought in transitively from Micronaut 3.8+ - // that is incompatible wit hour current Helm setup - force = true - } - - implementation 'io.fabric8:kubernetes-client:5.12.2' - implementation libs.bundles.datadog - - implementation project(':airbyte-api') - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-commons-protocol') - implementation project(':airbyte-commons-temporal') - implementation project(':airbyte-commons-worker') - implementation project(':airbyte-config:init') - implementation project(':airbyte-featureflag') - implementation project(':airbyte-json-validation') - implementation libs.airbyte.protocol - implementation project(':airbyte-metrics:metrics-lib') - implementation project(':airbyte-worker-models') - - testAnnotationProcessor platform(libs.micronaut.bom) - testAnnotationProcessor libs.bundles.micronaut.test.annotation.processor - - testImplementation libs.bundles.micronaut.test - testImplementation 'org.mockito:mockito-inline:2.13.0' - testImplementation libs.postgresql - testImplementation libs.platform.testcontainers - testImplementation libs.platform.testcontainers.postgresql - - airbyteProtocol(libs.airbyte.protocol) { - transitive = false - } -} - -application { - applicationName = "airbyte-container-orchestrator" - mainClass = "io.airbyte.container_orchestrator.Application" - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -// Duplicated from :airbyte-worker, eventually, this should be handled in :airbyte-protocol -task generateWellKnownTypes() { - doLast { - def wellKnownTypesYamlPath = 'airbyte_protocol/well_known_types.yaml' - configurations.airbyteProtocol.getFiles().each { - def zip = new java.util.zip.ZipFile(it) - def entry = zip.getEntry(wellKnownTypesYamlPath) - - def wellKnownTypesYaml = zip.getInputStream(entry).text - def parsedYaml = new YamlSlurper().parseText(wellKnownTypesYaml) - def wellKnownTypesJson = new JsonBuilder(parsedYaml).toPrettyString() - def targetFile = project.file("build/docker/WellKnownTypes.json") - targetFile.getParentFile().mkdirs() - targetFile.text = wellKnownTypesJson - } - } -} - -tasks.named("buildDockerImage") { - dependsOn copyGeneratedTar - dependsOn generateWellKnownTypes -} - -// Ideally, we would have buildDockerImage depend on generateWellKnownTypes -// but some of our actions use copyGeneratedTar as the "set up the docker build context" task -// so we'll just add it here. -tasks.named("copyGeneratedTar") { - dependsOn generateWellKnownTypes -} diff --git a/airbyte-container-orchestrator/gradle.properties b/airbyte-container-orchestrator/gradle.properties deleted file mode 100644 index b98ac03de2ea..000000000000 --- a/airbyte-container-orchestrator/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -dockerImageName=container-orchestrator diff --git a/airbyte-container-orchestrator/readme.md b/airbyte-container-orchestrator/readme.md deleted file mode 100644 index 05f8a46120b4..000000000000 --- a/airbyte-container-orchestrator/readme.md +++ /dev/null @@ -1,6 +0,0 @@ -# airbyte-container-orchestrator - -This module contains logic to handle launching connector containers. It is called from the temporal workflows in `airbyte-workers`. It is called from the worker and spins up in a separate pod so that sync workflows can be isolated from each other. - -## Entrypoint -* `ContainerOrchestratorApp.java` diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/Application.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/Application.java deleted file mode 100644 index af03f4902035..000000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/Application.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.logging.LoggingHelper; -import io.airbyte.commons.logging.MdcScope; -import io.airbyte.container_orchestrator.orchestrator.JobOrchestrator; -import io.airbyte.workers.process.AsyncKubePodStatus; -import io.micronaut.runtime.Micronaut; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.lang.invoke.MethodHandles; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Entrypoint for the application responsible for launching containers and handling all message - * passing for replication, normalization, and dbt. Also, the current version relies on a heartbeat - * from a Temporal worker. This will also be removed in the future so this can run fully async. - *

- * This application retrieves most of its configuration from copied files from the calling Temporal - * worker. - *

- * This app uses default logging which is directly captured by the calling Temporal worker. In the - * future this will need to independently interact with cloud storage. - */ -@SuppressWarnings({"PMD.AvoidCatchingThrowable", "PMD.DoNotTerminateVM", "PMD.AvoidFieldNameMatchingTypeName"}) -@Singleton -public class Application { - - public static void main(final String[] args) { - // To mimic previous behavior, assume an exit code of 1 unless Application.run returns otherwise. - var exitCode = 1; - try (final var ctx = Micronaut.run(Application.class, args)) { - exitCode = ctx.getBean(Application.class).run(); - } catch (final Throwable t) { - log.error("could not run {}", t.getMessage(), t); - } finally { - // this mimics the pre-micronaut code, unsure if there is a better way in micronaut to ensure a - // non-zero exit code - System.exit(exitCode); - } - } - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private final String application; - private final JobOrchestrator jobOrchestrator; - private final AsyncStateManager asyncStateManager; - - public Application(@Named("application") final String application, - final JobOrchestrator jobOrchestrator, - final AsyncStateManager asyncStateManager) { - this.application = application; - this.jobOrchestrator = jobOrchestrator; - this.asyncStateManager = asyncStateManager; - } - - /** - * Configures logging/mdc scope, and creates all objects necessary to handle state updates. - *

- * Handles state updates (including writing failures) and running the job orchestrator. As much of - * the initialization as possible should go in here, so it's logged properly and the state storage - * is updated appropriately. - */ - @VisibleForTesting - int run() { - // set mdc scope for the remaining execution - try (final var mdcScope = new MdcScope.Builder() - .setLogPrefix(application) - .setPrefixColor(LoggingHelper.Color.CYAN_BACKGROUND) - .build()) { - - asyncStateManager.write(AsyncKubePodStatus.INITIALIZING); - asyncStateManager.write(AsyncKubePodStatus.RUNNING); - asyncStateManager.write(AsyncKubePodStatus.SUCCEEDED, jobOrchestrator.runJob().orElse("")); - } catch (final Throwable t) { - log.error("Killing orchestrator because of an Exception", t); - asyncStateManager.write(AsyncKubePodStatus.FAILED); - return 1; - } - - return 0; - } - -} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/AsyncStateManager.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/AsyncStateManager.java deleted file mode 100644 index ea65906810ed..000000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/AsyncStateManager.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.workers.process.AsyncKubePodStatus; -import io.airbyte.workers.process.KubePodInfo; -import io.airbyte.workers.storage.DocumentStoreClient; -import jakarta.inject.Singleton; -import java.lang.invoke.MethodHandles; -import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * The state manager writes the "truth" for states of the async pod process. If the store isn't - * updated by the underlying pod, it will appear as failed. - *

- * It doesn't have a single value for a state. Instead, in a location on cloud storage or disk, it - * writes every state it's encountered. - */ -@Singleton -public class AsyncStateManager { - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static final List STATUS_CHECK_ORDER = List.of( - // terminal states first - AsyncKubePodStatus.FAILED, - AsyncKubePodStatus.SUCCEEDED, - // then check in progress state - AsyncKubePodStatus.RUNNING, - // then check for initialization state - AsyncKubePodStatus.INITIALIZING); - - private final DocumentStoreClient documentStoreClient; - private final KubePodInfo kubePodInfo; - - public AsyncStateManager(final DocumentStoreClient documentStoreClient, final KubePodInfo kubePodInfo) { - this.documentStoreClient = documentStoreClient; - this.kubePodInfo = kubePodInfo; - } - - /** - * Writes an empty file to a location designated by the input status. - */ - public void write(final AsyncKubePodStatus status, final String value) { - final var key = getDocumentStoreKey(status); - log.info("Writing async status {} for {}...", status, kubePodInfo); - documentStoreClient.write(key, value); - } - - /** - * Writes a file containing a string value to a location designated by the input status. - */ - public void write(final AsyncKubePodStatus status) { - write(status, ""); - } - - /** - * Interprets the state given all written state messages for the pod. - *

- * Checks terminal states first, then running, then initialized. Defaults to not started. - *

- * The order matters here! - */ - public AsyncKubePodStatus getStatus() { - return STATUS_CHECK_ORDER.stream() - .filter(this::statusFileExists) - .findFirst() - .orElse(AsyncKubePodStatus.NOT_STARTED); - } - - /** - * @return the output stored in the success file. This can be an empty string. - * @throws IllegalArgumentException if no success file exists - */ - public String getOutput() throws IllegalArgumentException { - final var key = getDocumentStoreKey(AsyncKubePodStatus.SUCCEEDED); - final var output = documentStoreClient.read(key); - - return output.orElseThrow(() -> new IllegalArgumentException("Expected to retrieve output from a successfully completed pod!")); - - } - - /** - * IMPORTANT: Changing the storage location will orphan already existing kube pods when the new - * version is deployed! - */ - @VisibleForTesting - String getDocumentStoreKey(final AsyncKubePodStatus status) { - return kubePodInfo.namespace() + "/" + kubePodInfo.name() + "/" + status.name(); - } - - private boolean statusFileExists(final AsyncKubePodStatus status) { - final var key = getDocumentStoreKey(status); - return documentStoreClient.read(key).isPresent(); - } - -} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/EventListeners.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/EventListeners.java deleted file mode 100644 index ed2bebc88754..000000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/EventListeners.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator; - -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.commons.temporal.sync.OrchestratorConstants; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.micronaut.runtime.event.annotation.EventListener; -import io.micronaut.runtime.server.event.ServerStartupEvent; -import jakarta.inject.Inject; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.lang.invoke.MethodHandles; -import java.util.Map; -import java.util.function.BiFunction; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.core.LoggerContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Singleton -public class EventListeners { - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private final Map envVars; - private final EnvConfigs configs; - private final JobRunConfig jobRunConfig; - private final BiFunction propertySetter; - - @Inject - EventListeners(@Named("envVars") final Map envVars, final EnvConfigs configs, final JobRunConfig jobRunConfig) { - this(envVars, configs, jobRunConfig, (name, value) -> { - System.setProperty(name, value); - return null; - }); - } - - /** - * Exists only for overriding the default property setter for testing - */ - EventListeners(@Named("envVars") final Map envVars, - final EnvConfigs configs, - final JobRunConfig jobRunConfig, - final BiFunction propertySetter) { - this.envVars = envVars; - this.configs = configs; - this.jobRunConfig = jobRunConfig; - this.propertySetter = propertySetter; - } - - /** - * Configures the environment variables for this app. - *

- * Should this be replaced with env-vars set on the container itself? - * - * @param unused required so Micronaut knows when to run this event-listener, but not used - */ - @EventListener - void setEnvVars(final ServerStartupEvent unused) { - log.info("settings env vars"); - - OrchestratorConstants.ENV_VARS_TO_TRANSFER.stream() - .filter(envVars::containsKey) - .forEach(envVar -> propertySetter.apply(envVar, envVars.get(envVar))); - } - - /** - * Configures the logging for this app. - * - * @param unused required so Micronaut knows when to run this event-listener, but not used - */ - @EventListener - void setLogging(final ServerStartupEvent unused) { - log.info("started logging"); - - // make sure the new configuration is picked up - final LoggerContext ctx = (LoggerContext) LogManager.getContext(false); - ctx.reconfigure(); - - LogClientSingleton.getInstance().setJobMdc( - configs.getWorkerEnvironment(), - configs.getLogConfigs(), - TemporalUtils.getJobRoot(configs.getWorkspaceRoot(), jobRunConfig.getJobId(), jobRunConfig.getAttemptId())); - } - -} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/HeartbeatController.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/HeartbeatController.java deleted file mode 100644 index 8cdce37ed4cc..000000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/HeartbeatController.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator; - -import io.micronaut.http.MediaType; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Get; -import io.micronaut.http.annotation.Produces; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; -import java.util.Map; - -/** - * Creates a controller that returns a 200 JSON response on any path requested. - *

- * This is intended to stay up as long as the Kube worker exists so pods spun up can check if the - * spawning Kube worker still exists. - */ -// @TODO remove this annotation -@Secured(SecurityRule.IS_ANONYMOUS) -@Controller -public class HeartbeatController { - - private static final Map response = Map.of("up", true); - - @Get - @Produces(MediaType.APPLICATION_JSON) - public Map get() { - return response; - } - -} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ConfigFactory.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ConfigFactory.java deleted file mode 100644 index 17042cb9d760..000000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ConfigFactory.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.config; - -import com.fasterxml.jackson.core.type.TypeReference; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.sync.OrchestratorConstants; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.process.AsyncOrchestratorPodProcess; -import io.airbyte.workers.process.KubePodInfo; -import io.airbyte.workers.process.KubePodProcess; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Value; -import jakarta.annotation.Nullable; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Map; - -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -@Factory -public class ConfigFactory { - - /** - * Returns the config directory which contains all the configuration files. - * - * @param configDir optional directory, defaults to KubePodProcess.CONFIG_DIR if not defined. - * @return Configuration directory. - */ - @Singleton - @Named("configDir") - String configDir(@Value("${airbyte.config-dir}") @Nullable final String configDir) { - if (configDir == null) { - return KubePodProcess.CONFIG_DIR; - } - return configDir; - } - - /** - * Returns the contents of the OrchestratorConstants.INIT_FILE_APPLICATION file. - * - * @param configDir Which directory contains the OrchestratorConstants.INIT_FILE_APPLICATION file. - * @return Contents of OrchestratorConstants.INIT_FILE_APPLICATION - * @throws IOException - */ - @Singleton - @Named("application") - String application(@Named("configDir") final String configDir) throws IOException { - return Files.readString(Path.of(configDir, OrchestratorConstants.INIT_FILE_APPLICATION)); - } - - /** - * Returns the contents of the OrchestratorConstants.INIT_FILE_ENV_MAP file. - * - * @param configDir Which directory contains the OrchestratorConstants.INIT_FILE_ENV_MAP file. - * @return Contents of OrchestratorConstants.INIT_FILE_ENV_MAP - */ - @Singleton - @Named("envVars") - Map envVars(@Named("configDir") final String configDir) { - return Jsons.deserialize( - Path.of(configDir, OrchestratorConstants.INIT_FILE_ENV_MAP).toFile(), new TypeReference<>() {}); - } - - /** - * Returns the contents of the OrchestratorConstants.INIT_FILE_JOB_RUN_CONFIG file. - * - * @param configDir Which directory contains the OrchestratorConstants.INIT_FILE_JOB_RUN_CONFIG - * file. - * @return Contents of OrchestratorConstants.INIT_FILE_JOB_RUN_CONFIG - */ - @Singleton - JobRunConfig jobRunConfig(@Named("configDir") final String configDir) { - return Jsons.deserialize(Path.of(configDir, OrchestratorConstants.INIT_FILE_JOB_RUN_CONFIG).toFile(), JobRunConfig.class); - } - - /** - * Returns the contents of the OrchestratorConstants.KUBE_POD_INFO file. - * - * @param configDir Which directory contains the OrchestratorConstants.KUBE_POD_INFO file. - * @return Contents of OrchestratorConstants.KUBE_POD_INFO - */ - @Singleton - KubePodInfo kubePodInfo(@Named("configDir") final String configDir) { - return Jsons.deserialize(Path.of(configDir, AsyncOrchestratorPodProcess.KUBE_POD_INFO).toFile(), KubePodInfo.class); - } - -} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java deleted file mode 100644 index 74004a3e802d..000000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.config; - -import io.airbyte.api.client.generated.DestinationApi; -import io.airbyte.api.client.generated.SourceApi; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider; -import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory; -import io.airbyte.commons.temporal.sync.OrchestratorConstants; -import io.airbyte.config.EnvConfigs; -import io.airbyte.container_orchestrator.orchestrator.DbtJobOrchestrator; -import io.airbyte.container_orchestrator.orchestrator.JobOrchestrator; -import io.airbyte.container_orchestrator.orchestrator.NoOpOrchestrator; -import io.airbyte.container_orchestrator.orchestrator.NormalizationJobOrchestrator; -import io.airbyte.container_orchestrator.orchestrator.ReplicationJobOrchestrator; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.process.AsyncOrchestratorPodProcess; -import io.airbyte.workers.process.DockerProcessFactory; -import io.airbyte.workers.process.KubePortManagerSingleton; -import io.airbyte.workers.process.KubeProcessFactory; -import io.airbyte.workers.process.ProcessFactory; -import io.airbyte.workers.storage.DocumentStoreClient; -import io.airbyte.workers.storage.StateClients; -import io.airbyte.workers.sync.DbtLauncherWorker; -import io.airbyte.workers.sync.NormalizationLauncherWorker; -import io.airbyte.workers.sync.ReplicationLauncherWorker; -import io.fabric8.kubernetes.client.DefaultKubernetesClient; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.annotation.Value; -import io.micronaut.context.env.Environment; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.nio.file.Path; -import java.util.Map; - -@Factory -class ContainerOrchestratorFactory { - - @Singleton - FeatureFlags featureFlags() { - return new EnvVariableFeatureFlags(); - } - - @Singleton - EnvConfigs envConfigs(@Named("envVars") final Map env) { - return new EnvConfigs(env); - } - - @Singleton - WorkerConfigs workerConfigs(final EnvConfigs envConfigs) { - return new WorkerConfigs(envConfigs); - } - - @Singleton - @Requires(notEnv = Environment.KUBERNETES) - ProcessFactory dockerProcessFactory(final WorkerConfigs workerConfigs, final EnvConfigs configs) { - return new DockerProcessFactory( - workerConfigs, - configs.getWorkspaceRoot(), // Path.of(workspaceRoot), - configs.getWorkspaceDockerMount(), // workspaceDockerMount, - configs.getLocalDockerMount(), // localDockerMount, - configs.getDockerNetwork()// dockerNetwork - ); - } - - @Singleton - @Requires(env = Environment.KUBERNETES) - ProcessFactory kubeProcessFactory( - final WorkerConfigs workerConfigs, - final EnvConfigs configs, - @Value("${micronaut.server.port}") final int serverPort) - throws UnknownHostException { - final var localIp = InetAddress.getLocalHost().getHostAddress(); - final var kubeHeartbeatUrl = localIp + ":" + serverPort; - - // this needs to have two ports for the source and two ports for the destination (all four must be - // exposed) - KubePortManagerSingleton.init(OrchestratorConstants.PORTS); - - return new KubeProcessFactory( - workerConfigs, - configs.getJobKubeNamespace(), - new DefaultKubernetesClient(), - kubeHeartbeatUrl, - false); - } - - @Singleton - JobOrchestrator jobOrchestrator( - @Named("application") final String application, - final EnvConfigs envConfigs, - final ProcessFactory processFactory, - final FeatureFlags featureFlags, - final FeatureFlagClient featureFlagClient, - final WorkerConfigs workerConfigs, - final AirbyteMessageSerDeProvider serdeProvider, - final AirbyteProtocolVersionedMigratorFactory migratorFactory, - final JobRunConfig jobRunConfig, - final SourceApi sourceApi, - final DestinationApi destinationApi) { - return switch (application) { - case ReplicationLauncherWorker.REPLICATION -> new ReplicationJobOrchestrator(envConfigs, processFactory, featureFlags, featureFlagClient, - serdeProvider, - migratorFactory, jobRunConfig, sourceApi, destinationApi); - case NormalizationLauncherWorker.NORMALIZATION -> new NormalizationJobOrchestrator(envConfigs, processFactory, jobRunConfig); - case DbtLauncherWorker.DBT -> new DbtJobOrchestrator(envConfigs, workerConfigs, processFactory, jobRunConfig); - case AsyncOrchestratorPodProcess.NO_OP -> new NoOpOrchestrator(); - default -> throw new IllegalStateException("Could not find job orchestrator for application: " + application); - }; - } - - @Singleton - DocumentStoreClient documentStoreClient(final EnvConfigs config) { - return StateClients.create(config.getStateStorageCloudConfigs(), Path.of("/state")); - } - -} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/DbtJobOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/DbtJobOrchestrator.java deleted file mode 100644 index 7ca402da3b30..000000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/DbtJobOrchestrator.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.orchestrator; - -import static io.airbyte.metrics.lib.ApmTraceConstants.JOB_ORCHESTRATOR_OPERATION_NAME; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.DESTINATION_DOCKER_IMAGE_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; - -import datadog.trace.api.Trace; -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.config.Configs; -import io.airbyte.config.OperatorDbtInput; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.general.DbtTransformationRunner; -import io.airbyte.workers.general.DbtTransformationWorker; -import io.airbyte.workers.normalization.DefaultNormalizationRunner; -import io.airbyte.workers.process.KubePodProcess; -import io.airbyte.workers.process.ProcessFactory; -import io.airbyte.workers.sync.ReplicationLauncherWorker; -import java.lang.invoke.MethodHandles; -import java.nio.file.Path; -import java.util.Map; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DbtJobOrchestrator implements JobOrchestrator { - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private final Configs configs; - private final WorkerConfigs workerConfigs; - private final ProcessFactory processFactory; - private final JobRunConfig jobRunConfig; - - public DbtJobOrchestrator(final Configs configs, - final WorkerConfigs workerConfigs, - final ProcessFactory processFactory, - final JobRunConfig jobRunConfig) { - this.configs = configs; - this.workerConfigs = workerConfigs; - this.processFactory = processFactory; - this.jobRunConfig = jobRunConfig; - } - - @Override - public String getOrchestratorName() { - return "DBT Transformation"; - } - - @Override - public Class getInputClass() { - return OperatorDbtInput.class; - } - - @Trace(operationName = JOB_ORCHESTRATOR_OPERATION_NAME) - @Override - public Optional runJob() throws Exception { - final OperatorDbtInput dbtInput = readInput(); - - final IntegrationLauncherConfig destinationLauncherConfig = JobOrchestrator.readAndDeserializeFile( - Path.of(KubePodProcess.CONFIG_DIR, - ReplicationLauncherWorker.INIT_FILE_DESTINATION_LAUNCHER_CONFIG), - IntegrationLauncherConfig.class); - - ApmTraceUtils - .addTagsToTrace(Map.of(JOB_ID_KEY, jobRunConfig.getJobId(), DESTINATION_DOCKER_IMAGE_KEY, - destinationLauncherConfig.getDockerImage())); - - log.info("Setting up dbt worker..."); - final DbtTransformationWorker worker = new DbtTransformationWorker( - jobRunConfig.getJobId(), - Math.toIntExact(jobRunConfig.getAttemptId()), - workerConfigs.getResourceRequirements(), - new DbtTransformationRunner( - processFactory, new DefaultNormalizationRunner( - processFactory, - destinationLauncherConfig.getNormalizationDockerImage(), - destinationLauncherConfig.getNormalizationIntegrationType()))); - - log.info("Running dbt worker..."); - final Path jobRoot = TemporalUtils.getJobRoot(configs.getWorkspaceRoot(), - jobRunConfig.getJobId(), jobRunConfig.getAttemptId()); - worker.run(dbtInput, jobRoot); - - return Optional.empty(); - } - -} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/JobOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/JobOrchestrator.java deleted file mode 100644 index 27c549f60d6d..000000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/JobOrchestrator.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.orchestrator; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.sync.OrchestratorConstants; -import io.airbyte.workers.process.KubePodProcess; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Optional; - -/** - * The job orchestrator helps abstract over container launcher application differences across - * replication, normalization, and custom dbt operators. - * - * @param job input type - */ -public interface JobOrchestrator { - - // used for logging - String getOrchestratorName(); - - // used to serialize the loaded input - Class getInputClass(); - - // reads input from a file that was copied to the container launcher - default INPUT readInput() throws IOException { - return Jsons.deserialize( - Path.of(KubePodProcess.CONFIG_DIR, OrchestratorConstants.INIT_FILE_INPUT).toFile(), - getInputClass()); - } - - /** - * Contains the unique logic that belongs to each type of job. - * - * @return an optional output value to place within the output document store item. - */ - Optional runJob() throws Exception; - - static T readAndDeserializeFile(final Path path, final Class type) throws IOException { - return Jsons.deserialize(Files.readString(path), type); - } - -} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/NoOpOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/NoOpOrchestrator.java deleted file mode 100644 index 3582fced895c..000000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/NoOpOrchestrator.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.orchestrator; - -import io.airbyte.workers.process.AsyncOrchestratorPodProcess; -import java.lang.invoke.MethodHandles; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * For testing only. - */ -public class NoOpOrchestrator implements JobOrchestrator { - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - @Override - public String getOrchestratorName() { - return AsyncOrchestratorPodProcess.NO_OP; - } - - @Override - public Class getInputClass() { - return String.class; - } - - @Override - public Optional runJob() throws Exception { - log.info("Running no-op job."); - return Optional.empty(); - } - -} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/NormalizationJobOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/NormalizationJobOrchestrator.java deleted file mode 100644 index 4b9d4608f66e..000000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/NormalizationJobOrchestrator.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.orchestrator; - -import static io.airbyte.metrics.lib.ApmTraceConstants.JOB_ORCHESTRATOR_OPERATION_NAME; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.DESTINATION_DOCKER_IMAGE_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; - -import datadog.trace.api.Trace; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.config.Configs; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.general.DefaultNormalizationWorker; -import io.airbyte.workers.normalization.DefaultNormalizationRunner; -import io.airbyte.workers.normalization.NormalizationWorker; -import io.airbyte.workers.process.KubePodProcess; -import io.airbyte.workers.process.ProcessFactory; -import io.airbyte.workers.sync.ReplicationLauncherWorker; -import java.nio.file.Path; -import java.util.Map; -import java.util.Optional; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class NormalizationJobOrchestrator implements JobOrchestrator { - - private final Configs configs; - private final ProcessFactory processFactory; - private final JobRunConfig jobRunConfig; - - public NormalizationJobOrchestrator(final Configs configs, final ProcessFactory processFactory, final JobRunConfig jobRunConfig) { - this.configs = configs; - this.processFactory = processFactory; - this.jobRunConfig = jobRunConfig; - } - - @Override - public String getOrchestratorName() { - return "Normalization"; - } - - @Override - public Class getInputClass() { - return NormalizationInput.class; - } - - @Trace(operationName = JOB_ORCHESTRATOR_OPERATION_NAME) - @Override - public Optional runJob() throws Exception { - // final JobRunConfig jobRunConfig = readJobRunConfig(); - final NormalizationInput normalizationInput = readInput(); - - final IntegrationLauncherConfig destinationLauncherConfig = JobOrchestrator.readAndDeserializeFile( - Path.of(KubePodProcess.CONFIG_DIR, - ReplicationLauncherWorker.INIT_FILE_DESTINATION_LAUNCHER_CONFIG), - IntegrationLauncherConfig.class); - - ApmTraceUtils - .addTagsToTrace(Map.of(JOB_ID_KEY, jobRunConfig.getJobId(), DESTINATION_DOCKER_IMAGE_KEY, - destinationLauncherConfig.getDockerImage())); - - log.info("Setting up normalization worker..."); - final NormalizationWorker normalizationWorker = new DefaultNormalizationWorker( - jobRunConfig.getJobId(), - Math.toIntExact(jobRunConfig.getAttemptId()), - new DefaultNormalizationRunner( - processFactory, - destinationLauncherConfig.getNormalizationDockerImage(), - destinationLauncherConfig.getNormalizationIntegrationType()), - configs.getWorkerEnvironment()); - - log.info("Running normalization worker..."); - final Path jobRoot = TemporalUtils.getJobRoot(configs.getWorkspaceRoot(), - jobRunConfig.getJobId(), jobRunConfig.getAttemptId()); - final NormalizationSummary normalizationSummary = normalizationWorker.run(normalizationInput, - jobRoot); - - return Optional.of(Jsons.serialize(normalizationSummary)); - } - -} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestrator.java deleted file mode 100644 index fadbeaad5207..000000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestrator.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.orchestrator; - -import static io.airbyte.metrics.lib.ApmTraceConstants.JOB_ORCHESTRATOR_OPERATION_NAME; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.DESTINATION_DOCKER_IMAGE_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.SOURCE_DOCKER_IMAGE_KEY; - -import datadog.trace.api.Trace; -import io.airbyte.api.client.generated.DestinationApi; -import io.airbyte.api.client.generated.SourceApi; -import io.airbyte.commons.features.FeatureFlagHelper; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.MdcScope; -import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider; -import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory; -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.commons.version.Version; -import io.airbyte.config.Configs; -import io.airbyte.config.ReplicationOutput; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.FieldSelectionEnabled; -import io.airbyte.featureflag.Workspace; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.metrics.lib.MetricClientFactory; -import io.airbyte.metrics.lib.MetricEmittingApps; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.workers.RecordSchemaValidator; -import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.WorkerMetricReporter; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.general.DefaultReplicationWorker; -import io.airbyte.workers.helper.ConnectorConfigUpdater; -import io.airbyte.workers.internal.AirbyteStreamFactory; -import io.airbyte.workers.internal.DefaultAirbyteDestination; -import io.airbyte.workers.internal.DefaultAirbyteSource; -import io.airbyte.workers.internal.DefaultAirbyteStreamFactory; -import io.airbyte.workers.internal.EmptyAirbyteSource; -import io.airbyte.workers.internal.NamespacingMapper; -import io.airbyte.workers.internal.VersionedAirbyteMessageBufferedWriterFactory; -import io.airbyte.workers.internal.VersionedAirbyteStreamFactory; -import io.airbyte.workers.internal.book_keeping.AirbyteMessageTracker; -import io.airbyte.workers.process.AirbyteIntegrationLauncher; -import io.airbyte.workers.process.KubePodProcess; -import io.airbyte.workers.process.ProcessFactory; -import io.airbyte.workers.sync.ReplicationLauncherWorker; -import java.lang.invoke.MethodHandles; -import java.nio.file.Path; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ReplicationJobOrchestrator implements JobOrchestrator { - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private final ProcessFactory processFactory; - private final Configs configs; - private final FeatureFlags featureFlags; - private final FeatureFlagClient featureFlagClient; - private final AirbyteMessageSerDeProvider serDeProvider; - private final AirbyteProtocolVersionedMigratorFactory migratorFactory; - private final JobRunConfig jobRunConfig; - private final SourceApi sourceApi; - private final DestinationApi destinationApi; - - public ReplicationJobOrchestrator(final Configs configs, - final ProcessFactory processFactory, - final FeatureFlags featureFlags, - final FeatureFlagClient featureFlagClient, - final AirbyteMessageSerDeProvider serDeProvider, - final AirbyteProtocolVersionedMigratorFactory migratorFactory, - final JobRunConfig jobRunConfig, - final SourceApi sourceApi, - final DestinationApi destinationApi) { - this.configs = configs; - this.processFactory = processFactory; - this.featureFlags = featureFlags; - this.featureFlagClient = featureFlagClient; - this.serDeProvider = serDeProvider; - this.migratorFactory = migratorFactory; - this.jobRunConfig = jobRunConfig; - this.sourceApi = sourceApi; - this.destinationApi = destinationApi; - } - - @Override - public String getOrchestratorName() { - return "Replication"; - } - - @Override - public Class getInputClass() { - return StandardSyncInput.class; - } - - @Trace(operationName = JOB_ORCHESTRATOR_OPERATION_NAME) - @Override - public Optional runJob() throws Exception { - final var syncInput = readInput(); - - final var sourceLauncherConfig = JobOrchestrator.readAndDeserializeFile( - Path.of(KubePodProcess.CONFIG_DIR, ReplicationLauncherWorker.INIT_FILE_SOURCE_LAUNCHER_CONFIG), - IntegrationLauncherConfig.class); - - final var destinationLauncherConfig = JobOrchestrator.readAndDeserializeFile( - Path.of(KubePodProcess.CONFIG_DIR, ReplicationLauncherWorker.INIT_FILE_DESTINATION_LAUNCHER_CONFIG), - IntegrationLauncherConfig.class); - log.info("sourceLauncherConfig is: " + sourceLauncherConfig.toString()); - - ApmTraceUtils.addTagsToTrace( - Map.of(JOB_ID_KEY, jobRunConfig.getJobId(), - DESTINATION_DOCKER_IMAGE_KEY, destinationLauncherConfig.getDockerImage(), - SOURCE_DOCKER_IMAGE_KEY, sourceLauncherConfig.getDockerImage())); - - // At this moment, if either source or destination is from custom connector image, we will put all - // jobs into isolated pool to run. - final boolean useIsolatedPool = sourceLauncherConfig.getIsCustomConnector() || destinationLauncherConfig.getIsCustomConnector(); - log.info("Setting up source launcher..."); - final var sourceLauncher = new AirbyteIntegrationLauncher( - sourceLauncherConfig.getJobId(), - Math.toIntExact(sourceLauncherConfig.getAttemptId()), - sourceLauncherConfig.getDockerImage(), - processFactory, - syncInput.getSourceResourceRequirements(), - sourceLauncherConfig.getAllowedHosts(), - useIsolatedPool, - featureFlags); - - log.info("Setting up destination launcher..."); - final var destinationLauncher = new AirbyteIntegrationLauncher( - destinationLauncherConfig.getJobId(), - Math.toIntExact(destinationLauncherConfig.getAttemptId()), - destinationLauncherConfig.getDockerImage(), - processFactory, - syncInput.getDestinationResourceRequirements(), - destinationLauncherConfig.getAllowedHosts(), - useIsolatedPool, - featureFlags); - - log.info("Setting up source..."); - // reset jobs use an empty source to induce resetting all data in destination. - final var airbyteSource = - WorkerConstants.RESET_JOB_SOURCE_DOCKER_IMAGE_STUB.equals(sourceLauncherConfig.getDockerImage()) ? new EmptyAirbyteSource( - featureFlags.useStreamCapableState()) - : new DefaultAirbyteSource(sourceLauncher, - getStreamFactory(sourceLauncherConfig.getProtocolVersion(), syncInput.getCatalog(), DefaultAirbyteSource.CONTAINER_LOG_MDC_BUILDER), - migratorFactory.getProtocolSerializer(sourceLauncherConfig.getProtocolVersion()), featureFlags); - - MetricClientFactory.initialize(MetricEmittingApps.WORKER); - final var metricClient = MetricClientFactory.getMetricClient(); - final var metricReporter = new WorkerMetricReporter(metricClient, - sourceLauncherConfig.getDockerImage()); - - log.info("Setting up replication worker..."); - final UUID workspaceId = syncInput.getWorkspaceId(); - // NOTE: we apply field selection if the feature flag client says so (recommended) or the old - // environment-variable flags say so (deprecated). - // The latter FeatureFlagHelper will be removed once the flag client is fully deployed. - final boolean fieldSelectionEnabled = workspaceId != null && - (featureFlagClient.enabled(FieldSelectionEnabled.INSTANCE, new Workspace(workspaceId)) - || FeatureFlagHelper.isFieldSelectionEnabledForWorkspace(featureFlags, workspaceId)); - final var replicationWorker = new DefaultReplicationWorker( - jobRunConfig.getJobId(), - Math.toIntExact(jobRunConfig.getAttemptId()), - airbyteSource, - new NamespacingMapper(syncInput.getNamespaceDefinition(), syncInput.getNamespaceFormat(), syncInput.getPrefix()), - new DefaultAirbyteDestination(destinationLauncher, - getStreamFactory(destinationLauncherConfig.getProtocolVersion(), syncInput.getCatalog(), - DefaultAirbyteDestination.CONTAINER_LOG_MDC_BUILDER), - new VersionedAirbyteMessageBufferedWriterFactory(serDeProvider, migratorFactory, destinationLauncherConfig.getProtocolVersion(), - Optional.of(syncInput.getCatalog())), - migratorFactory.getProtocolSerializer(destinationLauncherConfig.getProtocolVersion())), - new AirbyteMessageTracker(featureFlags), - new RecordSchemaValidator(featureFlagClient, syncInput.getWorkspaceId(), WorkerUtils.mapStreamNamesToSchemas(syncInput)), - metricReporter, - new ConnectorConfigUpdater(sourceApi, destinationApi), - fieldSelectionEnabled); - - log.info("Running replication worker..."); - final var jobRoot = TemporalUtils.getJobRoot(configs.getWorkspaceRoot(), - jobRunConfig.getJobId(), jobRunConfig.getAttemptId()); - final ReplicationOutput replicationOutput = replicationWorker.run(syncInput, jobRoot); - - log.info("Returning output..."); - return Optional.of(Jsons.serialize(replicationOutput)); - } - - private AirbyteStreamFactory getStreamFactory(final Version protocolVersion, - final ConfiguredAirbyteCatalog configuredAirbyteCatalog, - final MdcScope.Builder mdcScope) { - return protocolVersion != null - ? new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, protocolVersion, Optional.of(configuredAirbyteCatalog), mdcScope, - Optional.of(RuntimeException.class)) - : new DefaultAirbyteStreamFactory(mdcScope); - } - -} diff --git a/airbyte-container-orchestrator/src/main/resources/application.yml b/airbyte-container-orchestrator/src/main/resources/application.yml deleted file mode 100644 index cb3caae40b1f..000000000000 --- a/airbyte-container-orchestrator/src/main/resources/application.yml +++ /dev/null @@ -1,53 +0,0 @@ -micronaut: - server: - port: 9000 - -airbyte: - config-dir: /config - acceptance: - test: - enabled: ${ACCEPTANCE_TEST_ENABLED:false} - control: - plane: - auth-endpoint: ${CONTROL_PLANE_AUTH_ENDPOINT:} - data: - plane: - service-account: - credentials-path: ${DATA_PLANE_SERVICE_ACCOUNT_CREDENTIALS_PATH:} - email: ${DATA_PLANE_SERVICE_ACCOUNT_EMAIL:} - feature-flag: - client: ${FEATURE_FLAG_CLIENT:config} - path: ${FEATURE_FLAG_PATH:/flags} - api-key: ${LAUNCHDARKLY_KEY:} - internal: - api: - auth-header: - name: ${AIRBYTE_API_AUTH_HEADER_NAME:} - value: ${AIRBYTE_API_AUTH_HEADER_VALUE:} - host: ${INTERNAL_API_HOST} - -endpoints: - beans: - enabled: true - sensitive: false - env: - enabled: true - sensitive: false - health: - enabled: true - sensitive: false - info: - enabled: true - sensitive: true - loggers: - enabled: true - sensitive: true - refresh: - enabled: false - sensitive: true - routes: - enabled: true - sensitive: false - threaddump: - enabled: true - sensitive: true diff --git a/airbyte-container-orchestrator/src/main/resources/micronaut-banner.txt b/airbyte-container-orchestrator/src/main/resources/micronaut-banner.txt deleted file mode 100644 index cc4505457b9e..000000000000 --- a/airbyte-container-orchestrator/src/main/resources/micronaut-banner.txt +++ /dev/null @@ -1,8 +0,0 @@ - - ___ _ __ __ - / | (_)____/ /_ __ __/ /____ - / /| | / / ___/ __ \/ / / / __/ _ \ - / ___ |/ / / / /_/ / /_/ / /_/ __/ -/_/ |_/_/_/ /_.___/\__, /\__/\___/ - /____/ - : airbyte-container-orchestrator : \ No newline at end of file diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/ApplicationTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/ApplicationTest.java deleted file mode 100644 index 552f766f8a80..000000000000 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/ApplicationTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.container_orchestrator.orchestrator.JobOrchestrator; -import io.airbyte.workers.process.AsyncKubePodStatus; -import java.util.Optional; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class ApplicationTest { - - private String application; - private JobOrchestrator jobOrchestrator; - private AsyncStateManager asyncStateManager; - - @BeforeEach - void setup() { - jobOrchestrator = mock(JobOrchestrator.class); - asyncStateManager = mock(AsyncStateManager.class); - } - - @Test - void testHappyPath() throws Exception { - final var output = "job-output"; - when(jobOrchestrator.runJob()).thenReturn(Optional.of(output)); - - final var app = new Application(application, jobOrchestrator, asyncStateManager); - final var code = app.run(); - - assertEquals(0, code); - verify(jobOrchestrator).runJob(); - verify(asyncStateManager).write(AsyncKubePodStatus.INITIALIZING); - verify(asyncStateManager).write(AsyncKubePodStatus.RUNNING); - verify(asyncStateManager).write(AsyncKubePodStatus.SUCCEEDED, output); - } - - @Test - void testJobFailedWritesFailedStatus() throws Exception { - when(jobOrchestrator.runJob()).thenThrow(new Exception()); - final var app = new Application(application, jobOrchestrator, asyncStateManager); - final var code = app.run(); - - assertEquals(1, code); - verify(jobOrchestrator).runJob(); - verify(asyncStateManager).write(AsyncKubePodStatus.INITIALIZING); - verify(asyncStateManager).write(AsyncKubePodStatus.RUNNING); - verify(asyncStateManager).write(AsyncKubePodStatus.FAILED); - } - -} diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/AsyncStateManagerTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/AsyncStateManagerTest.java deleted file mode 100644 index aabefcb86b6e..000000000000 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/AsyncStateManagerTest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.workers.process.AsyncKubePodStatus; -import io.airbyte.workers.process.KubeContainerInfo; -import io.airbyte.workers.process.KubePodInfo; -import io.airbyte.workers.storage.DocumentStoreClient; -import java.util.Optional; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class AsyncStateManagerTest { - - public static final String FAKE_IMAGE = "fake_image"; - private static final KubePodInfo KUBE_POD_INFO = new KubePodInfo("default", "pod1", - new KubeContainerInfo(FAKE_IMAGE, "IfNotPresent")); - private static final String OUTPUT = "some output value"; - - private DocumentStoreClient documentStore; - private AsyncStateManager stateManager; - - @BeforeEach - void setup() { - documentStore = mock(DocumentStoreClient.class); - stateManager = new AsyncStateManager(documentStore, KUBE_POD_INFO); - } - - @Test - void testEmptyWrite() { - stateManager.write(AsyncKubePodStatus.INITIALIZING); - - // test for overwrite (which should be allowed) - stateManager.write(AsyncKubePodStatus.INITIALIZING); - - final var key = stateManager.getDocumentStoreKey(AsyncKubePodStatus.INITIALIZING); - verify(documentStore, times(2)).write(key, ""); - } - - @Test - void testContentfulWrite() { - stateManager.write(AsyncKubePodStatus.SUCCEEDED, OUTPUT); - - final var key = stateManager.getDocumentStoreKey(AsyncKubePodStatus.SUCCEEDED); - verify(documentStore, times(1)).write(key, OUTPUT); - } - - @Test - void testReadingOutputWhenItExists() { - final var key = stateManager.getDocumentStoreKey(AsyncKubePodStatus.SUCCEEDED); - when(documentStore.read(key)).thenReturn(Optional.of(OUTPUT)); - assertEquals(OUTPUT, stateManager.getOutput()); - } - - @Test - void testReadingOutputWhenItDoesNotExist() { - // getting the output should throw an exception when there is no record in the document store - assertThrows(IllegalArgumentException.class, () -> { - stateManager.getOutput(); - }); - } - - @Test - void testSuccessfulStatusRetrievalLifecycle() { - when(documentStore.read(stateManager.getDocumentStoreKey(AsyncKubePodStatus.INITIALIZING))).thenReturn(Optional.empty()); - final var beforeInitializingStatus = stateManager.getStatus(); - assertEquals(AsyncKubePodStatus.NOT_STARTED, beforeInitializingStatus); - - when(documentStore.read(stateManager.getDocumentStoreKey(AsyncKubePodStatus.INITIALIZING))).thenReturn(Optional.of("")); - final var initializingStatus = stateManager.getStatus(); - assertEquals(AsyncKubePodStatus.INITIALIZING, initializingStatus); - - when(documentStore.read(stateManager.getDocumentStoreKey(AsyncKubePodStatus.RUNNING))).thenReturn(Optional.of("")); - final var runningStatus = stateManager.getStatus(); - assertEquals(AsyncKubePodStatus.RUNNING, runningStatus); - - when(documentStore.read(stateManager.getDocumentStoreKey(AsyncKubePodStatus.SUCCEEDED))).thenReturn( - Optional.of("output")); - final var succeededStatus = stateManager.getStatus(); - assertEquals(AsyncKubePodStatus.SUCCEEDED, succeededStatus); - } - - @Test - void testFailureStatusRetrievalLifecycle() { - when(documentStore.read(stateManager.getDocumentStoreKey(AsyncKubePodStatus.INITIALIZING))).thenReturn(Optional.empty()); - final var beforeInitializingStatus = stateManager.getStatus(); - assertEquals(AsyncKubePodStatus.NOT_STARTED, beforeInitializingStatus); - - when(documentStore.read(stateManager.getDocumentStoreKey(AsyncKubePodStatus.INITIALIZING))).thenReturn(Optional.of("")); - final var initializingStatus = stateManager.getStatus(); - assertEquals(AsyncKubePodStatus.INITIALIZING, initializingStatus); - - when(documentStore.read(stateManager.getDocumentStoreKey(AsyncKubePodStatus.RUNNING))).thenReturn(Optional.of("")); - final var runningStatus = stateManager.getStatus(); - assertEquals(AsyncKubePodStatus.RUNNING, runningStatus); - - when(documentStore.read(stateManager.getDocumentStoreKey(AsyncKubePodStatus.FAILED))).thenReturn(Optional.of("output")); - final var failedStatus = stateManager.getStatus(); - assertEquals(AsyncKubePodStatus.FAILED, failedStatus); - } - -} diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/EventListenersTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/EventListenersTest.java deleted file mode 100644 index 5e8ba157f0a1..000000000000 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/EventListenersTest.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; - -import io.airbyte.config.EnvConfigs; -import io.airbyte.persistence.job.models.JobRunConfig; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class EventListenersTest { - - private Map envVars; - private EnvConfigs configs; - private JobRunConfig jobRunConfig; - - @BeforeEach - void setup() { - envVars = new HashMap(); - configs = mock(EnvConfigs.class); - jobRunConfig = mock(JobRunConfig.class); - } - - @Test - void setEnvVars() { - envVars = Map.of( - // should be set as it is part of the ENV_VARS_TO_TRANSFER - EnvConfigs.WORKER_ENVIRONMENT, "worker-environment", - // should not be set as it is not part of ENV_VARS_TO_TRANSFER - "RANDOM_ENV", "random-env"); - - final var properties = new HashMap(); - final var listeners = new EventListeners(envVars, configs, jobRunConfig, (name, value) -> { - properties.put(name, value); - return null; - }); - - listeners.setEnvVars(null); - assertEquals(1, properties.size()); - assertEquals("worker-environment", properties.get(EnvConfigs.WORKER_ENVIRONMENT)); - } - -} diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/HeartbeatControllerTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/HeartbeatControllerTest.java deleted file mode 100644 index 40981dd7f1d5..000000000000 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/HeartbeatControllerTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.micronaut.http.HttpRequest; -import io.micronaut.http.client.HttpClient; -import io.micronaut.http.client.annotation.Client; -import io.micronaut.runtime.server.EmbeddedServer; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import jakarta.inject.Inject; -import org.junit.jupiter.api.Test; - -@MicronautTest -class HeartbeatControllerTest { - - @Inject - EmbeddedServer server; - - @Inject - @Client("/") - HttpClient client; - - @Test - void testHeartbeat() { - final var response = client.toBlocking().retrieve(HttpRequest.GET("/")); - assertEquals("{\"up\":true}", response); - } - -} diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ConfigFactoryTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ConfigFactoryTest.java deleted file mode 100644 index 4c4b8fb9b6b3..000000000000 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ConfigFactoryTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.config; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.process.KubePodInfo; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import jakarta.inject.Inject; -import jakarta.inject.Named; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@MicronautTest -class ConfigFactoryTest { - - @Inject - @Named("configDir") - String configDir; - - @Inject - @Named("application") - String application; - - @Inject - @Named("envVars") - Map envVars; - - @Inject - JobRunConfig jobRunConfig; - - @Inject - KubePodInfo kubePodInfo; - - @Test - void configDir() { - assertEquals("src/test/resources/files", configDir); - } - - @Test - void application() { - assertEquals("normalization-orchestrator", application); - } - - @Test - void envVars() { - assertEquals(29, envVars.size()); - } - - @Test - void jobRunConfig() { - assertEquals("824289", jobRunConfig.getJobId()); - assertEquals(10, jobRunConfig.getAttemptId()); - } - - @Test - void kubePodInfo() { - assertEquals("orchestrator-norm-job-824289-attempt-10", kubePodInfo.name()); - assertEquals("jobs", kubePodInfo.namespace()); - assertEquals("airbyte/container-orchestrator:dev-f0bb7a0ba3", kubePodInfo.mainContainerInfo().image()); - assertEquals("IfNotPresent", kubePodInfo.mainContainerInfo().pullPolicy()); - } - -} diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryK8sTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryK8sTest.java deleted file mode 100644 index 2fbf71e83001..000000000000 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryK8sTest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.config; - -import static org.junit.jupiter.api.Assertions.assertInstanceOf; - -import io.airbyte.workers.process.KubeProcessFactory; -import io.airbyte.workers.process.ProcessFactory; -import io.micronaut.context.env.Environment; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import jakarta.inject.Inject; -import org.junit.jupiter.api.Test; - -@MicronautTest(environments = Environment.KUBERNETES) -class ContainerOrchestratorFactoryK8sTest { - - @Inject - ProcessFactory processFactory; - - @Test - void processFactory() { - assertInstanceOf(KubeProcessFactory.class, processFactory); - } - -} diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java deleted file mode 100644 index adc93c039602..000000000000 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.config; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertInstanceOf; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.api.client.generated.DestinationApi; -import io.airbyte.api.client.generated.SourceApi; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider; -import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory; -import io.airbyte.config.EnvConfigs; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.TestClient; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.WorkerConfigs; -import io.airbyte.workers.process.AsyncOrchestratorPodProcess; -import io.airbyte.workers.process.DockerProcessFactory; -import io.airbyte.workers.process.ProcessFactory; -import io.airbyte.workers.sync.DbtLauncherWorker; -import io.airbyte.workers.sync.NormalizationLauncherWorker; -import io.airbyte.workers.sync.ReplicationLauncherWorker; -import io.micronaut.context.annotation.Bean; -import io.micronaut.context.annotation.Replaces; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import jakarta.inject.Inject; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@MicronautTest -class ContainerOrchestratorFactoryTest { - - @Inject - FeatureFlags featureFlags; - - @Bean - @Replaces(FeatureFlagClient.class) - FeatureFlagClient featureFlagClient = new TestClient(Map.of()); - - @Inject - EnvConfigs envConfigs; - - @Inject - WorkerConfigs workerConfigs; - - @Inject - ProcessFactory processFactory; - - @Inject - AirbyteMessageSerDeProvider airbyteMessageSerDeProvider; - - @Inject - AirbyteProtocolVersionedMigratorFactory airbyteProtocolVersionedMigratorFactory; - - @Inject - JobRunConfig jobRunConfig; - - @Inject - SourceApi sourceApi; - - @Inject - DestinationApi destinationApi; - - // Tests will fail if this is uncommented, due to how the implementation of the DocumentStoreClient - // is being created - // @Inject - // DocumentStoreClient documentStoreClient; - - @Test - void featureFlags() { - assertNotNull(featureFlags); - } - - @Test - void envConfigs() { - // check one random environment variable to ensure the EnvConfigs was created correctly - assertEquals("/tmp/airbyte_local", envConfigs.getEnv(EnvConfigs.LOCAL_DOCKER_MOUNT)); - } - - @Test - void workerConfigs() { - // check two variables to ensure the WorkerConfig was created correctly - assertEquals("1", workerConfigs.getResourceRequirements().getCpuLimit()); - assertEquals("1Gi", workerConfigs.getResourceRequirements().getMemoryLimit()); - } - - @Test - void processFactory() { - assertInstanceOf(DockerProcessFactory.class, processFactory); - } - - /** - * There isn't an easy way to test the correct JobOrchestrator is injected using @MicronautTest - * with @Nested classes, so opting for the more manual approach. - */ - @Test - void jobOrchestrator() { - final var factory = new ContainerOrchestratorFactory(); - - final var repl = factory.jobOrchestrator( - ReplicationLauncherWorker.REPLICATION, envConfigs, processFactory, featureFlags, featureFlagClient, workerConfigs, - airbyteMessageSerDeProvider, airbyteProtocolVersionedMigratorFactory, jobRunConfig, sourceApi, destinationApi); - assertEquals("Replication", repl.getOrchestratorName()); - - final var norm = factory.jobOrchestrator( - NormalizationLauncherWorker.NORMALIZATION, envConfigs, processFactory, featureFlags, featureFlagClient, workerConfigs, - airbyteMessageSerDeProvider, airbyteProtocolVersionedMigratorFactory, jobRunConfig, sourceApi, destinationApi); - assertEquals("Normalization", norm.getOrchestratorName()); - - final var dbt = factory.jobOrchestrator( - DbtLauncherWorker.DBT, envConfigs, processFactory, featureFlags, featureFlagClient, workerConfigs, - airbyteMessageSerDeProvider, airbyteProtocolVersionedMigratorFactory, jobRunConfig, sourceApi, destinationApi); - assertEquals("DBT Transformation", dbt.getOrchestratorName()); - - final var noop = factory.jobOrchestrator( - AsyncOrchestratorPodProcess.NO_OP, envConfigs, processFactory, featureFlags, featureFlagClient, workerConfigs, - airbyteMessageSerDeProvider, airbyteProtocolVersionedMigratorFactory, jobRunConfig, sourceApi, destinationApi); - assertEquals("NO_OP", noop.getOrchestratorName()); - - var caught = false; - try { - factory.jobOrchestrator( - "does not exist", envConfigs, processFactory, featureFlags, featureFlagClient, workerConfigs, - airbyteMessageSerDeProvider, airbyteProtocolVersionedMigratorFactory, jobRunConfig, sourceApi, destinationApi); - } catch (final Exception e) { - caught = true; - } - assertTrue(caught, "invalid application name should have thrown an exception"); - } - -} diff --git a/airbyte-container-orchestrator/src/test/resources/application.yml b/airbyte-container-orchestrator/src/test/resources/application.yml deleted file mode 100644 index b789c4757d51..000000000000 --- a/airbyte-container-orchestrator/src/test/resources/application.yml +++ /dev/null @@ -1,23 +0,0 @@ -micronaut: - server: - port: 9000 - -airbyte: - acceptance: - test: - enabled: false - config-dir: src/test/resources/files - control: - plane: - auth-endpoint: "" - data: - plane: - service-account: - credentials-path: "" - email: "" - internal: - api: - auth-header: - name: "" - value: "" - host: airbyte-server-svc:8001 \ No newline at end of file diff --git a/airbyte-container-orchestrator/src/test/resources/files/KUBE_POD_INFO b/airbyte-container-orchestrator/src/test/resources/files/KUBE_POD_INFO deleted file mode 100644 index 4d1363f4d3b6..000000000000 --- a/airbyte-container-orchestrator/src/test/resources/files/KUBE_POD_INFO +++ /dev/null @@ -1,8 +0,0 @@ -{ - "namespace": "jobs", - "name": "orchestrator-norm-job-824289-attempt-10", - "mainContainerInfo": { - "image": "airbyte/container-orchestrator:dev-f0bb7a0ba3", - "pullPolicy": "IfNotPresent" - } -} \ No newline at end of file diff --git a/airbyte-container-orchestrator/src/test/resources/files/application.txt b/airbyte-container-orchestrator/src/test/resources/files/application.txt deleted file mode 100644 index db208313a1f1..000000000000 --- a/airbyte-container-orchestrator/src/test/resources/files/application.txt +++ /dev/null @@ -1 +0,0 @@ -normalization-orchestrator \ No newline at end of file diff --git a/airbyte-container-orchestrator/src/test/resources/files/destinationLauncherConfig.json b/airbyte-container-orchestrator/src/test/resources/files/destinationLauncherConfig.json deleted file mode 100644 index 2673a94128d8..000000000000 --- a/airbyte-container-orchestrator/src/test/resources/files/destinationLauncherConfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "jobId": "824289", - "attemptId": 10, - "dockerImage": "airbyte/destination-postgres-strict-encrypt:0.3.26", - "protocolVersion": { - "version": "0.2.0" - } -} diff --git a/airbyte-container-orchestrator/src/test/resources/files/envMap.json b/airbyte-container-orchestrator/src/test/resources/files/envMap.json deleted file mode 100644 index 485382509ea8..000000000000 --- a/airbyte-container-orchestrator/src/test/resources/files/envMap.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "PUBLISH_METRICS": "true", - "LOCAL_DOCKER_MOUNT": "/tmp/airbyte_local", - "JOB_MAIN_CONTAINER_CPU_LIMIT": "1", - "AWS_SECRET_ACCESS_KEY": "secret-access-key", - "JOB_KUBE_NAMESPACE": "jobs", - "GOOGLE_APPLICATION_CREDENTIALS": "src/test/resources/files/creds.json", - "STATE_STORAGE_GCS_APPLICATION_CREDENTIALS": "src/test/resources/files/app-creds.json", - "S3_MINIO_ENDPOINT": " ", - "USE_STREAM_CAPABLE_STATE": "true", - "WORKER_ENVIRONMENT": "KUBERNETES", - "AWS_ACCESS_KEY_ID": "access-key-id", - "JOB_MAIN_CONTAINER_MEMORY_REQUEST": "1Gi", - "WORKSPACE_ROOT": "/workspace", - "METRIC_CLIENT": "imperial", - "LOG_LEVEL": "INFO", - "DEPLOYMENT_MODE": "CLOUD", - "JOB_KUBE_NODE_SELECTORS": "node-selectors", - "JOB_MAIN_CONTAINER_MEMORY_LIMIT": "1Gi", - "DD_DOGSTATSD_PORT": "8125", - "DD_AGENT_HOST": "172.20.100.45", - "S3_LOG_BUCKET_REGION": " ", - "GCS_LOG_BUCKET": "logs", - "STATE_STORAGE_GCS_BUCKET_NAME": "storage", - "S3_PATH_STYLE_ACCESS": " ", - "LOCAL_ROOT": "/tmp/airbyte_local", - "WORKSPACE_DOCKER_MOUNT": "workspace", - "JOB_MAIN_CONTAINER_CPU_REQUEST": "1", - "S3_LOG_BUCKET": " ", - "AIRBYTE_VERSION": "dev-hashhash-env" -} diff --git a/airbyte-container-orchestrator/src/test/resources/files/input.json b/airbyte-container-orchestrator/src/test/resources/files/input.json deleted file mode 100644 index 5a3230f16fee..000000000000 --- a/airbyte-container-orchestrator/src/test/resources/files/input.json +++ /dev/null @@ -1,162 +0,0 @@ -{ - "destinationConfiguration": { - "host": "example.us-east-2.rds.amazonaws.com", - "port": 5432, - "schema": "public", - "database": "example", - "password": "dmVyeXN0cm9uZ3Bhc3N3b3Jk", - "ssl_mode": { - "mode": "require" - }, - "username": "legit-user", - "tunnel_method": { - "tunnel_method": "NO_TUNNEL" - } - }, - "catalog": { - "streams": [ - { - "stream": { - "name": "example", - "json_schema": { - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "logs": { - "type": "string" - }, - "checksum": { - "type": "string" - }, - "started_at": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - }, - "finished_at": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - }, - "migration_name": { - "type": "string" - }, - "rolled_back_at": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - }, - "applied_steps_count": { - "type": "number", - "airbyte_type": "integer" - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "default_cursor_field": [], - "source_defined_primary_key": [["id"]], - "namespace": "events" - }, - "sync_mode": "full_refresh", - "cursor_field": [], - "destination_sync_mode": "overwrite", - "primary_key": [["id"]] - }, - { - "stream": { - "name": "example", - "json_schema": { - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "event": { - "type": "string" - }, - "context": { - "type": "string" - }, - "sent_at": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "user_id": { - "type": "string" - }, - "timestamp": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "message_id": { - "type": "string" - }, - "properties": { - "type": "string" - }, - "anonymous_id": { - "type": "string" - }, - "integrations": { - "type": "string" - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "default_cursor_field": [], - "source_defined_primary_key": [["id"]], - "namespace": "events" - }, - "sync_mode": "incremental", - "cursor_field": ["timestamp"], - "destination_sync_mode": "append_dedup", - "primary_key": [["id"]] - }, - { - "stream": { - "name": "example", - "json_schema": { - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "doc_id": { - "type": "string" - }, - "event_id": { - "type": "string" - }, - "timestamp": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "default_cursor_field": [], - "source_defined_primary_key": [["id"]], - "namespace": "events" - }, - "sync_mode": "incremental", - "cursor_field": ["timestamp"], - "destination_sync_mode": "append_dedup", - "primary_key": [["id"]] - } - ] - }, - "resourceRequirements": { - "cpu_request": "", - "cpu_limit": "", - "memory_request": "1.5Gi", - "memory_limit": "1.5Gi" - } -} diff --git a/airbyte-container-orchestrator/src/test/resources/files/jobRunConfig.json b/airbyte-container-orchestrator/src/test/resources/files/jobRunConfig.json deleted file mode 100644 index e06b6a368a5f..000000000000 --- a/airbyte-container-orchestrator/src/test/resources/files/jobRunConfig.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "jobId": "824289", - "attemptId": 10 -} diff --git a/airbyte-cron/Dockerfile b/airbyte-cron/Dockerfile deleted file mode 100644 index 2cbb772912b8..000000000000 --- a/airbyte-cron/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:1.0 -FROM ${JDK_IMAGE} AS cron - -ARG VERSION=0.40.32 - -ENV APPLICATION airbyte-cron -ENV VERSION ${VERSION} - -WORKDIR /app - -# This is automatically unzipped by Docker -ADD bin/${APPLICATION}-${VERSION}.tar /app - -# wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-${VERSION}/bin/${APPLICATION}"] diff --git a/airbyte-cron/LICENSE b/airbyte-cron/LICENSE deleted file mode 100644 index 7d1808d83342..000000000000 --- a/airbyte-cron/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Airbyte, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/airbyte-cron/build.gradle b/airbyte-cron/build.gradle deleted file mode 100644 index 47c1c939a2ed..000000000000 --- a/airbyte-cron/build.gradle +++ /dev/null @@ -1,52 +0,0 @@ -plugins { - id 'application' -} - -dependencies { - implementation 'com.auth0:java-jwt:3.19.2' - implementation 'io.fabric8:kubernetes-client:5.12.2' - implementation 'io.sentry:sentry:6.3.1' - implementation libs.bundles.temporal - implementation libs.bundles.datadog - - implementation project(':airbyte-api') - implementation project(':airbyte-analytics') - implementation project(':airbyte-commons-temporal') - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-config:config-persistence') - implementation project(':airbyte-config:init') - implementation project(':airbyte-json-validation') - implementation project(':airbyte-db:db-lib') - implementation project(':airbyte-metrics:metrics-lib') - implementation project(':airbyte-persistence:job-persistence') - - annotationProcessor platform(libs.micronaut.bom) - annotationProcessor libs.bundles.micronaut.annotation.processor - - implementation platform(libs.micronaut.bom) - implementation libs.bundles.micronaut -} - -mainClassName = 'io.airbyte.cron.MicronautCronRunner' - -application { - mainClass = mainClassName - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -Properties env = new Properties() -rootProject.file('.env.dev').withInputStream { env.load(it) } - -run { - // default for running on local machine. - env.each { entry -> - environment entry.getKey(), entry.getValue() - } - - environment 'AIRBYTE_ROLE', System.getenv('AIRBYTE_ROLE') - environment 'AIRBYTE_VERSION', env.VERSION -} - -tasks.named("buildDockerImage") { - dependsOn copyGeneratedTar -} diff --git a/airbyte-cron/gradle.properties b/airbyte-cron/gradle.properties deleted file mode 100644 index fe63d1ca9ce4..000000000000 --- a/airbyte-cron/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -dockerImageName=cron diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/ApplicationInitializer.java b/airbyte-cron/src/main/java/io/airbyte/cron/ApplicationInitializer.java deleted file mode 100644 index f5460f582a6f..000000000000 --- a/airbyte-cron/src/main/java/io/airbyte/cron/ApplicationInitializer.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cron; - -import io.airbyte.commons.temporal.TemporalInitializationUtils; -import io.micronaut.context.event.ApplicationEventListener; -import io.micronaut.discovery.event.ServiceReadyEvent; -import jakarta.inject.Inject; - -public class ApplicationInitializer implements ApplicationEventListener { - - @Inject - private TemporalInitializationUtils temporalInitializationUtils; - - @Override - public void onApplicationEvent(ServiceReadyEvent event) { - temporalInitializationUtils.waitForTemporalNamespace(); - } - -} diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/MicronautCronRunner.java b/airbyte-cron/src/main/java/io/airbyte/cron/MicronautCronRunner.java deleted file mode 100644 index 1f6ff6088af8..000000000000 --- a/airbyte-cron/src/main/java/io/airbyte/cron/MicronautCronRunner.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cron; - -import io.micronaut.runtime.Micronaut; - -/** - * Micronaut server responsible of running scheduled method. The methods need to be separated in - * Bean based on what they are cleaning and contain a method annotated with `@Scheduled` - * - * Injected object looks unused but they are not - */ -public class MicronautCronRunner { - - public static final String SCHEDULED_TRACE_OPERATION_NAME = "scheduled"; - - public static void main(final String[] args) { - Micronaut.build(args) - .mainClass(MicronautCronRunner.class) - .start(); - } - -} diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/config/ApplicationBeanFactory.java b/airbyte-cron/src/main/java/io/airbyte/cron/config/ApplicationBeanFactory.java deleted file mode 100644 index 065bdd9affcd..000000000000 --- a/airbyte-cron/src/main/java/io/airbyte/cron/config/ApplicationBeanFactory.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cron.config; - -import io.airbyte.config.Configs.DeploymentMode; -import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Value; -import io.micronaut.core.util.StringUtils; -import jakarta.inject.Singleton; -import java.util.Locale; -import java.util.function.Function; -import lombok.extern.slf4j.Slf4j; - -/** - * Micronaut bean factory for general singletons. - */ -@Factory -@Slf4j -public class ApplicationBeanFactory { - - @Singleton - public DeploymentMode deploymentMode(@Value("${airbyte.deployment-mode}") final String deploymentMode) { - return convertToEnum(deploymentMode, DeploymentMode::valueOf, DeploymentMode.OSS); - } - - @Singleton - public JsonSecretsProcessor jsonSecretsProcessor() { - return JsonSecretsProcessor.builder() - .copySecrets(false) - .build(); - } - - private T convertToEnum(final String value, final Function creatorFunction, final T defaultValue) { - return StringUtils.isNotEmpty(value) ? creatorFunction.apply(value.toUpperCase(Locale.ROOT)) : defaultValue; - } - -} diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/config/DatabaseBeanFactory.java b/airbyte-cron/src/main/java/io/airbyte/cron/config/DatabaseBeanFactory.java deleted file mode 100644 index 52928acc1f7c..000000000000 --- a/airbyte-cron/src/main/java/io/airbyte/cron/config/DatabaseBeanFactory.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cron.config; - -import io.airbyte.commons.temporal.config.WorkerMode; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.StreamResetPersistence; -import io.airbyte.db.Database; -import io.airbyte.db.check.DatabaseMigrationCheck; -import io.airbyte.db.factory.DatabaseCheckFactory; -import io.airbyte.persistence.job.DefaultJobPersistence; -import io.airbyte.persistence.job.JobPersistence; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.annotation.Value; -import io.micronaut.flyway.FlywayConfigurationProperties; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.IOException; -import javax.sql.DataSource; -import lombok.extern.slf4j.Slf4j; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; - -/** - * Micronaut bean factory for database-related singletons. - */ -@Factory -@Slf4j -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -public class DatabaseBeanFactory { - - private static final String BASELINE_DESCRIPTION = "Baseline from file-based migration v1"; - private static final Boolean BASELINE_ON_MIGRATION = true; - private static final String INSTALLED_BY = "AirbyteCron"; - - @Singleton - @Named("configDatabase") - public Database configDatabase(@Named("config") final DSLContext dslContext) throws IOException { - return new Database(dslContext); - } - - @Singleton - @Requires(env = WorkerMode.CONTROL_PLANE) - @Named("jobsDatabase") - public Database jobsDatabase(@Named("jobs") final DSLContext dslContext) throws IOException { - return new Database(dslContext); - } - - @Singleton - @Named("configFlyway") - public Flyway configFlyway(@Named("config") final FlywayConfigurationProperties configFlywayConfigurationProperties, - @Named("config") final DataSource configDataSource, - @Value("${airbyte.flyway.configs.minimum-migration-version}") final String baselineVersion) { - return configFlywayConfigurationProperties.getFluentConfiguration() - .dataSource(configDataSource) - .baselineVersion(baselineVersion) - .baselineDescription(BASELINE_DESCRIPTION) - .baselineOnMigrate(BASELINE_ON_MIGRATION) - .installedBy(INSTALLED_BY) - .table(String.format("airbyte_%s_migrations", "configs")) - .load(); - } - - @Singleton - public ConfigRepository configRepository(@Named("configDatabase") final Database configDatabase) { - return new ConfigRepository(configDatabase); - } - - @Singleton - @Named("configsDatabaseMigrationCheck") - public DatabaseMigrationCheck configsDatabaseMigrationCheck(@Named("config") final DSLContext dslContext, - @Named("configFlyway") final Flyway configsFlyway, - @Value("${airbyte.flyway.configs.minimum-migration-version}") final String configsDatabaseMinimumFlywayMigrationVersion, - @Value("${airbyte.flyway.configs.initialization-timeout-ms}") final Long configsDatabaseInitializationTimeoutMs) { - log.info("Configs database configuration: {} {}", configsDatabaseMinimumFlywayMigrationVersion, configsDatabaseInitializationTimeoutMs); - return DatabaseCheckFactory - .createConfigsDatabaseMigrationCheck(dslContext, configsFlyway, configsDatabaseMinimumFlywayMigrationVersion, - configsDatabaseInitializationTimeoutMs); - } - - @Singleton - @Requires(env = WorkerMode.CONTROL_PLANE) - public StreamResetPersistence streamResetPersistence(@Named("configDatabase") final Database configDatabase) { - return new StreamResetPersistence(configDatabase); - } - - @Singleton - @Requires(env = WorkerMode.CONTROL_PLANE) - public JobPersistence jobPersistence(@Named("jobsDatabase") final Database jobDatabase) { - return new DefaultJobPersistence(jobDatabase); - } - -} diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/selfhealing/DefinitionsUpdater.java b/airbyte-cron/src/main/java/io/airbyte/cron/selfhealing/DefinitionsUpdater.java deleted file mode 100644 index 5821fa0844d2..000000000000 --- a/airbyte-cron/src/main/java/io/airbyte/cron/selfhealing/DefinitionsUpdater.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cron.selfhealing; - -import static io.airbyte.cron.MicronautCronRunner.SCHEDULED_TRACE_OPERATION_NAME; - -import datadog.trace.api.Trace; -import io.airbyte.config.Configs.DeploymentMode; -import io.airbyte.config.init.ApplyDefinitionsHelper; -import io.micronaut.context.annotation.Requires; -import io.micronaut.scheduling.annotation.Scheduled; -import jakarta.inject.Singleton; -import lombok.extern.slf4j.Slf4j; - -/** - * DefinitionsUpdater - * - * Automatically updates connector definitions from a remote catalog at an interval (30s). This can - * be enabled by setting a REMOTE_CATALOG_URL and UPDATE_DEFINITIONS_CRON_ENABLED=true. - */ -@Singleton -@Slf4j -@Requires(property = "airbyte.cron.update-definitions.enabled", - value = "true") -public class DefinitionsUpdater { - - private final ApplyDefinitionsHelper applyDefinitionsHelper; - private final DeploymentMode deploymentMode; - - public DefinitionsUpdater(final ApplyDefinitionsHelper applyDefinitionsHelper, - final DeploymentMode deploymentMode) { - log.info("Creating connector definitions updater"); - - this.applyDefinitionsHelper = applyDefinitionsHelper; - this.deploymentMode = deploymentMode; - } - - @Trace(operationName = SCHEDULED_TRACE_OPERATION_NAME) - @Scheduled(fixedRate = "30s", - initialDelay = "1m") - void updateDefinitions() { - log.info("Updating definitions..."); - - try { - try { - applyDefinitionsHelper.apply(deploymentMode == DeploymentMode.CLOUD); - - log.info("Done applying remote connector definitions"); - } catch (final Exception e) { - log.error("Error while applying remote definitions", e); - } - - } catch (final Exception e) { - log.error("Error when retrieving remote definitions", e); - } - - } - -} diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/selfhealing/Temporal.java b/airbyte-cron/src/main/java/io/airbyte/cron/selfhealing/Temporal.java deleted file mode 100644 index 013775de8c26..000000000000 --- a/airbyte-cron/src/main/java/io/airbyte/cron/selfhealing/Temporal.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cron.selfhealing; - -import static io.airbyte.cron.MicronautCronRunner.SCHEDULED_TRACE_OPERATION_NAME; - -import datadog.trace.api.Trace; -import io.airbyte.commons.temporal.TemporalClient; -import io.micronaut.scheduling.annotation.Scheduled; -import io.temporal.api.enums.v1.WorkflowExecutionStatus; -import jakarta.inject.Singleton; -import lombok.extern.slf4j.Slf4j; - -@Singleton -@Slf4j -public class Temporal { - - private final TemporalClient temporalClient; - - public Temporal(final TemporalClient temporalClient) { - log.debug("Creating temporal self-healing"); - this.temporalClient = temporalClient; - } - - @Trace(operationName = SCHEDULED_TRACE_OPERATION_NAME) - @Scheduled(fixedRate = "10s") - void cleanTemporal() { - temporalClient.restartClosedWorkflowByStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_FAILED); - } - -} diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/selfhealing/WorkspaceCleaner.java b/airbyte-cron/src/main/java/io/airbyte/cron/selfhealing/WorkspaceCleaner.java deleted file mode 100644 index 5db869dd4c3d..000000000000 --- a/airbyte-cron/src/main/java/io/airbyte/cron/selfhealing/WorkspaceCleaner.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cron.selfhealing; - -import static io.airbyte.cron.MicronautCronRunner.SCHEDULED_TRACE_OPERATION_NAME; - -import datadog.trace.api.Trace; -import io.airbyte.config.Configs; -import io.airbyte.config.EnvConfigs; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.scheduling.annotation.Scheduled; -import jakarta.inject.Singleton; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.LocalDateTime; -import java.time.OffsetDateTime; -import java.util.Date; -import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.filefilter.AgeFileFilter; - -@Singleton -@Slf4j -@Requires(notEnv = Environment.KUBERNETES) -public class WorkspaceCleaner { - - private final Path workspaceRoot; - private final long maxAgeFilesInDays; - - WorkspaceCleaner() { - log.info("Creating workspace cleaner"); - - // TODO Configs should get injected through micronaut - final Configs configs = new EnvConfigs(); - - this.workspaceRoot = configs.getWorkspaceRoot(); - // We align max file age on temporal for history consistency - // It might make sense configure this independently in the future - this.maxAgeFilesInDays = configs.getTemporalRetentionInDays(); - } - - /* - * Delete files older than maxAgeFilesInDays from the workspace - * - * NOTE: this is currently only intended to work for docker - */ - @Trace(operationName = SCHEDULED_TRACE_OPERATION_NAME) - @Scheduled(fixedRate = "1d") - public void deleteOldFiles() throws IOException { - final Date oldestAllowed = getDateFromDaysAgo(maxAgeFilesInDays); - log.info("Deleting files older than {} days ({})", maxAgeFilesInDays, oldestAllowed); - - ApmTraceUtils.addTagsToTrace(Map.of("oldest_date_allowed", oldestAllowed, "max_age", maxAgeFilesInDays)); - - final AtomicInteger counter = new AtomicInteger(0); - Files.walk(workspaceRoot) - .map(Path::toFile) - .filter(f -> new AgeFileFilter(oldestAllowed).accept(f)) - .forEach(file -> { - log.debug("Deleting file: " + file.toString()); - FileUtils.deleteQuietly(file); - counter.incrementAndGet(); - final File parentDir = file.getParentFile(); - if (parentDir.isDirectory() && parentDir.listFiles().length == 0) { - FileUtils.deleteQuietly(parentDir); - } - }); - log.info("deleted {} files", counter.get()); - } - - private static Date getDateFromDaysAgo(final long daysAgo) { - return Date.from(LocalDateTime.now().minusDays(daysAgo).toInstant(OffsetDateTime.now().getOffset())); - } - -} diff --git a/airbyte-cron/src/main/resources/application.yml b/airbyte-cron/src/main/resources/application.yml deleted file mode 100644 index 05bf160cd95e..000000000000 --- a/airbyte-cron/src/main/resources/application.yml +++ /dev/null @@ -1,127 +0,0 @@ -micronaut: - application: - name: airbyte-cron - caches: - remote-definitions-provider: - expire-after-write: 15s - server: - port: 9001 - -airbyte: - cron: - update-definitions: - enabled: ${UPDATE_DEFINITIONS_CRON_ENABLED:false} - deployment-mode: ${DEPLOYMENT_MODE:OSS} - flyway: - configs: - initialization-timeout-ms: ${CONFIGS_DATABASE_INITIALIZATION_TIMEOUT_MS:60000} - minimum-migration-version: ${CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION} - local: - docker-mount: ${LOCAL_DOCKER_MOUNT:} - root: ${LOCAL_ROOT} - platform: - remote-connector-catalog: - timeout-ms: ${REMOTE_CONNECTOR_CATALOG_MS:30000} - url: ${REMOTE_CONNECTOR_CATALOG_URL:} - role: ${AIRBYTE_ROLE:} - temporal: - worker: - ports: ${TEMPORAL_WORKER_PORTS:} - tracking-strategy: ${TRACKING_STRATEGY:LOGGING} - version: ${AIRBYTE_VERSION} - workflow: - failure: - restart-delay: ${WORKFLOW_FAILURE_RESTART_DELAY_SECONDS:600} - workspace: - docker-mount: ${WORKSPACE_DOCKER_MOUNT:} - root: ${WORKSPACE_ROOT} - -datasources: - config: - connection-test-query: SELECT 1 - connection-timeout: 30000 - idle-timeout: 600000 - initialization-fail-timeout: -1 # Disable fail fast checking to avoid issues due to other pods not being started in time - maximum-pool-size: 5 - minimum-idle: 0 - url: ${DATABASE_URL} - driverClassName: org.postgresql.Driver - username: ${DATABASE_USER} - password: ${DATABASE_PASSWORD} - jobs: - connection-test-query: SELECT 1 - connection-timeout: 30000 - idle-timeout: 600000 - initialization-fail-timeout: -1 # Disable fail fast checking to avoid issues due to other pods not being started in time - maximum-pool-size: 5 - minimum-idle: 0 - url: ${DATABASE_URL} - driverClassName: org.postgresql.Driver - username: ${DATABASE_USER} - password: ${DATABASE_PASSWORD} - -docker: - network: ${DOCKER_NETWORK:host} - -endpoints: - beans: - enabled: true - sensitive: false - env: - enabled: true - sensitive: false - health: - enabled: true - sensitive: false - info: - enabled: true - sensitive: true - loggers: - enabled: true - sensitive: true - refresh: - enabled: false - sensitive: true - routes: - enabled: true - sensitive: false - threaddump: - enabled: true - sensitive: true - -flyway: - enabled: true - datasources: - config: - enabled: false - locations: - - 'classpath:io/airbyte/db/instance/configs/migrations' - jobs: - enabled: false - locations: - - 'classpath:io/airbyte/db/instance/jobs/migrations' - -jooq: - datasources: - config: - jackson-converter-enabled: true - sql-dialect: POSTGRES - jobs: - jackson-converter-enabled: true - sql-dialect: POSTGRES - -temporal: - cloud: - client: - cert: ${TEMPORAL_CLOUD_CLIENT_CERT:} - key: ${TEMPORAL_CLOUD_CLIENT_KEY:} - enabled: ${TEMPORAL_CLOUD_ENABLED:false} - host: ${TEMPORAL_CLOUD_HOST:} - namespace: ${TEMPORAL_CLOUD_NAMESPACE:} - host: ${TEMPORAL_HOST:`airbyte-temporal:7233`} - retention: ${TEMPORAL_HISTORY_RETENTION_IN_DAYS:30} - -logger: - levels: -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG diff --git a/airbyte-cron/src/main/resources/micronaut-banner.txt b/airbyte-cron/src/main/resources/micronaut-banner.txt deleted file mode 100644 index d7778fa0cb04..000000000000 --- a/airbyte-cron/src/main/resources/micronaut-banner.txt +++ /dev/null @@ -1,8 +0,0 @@ - - ___ _ __ __ - / | (_)____/ /_ __ __/ /____ - / /| | / / ___/ __ \/ / / / __/ _ \ - / ___ |/ / / / /_/ / /_/ / /_/ __/ -/_/ |_/_/_/ /_.___/\__, /\__/\___/ - /____/ - : airbyte-cron : \ No newline at end of file diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/MigrationDevHelper.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/MigrationDevHelper.java index f2324596d2df..dedbf4856e92 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/MigrationDevHelper.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/MigrationDevHelper.java @@ -139,7 +139,7 @@ private static Optional getLastMigrationVersion(final FlywayDa @VisibleForTesting static AirbyteVersion getCurrentAirbyteVersion() { - try (final BufferedReader reader = new BufferedReader(new FileReader("../../.env", StandardCharsets.UTF_8))) { + try (final BufferedReader reader = new BufferedReader(new FileReader("../../gradle.properties", StandardCharsets.UTF_8))) { String line = reader.readLine(); while (line != null) { if (line.startsWith("VERSION")) { @@ -148,11 +148,11 @@ static AirbyteVersion getCurrentAirbyteVersion() { line = reader.readLine(); } } catch (final FileNotFoundException e) { - throw new IllegalStateException("Cannot find the .env file", e); + throw new IllegalStateException("Cannot find the gradle.properties file", e); } catch (final IOException e) { throw new RuntimeException(e); } - throw new IllegalStateException("Cannot find current Airbyte version from .env file"); + throw new IllegalStateException("Cannot find the gradle.properties file"); } /** diff --git a/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java b/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java index 63991e6e6eaa..ca3e5ae8255d 100644 --- a/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java +++ b/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java @@ -1820,9 +1820,13 @@ private static List getRecordMessagesWithNewNamespace( @Getter public static class SpecialNumericTypes { + @Builder.Default boolean supportIntegerNan = false; + @Builder.Default boolean supportNumberNan = false; + @Builder.Default boolean supportIntegerInfinity = false; + @Builder.Default boolean supportNumberInfinity = false; } diff --git a/airbyte-metrics/metrics-lib/README.md b/airbyte-metrics/metrics-lib/README.md deleted file mode 100644 index 936b6e043c55..000000000000 --- a/airbyte-metrics/metrics-lib/README.md +++ /dev/null @@ -1,3 +0,0 @@ -## Airbyte Metrics Lib - -This module contains helpers for emitting metrics. This is primarily intended to be consumed by Airbyte Cloud, though OSS users are certainly welcome to make sure of this. diff --git a/airbyte-metrics/metrics-lib/build.gradle b/airbyte-metrics/metrics-lib/build.gradle deleted file mode 100644 index 110dfe751b00..000000000000 --- a/airbyte-metrics/metrics-lib/build.gradle +++ /dev/null @@ -1,29 +0,0 @@ -plugins { - id "java-library" -} - -dependencies { - implementation project(':airbyte-commons') - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-db:jooq') - implementation project(':airbyte-db:db-lib') - - implementation libs.otel.semconv - implementation libs.otel.sdk - implementation libs.otel.sdk.testing - implementation libs.micrometer.statsd - implementation platform(libs.otel.bom) - implementation("io.opentelemetry:opentelemetry-api") - implementation("io.opentelemetry:opentelemetry-sdk") - implementation("io.opentelemetry:opentelemetry-exporter-otlp") - - implementation libs.java.dogstatsd.client - implementation libs.bundles.datadog - - testImplementation project(':airbyte-config:config-persistence') - testImplementation project(':airbyte-test-utils') - testImplementation libs.platform.testcontainers.postgresql - testImplementation "io.opentracing:opentracing-util:0.33.0:tests" -} - -Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceConstants.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceConstants.java deleted file mode 100644 index 8c5575adcabc..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceConstants.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -/** - * Collection of constants for APM tracing. - */ -public final class ApmTraceConstants { - - /** - * Operation name for an APM trace of a Temporal activity. - */ - public static final String ACTIVITY_TRACE_OPERATION_NAME = "activity"; - - /** - * Operation name for an APM trace of a job orchestrator. - */ - public static final String JOB_ORCHESTRATOR_OPERATION_NAME = "job.orchestrator"; - - /** - * Operation name for an APM trace of a worker implementation. - */ - public static final String WORKER_OPERATION_NAME = "worker"; - - /** - * Operation name for an APM trace of a Temporal workflow. - */ - public static final String WORKFLOW_TRACE_OPERATION_NAME = "workflow"; - - private ApmTraceConstants() {} - - /** - * Trace tag constants. - */ - public static final class Tags { - - /** - * Name of the APM trace tag that holds the attempt number value associated with the trace. - */ - public static final String ATTEMPT_NUMBER_KEY = "attempt_number"; - - /** - * Name of the APM trace tag that holds the destination Docker image value associated with the - * trace. - */ - public static final String CONNECTION_ID_KEY = "connection_id"; - - /** - * Name of the APM trace tag that holds the connector version value associated with the trace. - */ - public static final String CONNECTOR_VERSION_KEY = "connector_version"; - - /** - * Name of the APM trace tag that holds the destination definition ID value associated with the - * trace. - */ - public static final String DESTINATION_DEFINITION_ID_KEY = "destination.definition_id"; - - /** - * Name of the APM trace tag that holds the destination Docker image value associated with the - * trace. - */ - public static final String DESTINATION_DOCKER_IMAGE_KEY = "destination.docker_image"; - - /** - * Name of the APM trace tag that holds the Docker image value associated with the trace. - */ - public static final String DOCKER_IMAGE_KEY = "docker_image"; - - /** - * Name of the APM trace tag that holds the failure origin(s) associated with the trace. - */ - public static final String FAILURE_ORIGINS_KEY = "failure_origins"; - - /** - * Name of the APM trace tag that holds the failure type(s) associated with the trace. - */ - public static final String FAILURE_TYPES_KEY = "failure_types"; - - /** - * Name of the APM trace tag that holds the job ID value associated with the trace. - */ - public static final String JOB_ID_KEY = "job_id"; - - /** - * Name of the APM trace tag that holds the job root value associated with the trace. - */ - public static final String JOB_ROOT_KEY = "job_root"; - - /** - * Name of the APM trace tag that holds the process exit value associated with the trace. - */ - public static final String PROCESS_EXIT_VALUE_KEY = "process.exit_value"; - - /** - * Name of the APM trace tag that holds the replication bytes synced value associated with the - * trace. - */ - public static final String REPLICATION_BYTES_SYNCED_KEY = "replication.bytes_synced"; - - /** - * Name of the APM trace tag that holds the replication records synced value associated with the - * trace. - */ - public static final String REPLICATION_RECORDS_SYNCED_KEY = "replication.records_synced"; - - /** - * Name of the APM trace tag that holds the replication status value associated with the trace. - */ - public static final String REPLICATION_STATUS_KEY = "replication.status"; - - /** - * Name of the APM trace tag that holds the source definition ID value associated with the trace. - */ - public static final String SOURCE_DEFINITION_ID_KEY = "source.definition_id"; - - /** - * Name of the APM trace tag that holds the source Docker image value associated with the trace. - */ - public static final String SOURCE_DOCKER_IMAGE_KEY = "source.docker_image"; - - /** - * Name of the APM trace tag that holds the source ID value associated with the trace. - */ - public static final String SOURCE_ID_KEY = "source.id"; - - /** - * Name of the APM trace tag that holds the webhook config ID value associated with the trace. - */ - public static final String WEBHOOK_CONFIG_ID_KEY = "webhook.config_id"; - - /** - * Name of the APM trace tag that holds the workspace ID value associated with the trace. - */ - public static final String WORKSPACE_ID_KEY = "workspace.id"; - - private Tags() {} - - } - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceUtils.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceUtils.java deleted file mode 100644 index cb4b85bf2a9e..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceUtils.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import datadog.trace.api.DDTags; -import datadog.trace.api.interceptor.MutableSpan; -import io.opentracing.Span; -import io.opentracing.log.Fields; -import io.opentracing.tag.Tags; -import io.opentracing.util.GlobalTracer; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.util.Map; - -/** - * Collection of utility methods to help with performance tracing. - */ -public class ApmTraceUtils { - - /** - * String format for the name of tags added to spans. - */ - public static final String TAG_FORMAT = "airbyte.%s.%s"; - - /** - * Standard prefix for tags added to spans. - */ - public static final String TAG_PREFIX = "metadata"; - - /** - * Adds all the provided tags to the currently active span, if one exists.
- * All tags added via this method will use the default {@link #TAG_PREFIX} namespace. - * - * @param tags A map of tags to be added to the currently active span. - */ - public static void addTagsToTrace(final Map tags) { - addTagsToTrace(tags, TAG_PREFIX); - } - - /** - * Adds all provided tags to the currently active span, if one exists, under the provided tag name - * namespace. - * - * @param tags A map of tags to be added to the currently active span. - * @param tagPrefix The prefix to be added to each custom tag name. - */ - public static void addTagsToTrace(final Map tags, final String tagPrefix) { - addTagsToTrace(GlobalTracer.get().activeSpan(), tags, tagPrefix); - } - - /** - * Adds all the provided tags to the provided span, if one exists. - * - * @param span The {@link Span} that will be associated with the tags. - * @param tags A map of tags to be added to the currently active span. - * @param tagPrefix The prefix to be added to each custom tag name. - */ - public static void addTagsToTrace(final Span span, final Map tags, final String tagPrefix) { - if (span != null) { - tags.entrySet().forEach(entry -> { - span.setTag(formatTag(entry.getKey(), tagPrefix), entry.getValue().toString()); - }); - } - } - - /** - * Adds an exception to the currently active span, if one exists. - * - * @param t The {@link Throwable} to be added to the currently active span. - */ - public static void addExceptionToTrace(final Throwable t) { - addExceptionToTrace(GlobalTracer.get().activeSpan(), t); - } - - /** - * Adds an exception to the provided span, if one exists. - * - * @param span The {@link Span} that will be associated with the exception. - * @param t The {@link Throwable} to be added to the provided span. - */ - public static void addExceptionToTrace(final Span span, final Throwable t) { - if (span != null) { - span.setTag(Tags.ERROR, true); - span.log(Map.of(Fields.ERROR_OBJECT, t)); - } - } - - /** - * Adds all the provided tags to the root span. - * - * @param tags A map of tags to be added to the root span. - */ - public static void addTagsToRootSpan(final Map tags) { - final Span activeSpan = GlobalTracer.get().activeSpan(); - if (activeSpan instanceof MutableSpan) { - final MutableSpan localRootSpan = ((MutableSpan) activeSpan).getLocalRootSpan(); - tags.entrySet().forEach(entry -> { - localRootSpan.setTag(formatTag(entry.getKey(), TAG_PREFIX), entry.getValue().toString()); - }); - } - } - - /** - * Adds an exception to the root span, if an active one exists. - * - * @param t The {@link Throwable} to be added to the provided span. - */ - public static void recordErrorOnRootSpan(final Throwable t) { - final Span activeSpan = GlobalTracer.get().activeSpan(); - if (activeSpan != null) { - activeSpan.setTag(Tags.ERROR, true); - activeSpan.log(Map.of(Fields.ERROR_OBJECT, t)); - } - if (activeSpan instanceof MutableSpan) { - final MutableSpan localRootSpan = ((MutableSpan) activeSpan).getLocalRootSpan(); - localRootSpan.setError(true); - localRootSpan.setTag(DDTags.ERROR_MSG, t.getMessage()); - localRootSpan.setTag(DDTags.ERROR_TYPE, t.getClass().getName()); - final StringWriter errorString = new StringWriter(); - t.printStackTrace(new PrintWriter(errorString)); - localRootSpan.setTag(DDTags.ERROR_STACK, errorString.toString()); - } - } - - /** - * Formats the tag key using {@link #TAG_FORMAT} provided by this utility, using the default tag - * prefix {@link #TAG_PREFIX}. - * - * @param tagKey The tag key to format. - * @return The formatted tag key. - */ - public static String formatTag(final String tagKey) { - return formatTag(tagKey, TAG_PREFIX); - } - - /** - * Formats the tag key using {@link #TAG_FORMAT} provided by this utility with the provided tag - * prefix. - * - * @param tagKey The tag key to format. - * @param tagPrefix The prefix to be added to each custom tag name. - * @return The formatted tag key. - */ - public static String formatTag(final String tagKey, final String tagPrefix) { - return String.format(TAG_FORMAT, tagPrefix, tagKey); - } - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DatadogClientConfiguration.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DatadogClientConfiguration.java deleted file mode 100644 index c9808cd6ccf1..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DatadogClientConfiguration.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import io.airbyte.config.Configs; -import java.util.List; -import lombok.AllArgsConstructor; - -/** - * POJO of configuration required for publishing metrics. - */ -@AllArgsConstructor -public class DatadogClientConfiguration { - - public final String ddAgentHost; - public final String ddPort; - public final boolean publish; - - public final List constantTags; - - public DatadogClientConfiguration(final Configs configs) { - this.ddAgentHost = configs.getDDAgentHost(); - this.ddPort = configs.getDDDogStatsDPort(); - this.publish = configs.getPublishMetrics(); - this.constantTags = configs.getDDConstantTags(); - } - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricClient.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricClient.java deleted file mode 100644 index 93e772aa4960..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricClient.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import com.google.common.annotations.VisibleForTesting; -import com.timgroup.statsd.NonBlockingStatsDClientBuilder; -import com.timgroup.statsd.StatsDClient; -import io.airbyte.config.Configs; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; - -/** - * Light wrapper around the DogsStatsD client to make using the client slightly more ergonomic. - *

- * This class mainly exists to help Airbyte instrument/debug application on Airbyte Cloud. The - * methods here do not fail loudly to prevent application disruption. - *

- * Open source users are free to turn this on and consume the same metrics. - *

- * This class is intended to be used in conjunction with {@link Configs#getPublishMetrics()}. - *

- * Any {@link MetricAttribute}s provided with the metric data are sent as tags created by joining - * the {@code key} and {@code value} property of each {@link MetricAttribute} with a - * {@link #TAG_DELIMITER} delimiter. - */ -@Slf4j -public class DogStatsDMetricClient implements MetricClient { - - private static final String TAG_DELIMITER = ":"; - - private boolean instancePublish = false; - private StatsDClient statsDClient; - - /** - * Traditional singleton initialize call. Please invoke this before using any methods in this class. - * Usually called in the main class of the application attempting to publish metrics. - */ - - public void initialize(final MetricEmittingApp app, final DatadogClientConfiguration config) { - if (statsDClient != null) { - throw new RuntimeException("You cannot initialize configuration more than once."); - } - - if (!config.publish) { - // do nothing if we do not want to publish. All metrics methods also do nothing. - return; - } - - log.info("Starting DogStatsD client.."); - instancePublish = config.publish; - statsDClient = new NonBlockingStatsDClientBuilder() - .prefix(app.getApplicationName()) - .hostname(config.ddAgentHost) - .port(Integer.parseInt(config.ddPort)) - .constantTags(config.constantTags.toArray(new String[0])) - .build(); - } - - @VisibleForTesting - @Override - public synchronized void shutdown() { - statsDClient = null; - instancePublish = false; - } - - /** - * Increment or decrement a counter. - * - * @param metric - * @param amt to adjust. - * @param attributes - */ - @Override - public void count(final MetricsRegistry metric, final long amt, final MetricAttribute... attributes) { - if (instancePublish) { - if (statsDClient == null) { - // do not loudly fail to prevent application disruption - log.warn("singleton not initialized, count {} not emitted", metric); - return; - } - - log.info("publishing count, name: {}, value: {}, attributes: {}", metric, amt, attributes); - statsDClient.count(metric.getMetricName(), amt, toTags(attributes)); - } - } - - /** - * Record the latest value for a gauge. - * - * @param metric - * @param val to record. - * @param attributes - */ - @Override - public void gauge(final MetricsRegistry metric, final double val, final MetricAttribute... attributes) { - if (instancePublish) { - if (statsDClient == null) { - // do not loudly fail to prevent application disruption - log.warn("singleton not initialized, gauge {} not emitted", metric); - return; - } - - log.debug("publishing gauge, name: {}, value: {}, attributes: {}", metric, val, attributes); - statsDClient.gauge(metric.getMetricName(), val, toTags(attributes)); - } - } - - @Override - public void distribution(final MetricsRegistry metric, final double val, final MetricAttribute... attributes) { - if (instancePublish) { - if (statsDClient == null) { - // do not loudly fail to prevent application disruption - log.warn("singleton not initialized, distribution {} not emitted", metric); - return; - } - - log.debug("recording distribution, name: {}, value: {}, attributes: {}", metric, val, attributes); - statsDClient.distribution(metric.getMetricName(), val, toTags(attributes)); - } - } - - /** - * Converts each {@link MetricAttribute} tuple to a list of tags consumable by StatsD. - * - * @param attributes An array of {@link MetricAttribute} tuples. - * @return An array of tag values. - */ - private String[] toTags(final MetricAttribute... attributes) { - return Stream.of(attributes).map(a -> String.join(TAG_DELIMITER, a.key(), a.value())).collect(Collectors.toList()).toArray(new String[] {}); - } - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricSingleton.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricSingleton.java deleted file mode 100644 index d51279fc53a7..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricSingleton.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import com.google.common.annotations.VisibleForTesting; -import com.timgroup.statsd.NonBlockingStatsDClientBuilder; -import com.timgroup.statsd.StatsDClient; -import io.airbyte.config.Configs; -import lombok.extern.slf4j.Slf4j; - -/** - * Light wrapper around the DogsStatsD client to make using the client slightly more ergonomic. - *

- * This class mainly exists to help Airbyte instrument/debug application on Airbyte Cloud. The - * methods here do not fail loudly to prevent application disruption. - *

- * Open source users are free to turn this on and consume the same metrics. - *

- * This class is intended to be used in conjection with {@link Configs#getPublishMetrics()}. - */ -@Slf4j -public class DogStatsDMetricSingleton { - - private static boolean instancePublish = false; - private static StatsDClient statsDClient; - - /** - * Traditional singleton initialize call. Please invoke this before using any methods in this class. - * Usually called in the main class of the application attempting to publish metrics. - */ - public synchronized static void initialize(final MetricEmittingApp app, final DatadogClientConfiguration config) { - if (statsDClient != null) { - throw new RuntimeException("You cannot initialize configuration more than once."); - } - - if (!config.publish) { - // do nothing if we do not want to publish. All metrics methods also do nothing. - return; - } - - log.info("Starting DogStatsD client.."); - instancePublish = config.publish; - statsDClient = new NonBlockingStatsDClientBuilder() - .prefix(app.getApplicationName()) - .hostname(config.ddAgentHost) - .port(Integer.parseInt(config.ddPort)) - .constantTags(config.constantTags.toArray(new String[0])) - .build(); - } - - @VisibleForTesting - public synchronized static void flush() { - statsDClient = null; - instancePublish = false; - } - - /** - * Increment or decrement a counter. - * - * @param metric - * @param amt to adjust. - * @param tags - */ - public static void count(final MetricsRegistry metric, final double amt, final String... tags) { - if (instancePublish) { - if (statsDClient == null) { - // do not loudly fail to prevent application disruption - log.warn("singleton not initialized, count {} not emitted", metric); - return; - } - - log.info("publishing count, name: {}, value: {}, tags: {}", metric, amt, tags); - statsDClient.count(metric.getMetricName(), amt, tags); - } - } - - /** - * Record the latest value for a gauge. - * - * @param metric - * @param val to record. - * @param tags - */ - public static void gauge(final MetricsRegistry metric, final double val, final String... tags) { - if (instancePublish) { - if (statsDClient == null) { - // do not loudly fail to prevent application disruption - log.warn("singleton not initialized, gauge {} not emitted", metric); - return; - } - - log.info("publishing gauge, name: {}, value: {}, tags: {}", metric, val, tags); - statsDClient.gauge(metric.getMetricName(), val, tags); - } - } - - /** - * Submit a single execution time aggregated locally by the Agent - all metric related statistics - * are calculated agent-side. Be careful using this if there will be multiple agents emitting this - * metric as this will cause inaccuracy in non-additive metrics e.g. average, median, percentiles - * etc. - * - * The upside of this is this metric is cheaper to calculate than the Distribution type. - * - * See https://docs.datadoghq.com/metrics/types/?tab=histogram#metric-types for more information. - * - * @param metric - * @param val of time to record. - * @param tags - */ - public static void recordTimeLocal(final MetricsRegistry metric, final double val, final String... tags) { - if (instancePublish) { - if (statsDClient == null) { - // do not loudly fail to prevent application disruption - log.warn("singleton not initialized, histogram {} not emitted", metric); - return; - } - - log.info("recording histogram, name: {}, value: {}, tags: {}", metric, val, tags); - statsDClient.histogram(metric.getMetricName(), val, tags); - } - } - - /** - * Submit a single execution time aggregated globally by Datadog - all metric related statistics are - * calculated in Datadog. Use this for precise stats. - * - * @param metric - * @param val of time to record. - * @param tags - */ - public static void recordTimeGlobal(final MetricsRegistry metric, final double val, final String... tags) { - if (instancePublish) { - if (statsDClient == null) { - // do not loudly fail to prevent application disruption - log.warn("singleton not initialized, distribution {} not emitted", metric); - return; - } - - log.info("recording distribution, name: {}, value: {}, tags: {}", metric, val, tags); - statsDClient.distribution(metric.getMetricName(), val, tags); - } - } - - /** - * Wrapper of {@link #recordTimeGlobal(MetricsRegistry, double, String...)} with a runnable for - * convenience. - * - * @param metric - * @param runnable to time - * @param tags - */ - public static void recordTimeGlobal(final MetricsRegistry metric, final Runnable runnable, final String... tags) { - final long start = System.currentTimeMillis(); - runnable.run(); - final long end = System.currentTimeMillis(); - final long val = end - start; - recordTimeGlobal(metric, val, tags); - } - - /** - * Wrapper around {@link #recordTimeGlobal(MetricsRegistry, double, String...)} with a different - * name to better represent what this function does. - * - * @param metric - * @param val - * @param tags - */ - public static void percentile(final MetricsRegistry metric, final double val, final String... tags) { - recordTimeGlobal(metric, val, tags); - } - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricAttribute.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricAttribute.java deleted file mode 100644 index e302d11147b7..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricAttribute.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -/** - * Custom tuple that represents a key/value pair to be included with a metric. - *

- * It is up to each {@link MetricClient} implementation to decide what data from this record is used - * when generating a metric. See the specific implementations of the {@link MetricClient} interface - * for actual usage. - */ -public record MetricAttribute(String key, String value) {} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricClient.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricClient.java deleted file mode 100644 index c651bf059dd5..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricClient.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -/* - * A generic metric client interface supporting basic metric emitting actions. - */ -public interface MetricClient { - - /** - * Increment or decrement a counter. - * - * @param metric - * @param val to record. - * @param attributes - */ - void count(MetricsRegistry metric, long val, final MetricAttribute... attributes); - - /** - * Record the latest value for a gauge. - * - * @param metric - * @param val to record. - * @param attributes - */ - void gauge(MetricsRegistry metric, double val, final MetricAttribute... attributes); - - /* - * Accepts value on the metrics, and report the distribution of these values. Useful to analysis how - * much time have elapsed, and percentile of a series of records. - * - * @param metric - * - * @param val to record. - * - * @param attributes - */ - void distribution(MetricsRegistry metric, double val, final MetricAttribute... attributes); - - /* - * Reset initialization. Can be used in a unit test to reset metric client state. - */ - void shutdown(); - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricClientFactory.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricClientFactory.java deleted file mode 100644 index a541295f97dd..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricClientFactory.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import io.airbyte.config.Configs; -import io.airbyte.config.EnvConfigs; -import io.micrometer.core.instrument.Clock; -import io.micrometer.core.instrument.MeterRegistry; -import io.micrometer.statsd.StatsdConfig; -import io.micrometer.statsd.StatsdMeterRegistry; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A singleton factory producing a singleton metric client. - */ -public class MetricClientFactory { - - private static final Logger LOGGER = LoggerFactory.getLogger(MetricClientFactory.class); - - static final String DATADOG_METRIC_CLIENT = "datadog"; - private static final String OTEL_METRIC_CLIENT = "otel"; - - private static final Configs configs = new EnvConfigs(); - - private MetricClientFactory() { - // no explicit implementation - } - - private static MetricClient metricClient; - - /** - * - * Retrieve previously created metric client. If metric client was not created before, returns a - * NotImplementedMetricClient instead. - * - * @return previously created metric client which has been properly initialized, or an instance of - * the empty NotImplementedMetricClient. - */ - public synchronized static MetricClient getMetricClient() { - if (metricClient != null) { - return metricClient; - } - LOGGER.warn( - "MetricClient has not been initialized. Must call MetricClientFactory.CreateMetricClient before using MetricClient. Using a dummy client for now. Ignore this if Airbyte is configured to not publish any metrics."); - - return new NotImplementedMetricClient(); - } - - /** - * - * Create and initialize a MetricClient based on System env. - * - * @param metricEmittingApp the name of the app which the metric will be running under. - */ - public static synchronized void initialize(final MetricEmittingApp metricEmittingApp) { - if (metricClient != null) { - LOGGER.warn("Metric client is already initialized to " + configs.getMetricClient()); - return; - } - - if (DATADOG_METRIC_CLIENT.equals(configs.getMetricClient())) { - if (configs.getDDAgentHost() == null || configs.getDDDogStatsDPort() == null) { - throw new RuntimeException("DD_AGENT_HOST is null or DD_DOGSTATSD_PORT is null. Both are required to use the DataDog Metric Client"); - } else { - initializeDatadogMetricClient(metricEmittingApp); - } - } else if (OTEL_METRIC_CLIENT.equals(configs.getMetricClient())) { - initializeOpenTelemetryMetricClient(metricEmittingApp); - } else { - metricClient = new NotImplementedMetricClient(); - LOGGER.warn( - "MetricClient was not recognized or not provided. Accepted values are `datadog` or `otel`. "); - } - } - - /** - * A statsd config for micrometer. We override host to be the datadog agent address, while keeping - * other settings default. - */ - private static StatsdConfig getDatadogStatsDConfig() { - return new StatsdConfig() { - - /** - * @return - */ - @Override - public String host() { - return configs.getDDAgentHost(); - } - - /** - * Returning null for default get function because the host has been overridden above. - */ - @Override - public String get(final String key) { - return null; - } - - }; - } - - /** - * - * Returns a meter registry to be consumed by temporal configs. - * - */ - public static MeterRegistry getMeterRegistry() { - - if (DATADOG_METRIC_CLIENT.equals(configs.getMetricClient())) { - final StatsdConfig config = getDatadogStatsDConfig(); - return new StatsdMeterRegistry(config, Clock.SYSTEM); - } - - // To support open telemetry, we need to use a different type of Config. For now we simply return - // null - in this case, we do not register any metric emitting mechanism in temporal and thus - // users will not receive temporal related metrics. - return null; - } - - private static DogStatsDMetricClient initializeDatadogMetricClient( - final MetricEmittingApp metricEmittingApp) { - final DogStatsDMetricClient client = new DogStatsDMetricClient(); - - client.initialize(metricEmittingApp, new DatadogClientConfiguration(configs)); - metricClient = client; - return client; - } - - private static OpenTelemetryMetricClient initializeOpenTelemetryMetricClient( - final MetricEmittingApp metricEmittingApp) { - final OpenTelemetryMetricClient client = new OpenTelemetryMetricClient(); - client.initialize(metricEmittingApp, configs.getOtelCollectorEndpoint()); - metricClient = client; - return client; - } - - synchronized static void flush() { - if (metricClient != null) { - metricClient.shutdown(); - metricClient = null; - } - } - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApp.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApp.java deleted file mode 100644 index 6098792277e2..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApp.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -/** - * Interface representing an Airbyte Application to collect metrics for. This interface is present - * as Java doesn't support enum inheritance as of Java 17. We use a shared interface so this - * interface can be used in the {@link MetricsRegistry} enum. - */ -public interface MetricEmittingApp { - - String getApplicationName(); - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApps.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApps.java deleted file mode 100644 index d5a0d7fb819c..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApps.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import lombok.AllArgsConstructor; - -/** - * Enum containing all applications metrics are emitted for. Used to initialize - * {@link MetricClientFactory.initialize(MetricEmittingApp)}. - * - * Application Name Conventions: - *

- * - Drop the airbyte prefix when naming applications, e.g airbyte-server -> server. - *

- * - Use dashes to delimit application names with multiple words. - *

- * - Use lowercase. - */ -@AllArgsConstructor -public enum MetricEmittingApps implements MetricEmittingApp { - - METRICS_REPORTER("metrics-reporter"), - WORKER("worker"); - - private String applicationName; - - @Override - public String getApplicationName() { - return this.applicationName; - } - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricQueries.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricQueries.java deleted file mode 100644 index 8764938412cf..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricQueries.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; - -import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; -import java.util.List; -import java.util.UUID; -import lombok.extern.slf4j.Slf4j; -import org.jooq.DSLContext; - -/** - * This class centralises metrics queries. These queries power metrics that require some sort of - * data access or calculation. - *

- * Simple metrics that require no calculation need not be tracked here. - */ -@Slf4j -public class MetricQueries { - - public static List jobIdToReleaseStages(final DSLContext ctx, final long jobId) { - final var srcRelStageCol = "src_release_stage"; - final var dstRelStageCol = "dst_release_stage"; - - final var query = String.format(""" - SELECT src_def_data.release_stage AS %s, - dest_def_data.release_stage AS %s - FROM connection - INNER JOIN jobs ON connection.id=CAST(jobs.scope AS uuid) - INNER JOIN actor AS dest_data ON connection.destination_id = dest_data.id - INNER JOIN actor_definition AS dest_def_data ON dest_data.actor_definition_id = dest_def_data.id - INNER JOIN actor AS src_data ON connection.source_id = src_data.id - INNER JOIN actor_definition AS src_def_data ON src_data.actor_definition_id = src_def_data.id - WHERE jobs.id = '%d';""", srcRelStageCol, dstRelStageCol, jobId); - - final var res = ctx.fetch(query); - final var stages = res.getValues(srcRelStageCol, ReleaseStage.class); - stages.addAll(res.getValues(dstRelStageCol, ReleaseStage.class)); - return stages; - } - - public static List srcIdAndDestIdToReleaseStages(final DSLContext ctx, final UUID srcId, final UUID dstId) { - return ctx.select(ACTOR_DEFINITION.RELEASE_STAGE).from(ACTOR).join(ACTOR_DEFINITION).on(ACTOR.ACTOR_DEFINITION_ID.eq(ACTOR_DEFINITION.ID)) - .where(ACTOR.ID.eq(srcId)) - .or(ACTOR.ID.eq(dstId)).fetch().getValues(ACTOR_DEFINITION.RELEASE_STAGE); - } - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java deleted file mode 100644 index 7c5492ce6518..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import io.airbyte.config.FailureReason.FailureOrigin; -import io.airbyte.config.FailureReason.FailureType; -import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; -import io.airbyte.db.instance.jobs.jooq.generated.enums.JobStatus; - -/** - * Keep track of all metric tags. - */ -public class MetricTags { - - public static final String CONNECTION_ID = "connection_id"; - public static final String FAILURE_ORIGIN = "failure_origin"; - public static final String FAILURE_TYPE = "failure_type"; - public static final String JOB_ID = "job_id"; - public static final String JOB_STATUS = "job_status"; - public static final String RELEASE_STAGE = "release_stage"; - public static final String RESET_WORKFLOW_FAILURE_CAUSE = "failure_cause"; - public static final String WORKFLOW_TYPE = "workflow_type"; - public static final String ATTEMPT_QUEUE = "attempt_queue"; - public static final String GEOGRAPHY = "geography"; - public static final String UNKNOWN = "unknown"; - - // the release stage of the highest release connector in the sync (GA > Beta > Alpha) - public static final String MAX_CONNECTOR_RELEASE_STATE = "max_connector_release_stage"; - // the release stage of the lowest release stage connector in the sync (GA > Beta > Alpha) - public static final String MIN_CONNECTOR_RELEASE_STATE = "min_connector_release_stage"; - public static final String ATTEMPT_OUTCOME = "attempt_outcome"; // succeeded|failed - public static final String ATTEMPT_NUMBER = "attempt_number"; // 0|1|2|3 - - public static String getReleaseStage(final ReleaseStage stage) { - return stage != null ? stage.getLiteral() : UNKNOWN; - } - - public static String getFailureOrigin(final FailureOrigin origin) { - return origin != null ? origin.value() : FailureOrigin.UNKNOWN.value(); - } - - public static String getFailureType(final FailureType origin) { - return origin != null ? origin.value() : UNKNOWN; - } - - public static String getJobStatus(final JobStatus status) { - return status != null ? status.getLiteral() : UNKNOWN; - } - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricsRegistry.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricsRegistry.java deleted file mode 100644 index 50dbfe4e9838..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricsRegistry.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -/** - * Interface representing metrics collected an Airbyte Application. This interface is present as - * Java doesn't support enum inheritance as of Java 17. - */ -public interface MetricsRegistry { - - MetricEmittingApp getApplication(); - - String getMetricName(); - - String getMetricDescription(); - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/NotImplementedMetricClient.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/NotImplementedMetricClient.java deleted file mode 100644 index 9954200ee7fb..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/NotImplementedMetricClient.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -/** - * A mock implementation of MetricClient. Useful for users who do not have any metric client set up - * but still want to use the functionality of airbyte, or in a unit test where user calls the - * testing function but did not initialize the metric client in the first place. - */ -public class NotImplementedMetricClient implements MetricClient { - - @Override - public void count(final MetricsRegistry metric, final long val, final MetricAttribute... attributes) { - // Not Implemented. - } - - @Override - public void gauge(final MetricsRegistry metric, final double val, final MetricAttribute... attributes) { - // Not Implemented. - } - - @Override - public void distribution(final MetricsRegistry metric, final double val, final MetricAttribute... attributes) { - // Not Implemented. - } - - @Override - public void shutdown() { - // Not Implemented. - } - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OpenTelemetryMetricClient.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OpenTelemetryMetricClient.java deleted file mode 100644 index b81a3250a3e7..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OpenTelemetryMetricClient.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import static io.opentelemetry.api.GlobalOpenTelemetry.resetForTest; -import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.semconv.resource.attributes.ResourceAttributes.SERVICE_NAME; - -import com.google.common.annotations.VisibleForTesting; -import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.metrics.DoubleHistogram; -import io.opentelemetry.api.metrics.LongCounter; -import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; -import io.opentelemetry.context.propagation.ContextPropagators; -import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; -import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; -import io.opentelemetry.sdk.OpenTelemetrySdk; -import io.opentelemetry.sdk.metrics.SdkMeterProvider; -import io.opentelemetry.sdk.metrics.export.MetricExporter; -import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.SdkTracerProvider; -import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; - -/** - * Implementation of the {@link MetricClient} that sends the provided metric data to an - * OpenTelemetry compliant metrics store. - *

- * Any {@link MetricAttribute}s provided along with the metric data are passed as key/value pairs - * annotating the metric. - */ -public class OpenTelemetryMetricClient implements MetricClient { - - private Meter meter; - private SdkMeterProvider meterProvider; - - @Override - public void count(final MetricsRegistry metric, final long val, final MetricAttribute... attributes) { - final LongCounter counter = meter - .counterBuilder(metric.getMetricName()) - .setDescription(metric.getMetricDescription()) - .build(); - - final AttributesBuilder attributesBuilder = buildAttributes(attributes); - counter.add(val, attributesBuilder.build()); - } - - @Override - public void gauge(final MetricsRegistry metric, final double val, final MetricAttribute... attributes) { - final AttributesBuilder attributesBuilder = buildAttributes(attributes); - meter.gaugeBuilder(metric.getMetricName()).setDescription(metric.getMetricDescription()) - .buildWithCallback(measurement -> measurement.record(val, attributesBuilder.build())); - } - - @Override - public void distribution(final MetricsRegistry metric, final double val, final MetricAttribute... attributes) { - final DoubleHistogram histogramMeter = meter.histogramBuilder(metric.getMetricName()).setDescription(metric.getMetricDescription()).build(); - final AttributesBuilder attributesBuilder = buildAttributes(attributes); - histogramMeter.record(val, attributesBuilder.build()); - } - - public void initialize(final MetricEmittingApp metricEmittingApp, final String otelEndpoint) { - final Resource resource = Resource.getDefault().toBuilder().put(SERVICE_NAME, metricEmittingApp.getApplicationName()).build(); - - final SdkTracerProvider sdkTracerProvider = SdkTracerProvider.builder() - .addSpanProcessor( - BatchSpanProcessor - .builder(OtlpGrpcSpanExporter.builder().setEndpoint(otelEndpoint).build()) - .build()) - .setResource(resource) - .build(); - final MetricExporter metricExporter = OtlpGrpcMetricExporter.builder() - .setEndpoint(otelEndpoint).build(); - initialize(metricEmittingApp, metricExporter, sdkTracerProvider, resource); - } - - @VisibleForTesting - SdkMeterProvider getSdkMeterProvider() { - return meterProvider; - } - - @VisibleForTesting - void initialize( - final MetricEmittingApp metricEmittingApp, - final MetricExporter metricExporter, - final SdkTracerProvider sdkTracerProvider, - final Resource resource) { - meterProvider = SdkMeterProvider.builder() - .registerMetricReader(PeriodicMetricReader.builder(metricExporter).build()) - .setResource(resource) - .build(); - - final OpenTelemetry openTelemetry = OpenTelemetrySdk.builder() - .setTracerProvider(sdkTracerProvider) - .setMeterProvider(meterProvider) - .setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance())) - .buildAndRegisterGlobal(); - - meter = openTelemetry.meterBuilder(metricEmittingApp.getApplicationName()) - .build(); - } - - @Override - public void shutdown() { - resetForTest(); - } - - private AttributesBuilder buildAttributes(final MetricAttribute... attributes) { - final AttributesBuilder attributesBuilder = Attributes.builder(); - for (final MetricAttribute attribute : attributes) { - attributesBuilder.put(stringKey(attribute.key()), attribute.value()); - } - return attributesBuilder; - } - -} diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java deleted file mode 100644 index fd00ace2f371..000000000000 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import com.google.api.client.util.Preconditions; -import java.util.Arrays; -import java.util.List; - -/** - * Enum source of truth of all Airbyte metrics. Each enum value represent a metric and is linked to - * an application and contains a description to make it easier to understand. - *

- * Each object of the enum actually represent a metric, so the Registry name is misleading. The - * reason 'Registry' is in the name is to emphasize this enum's purpose as a source of truth for all - * metrics. This also helps code readability i.e. AirbyteMetricsRegistry.metricA. - *

- * Metric Name Convention (adapted from - * https://docs.datadoghq.com/developers/guide/what-best-practices-are-recommended-for-naming-metrics-and-tags/): - *

- * - Use lowercase. Metric names are case-sensitive. - *

- * - Use underscore to delimit names with multiple words. - *

- * - No spaces. This makes the metric confusing to read. - *

- * - Avoid numbers. This makes the metric confusing to read. Numbers should only be used as a - *

- * - Add units at name end if applicable. This is especially relevant for time units. - *

- * - Include the time period in the name if the metric is meant to be run at a certain interval. - */ -public enum OssMetricsRegistry implements MetricsRegistry { - - ATTEMPT_CREATED_BY_RELEASE_STAGE( - MetricEmittingApps.WORKER, - "attempt_created_by_release_stage", - "increments when a new attempt is created. attempts are double counted as this is tagged by release stage."), - ATTEMPT_FAILED_BY_RELEASE_STAGE( - MetricEmittingApps.WORKER, - "attempt_failed_by_release_stage", - "increments when an attempt fails. attempts are double counted as this is tagged by release stage."), - ATTEMPT_FAILED_BY_FAILURE_ORIGIN( - MetricEmittingApps.WORKER, - "attempt_failed_by_failure_origin", - "increments for every failure origin a failed attempt has. since a failure can have multiple origins, a single failure can be counted more than once. tagged by failure origin and failure type."), - ATTEMPT_SUCCEEDED_BY_RELEASE_STAGE( - MetricEmittingApps.WORKER, - "attempt_succeeded_by_release_stage", - "increments when an attempts succeeds. attempts are double counted as this is tagged by release stage."), - EST_NUM_METRICS_EMITTED_BY_REPORTER( - MetricEmittingApps.METRICS_REPORTER, - "est_num_metrics_emitted_by_reporter", - "estimated metrics emitted by the reporter in the last interval. this is estimated since the count is not precise."), - JOB_CANCELLED_BY_RELEASE_STAGE( - MetricEmittingApps.WORKER, - "job_cancelled_by_release_stage", - "increments when a job is cancelled. jobs are double counted as this is tagged by release stage."), - JOB_CREATED_BY_RELEASE_STAGE( - MetricEmittingApps.WORKER, - "job_created_by_release_stage", - "increments when a new job is created. jobs are double counted as this is tagged by release stage."), - JOB_FAILED_BY_RELEASE_STAGE( - MetricEmittingApps.WORKER, - "job_failed_by_release_stage", - "increments when a job fails. jobs are double counted as this is tagged by release stage."), - JOB_SUCCEEDED_BY_RELEASE_STAGE( - MetricEmittingApps.WORKER, - "job_succeeded_by_release_stage", - "increments when a job succeeds. jobs are double counted as this is tagged by release stage."), - JSON_STRING_LENGTH( - MetricEmittingApps.WORKER, - "json_string_length", - "string length of a raw json string"), - KUBE_POD_PROCESS_CREATE_TIME_MILLISECS( - MetricEmittingApps.WORKER, - "kube_pod_process_create_time_millisecs", - "time taken to create a new kube pod process"), - NUM_ABNORMAL_SCHEDULED_SYNCS_IN_LAST_DAY( - MetricEmittingApps.METRICS_REPORTER, - "num_abnormal_scheduled_syncs_last_day", - "number of abnormal syncs that have skipped at least 1 scheduled run in last day."), - NUM_ACTIVE_CONN_PER_WORKSPACE( - MetricEmittingApps.METRICS_REPORTER, - "num_active_conn_per_workspace", - "number of active connections per workspace"), - NUM_PENDING_JOBS( - MetricEmittingApps.METRICS_REPORTER, - "num_pending_jobs", - "number of pending jobs"), - NUM_ORPHAN_RUNNING_JOBS( - MetricEmittingApps.METRICS_REPORTER, - "num_orphan_running_jobs", - "number of jobs reported as running that as associated to connection inactive or deprecated"), - NUM_RUNNING_JOBS( - MetricEmittingApps.METRICS_REPORTER, - "num_running_jobs", - "number of running jobs"), - NUM_SOURCE_STREAMS_WITH_RECORD_SCHEMA_VALIDATION_ERRORS(MetricEmittingApps.WORKER, - "record_schema_validation_error", - "number of record schema validation errors"), - NUM_TOTAL_SCHEDULED_SYNCS_IN_LAST_DAY( - MetricEmittingApps.METRICS_REPORTER, - "num_total_scheduled_syncs_last_day", - "number of total syncs runs in last day."), - - NUM_UNUSUALLY_LONG_SYNCS( - MetricEmittingApps.METRICS_REPORTER, - "num_unusually_long_syncs", - "number of unusual long syncs compared to their historic performance."), - - OLDEST_PENDING_JOB_AGE_SECS(MetricEmittingApps.METRICS_REPORTER, - "oldest_pending_job_age_secs", - "oldest pending job in seconds"), - OLDEST_RUNNING_JOB_AGE_SECS(MetricEmittingApps.METRICS_REPORTER, - "oldest_running_job_age_secs", - "oldest running job in seconds"), - OVERALL_JOB_RUNTIME_IN_LAST_HOUR_BY_TERMINAL_STATE_SECS(MetricEmittingApps.METRICS_REPORTER, - "overall_job_runtime_in_last_hour_by_terminal_state_secs", - "overall job runtime - scheduling and execution for all attempts - for jobs that reach terminal states in the last hour. tagged by terminal states."), - STATE_METRIC_TRACKER_ERROR(MetricEmittingApps.WORKER, - "state_timestamp_metric_tracker_error", - "number of syncs where the state timestamp metric tracker ran out of memory or was unable to match destination state message to source state message"), - TEMPORAL_WORKFLOW_ATTEMPT(MetricEmittingApps.WORKER, - "temporal_workflow_attempt", - "count of the number of workflow attempts"), - TEMPORAL_WORKFLOW_SUCCESS(MetricEmittingApps.WORKER, - "temporal_workflow_success", - "count of the number of successful workflow syncs."), - TEMPORAL_WORKFLOW_FAILURE(MetricEmittingApps.WORKER, - "temporal_workflow_failure", - "count of the number of workflow failures"), - REPLICATION_BYTES_SYNCED(MetricEmittingApps.WORKER, - "replication_bytes_synced", - "number of bytes synced during replication"), - REPLICATION_RECORDS_SYNCED(MetricEmittingApps.WORKER, - "replication_records_synced", - "number of records synced during replication"), - RESET_REQUEST(MetricEmittingApps.WORKER, - "reset_request", - "number of requested resets"), - - ATTEMPTS_CREATED( - MetricEmittingApps.WORKER, - "attempt_created", - "increments when a new attempt is created. one is emitted per attempt", - MetricTags.GEOGRAPHY, - MetricTags.ATTEMPT_NUMBER, - MetricTags.MIN_CONNECTOR_RELEASE_STATE, - MetricTags.MAX_CONNECTOR_RELEASE_STATE), - ATTEMPTS_COMPLETED( - MetricEmittingApps.WORKER, - "attempt_completed", - "increments when a new attempt is completed. one is emitted per attempt", - MetricTags.GEOGRAPHY, - MetricTags.ATTEMPT_NUMBER, - MetricTags.MIN_CONNECTOR_RELEASE_STATE, - MetricTags.MAX_CONNECTOR_RELEASE_STATE, - MetricTags.ATTEMPT_QUEUE, - MetricTags.ATTEMPT_OUTCOME, - MetricTags.FAILURE_ORIGIN, // only includes the first failure origin - MetricTags.FAILURE_TYPE); // only includes the first failure type - - private final MetricEmittingApp application; - private final String metricName; - private final String metricDescription; - - // added this field to declare metric attributes, but we never read them. - @SuppressWarnings("FieldCanBeLocal") - private final List metricTags; - - OssMetricsRegistry(final MetricEmittingApp application, - final String metricName, - final String metricDescription, - final String... metricTags) { - Preconditions.checkNotNull(metricDescription); - Preconditions.checkNotNull(application); - - this.application = application; - this.metricName = metricName; - this.metricDescription = metricDescription; - this.metricTags = Arrays.asList(metricTags); - } - - @Override - public MetricEmittingApp getApplication() { - return application; - } - - @Override - public String getMetricName() { - return metricName; - } - - @Override - public String getMetricDescription() { - return metricDescription; - } - -} diff --git a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/ApmTraceUtilsTest.java b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/ApmTraceUtilsTest.java deleted file mode 100644 index 4d98d015b98d..000000000000 --- a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/ApmTraceUtilsTest.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import static io.airbyte.metrics.lib.ApmTraceUtils.TAG_FORMAT; -import static io.airbyte.metrics.lib.ApmTraceUtils.TAG_PREFIX; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.mockito.Mockito.withSettings; - -import datadog.trace.api.DDTags; -import datadog.trace.api.interceptor.MutableSpan; -import io.opentracing.Span; -import io.opentracing.Tracer; -import io.opentracing.log.Fields; -import io.opentracing.tag.Tags; -import io.opentracing.util.GlobalTracerTestUtil; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.util.Map; -import org.junit.After; -import org.junit.Before; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link ApmTraceUtils} class. - */ -class ApmTraceUtilsTest { - - private static final String TAG_1 = "tag1"; - private static final String TAG_2 = "tag2"; - private static final String VALUE_1 = "foo"; - private static final String VALUE_2 = "bar"; - private static final String PREFIX = "prefix"; - private static final Map TAGS = Map.of(TAG_1, VALUE_1, TAG_2, VALUE_2); - - @Before - @After - public void clearGlobalTracer() { - GlobalTracerTestUtil.resetGlobalTracer(); - } - - @Test - void testAddingTags() { - final Span span = mock(Span.class); - final Tracer tracer = mock(Tracer.class); - when(tracer.activeSpan()).thenReturn(span); - GlobalTracerTestUtil.setGlobalTracerUnconditionally(tracer); - ApmTraceUtils.addTagsToTrace(TAGS); - verify(span, times(1)).setTag(String.format(TAG_FORMAT, TAG_PREFIX, TAG_1), VALUE_1); - verify(span, times(1)).setTag(String.format(TAG_FORMAT, TAG_PREFIX, TAG_2), VALUE_2); - } - - @Test - void testAddingTagsWithPrefix() { - final Span span = mock(Span.class); - final Tracer tracer = mock(Tracer.class); - when(tracer.activeSpan()).thenReturn(span); - GlobalTracerTestUtil.setGlobalTracerUnconditionally(tracer); - final String tagPrefix = PREFIX; - ApmTraceUtils.addTagsToTrace(TAGS, tagPrefix); - verify(span, times(1)).setTag(String.format(TAG_FORMAT, tagPrefix, TAG_1), VALUE_1); - verify(span, times(1)).setTag(String.format(TAG_FORMAT, tagPrefix, TAG_2), VALUE_2); - } - - @Test - void testAddingTagsToSpanWithPrefix() { - final String tagPrefix = PREFIX; - final Span span = mock(Span.class); - ApmTraceUtils.addTagsToTrace(span, TAGS, tagPrefix); - verify(span, times(1)).setTag(String.format(TAG_FORMAT, tagPrefix, TAG_1), VALUE_1); - verify(span, times(1)).setTag(String.format(TAG_FORMAT, tagPrefix, TAG_2), VALUE_2); - } - - @Test - void testAddingTagsToNullSpanWithPrefix() { - final String tagPrefix = "prefix"; - Assertions.assertDoesNotThrow(() -> ApmTraceUtils.addTagsToTrace(null, TAGS, tagPrefix)); - } - - @Test - void testFormattingTagKeys() { - final String tagKey1 = "tagKey1"; - final String tagPrefix1 = PREFIX; - - final String result1 = ApmTraceUtils.formatTag(tagKey1); - assertEquals("airbyte.metadata." + tagKey1, result1); - - final String result2 = ApmTraceUtils.formatTag(tagKey1, tagPrefix1); - assertEquals("airbyte." + tagPrefix1 + "." + tagKey1, result2); - } - - @Test - void testAddingTagsToRootSpan() { - final Span activeSpan = mock(Span.class, withSettings().extraInterfaces(MutableSpan.class)); - final Tracer tracer = mock(Tracer.class); - final MutableSpan localRootSpan = mock(MutableSpan.class); - when(tracer.activeSpan()).thenReturn(activeSpan); - when(((MutableSpan) activeSpan).getLocalRootSpan()).thenReturn(localRootSpan); - GlobalTracerTestUtil.setGlobalTracerUnconditionally(tracer); - ApmTraceUtils.addTagsToRootSpan(TAGS); - verify(localRootSpan, times(1)).setTag(String.format(TAG_FORMAT, TAG_PREFIX, TAG_1), VALUE_1); - verify(localRootSpan, times(1)).setTag(String.format(TAG_FORMAT, TAG_PREFIX, TAG_2), VALUE_2); - } - - @Test - void testAddingTagsToRootSpanWhenActiveSpanIsNull() { - final Tracer tracer = mock(Tracer.class); - when(tracer.activeSpan()).thenReturn(null); - GlobalTracerTestUtil.setGlobalTracerUnconditionally(tracer); - Assertions.assertDoesNotThrow(() -> ApmTraceUtils.addTagsToRootSpan(TAGS)); - } - - @Test - void testRecordErrorOnRootSpan() { - final Span activeSpan = mock(Span.class, withSettings().extraInterfaces(MutableSpan.class)); - final Tracer tracer = mock(Tracer.class); - final MutableSpan localRootSpan = mock(MutableSpan.class); - final Throwable exception = mock(Throwable.class); - when(tracer.activeSpan()).thenReturn(activeSpan); - when(((MutableSpan) activeSpan).getLocalRootSpan()).thenReturn(localRootSpan); - GlobalTracerTestUtil.setGlobalTracerUnconditionally(tracer); - - ApmTraceUtils.recordErrorOnRootSpan(exception); - verify(activeSpan, times(1)).setTag(Tags.ERROR, true); - verify(activeSpan, times(1)).log(Map.of(Fields.ERROR_OBJECT, exception)); - - verify(localRootSpan, times(1)).setError(true); - verify(localRootSpan, times(1)).setTag(DDTags.ERROR_MSG, exception.getMessage()); - verify(localRootSpan, times(1)).setTag(DDTags.ERROR_TYPE, exception.getClass().getName()); - final StringWriter expectedErrorString = new StringWriter(); - exception.printStackTrace(new PrintWriter(expectedErrorString)); - verify(localRootSpan, times(1)).setTag(DDTags.ERROR_STACK, expectedErrorString.toString()); - } - - @Test - void testRecordErrorOnRootSpanWhenActiveSpanIsNull() { - final Throwable exception = mock(Throwable.class); - final Tracer tracer = mock(Tracer.class); - when(tracer.activeSpan()).thenReturn(null); - GlobalTracerTestUtil.setGlobalTracerUnconditionally(tracer); - Assertions.assertDoesNotThrow(() -> ApmTraceUtils.recordErrorOnRootSpan(exception)); - } - -} diff --git a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/DogStatsDMetricClientTest.java b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/DogStatsDMetricClientTest.java deleted file mode 100644 index bf07aa51c32f..000000000000 --- a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/DogStatsDMetricClientTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import java.util.Collections; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -class DogStatsDMetricClientTest { - - DogStatsDMetricClient dogStatsDMetricClient; - - @BeforeEach - void setUp() { - dogStatsDMetricClient = new DogStatsDMetricClient(); - dogStatsDMetricClient.initialize(MetricEmittingApps.WORKER, new DatadogClientConfiguration("localhost", "1000", false, Collections.emptyList())); - } - - @AfterEach - void tearDown() { - dogStatsDMetricClient.shutdown(); - } - - @Test - @DisplayName("there should be no exception if we attempt to emit metrics while publish is false") - void testPublishTrueNoEmitError() { - Assertions.assertDoesNotThrow(() -> { - dogStatsDMetricClient.gauge(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); - }); - } - - @Test - @DisplayName("there should be no exception if we attempt to emit metrics while publish is true") - void testPublishFalseNoEmitError() { - Assertions.assertDoesNotThrow(() -> { - dogStatsDMetricClient.gauge(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); - }); - } - - @Test - @DisplayName("there should be no exception if we attempt to emit metrics without initializing") - void testNoInitializeNoEmitError() { - Assertions.assertDoesNotThrow(() -> { - dogStatsDMetricClient.gauge(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); - }); - } - -} diff --git a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/DogStatsDMetricSingletonTest.java b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/DogStatsDMetricSingletonTest.java deleted file mode 100644 index 0983ad10ddad..000000000000 --- a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/DogStatsDMetricSingletonTest.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import java.util.Collections; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -class DogStatsDMetricSingletonTest { - - @AfterEach - void tearDown() { - DogStatsDMetricSingleton.flush(); - } - - @Test - @DisplayName("there should be no exception if we attempt to emit metrics while publish is false") - void testPublishTrueNoEmitError() { - Assertions.assertDoesNotThrow(() -> { - DogStatsDMetricSingleton.initialize(MetricEmittingApps.WORKER, - new DatadogClientConfiguration("localhost", "1000", false, Collections.emptyList())); - DogStatsDMetricSingleton.gauge(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); - }); - } - - @Test - @DisplayName("there should be no exception if we attempt to emit metrics while publish is true") - void testPublishFalseNoEmitError() { - Assertions.assertDoesNotThrow(() -> { - DogStatsDMetricSingleton.initialize(MetricEmittingApps.WORKER, - new DatadogClientConfiguration("localhost", "1000", true, Collections.emptyList())); - DogStatsDMetricSingleton.gauge(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); - }); - } - - @Test - @DisplayName("there should be no exception if we attempt to emit metrics without initializing") - void testNoInitializeNoEmitError() { - Assertions.assertDoesNotThrow(() -> { - DogStatsDMetricSingleton.gauge(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); - }); - } - -} diff --git a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/MetricClientFactoryTest.java b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/MetricClientFactoryTest.java deleted file mode 100644 index a6ae2b4f8e44..000000000000 --- a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/MetricClientFactoryTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertNull; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -class MetricClientFactoryTest { - - @AfterEach - void tearDown() { - MetricClientFactory.flush(); - } - - @Test - @DisplayName("Should not throw error if calling get without calling create;") - void testMetricClientFactoryGetMetricOnlyDoNotThrow() { - final MetricClient metricClient = MetricClientFactory.getMetricClient(); - assertThat(metricClient, instanceOf(NotImplementedMetricClient.class)); - } - - @Test - @DisplayName("Should not throw error if MetricClientFactory creates a metric client on the first call;") - void testMetricClientFactoryCreateSuccess() { - Assertions.assertDoesNotThrow(() -> { - MetricClientFactory.initialize(MetricEmittingApps.METRICS_REPORTER); - }); - } - - @Test - @DisplayName("Should not return null if metric client not specified;") - void testMicroMeterRegistryRuturnsNullForEmptyClientConfig() { - assertNull(MetricClientFactory.getMeterRegistry()); - } - -} diff --git a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/MetricsQueriesTest.java b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/MetricsQueriesTest.java deleted file mode 100644 index 0724f4491fb7..000000000000 --- a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/MetricsQueriesTest.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import static io.airbyte.db.instance.configs.jooq.generated.Keys.ACTOR_CATALOG_FETCH_EVENT__ACTOR_CATALOG_FETCH_EVENT_ACTOR_ID_FKEY; -import static io.airbyte.db.instance.configs.jooq.generated.Keys.ACTOR__ACTOR_WORKSPACE_ID_FKEY; -import static io.airbyte.db.instance.configs.jooq.generated.Keys.CONNECTION__CONNECTION_DESTINATION_ID_FKEY; -import static io.airbyte.db.instance.configs.jooq.generated.Keys.CONNECTION__CONNECTION_SOURCE_ID_FKEY; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_CATALOG_FETCH_EVENT; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.WORKSPACE; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.JOBS; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.db.Database; -import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.configs.jooq.generated.enums.ActorType; -import io.airbyte.db.instance.configs.jooq.generated.enums.NamespaceDefinitionType; -import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; -import io.airbyte.db.instance.test.TestDatabaseProviders; -import io.airbyte.test.utils.DatabaseConnectionHelper; -import java.io.IOException; -import java.sql.SQLException; -import java.util.List; -import java.util.UUID; -import javax.sql.DataSource; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.PostgreSQLContainer; - -public class MetricsQueriesTest { - - private static final String USER = "user"; - private static final String PASS = "hunter2"; - private static final String SRC = "src"; - private static final String DEST = "dst"; - private static final String DISPLAY_NAME = "should not error out or return any result if not applicable"; - private static final String CONN = "conn"; - - private static final UUID SRC_DEF_ID = UUID.randomUUID(); - private static final UUID DST_DEF_ID = UUID.randomUUID(); - - private static Database configDb; - - @BeforeAll - static void setUpAll() throws IOException, SQLException, DatabaseInitializationException { - final PostgreSQLContainer container = new PostgreSQLContainer<>("postgres:13-alpine") - .withUsername(USER) - .withPassword(PASS); - container.start(); - - final DataSource dataSource = DatabaseConnectionHelper.createDataSource(container); - final DSLContext dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); - final TestDatabaseProviders databaseProviders = new TestDatabaseProviders(dataSource, dslContext); - configDb = databaseProviders.createNewConfigsDatabase(); - databaseProviders.createNewJobsDatabase(); - - // create src and dst def - configDb.transaction(ctx -> ctx - .insertInto(ACTOR_DEFINITION, ACTOR_DEFINITION.ID, ACTOR_DEFINITION.NAME, ACTOR_DEFINITION.DOCKER_REPOSITORY, - ACTOR_DEFINITION.DOCKER_IMAGE_TAG, ACTOR_DEFINITION.SPEC, ACTOR_DEFINITION.ACTOR_TYPE, ACTOR_DEFINITION.RELEASE_STAGE) - .values(SRC_DEF_ID, "srcDef", "repository", "tag", JSONB.valueOf("{}"), ActorType.source, ReleaseStage.beta) - .values(DST_DEF_ID, "dstDef", "repository", "tag", JSONB.valueOf("{}"), ActorType.destination, ReleaseStage.generally_available) - .values(UUID.randomUUID(), "dstDef", "repository", "tag", JSONB.valueOf("{}"), ActorType.destination, ReleaseStage.alpha).execute()); - - // drop constraints to simplify test set up - configDb.transaction(ctx -> ctx.alterTable(ACTOR).dropForeignKey(ACTOR__ACTOR_WORKSPACE_ID_FKEY.constraint()).execute()); - configDb.transaction(ctx -> ctx.alterTable(CONNECTION).dropForeignKey(CONNECTION__CONNECTION_DESTINATION_ID_FKEY.constraint()).execute()); - configDb.transaction(ctx -> ctx.alterTable(CONNECTION).dropForeignKey(CONNECTION__CONNECTION_SOURCE_ID_FKEY.constraint()).execute()); - configDb.transaction(ctx -> ctx.alterTable(ACTOR_CATALOG_FETCH_EVENT) - .dropForeignKey(ACTOR_CATALOG_FETCH_EVENT__ACTOR_CATALOG_FETCH_EVENT_ACTOR_ID_FKEY.constraint()).execute()); - configDb.transaction(ctx -> ctx.alterTable(WORKSPACE).alter(WORKSPACE.SLUG).dropNotNull().execute()); - configDb.transaction(ctx -> ctx.alterTable(WORKSPACE).alter(WORKSPACE.INITIAL_SETUP_COMPLETE).dropNotNull().execute()); - } - - @Nested - class srcIdAndDestIdToReleaseStages { - - @AfterEach - void tearDown() throws SQLException { - configDb.transaction(ctx -> ctx.truncate(ACTOR).execute()); - configDb.transaction(ctx -> ctx.truncate(JOBS).execute()); - } - - @Test - @DisplayName("should return the right release stages") - void shouldReturnReleaseStages() throws SQLException { - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - - // create src and dst - configDb.transaction( - ctx -> ctx.insertInto(ACTOR, ACTOR.ID, ACTOR.WORKSPACE_ID, ACTOR.ACTOR_DEFINITION_ID, ACTOR.NAME, ACTOR.CONFIGURATION, ACTOR.ACTOR_TYPE) - .values(srcId, UUID.randomUUID(), SRC_DEF_ID, SRC, JSONB.valueOf("{}"), ActorType.source) - .values(dstId, UUID.randomUUID(), DST_DEF_ID, DEST, JSONB.valueOf("{}"), ActorType.destination) - .execute()); - final var res = configDb.query(ctx -> MetricQueries.srcIdAndDestIdToReleaseStages(ctx, srcId, dstId)); - assertEquals(List.of(ReleaseStage.beta, ReleaseStage.generally_available), res); - } - - @Test - @DisplayName(DISPLAY_NAME) - void shouldReturnNothingIfNotApplicable() throws SQLException { - final var res = configDb.query(ctx -> MetricQueries.srcIdAndDestIdToReleaseStages(ctx, UUID.randomUUID(), UUID.randomUUID())); - assertEquals(0, res.size()); - } - - } - - @Nested - class jobIdToReleaseStages { - - @AfterEach - void tearDown() throws SQLException { - configDb.transaction(ctx -> ctx.truncate(ACTOR).execute()); - configDb.transaction(ctx -> ctx.truncate(JOBS).execute()); - } - - @Test - @DisplayName("should return the right release stages") - void shouldReturnReleaseStages() throws SQLException { - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - // create src and dst - configDb.transaction( - ctx -> ctx.insertInto(ACTOR, ACTOR.ID, ACTOR.WORKSPACE_ID, ACTOR.ACTOR_DEFINITION_ID, ACTOR.NAME, ACTOR.CONFIGURATION, ACTOR.ACTOR_TYPE) - .values(srcId, UUID.randomUUID(), SRC_DEF_ID, SRC, JSONB.valueOf("{}"), ActorType.source) - .values(dstId, UUID.randomUUID(), DST_DEF_ID, DEST, JSONB.valueOf("{}"), ActorType.destination) - .execute()); - final var connId = UUID.randomUUID(); - // create connection - configDb.transaction( - ctx -> ctx - .insertInto(CONNECTION, CONNECTION.ID, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, CONNECTION.DESTINATION_ID, - CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.MANUAL) - .values(connId, NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true) - .execute()); - // create job - final var jobId = 1L; - configDb.transaction( - ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE).values(jobId, connId.toString()).execute()); - - final var res = configDb.query(ctx -> MetricQueries.jobIdToReleaseStages(ctx, jobId)); - assertEquals(List.of(ReleaseStage.beta, ReleaseStage.generally_available), res); - } - - @Test - @DisplayName(DISPLAY_NAME) - void shouldReturnNothingIfNotApplicable() throws SQLException { - final var missingJobId = 100000L; - final var res = configDb.query(ctx -> MetricQueries.jobIdToReleaseStages(ctx, missingJobId)); - assertEquals(0, res.size()); - } - - } - -} diff --git a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/OpenTelemetryMetricClientTest.java b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/OpenTelemetryMetricClientTest.java deleted file mode 100644 index 83350f2c66a5..000000000000 --- a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/OpenTelemetryMetricClientTest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.lib; - -import static io.opentelemetry.semconv.resource.attributes.ResourceAttributes.SERVICE_NAME; -import static org.assertj.core.api.AssertionsForClassTypes.assertThat; - -import com.google.common.collect.Iterables; -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.sdk.metrics.SdkMeterProvider; -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; -import io.opentelemetry.sdk.trace.SdkTracerProvider; -import java.util.List; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -class OpenTelemetryMetricClientTest { - - OpenTelemetryMetricClient openTelemetryMetricClient; - private final static String TAG = "tag1"; - - private final static MetricEmittingApp METRIC_EMITTING_APP = MetricEmittingApps.WORKER; - private InMemoryMetricExporter metricExporter; - private SdkMeterProvider metricProvider; - - @BeforeEach - void setUp() { - openTelemetryMetricClient = new OpenTelemetryMetricClient(); - - final Resource resource = Resource.getDefault().toBuilder().put(SERVICE_NAME, METRIC_EMITTING_APP.getApplicationName()).build(); - metricExporter = InMemoryMetricExporter.create(); - final SdkTracerProvider sdkTracerProvider = SdkTracerProvider.builder() - .setResource(resource) - .build(); - openTelemetryMetricClient.initialize(METRIC_EMITTING_APP, metricExporter, sdkTracerProvider, resource); - - metricProvider = openTelemetryMetricClient.getSdkMeterProvider(); - } - - @AfterEach - void tearDown() { - openTelemetryMetricClient.shutdown(); - } - - @Test - @DisplayName("Should send out count metric with correct metric name, description and value") - void testCountSuccess() { - openTelemetryMetricClient.count(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); - - metricProvider.forceFlush(); - final List metricDataList = metricExporter.getFinishedMetricItems(); - final MetricData data = Iterables.getOnlyElement(metricDataList); - - assertThat(data.getName()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricName()); - assertThat(data.getDescription()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricDescription()); - assertThat(data.getLongSumData().getPoints().stream().anyMatch(longPointData -> longPointData.getValue() == 1L)); - } - - @Test - @DisplayName("Tags should be passed into metrics") - void testCountWithTagSuccess() { - openTelemetryMetricClient.count(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1, new MetricAttribute(TAG, TAG)); - - metricProvider.forceFlush(); - final List metricDataList = metricExporter.getFinishedMetricItems(); - final MetricData data = Iterables.getOnlyElement(metricDataList); - - assertThat(data.getName()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricName()); - assertThat(data.getDescription()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricDescription()); - assertThat(data.getLongSumData().getPoints().stream() - .anyMatch( - longPointData -> longPointData.getValue() == 1L && TAG.equals(longPointData.getAttributes().get(AttributeKey.stringKey(TAG))))); - } - - @Test - @DisplayName("Should send out gauge metric with correct metric name, description and value") - void testGaugeSuccess() throws Exception { - openTelemetryMetricClient.gauge(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 1); - - metricProvider.forceFlush(); - final List metricDataList = metricExporter.getFinishedMetricItems(); - final MetricData data = Iterables.getOnlyElement(metricDataList); - - assertThat(data.getName()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricName()); - assertThat(data.getDescription()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricDescription()); - assertThat(data.getDoubleGaugeData().getPoints().stream().anyMatch(doublePointData -> doublePointData.getValue() == 1.0)); - } - - @Test - @DisplayName("Should send out histogram metric with correct metric name, description and value") - void testHistogramSuccess() { - openTelemetryMetricClient.distribution(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 10); - openTelemetryMetricClient.distribution(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS, 30); - - metricProvider.forceFlush(); - final List metricDataList = metricExporter.getFinishedMetricItems(); - final MetricData data = Iterables.getOnlyElement(metricDataList); - - assertThat(data.getName()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricName()); - assertThat(data.getDescription()).isEqualTo(OssMetricsRegistry.KUBE_POD_PROCESS_CREATE_TIME_MILLISECS.getMetricDescription()); - assertThat(data.getHistogramData().getPoints().stream().anyMatch(histogramPointData -> histogramPointData.getMax() == 30.0)); - assertThat(data.getHistogramData().getPoints().stream().anyMatch(histogramPointData -> histogramPointData.getMin() == 10.0)); - } - -} diff --git a/airbyte-metrics/readme.md b/airbyte-metrics/readme.md deleted file mode 100644 index 332acbb701f3..000000000000 --- a/airbyte-metrics/readme.md +++ /dev/null @@ -1,3 +0,0 @@ -# airbyte-metrics - -Responsible for logic related to pushing monitoring and performance metrics to external aggregates. This is only used if explicitly turned on by the user. diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile deleted file mode 100644 index 0cfbeddb00af..000000000000 --- a/airbyte-metrics/reporter/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:1.0 -FROM ${JDK_IMAGE} AS metrics-reporter - -ARG VERSION=0.40.32 - -ENV APPLICATION airbyte-metrics-reporter -ENV VERSION ${VERSION} - -WORKDIR /app - -ADD bin/${APPLICATION}-${VERSION}.tar /app - - -# wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-${VERSION}/bin/${APPLICATION}"] diff --git a/airbyte-metrics/reporter/README.md b/airbyte-metrics/reporter/README.md deleted file mode 100644 index 745736905506..000000000000 --- a/airbyte-metrics/reporter/README.md +++ /dev/null @@ -1,5 +0,0 @@ -## Airbyte Metrics Reporter - -The Reporter attempts to keep code clean by centralising metric calculation and submissions. - -This is primarily intended for Airbyte Cloud, though OSS users are certainly welcome to make use of this. diff --git a/airbyte-metrics/reporter/build.gradle b/airbyte-metrics/reporter/build.gradle deleted file mode 100644 index 22885a77de9f..000000000000 --- a/airbyte-metrics/reporter/build.gradle +++ /dev/null @@ -1,42 +0,0 @@ -plugins { - id 'application' -} - -configurations { - jdbc -} - -dependencies { - annotationProcessor platform(libs.micronaut.bom) - annotationProcessor libs.bundles.micronaut.annotation.processor - - implementation platform(libs.micronaut.bom) - implementation libs.bundles.micronaut - - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-db:jooq') - implementation project(':airbyte-db:db-lib') - implementation project(':airbyte-metrics:metrics-lib') - - implementation(libs.jooq) { - force = true - } - - testAnnotationProcessor platform(libs.micronaut.bom) - testAnnotationProcessor libs.bundles.micronaut.test.annotation.processor - - testImplementation project(':airbyte-test-utils') - testImplementation libs.bundles.micronaut.test - testImplementation libs.postgresql - testImplementation libs.platform.testcontainers.postgresql -} - -application { - applicationName = "airbyte-metrics-reporter" - mainClass = 'io.airbyte.metrics.reporter.Application' - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -tasks.named("buildDockerImage") { - dependsOn copyGeneratedTar -} diff --git a/airbyte-metrics/reporter/gradle.properties b/airbyte-metrics/reporter/gradle.properties deleted file mode 100644 index ce3ec4d9f7a5..000000000000 --- a/airbyte-metrics/reporter/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -dockerImageName=metrics-reporter diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/Application.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/Application.java deleted file mode 100644 index 76ff91e9f1bc..000000000000 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/Application.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.reporter; - -import io.airbyte.metrics.lib.MetricClientFactory; -import io.airbyte.metrics.lib.MetricEmittingApps; -import io.micronaut.runtime.Micronaut; - -/** - * Metric Reporter application. - *

- * Responsible for emitting metric information on a periodic basis. - */ -public class Application { - - public static void main(final String[] args) { - MetricClientFactory.initialize(MetricEmittingApps.METRICS_REPORTER); - Micronaut.run(Application.class, args); - } - -} diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/Emitter.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/Emitter.java deleted file mode 100644 index c375603995be..000000000000 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/Emitter.java +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.reporter; - -import io.airbyte.metrics.lib.MetricAttribute; -import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.metrics.lib.MetricTags; -import io.airbyte.metrics.lib.OssMetricsRegistry; -import jakarta.inject.Singleton; -import java.lang.invoke.MethodHandles; -import java.time.Duration; -import java.util.concurrent.Callable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Singleton -final class NumPendingJobs extends Emitter { - - public NumPendingJobs(final MetricClient client, final MetricRepository db) { - super(client, () -> { - db.numberOfPendingJobsByGeography().forEach((geography, count) -> client.gauge( - OssMetricsRegistry.NUM_PENDING_JOBS, - count, - new MetricAttribute(MetricTags.GEOGRAPHY, geography))); - - return null; - }); - } - -} - -@Singleton -final class NumRunningJobs extends Emitter { - - public NumRunningJobs(final MetricClient client, final MetricRepository db) { - super(client, () -> { - db.numberOfRunningJobsByTaskQueue().forEach((attemptQueue, count) -> client.gauge( - OssMetricsRegistry.NUM_RUNNING_JOBS, - count, - new MetricAttribute(MetricTags.ATTEMPT_QUEUE, attemptQueue))); - return null; - }); - } - -} - -@Singleton -final class NumOrphanRunningJobs extends Emitter { - - NumOrphanRunningJobs(final MetricClient client, final MetricRepository db) { - super(client, () -> { - final var orphaned = db.numberOfOrphanRunningJobs(); - client.gauge(OssMetricsRegistry.NUM_ORPHAN_RUNNING_JOBS, orphaned); - return null; - }); - } - -} - -@Singleton -final class OldestRunningJob extends Emitter { - - OldestRunningJob(final MetricClient client, final MetricRepository db) { - super(client, () -> { - db.oldestRunningJobAgeSecsByTaskQueue().forEach((attemptQueue, count) -> client.gauge( - OssMetricsRegistry.OLDEST_RUNNING_JOB_AGE_SECS, - count, - new MetricAttribute(MetricTags.ATTEMPT_QUEUE, attemptQueue))); - return null; - }); - } - -} - -@Singleton -final class OldestPendingJob extends Emitter { - - OldestPendingJob(final MetricClient client, final MetricRepository db) { - super(client, () -> { - db.oldestPendingJobAgeSecsByGeography().forEach((geographyType, count) -> client.gauge( - OssMetricsRegistry.OLDEST_PENDING_JOB_AGE_SECS, - count, - new MetricAttribute(MetricTags.GEOGRAPHY, geographyType))); - return null; - }); - } - -} - -@Singleton -final class NumActiveConnectionsPerWorkspace extends Emitter { - - NumActiveConnectionsPerWorkspace(final MetricClient client, final MetricRepository db) { - super(client, () -> { - final var workspaceConns = db.numberOfActiveConnPerWorkspace(); - for (final long numCons : workspaceConns) { - client.distribution(OssMetricsRegistry.NUM_ACTIVE_CONN_PER_WORKSPACE, numCons); - } - return null; - }); - } - -} - -@Singleton -final class NumAbnormalScheduledSyncs extends Emitter { - - NumAbnormalScheduledSyncs(final MetricClient client, final MetricRepository db) { - super(client, () -> { - final var count = db.numberOfJobsNotRunningOnScheduleInLastDay(); - client.gauge(OssMetricsRegistry.NUM_ABNORMAL_SCHEDULED_SYNCS_IN_LAST_DAY, count); - return null; - }); - } - - @Override - public Duration getDuration() { - return Duration.ofHours(1); - } - -} - -@Singleton -final class NumUnusuallyLongSyncs extends Emitter { - - NumUnusuallyLongSyncs(final MetricClient client, final MetricRepository db) { - super(client, () -> { - final var count = db.numberOfJobsRunningUnusuallyLong(); - client.gauge(OssMetricsRegistry.NUM_UNUSUALLY_LONG_SYNCS, count); - return null; - }); - } - - @Override - public Duration getDuration() { - return Duration.ofMinutes(15); - } - -} - -@Singleton -final class TotalScheduledSyncs extends Emitter { - - TotalScheduledSyncs(final MetricClient client, final MetricRepository db) { - super(client, () -> { - final var count = db.numScheduledActiveConnectionsInLastDay(); - client.gauge(OssMetricsRegistry.NUM_TOTAL_SCHEDULED_SYNCS_IN_LAST_DAY, count); - return null; - }); - } - - @Override - public Duration getDuration() { - return Duration.ofHours(1); - } - -} - -@Singleton -final class TotalJobRuntimeByTerminalState extends Emitter { - - public TotalJobRuntimeByTerminalState(final MetricClient client, final MetricRepository db) { - super(client, () -> { - db.overallJobRuntimeForTerminalJobsInLastHour() - .forEach((jobStatus, time) -> client.distribution( - OssMetricsRegistry.OVERALL_JOB_RUNTIME_IN_LAST_HOUR_BY_TERMINAL_STATE_SECS, - time, - new MetricAttribute(MetricTags.JOB_STATUS, jobStatus.getLiteral()))); - return null; - }); - } - - @Override - public Duration getDuration() { - return Duration.ofHours(1); - } - -} - -/** - * Abstract base class for all emitted metrics. - *

- * As this is a sealed class, all implementations of it are contained within this same file. - */ -sealed class Emitter { - - protected static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - protected final MetricClient client; - protected final Callable callable; - - Emitter(final MetricClient client, final Callable callable) { - this.client = client; - this.callable = callable; - } - - /** - * Emit the metrics by calling the callable. - *

- * Any exception thrown by the callable will be logged. - * - * @TODO: replace log message with a published error-event of some kind. - */ - public void Emit() { - try { - callable.call(); - client.count(OssMetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); - } catch (final Exception e) { - log.error("Exception querying database for metric: ", e); - } - } - - /** - * How often this metric should report, defaults to 15s if not overwritten. - * - * @return Duration of how often this metric should report. - */ - public Duration getDuration() { - return Duration.ofSeconds(15); - } - -} diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/EventListeners.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/EventListeners.java deleted file mode 100644 index 60375e1f7732..000000000000 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/EventListeners.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.reporter; - -import io.micronaut.runtime.event.ApplicationShutdownEvent; -import io.micronaut.runtime.event.ApplicationStartupEvent; -import io.micronaut.runtime.event.annotation.EventListener; -import jakarta.inject.Singleton; -import java.lang.invoke.MethodHandles; -import java.util.List; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * EventListeners registers event listeners for the startup and shutdown events from Micronaut. - */ -@Singleton -class EventListeners { - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private final List emitters; - private final ScheduledExecutorService executor; - - EventListeners(final List emitters) { - this.emitters = emitters; - this.executor = Executors.newScheduledThreadPool(emitters.size()); - } - - /** - * Manually registers all the emitters to run on startup. - * - * @param event unused but required in order to listen to the startup event. - */ - @EventListener - public void startEmitters(final ApplicationStartupEvent event) { - emitters.forEach(emitter -> executor.scheduleAtFixedRate(emitter::Emit, 0, emitter.getDuration().getSeconds(), TimeUnit.SECONDS)); - log.info("registered {} emitters", emitters.size()); - } - - /** - * Attempts to cleanly shutdown the running emitters - * - * @param event unused but required in order to listen to the shutdown event. - */ - @EventListener - public void stopEmitters(final ApplicationShutdownEvent event) { - log.info("shutting down emitters"); - executor.shutdown(); - } - -} diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/MetricRepository.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/MetricRepository.java deleted file mode 100644 index 7fee4766a701..000000000000 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/MetricRepository.java +++ /dev/null @@ -1,299 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.reporter; - -import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.ATTEMPTS; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.JOBS; -import static org.jooq.impl.DSL.asterisk; -import static org.jooq.impl.DSL.count; -import static org.jooq.impl.DSL.name; -import static org.jooq.impl.SQLDataType.VARCHAR; - -import io.airbyte.db.instance.configs.jooq.generated.enums.StatusType; -import io.airbyte.db.instance.jobs.jooq.generated.enums.AttemptStatus; -import io.airbyte.db.instance.jobs.jooq.generated.enums.JobStatus; -import jakarta.inject.Singleton; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.impl.DSL; - -@Singleton -class MetricRepository { - - private final DSLContext ctx; - - // We have to report gauge metric with value 0 if they are not showing up in the DB, - // otherwise datadog will use previous reported value. - // Another option we didn't use here is to build this into SQL query - it will lead SQL much less - // readable while not decreasing any complexity. - private final static List REGISTERED_ATTEMPT_QUEUE = List.of("SYNC", "AWS_PARIS_SYNC", "null"); - private final static List REGISTERED_GEOGRAPHY = List.of("US", "AUTO", "EU"); - - MetricRepository(final DSLContext ctx) { - this.ctx = ctx; - } - - Map numberOfPendingJobsByGeography() { - String geographyResultAlias = "geography"; - String countResultAlias = "result"; - var result = ctx.select(CONNECTION.GEOGRAPHY.cast(String.class).as(geographyResultAlias), count(asterisk()).as(countResultAlias)) - .from(JOBS) - .join(CONNECTION) - .on(CONNECTION.ID.cast(VARCHAR(255)).eq(JOBS.SCOPE)) - .where(JOBS.STATUS.eq(JobStatus.pending)) - .groupBy(CONNECTION.GEOGRAPHY); - Field geographyResultField = DSL.field(name(geographyResultAlias), String.class); - Field countResultField = DSL.field(name(countResultAlias), Integer.class); - Map queriedMap = result.fetchMap(geographyResultField, countResultField); - for (final String potentialGeography : REGISTERED_GEOGRAPHY) { - if (!queriedMap.containsKey(potentialGeography)) { - queriedMap.put(potentialGeography, 0); - } - } - return queriedMap; - } - - Map numberOfRunningJobsByTaskQueue() { - String countFieldName = "count"; - var result = ctx.select(ATTEMPTS.PROCESSING_TASK_QUEUE, count(asterisk()).as(countFieldName)) - .from(JOBS) - .join(CONNECTION) - .on(CONNECTION.ID.cast(VARCHAR(255)).eq(JOBS.SCOPE)) - .join(ATTEMPTS) - .on(ATTEMPTS.JOB_ID.eq(JOBS.ID)) - .where(JOBS.STATUS.eq(JobStatus.running).and(CONNECTION.STATUS.eq(StatusType.active))) - .and(ATTEMPTS.STATUS.eq(AttemptStatus.running)) - .groupBy(ATTEMPTS.PROCESSING_TASK_QUEUE); - - Field countResultField = DSL.field(name(countFieldName), Integer.class); - Map queriedMap = result.fetchMap(ATTEMPTS.PROCESSING_TASK_QUEUE, countResultField); - for (final String potentialAttemptQueue : REGISTERED_ATTEMPT_QUEUE) { - if (!queriedMap.containsKey(potentialAttemptQueue)) { - queriedMap.put(potentialAttemptQueue, 0); - } - } - return queriedMap; - } - - // This is a rare case and not likely to be related to data planes; So we will monitor them as a - // whole. - int numberOfOrphanRunningJobs() { - return ctx.selectCount() - .from(JOBS) - .join(CONNECTION) - .on(CONNECTION.ID.cast(VARCHAR(255)).eq(JOBS.SCOPE)) - .where(JOBS.STATUS.eq(JobStatus.running).and(CONNECTION.STATUS.ne(StatusType.active))) - .fetchOne(0, int.class); - } - - Map oldestPendingJobAgeSecsByGeography() { - final var query = - """ - SELECT cast(connection.geography as varchar) AS geography, MAX(EXTRACT(EPOCH FROM (current_timestamp - jobs.created_at))) AS run_duration_seconds - FROM jobs - JOIN connection - ON jobs.scope::uuid = connection.id - WHERE jobs.status = 'pending' - GROUP BY geography; - """; - final var result = ctx.fetch(query); - Field geographyResultField = DSL.field(name("geography"), String.class); - Field runDurationSecondsField = DSL.field(name("run_duration_seconds"), Double.class); - Map queriedMap = result.intoMap(geographyResultField, runDurationSecondsField); - for (final String potentialGeography : REGISTERED_GEOGRAPHY) { - if (!queriedMap.containsKey(potentialGeography)) { - queriedMap.put(potentialGeography, 0.0); - } - } - return queriedMap; - } - - Map oldestRunningJobAgeSecsByTaskQueue() { - final var query = - """ - SELECT attempts.processing_task_queue AS task_queue, MAX(EXTRACT(EPOCH FROM (current_timestamp - jobs.created_at))) AS run_duration_seconds - FROM jobs - JOIN attempts - ON jobs.id = attempts.job_id - WHERE jobs.status = 'running' AND attempts.status = 'running' - GROUP BY task_queue; - """; - final var result = ctx.fetch(query); - Field taskQueueResultField = DSL.field(name("task_queue"), String.class); - Field runDurationSecondsField = DSL.field(name("run_duration_seconds"), Double.class); - Map queriedMap = result.intoMap(taskQueueResultField, runDurationSecondsField); - for (final String potentialAttemptQueue : REGISTERED_ATTEMPT_QUEUE) { - if (!queriedMap.containsKey(potentialAttemptQueue)) { - queriedMap.put(potentialAttemptQueue, 0.0); - } - } - return queriedMap; - } - - List numberOfActiveConnPerWorkspace() { - final var query = """ - SELECT workspace_id, count(c.id) as num_conn - FROM actor - INNER JOIN workspace ws ON actor.workspace_id = ws.id - INNER JOIN connection c ON actor.id = c.source_id - WHERE ws.tombstone = false - AND actor.tombstone = false AND actor.actor_type = 'source' - AND c.status = 'active' - GROUP BY workspace_id; - """; - return ctx.fetch(query).getValues("num_conn", long.class); - } - - long numScheduledActiveConnectionsInLastDay() { - final var queryForTotalConnections = """ - select count(1) as connection_count - from connection c - where - c.updated_at < now() - interval '24 hours 1 minutes' - and cast(c.schedule::jsonb->'timeUnit' as text) IN ('"hours"', '"minutes"') - and c.status = 'active' - """; - - return ctx.fetchOne(queryForTotalConnections).get("connection_count", long.class); - } - - long numberOfJobsNotRunningOnScheduleInLastDay() { - // This query finds all sync jobs ran in last 24 hours and count how many times they have run. - // Comparing this to the expected number of runs (24 hours divide by configured cadence in hours), - // if it runs below that expected number it will be considered as abnormal instance. - // For example, if it's configured to run every 6 hours but in last 24 hours it only has 3 runs, - // it will be considered as 1 abnormal instance. - final var queryForAbnormalSyncInHoursInLastDay = """ - select count(1) as cnt - from ( - select - c.id, - count(*) as cnt - from connection c - left join jobs j on j.scope::uuid = c.id - where - c.schedule is not null - and c.schedule != 'null' - and j.created_at > now() - interval '24 hours 1 minutes' - and c.status = 'active' - and j.config_type = 'sync' - and c.updated_at < now() - interval '24 hours 1 minutes' - and cast(c.schedule::jsonb->'timeUnit' as text) = '"hours"' - group by 1 - having count(*) < 24 / cast(c.schedule::jsonb->'units' as integer) - ) as abnormal_jobs - """; - - // Similar to the query above, this finds if the connection cadence's timeUnit is minutes. - // thus we use 1440 (=24 hours x 60 minutes) to divide the configured cadence. - final var queryForAbnormalSyncInMinutesInLastDay = """ - select count(1) as cnt - from ( - select - c.id, - count(*) as cnt - from - connection c - left join Jobs j on - j.scope::uuid = c.id - where - c.schedule is not null - and c.schedule != 'null' - and j.created_at > now() - interval '24 hours 1 minutes' - and c.status = 'active' - and j.config_type = 'sync' - and c.updated_at < now() - interval '24 hours 1 minutes' - and cast(c.schedule::jsonb->'timeUnit' as text) = '"minutes"' - group by 1 - having count(*) < 1440 / cast(c.schedule::jsonb->'units' as integer) - ) as abnormal_jobs - """; - return ctx.fetchOne(queryForAbnormalSyncInHoursInLastDay).get("cnt", long.class) - + ctx.fetchOne(queryForAbnormalSyncInMinutesInLastDay).get("cnt", long.class); - } - - long numberOfJobsRunningUnusuallyLong() { - // Definition of unusually long means runtime is more than 2x historic avg run time or 15 - // minutes more than avg run time, whichever is greater. - // It will skip jobs with fewer than 4 runs in last week to make sure the historic avg run is - // meaningful and consistent. - final var query = - """ - -- pick average running time and last sync running time in attempts table. - select - current_running_attempts.connection_id, - current_running_attempts.running_time, - historic_avg_running_attempts.avg_run_sec - from - ( - -- Sub-query-1: query the currently running attempt's running time. - ( - select - jobs.scope as connection_id, - extract(epoch from age(NOW(), attempts.created_at)) as running_time - from - jobs - join attempts on - jobs.id = attempts.job_id - where - jobs.status = 'running' - and attempts.status = 'running' - and jobs.config_type = 'sync' ) - as current_running_attempts - join - -- Sub-query-2: query historic attempts' average running time within last week. - ( - select - jobs.scope as connection_id, - avg(extract(epoch from age(attempts.updated_at, attempts.created_at))) as avg_run_sec - from - jobs - join attempts on - jobs.id = attempts.job_id - where - -- 168 hours is 1 week: we look for all attempts in last week to calculate its average running time. - attempts.updated_at >= NOW() - interval '168 HOUR' - and jobs.status = 'succeeded' - and attempts.status = 'succeeded' - and jobs.config_type = 'sync' - group by - connection_id - having - count(*) > 4 - ) as historic_avg_running_attempts - on - current_running_attempts.connection_id = historic_avg_running_attempts.connection_id) - where - -- Find if currently running time takes 2x more time than average running time, - -- and it's 15 minutes (900 seconds) more than average running time so it won't alert on noises for quick sync jobs. - current_running_attempts.running_time > greatest(historic_avg_running_attempts.avg_run_sec * 2, historic_avg_running_attempts.avg_run_sec + 900) - """; - final var queryResults = ctx.fetch(query); - return queryResults.getValues("connection_id").size(); - } - - Map overallJobRuntimeForTerminalJobsInLastHour() { - final var query = """ - SELECT status, extract(epoch from age(updated_at, created_at)) AS sec FROM jobs - WHERE updated_at >= NOW() - INTERVAL '1 HOUR' - AND jobs.status IN ('failed', 'succeeded', 'cancelled'); - """; - final var queryResults = ctx.fetch(query); - final var statuses = queryResults.getValues("status", JobStatus.class); - final var times = queryResults.getValues("sec", double.class); - - final var results = new HashMap(); - for (int i = 0; i < statuses.size(); i++) { - results.put(statuses.get(i), times.get(i)); - } - - return results; - } - -} diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterFactory.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterFactory.java deleted file mode 100644 index 4e3c0e2413c0..000000000000 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterFactory.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.reporter; - -import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.metrics.lib.MetricClientFactory; -import io.micronaut.context.annotation.Factory; -import jakarta.inject.Singleton; - -/** - * Micronaut factory for creating the appropriate singletons utilized by the metric reporter - * service. - */ -@Factory -class ReporterFactory { - - @Singleton - public MetricClient metricClient() { - return MetricClientFactory.getMetricClient(); - } - -} diff --git a/airbyte-metrics/reporter/src/main/resources/application.yml b/airbyte-metrics/reporter/src/main/resources/application.yml deleted file mode 100644 index 49dd8cb8d7ae..000000000000 --- a/airbyte-metrics/reporter/src/main/resources/application.yml +++ /dev/null @@ -1,38 +0,0 @@ -micronaut: - application: - name: airbyte-metrics-reporter - security: - intercept-url-map: - - pattern: /** - httpMethod: GET - access: - - isAnonymous() - server: - port: 9000 - -datasources: - config: - connection-test-query: SELECT 1 - connection-timeout: 30000 - idle-timeout: 600000 - maximum-pool-size: 10 - url: ${DATABASE_URL} - driverClassName: org.postgresql.Driver - username: ${DATABASE_USER} - password: ${DATABASE_PASSWORD} - -jooq: - datasources: - config: - jackson-converter-enabled: true - sql-dialect: POSTGRES - -endpoints: - all: - enabled: true - -logger: - levels: - io.airbyte.bootloader: DEBUG -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG diff --git a/airbyte-metrics/reporter/src/main/resources/micronaut-banner.txt b/airbyte-metrics/reporter/src/main/resources/micronaut-banner.txt deleted file mode 100644 index 633f73326c1a..000000000000 --- a/airbyte-metrics/reporter/src/main/resources/micronaut-banner.txt +++ /dev/null @@ -1,8 +0,0 @@ - - ___ _ __ __ - / | (_)____/ /_ __ __/ /____ - / /| | / / ___/ __ \/ / / / __/ _ \ - / ___ |/ / / / /_/ / /_/ / /_/ __/ -/_/ |_/_/_/ /_.___/\__, /\__/\___/ - /____/ - : airbyte-metrics-reporter : \ No newline at end of file diff --git a/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/EmitterTest.java b/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/EmitterTest.java deleted file mode 100644 index 2c0bb854166a..000000000000 --- a/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/EmitterTest.java +++ /dev/null @@ -1,186 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.reporter; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.db.instance.jobs.jooq.generated.enums.JobStatus; -import io.airbyte.metrics.lib.MetricAttribute; -import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.metrics.lib.MetricTags; -import io.airbyte.metrics.lib.OssMetricsRegistry; -import java.time.Duration; -import java.util.List; -import java.util.Map; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class EmitterTest { - - private MetricClient client; - private MetricRepository repo; - - private static final String SYNC_QUEUE = "SYNC"; - private static final String AWS_QUEUE = "AWS"; - - private static final String EU_REGION = "EU"; - private static final String AUTO_REGION = "AUTO"; - - @BeforeEach - void setUp() { - client = mock(MetricClient.class); - repo = mock(MetricRepository.class); - } - - @Test - void TestNumPendingJobs() { - final var value = Map.of(AUTO_REGION, 101, EU_REGION, 20); - when(repo.numberOfPendingJobsByGeography()).thenReturn(value); - - final var emitter = new NumPendingJobs(client, repo); - emitter.Emit(); - - assertEquals(Duration.ofSeconds(15), emitter.getDuration()); - verify(repo).numberOfPendingJobsByGeography(); - verify(client).gauge(OssMetricsRegistry.NUM_PENDING_JOBS, 101, - new MetricAttribute(MetricTags.GEOGRAPHY, AUTO_REGION)); - verify(client).gauge(OssMetricsRegistry.NUM_PENDING_JOBS, 20, - new MetricAttribute(MetricTags.GEOGRAPHY, EU_REGION)); - verify(client).count(OssMetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); - } - - @Test - void TestNumRunningJobs() { - final var value = Map.of(SYNC_QUEUE, 101, AWS_QUEUE, 20); - when(repo.numberOfRunningJobsByTaskQueue()).thenReturn(value); - - final var emitter = new NumRunningJobs(client, repo); - emitter.Emit(); - - assertEquals(Duration.ofSeconds(15), emitter.getDuration()); - verify(repo).numberOfRunningJobsByTaskQueue(); - verify(client).gauge(OssMetricsRegistry.NUM_RUNNING_JOBS, 101, - new MetricAttribute(MetricTags.ATTEMPT_QUEUE, SYNC_QUEUE)); - verify(client).gauge(OssMetricsRegistry.NUM_RUNNING_JOBS, 20, - new MetricAttribute(MetricTags.ATTEMPT_QUEUE, AWS_QUEUE)); - verify(client).count(OssMetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); - } - - @Test - void TestNumOrphanRunningJobs() { - final var value = 101; - when(repo.numberOfOrphanRunningJobs()).thenReturn(value); - - final var emitter = new NumOrphanRunningJobs(client, repo); - emitter.Emit(); - - assertEquals(Duration.ofSeconds(15), emitter.getDuration()); - verify(repo).numberOfOrphanRunningJobs(); - verify(client).gauge(OssMetricsRegistry.NUM_ORPHAN_RUNNING_JOBS, value); - verify(client).count(OssMetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); - } - - @Test - void TestOldestRunningJob() { - final var value = Map.of(SYNC_QUEUE, 101.0, AWS_QUEUE, 20.0); - when(repo.oldestRunningJobAgeSecsByTaskQueue()).thenReturn(value); - - final var emitter = new OldestRunningJob(client, repo); - emitter.Emit(); - - assertEquals(Duration.ofSeconds(15), emitter.getDuration()); - verify(repo).oldestRunningJobAgeSecsByTaskQueue(); - verify(client).gauge(OssMetricsRegistry.OLDEST_RUNNING_JOB_AGE_SECS, 101, - new MetricAttribute(MetricTags.ATTEMPT_QUEUE, SYNC_QUEUE)); - verify(client).gauge(OssMetricsRegistry.OLDEST_RUNNING_JOB_AGE_SECS, 20, - new MetricAttribute(MetricTags.ATTEMPT_QUEUE, AWS_QUEUE)); - verify(client).count(OssMetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); - } - - @Test - void TestOldestPendingJob() { - final var value = Map.of(AUTO_REGION, 101.0, EU_REGION, 20.0); - when(repo.oldestPendingJobAgeSecsByGeography()).thenReturn(value); - - final var emitter = new OldestPendingJob(client, repo); - emitter.Emit(); - - assertEquals(Duration.ofSeconds(15), emitter.getDuration()); - verify(repo).oldestPendingJobAgeSecsByGeography(); - verify(client).gauge(OssMetricsRegistry.OLDEST_PENDING_JOB_AGE_SECS, 101, - new MetricAttribute(MetricTags.GEOGRAPHY, AUTO_REGION)); - verify(client).gauge(OssMetricsRegistry.OLDEST_PENDING_JOB_AGE_SECS, 20, - new MetricAttribute(MetricTags.GEOGRAPHY, EU_REGION)); - - verify(client).count(OssMetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); - } - - @Test - void TestNumActiveConnectionsPerWorkspace() { - final var values = List.of(101L, 202L); - when(repo.numberOfActiveConnPerWorkspace()).thenReturn(values); - - final var emitter = new NumActiveConnectionsPerWorkspace(client, repo); - emitter.Emit(); - - assertEquals(Duration.ofSeconds(15), emitter.getDuration()); - verify(repo).numberOfActiveConnPerWorkspace(); - for (final var value : values) { - verify(client).distribution(OssMetricsRegistry.NUM_ACTIVE_CONN_PER_WORKSPACE, value); - } - verify(client).count(OssMetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); - } - - @Test - void TestNumAbnormalScheduledSyncs() { - final var value = 101; - when(repo.numberOfJobsNotRunningOnScheduleInLastDay()).thenReturn((long) value); - - final var emitter = new NumAbnormalScheduledSyncs(client, repo); - emitter.Emit(); - - assertEquals(Duration.ofHours(1), emitter.getDuration()); - verify(repo).numberOfJobsNotRunningOnScheduleInLastDay(); - verify(client).gauge(OssMetricsRegistry.NUM_ABNORMAL_SCHEDULED_SYNCS_IN_LAST_DAY, value); - verify(client).count(OssMetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); - } - - @Test - void TestTotalScheduledSyncs() { - final var value = 101; - when(repo.numScheduledActiveConnectionsInLastDay()).thenReturn((long) value); - - final var emitter = new TotalScheduledSyncs(client, repo); - emitter.Emit(); - - assertEquals(Duration.ofHours(1), emitter.getDuration()); - verify(repo).numScheduledActiveConnectionsInLastDay(); - verify(client).gauge(OssMetricsRegistry.NUM_TOTAL_SCHEDULED_SYNCS_IN_LAST_DAY, value); - verify(client).count(OssMetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); - } - - @Test - void TestTotalJobRuntimeByTerminalState() { - final var values = Map.of(JobStatus.cancelled, 101.0, JobStatus.succeeded, 202.0, - JobStatus.failed, 303.0); - when(repo.overallJobRuntimeForTerminalJobsInLastHour()).thenReturn(values); - - final var emitter = new TotalJobRuntimeByTerminalState(client, repo); - emitter.Emit(); - - assertEquals(Duration.ofHours(1), emitter.getDuration()); - verify(repo).overallJobRuntimeForTerminalJobsInLastHour(); - values.forEach((jobStatus, time) -> { - verify(client).distribution( - OssMetricsRegistry.OVERALL_JOB_RUNTIME_IN_LAST_HOUR_BY_TERMINAL_STATE_SECS, time, - new MetricAttribute(MetricTags.JOB_STATUS, jobStatus.getLiteral())); - }); - verify(client).count(OssMetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); - } - -} diff --git a/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/MetricRepositoryTest.java b/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/MetricRepositoryTest.java deleted file mode 100644 index 85dee029d5a2..000000000000 --- a/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/MetricRepositoryTest.java +++ /dev/null @@ -1,680 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.metrics.reporter; - -import static io.airbyte.db.instance.configs.jooq.generated.Keys.ACTOR_CATALOG_FETCH_EVENT__ACTOR_CATALOG_FETCH_EVENT_ACTOR_ID_FKEY; -import static io.airbyte.db.instance.configs.jooq.generated.Keys.ACTOR__ACTOR_WORKSPACE_ID_FKEY; -import static io.airbyte.db.instance.configs.jooq.generated.Keys.CONNECTION__CONNECTION_DESTINATION_ID_FKEY; -import static io.airbyte.db.instance.configs.jooq.generated.Keys.CONNECTION__CONNECTION_SOURCE_ID_FKEY; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_CATALOG_FETCH_EVENT; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.WORKSPACE; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.ATTEMPTS; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.JOBS; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.configs.jooq.generated.enums.ActorType; -import io.airbyte.db.instance.configs.jooq.generated.enums.GeographyType; -import io.airbyte.db.instance.configs.jooq.generated.enums.NamespaceDefinitionType; -import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; -import io.airbyte.db.instance.configs.jooq.generated.enums.StatusType; -import io.airbyte.db.instance.jobs.jooq.generated.enums.AttemptStatus; -import io.airbyte.db.instance.jobs.jooq.generated.enums.JobConfigType; -import io.airbyte.db.instance.jobs.jooq.generated.enums.JobStatus; -import io.airbyte.db.instance.test.TestDatabaseProviders; -import io.airbyte.test.utils.DatabaseConnectionHelper; -import java.io.IOException; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.time.temporal.ChronoUnit; -import java.util.Map; -import java.util.UUID; -import org.jooq.DSLContext; -import org.jooq.JSONB; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.PostgreSQLContainer; - -class MetricRepositoryTest { - - private static final String SRC = "src"; - private static final String DEST = "dst"; - private static final String CONN = "conn"; - private static final String SYNC_QUEUE = "SYNC"; - private static final String AWS_SYNC_QUEUE = "AWS_PARIS_SYNC"; - private static final String AUTO_REGION = "AUTO"; - private static final String EU_REGION = "EU"; - - private static final UUID SRC_DEF_ID = UUID.randomUUID(); - private static final UUID DST_DEF_ID = UUID.randomUUID(); - private static MetricRepository db; - private static DSLContext ctx; - - @BeforeAll - public static void setUpAll() throws DatabaseInitializationException, IOException { - final var psqlContainer = new PostgreSQLContainer<>("postgres:13-alpine") - .withUsername("user") - .withPassword("hunter2"); - psqlContainer.start(); - - final var dataSource = DatabaseConnectionHelper.createDataSource(psqlContainer); - ctx = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); - final var dbProviders = new TestDatabaseProviders(dataSource, ctx); - dbProviders.createNewConfigsDatabase(); - dbProviders.createNewJobsDatabase(); - - ctx.insertInto(ACTOR_DEFINITION, ACTOR_DEFINITION.ID, ACTOR_DEFINITION.NAME, ACTOR_DEFINITION.DOCKER_REPOSITORY, - ACTOR_DEFINITION.DOCKER_IMAGE_TAG, ACTOR_DEFINITION.SPEC, ACTOR_DEFINITION.ACTOR_TYPE, ACTOR_DEFINITION.RELEASE_STAGE) - .values(SRC_DEF_ID, "srcDef", "repository", "tag", JSONB.valueOf("{}"), ActorType.source, ReleaseStage.beta) - .values(DST_DEF_ID, "dstDef", "repository", "tag", JSONB.valueOf("{}"), ActorType.destination, ReleaseStage.generally_available) - .values(UUID.randomUUID(), "dstDef", "repository", "tag", JSONB.valueOf("{}"), ActorType.destination, ReleaseStage.alpha) - .execute(); - - // drop constraints to simplify test set up - ctx.alterTable(ACTOR).dropForeignKey(ACTOR__ACTOR_WORKSPACE_ID_FKEY.constraint()).execute(); - ctx.alterTable(CONNECTION).dropForeignKey(CONNECTION__CONNECTION_DESTINATION_ID_FKEY.constraint()).execute(); - ctx.alterTable(CONNECTION).dropForeignKey(CONNECTION__CONNECTION_SOURCE_ID_FKEY.constraint()).execute(); - ctx.alterTable(ACTOR_CATALOG_FETCH_EVENT) - .dropForeignKey(ACTOR_CATALOG_FETCH_EVENT__ACTOR_CATALOG_FETCH_EVENT_ACTOR_ID_FKEY.constraint()).execute(); - ctx.alterTable(WORKSPACE).alter(WORKSPACE.SLUG).dropNotNull().execute(); - ctx.alterTable(WORKSPACE).alter(WORKSPACE.INITIAL_SETUP_COMPLETE).dropNotNull().execute(); - - db = new MetricRepository(ctx); - } - - @BeforeEach - void setUp() { - ctx.truncate(ACTOR).execute(); - ctx.truncate(CONNECTION).cascade().execute(); - ctx.truncate(JOBS).cascade().execute(); - ctx.truncate(ATTEMPTS).cascade().execute(); - ctx.truncate(WORKSPACE).cascade().execute(); - } - - @AfterEach - void tearDown() { - - } - - @Nested - class NumJobs { - - @Test - void shouldReturnReleaseStages() { - ctx.insertInto(ATTEMPTS, ATTEMPTS.ID, ATTEMPTS.JOB_ID, ATTEMPTS.STATUS, ATTEMPTS.PROCESSING_TASK_QUEUE) - .values(10L, 1L, AttemptStatus.running, SYNC_QUEUE).values(20L, 2L, AttemptStatus.running, SYNC_QUEUE) - .values(30L, 3L, AttemptStatus.running, SYNC_QUEUE).values(40L, 4L, AttemptStatus.running, AWS_SYNC_QUEUE) - .values(50L, 5L, AttemptStatus.running, SYNC_QUEUE) - .execute(); - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - final var activeConnectionId = UUID.randomUUID(); - final var inactiveConnectionId = UUID.randomUUID(); - ctx.insertInto(CONNECTION, CONNECTION.ID, CONNECTION.STATUS, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, - CONNECTION.DESTINATION_ID, CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.MANUAL) - .values(activeConnectionId, StatusType.active, NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true) - .values(inactiveConnectionId, StatusType.inactive, NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true) - .execute(); - - // non-pending jobs - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS) - .values(1L, activeConnectionId.toString(), JobStatus.pending) - .execute(); - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS) - .values(2L, activeConnectionId.toString(), JobStatus.failed) - .values(3L, activeConnectionId.toString(), JobStatus.running) - .values(4L, activeConnectionId.toString(), JobStatus.running) - .values(5L, inactiveConnectionId.toString(), JobStatus.running) - .execute(); - - assertEquals(1, db.numberOfRunningJobsByTaskQueue().get(SYNC_QUEUE)); - assertEquals(1, db.numberOfRunningJobsByTaskQueue().get(AWS_SYNC_QUEUE)); - // To test we send 0 for 'null' to overwrite previous bug. - assertEquals(0, db.numberOfRunningJobsByTaskQueue().get("null")); - assertEquals(1, db.numberOfOrphanRunningJobs()); - } - - @Test - void runningJobsShouldReturnZero() throws SQLException { - // non-pending jobs - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(1L, "", JobStatus.pending).execute(); - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(2L, "", JobStatus.failed).execute(); - - final var result = db.numberOfRunningJobsByTaskQueue(); - assertEquals(result.get(SYNC_QUEUE), 0); - assertEquals(result.get(AWS_SYNC_QUEUE), 0); - } - - @Test - void pendingJobsShouldReturnCorrectCount() throws SQLException { - // non-pending jobs - final var connectionUuid = UUID.randomUUID(); - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - ctx.insertInto(CONNECTION, CONNECTION.ID, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, CONNECTION.DESTINATION_ID, - CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.MANUAL, CONNECTION.STATUS, CONNECTION.GEOGRAPHY) - .values(connectionUuid, NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true, StatusType.active, - GeographyType.valueOf(EU_REGION)) - .execute(); - - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS) - .values(1L, connectionUuid.toString(), JobStatus.pending) - .values(2L, connectionUuid.toString(), JobStatus.failed) - .values(3L, connectionUuid.toString(), JobStatus.pending) - .values(4L, connectionUuid.toString(), JobStatus.running) - .execute(); - - final var res = db.numberOfPendingJobsByGeography(); - assertEquals(2, res.get(EU_REGION)); - assertEquals(0, res.get(AUTO_REGION)); - } - - @Test - void pendingJobsShouldReturnZero() throws SQLException { - final var connectionUuid = UUID.randomUUID(); - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - ctx.insertInto(CONNECTION, CONNECTION.ID, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, CONNECTION.DESTINATION_ID, - CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.MANUAL, CONNECTION.STATUS, CONNECTION.GEOGRAPHY) - .values(connectionUuid, NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true, StatusType.active, - GeographyType.valueOf(EU_REGION)) - .execute(); - - // non-pending jobs - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS) - .values(1L, connectionUuid.toString(), JobStatus.running) - .values(2L, connectionUuid.toString(), JobStatus.failed) - .execute(); - - final var result = db.numberOfPendingJobsByGeography(); - assertEquals(result.get(AUTO_REGION), 0); - assertEquals(result.get(EU_REGION), 0); - } - - } - - @Nested - class OldestPendingJob { - - @Test - void shouldReturnOnlyPendingSeconds() throws SQLException { - final var expAgeSecs = 1000; - final var oldestCreateAt = OffsetDateTime.now().minus(expAgeSecs, ChronoUnit.SECONDS); - final var connectionUuid = UUID.randomUUID(); - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - - ctx.insertInto(CONNECTION, CONNECTION.ID, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, CONNECTION.DESTINATION_ID, - CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.MANUAL, CONNECTION.STATUS, CONNECTION.GEOGRAPHY) - .values(connectionUuid, NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true, StatusType.active, - GeographyType.valueOf(EU_REGION)) - .execute(); - - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT) - // oldest pending job - .values(1L, connectionUuid.toString(), JobStatus.pending, oldestCreateAt) - // second-oldest pending job - .values(2L, connectionUuid.toString(), JobStatus.pending, OffsetDateTime.now()) - .execute(); - // non-pending jobs - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS) - .values(3L, connectionUuid.toString(), JobStatus.running) - .values(4L, connectionUuid.toString(), JobStatus.failed) - .execute(); - - Double result = db.oldestPendingJobAgeSecsByGeography().get(EU_REGION); - // expected age is 1000 seconds, but allow for +/- 1 second to account for timing/rounding errors - assertTrue(999 < result && result < 1001); - } - - @Test - void shouldReturnNothingIfNotApplicable() { - final var connectionUuid = UUID.randomUUID(); - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - - ctx.insertInto(CONNECTION, CONNECTION.ID, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, CONNECTION.DESTINATION_ID, - CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.MANUAL, CONNECTION.STATUS, CONNECTION.GEOGRAPHY) - .values(connectionUuid, NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true, StatusType.active, GeographyType.EU) - .execute(); - - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS) - .values(1L, connectionUuid.toString(), JobStatus.succeeded) - .values(2L, connectionUuid.toString(), JobStatus.running) - .values(3L, connectionUuid.toString(), JobStatus.failed).execute(); - - final var result = db.oldestPendingJobAgeSecsByGeography(); - assertEquals(result.get(EU_REGION), 0.0); - assertEquals(result.get(AUTO_REGION), 0.0); - } - - } - - @Nested - class OldestRunningJob { - - @Test - void shouldReturnOnlyRunningSeconds() { - final var expAgeSecs = 10000; - final var oldestCreateAt = OffsetDateTime.now().minus(expAgeSecs, ChronoUnit.SECONDS); - ctx.insertInto(ATTEMPTS, ATTEMPTS.ID, ATTEMPTS.JOB_ID, ATTEMPTS.STATUS, ATTEMPTS.PROCESSING_TASK_QUEUE) - .values(10L, 1L, AttemptStatus.running, SYNC_QUEUE).values(20L, 2L, AttemptStatus.running, SYNC_QUEUE) - .execute(); - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT) - // oldest pending job - .values(1L, "", JobStatus.running, oldestCreateAt) - // second-oldest pending job - .values(2L, "", JobStatus.running, OffsetDateTime.now()) - .execute(); - - // non-pending jobs - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS) - .values(3L, "", JobStatus.pending) - .values(4L, "", JobStatus.failed) - .execute(); - - final var result = db.oldestRunningJobAgeSecsByTaskQueue(); - // expected age is 1000 seconds, but allow for +/- 1 second to account for timing/rounding errors - assertTrue(9999 < result.get(SYNC_QUEUE) && result.get(SYNC_QUEUE) < 10001L); - assertEquals(result.get(AWS_SYNC_QUEUE), 0.0); - } - - @Test - void shouldReturnNothingIfNotApplicable() { - ctx.insertInto(ATTEMPTS, ATTEMPTS.ID, ATTEMPTS.JOB_ID, ATTEMPTS.PROCESSING_TASK_QUEUE).values(10L, 1L, SYNC_QUEUE).values(20L, 2L, SYNC_QUEUE) - .values(30L, 3L, SYNC_QUEUE).execute(); - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS) - .values(1L, "", JobStatus.succeeded) - .values(2L, "", JobStatus.pending) - .values(3L, "", JobStatus.failed) - .execute(); - - final var result = db.oldestRunningJobAgeSecsByTaskQueue(); - assertEquals(result.get(SYNC_QUEUE), 0.0); - assertEquals(result.get(AWS_SYNC_QUEUE), 0.0); - } - - } - - @Nested - class NumActiveConnsPerWorkspace { - - @Test - void shouldReturnNumConnectionsBasic() { - final var workspaceId = UUID.randomUUID(); - ctx.insertInto(WORKSPACE, WORKSPACE.ID, WORKSPACE.NAME, WORKSPACE.TOMBSTONE) - .values(workspaceId, "test-0", false) - .execute(); - - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - ctx.insertInto(ACTOR, ACTOR.ID, ACTOR.WORKSPACE_ID, ACTOR.ACTOR_DEFINITION_ID, ACTOR.NAME, ACTOR.CONFIGURATION, ACTOR.ACTOR_TYPE, - ACTOR.TOMBSTONE) - .values(srcId, workspaceId, SRC_DEF_ID, SRC, JSONB.valueOf("{}"), ActorType.source, false) - .values(dstId, workspaceId, DST_DEF_ID, DEST, JSONB.valueOf("{}"), ActorType.destination, false) - .execute(); - - ctx.insertInto(CONNECTION, CONNECTION.ID, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, CONNECTION.DESTINATION_ID, - CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.MANUAL, CONNECTION.STATUS) - .values(UUID.randomUUID(), NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true, StatusType.active) - .values(UUID.randomUUID(), NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true, StatusType.active) - .execute(); - - final var res = db.numberOfActiveConnPerWorkspace(); - assertEquals(1, res.size()); - assertEquals(2, res.get(0)); - } - - @Test - @DisplayName("should ignore deleted connections") - void shouldIgnoreNonRunningConnections() { - final var workspaceId = UUID.randomUUID(); - ctx.insertInto(WORKSPACE, WORKSPACE.ID, WORKSPACE.NAME, WORKSPACE.TOMBSTONE) - .values(workspaceId, "test-0", false) - .execute(); - - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - ctx.insertInto(ACTOR, ACTOR.ID, ACTOR.WORKSPACE_ID, ACTOR.ACTOR_DEFINITION_ID, ACTOR.NAME, ACTOR.CONFIGURATION, ACTOR.ACTOR_TYPE, - ACTOR.TOMBSTONE) - .values(srcId, workspaceId, SRC_DEF_ID, SRC, JSONB.valueOf("{}"), ActorType.source, false) - .values(dstId, workspaceId, DST_DEF_ID, DEST, JSONB.valueOf("{}"), ActorType.destination, false) - .execute(); - - ctx.insertInto(CONNECTION, CONNECTION.ID, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, CONNECTION.DESTINATION_ID, - CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.MANUAL, CONNECTION.STATUS) - .values(UUID.randomUUID(), NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true, StatusType.active) - .values(UUID.randomUUID(), NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true, StatusType.active) - .values(UUID.randomUUID(), NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true, StatusType.deprecated) - .values(UUID.randomUUID(), NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true, StatusType.inactive) - .execute(); - - final var res = db.numberOfActiveConnPerWorkspace(); - assertEquals(1, res.size()); - assertEquals(2, res.get(0)); - } - - @Test - @DisplayName("should ignore deleted connections") - void shouldIgnoreDeletedWorkspaces() { - final var workspaceId = UUID.randomUUID(); - ctx.insertInto(WORKSPACE, WORKSPACE.ID, WORKSPACE.NAME, WORKSPACE.TOMBSTONE) - .values(workspaceId, "test-0", true) - .execute(); - - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - ctx.insertInto(ACTOR, ACTOR.ID, ACTOR.WORKSPACE_ID, ACTOR.ACTOR_DEFINITION_ID, ACTOR.NAME, ACTOR.CONFIGURATION, ACTOR.ACTOR_TYPE, - ACTOR.TOMBSTONE) - .values(srcId, workspaceId, SRC_DEF_ID, SRC, JSONB.valueOf("{}"), ActorType.source, false) - .values(dstId, workspaceId, DST_DEF_ID, DEST, JSONB.valueOf("{}"), ActorType.destination, false) - .execute(); - - ctx.insertInto(CONNECTION, CONNECTION.ID, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, CONNECTION.DESTINATION_ID, - CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.MANUAL, CONNECTION.STATUS) - .values(UUID.randomUUID(), NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), true, StatusType.active) - .execute(); - - final var res = db.numberOfActiveConnPerWorkspace(); - assertEquals(0, res.size()); - } - - @Test - void shouldReturnNothingIfNotApplicable() { - final var res = db.numberOfActiveConnPerWorkspace(); - assertEquals(0, res.size()); - } - - } - - @Nested - class OverallJobRuntimeForTerminalJobsInLastHour { - - @Test - void shouldIgnoreNonTerminalJobs() throws SQLException { - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS) - .values(1L, "", JobStatus.running) - .values(2L, "", JobStatus.incomplete) - .values(3L, "", JobStatus.pending) - .execute(); - - final var res = db.overallJobRuntimeForTerminalJobsInLastHour(); - assertEquals(0, res.size()); - } - - @Test - void shouldIgnoreJobsOlderThan1Hour() { - final var updateAt = OffsetDateTime.now().minus(2, ChronoUnit.HOURS); - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.UPDATED_AT).values(1L, "", JobStatus.succeeded, updateAt).execute(); - - final var res = db.overallJobRuntimeForTerminalJobsInLastHour(); - assertEquals(0, res.size()); - } - - @Test - @DisplayName("should return correct duration for terminal jobs") - void shouldReturnTerminalJobs() { - final var updateAt = OffsetDateTime.now(); - final var expAgeSecs = 10000; - final var createAt = updateAt.minus(expAgeSecs, ChronoUnit.SECONDS); - - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) - .values(1L, "", JobStatus.succeeded, createAt, updateAt) - .execute(); - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) - .values(2L, "", JobStatus.failed, createAt, updateAt) - .execute(); - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) - .values(3L, "", JobStatus.cancelled, createAt, updateAt) - .execute(); - - final var res = db.overallJobRuntimeForTerminalJobsInLastHour(); - assertEquals(3, res.size()); - - final var exp = Map.of( - JobStatus.succeeded, expAgeSecs * 1.0, - JobStatus.cancelled, expAgeSecs * 1.0, - JobStatus.failed, expAgeSecs * 1.0); - assertEquals(exp, res); - } - - @Test - void shouldReturnTerminalJobsComplex() { - final var updateAtNow = OffsetDateTime.now(); - final var expAgeSecs = 10000; - final var createAt = updateAtNow.minus(expAgeSecs, ChronoUnit.SECONDS); - - // terminal jobs in last hour - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) - .values(1L, "", JobStatus.succeeded, createAt, updateAtNow) - .execute(); - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) - .values(2L, "", JobStatus.failed, createAt, updateAtNow) - .execute(); - - // old terminal jobs - final var updateAtOld = OffsetDateTime.now().minus(2, ChronoUnit.HOURS); - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) - .values(3L, "", JobStatus.cancelled, createAt, updateAtOld) - .execute(); - - // non-terminal jobs - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT) - .values(4L, "", JobStatus.running, createAt) - .execute(); - - final var res = db.overallJobRuntimeForTerminalJobsInLastHour(); - assertEquals(2, res.size()); - - final var exp = Map.of( - JobStatus.succeeded, expAgeSecs * 1.0, - JobStatus.failed, expAgeSecs * 1.0); - assertEquals(exp, res); - } - - @Test - void shouldReturnNothingIfNotApplicable() { - final var res = db.overallJobRuntimeForTerminalJobsInLastHour(); - assertEquals(0, res.size()); - } - - } - - @Nested - class AbnormalJobsInLastDay { - - @Test - void shouldCountInJobsWithMissingRun() throws SQLException { - final var updateAt = OffsetDateTime.now().minus(300, ChronoUnit.HOURS); - final var connectionId = UUID.randomUUID(); - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - final var syncConfigType = JobConfigType.sync; - - ctx.insertInto(CONNECTION, CONNECTION.ID, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, CONNECTION.DESTINATION_ID, - CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.SCHEDULE, CONNECTION.MANUAL, CONNECTION.STATUS, CONNECTION.CREATED_AT, - CONNECTION.UPDATED_AT) - .values(connectionId, NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), - JSONB.valueOf("{\"units\": 6, \"timeUnit\": \"hours\"}"), false, StatusType.active, updateAt, updateAt) - .execute(); - - // Jobs running in prior day will not be counted - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT, JOBS.CONFIG_TYPE) - .values(100L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(28, ChronoUnit.HOURS), updateAt, syncConfigType) - .values(1L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(20, ChronoUnit.HOURS), updateAt, syncConfigType) - .values(2L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(10, ChronoUnit.HOURS), updateAt, syncConfigType) - .values(3L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(5, ChronoUnit.HOURS), updateAt, syncConfigType) - .execute(); - - final var totalConnectionResult = db.numScheduledActiveConnectionsInLastDay(); - assertEquals(1, totalConnectionResult); - - final var abnormalConnectionResult = db.numberOfJobsNotRunningOnScheduleInLastDay(); - assertEquals(1, abnormalConnectionResult); - } - - @Test - void shouldNotCountNormalJobsInAbnormalMetric() { - final var updateAt = OffsetDateTime.now().minus(300, ChronoUnit.HOURS); - final var inactiveConnectionId = UUID.randomUUID(); - final var activeConnectionId = UUID.randomUUID(); - final var srcId = UUID.randomUUID(); - final var dstId = UUID.randomUUID(); - final var syncConfigType = JobConfigType.sync; - - ctx.insertInto(CONNECTION, CONNECTION.ID, CONNECTION.NAMESPACE_DEFINITION, CONNECTION.SOURCE_ID, CONNECTION.DESTINATION_ID, - CONNECTION.NAME, CONNECTION.CATALOG, CONNECTION.SCHEDULE, CONNECTION.MANUAL, CONNECTION.STATUS, CONNECTION.CREATED_AT, - CONNECTION.UPDATED_AT) - .values(inactiveConnectionId, NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), - JSONB.valueOf("{\"units\": 12, \"timeUnit\": \"hours\"}"), false, StatusType.inactive, updateAt, updateAt) - .values(activeConnectionId, NamespaceDefinitionType.source, srcId, dstId, CONN, JSONB.valueOf("{}"), - JSONB.valueOf("{\"units\": 12, \"timeUnit\": \"hours\"}"), false, StatusType.active, updateAt, updateAt) - .execute(); - - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT, JOBS.CONFIG_TYPE) - .values(1L, activeConnectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(20, ChronoUnit.HOURS), updateAt, - syncConfigType) - .values(2L, activeConnectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(10, ChronoUnit.HOURS), updateAt, - syncConfigType) - .execute(); - - final var totalConnectionResult = db.numScheduledActiveConnectionsInLastDay(); - assertEquals(1, totalConnectionResult); - - final var abnormalConnectionResult = db.numberOfJobsNotRunningOnScheduleInLastDay(); - assertEquals(0, abnormalConnectionResult); - } - - } - - @Nested - class UnusuallyLongJobs { - - @Test - void shouldCountInJobsWithUnusuallyLongTime() throws SQLException { - final var connectionId = UUID.randomUUID(); - final var syncConfigType = JobConfigType.sync; - - // Current job has been running for 12 hours while the previous 5 jobs runs 2 hours. Avg will be 2 - // hours. - // Thus latest job will be counted as an unusually long-running job. - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT, JOBS.CONFIG_TYPE) - .values(100L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(28, ChronoUnit.HOURS), - OffsetDateTime.now().minus(26, ChronoUnit.HOURS), syncConfigType) - .values(1L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(20, ChronoUnit.HOURS), - OffsetDateTime.now().minus(18, ChronoUnit.HOURS), syncConfigType) - .values(2L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(18, ChronoUnit.HOURS), - OffsetDateTime.now().minus(16, ChronoUnit.HOURS), syncConfigType) - .values(3L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(16, ChronoUnit.HOURS), - OffsetDateTime.now().minus(14, ChronoUnit.HOURS), syncConfigType) - .values(4L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(14, ChronoUnit.HOURS), - OffsetDateTime.now().minus(12, ChronoUnit.HOURS), syncConfigType) - .values(5L, connectionId.toString(), JobStatus.running, OffsetDateTime.now().minus(12, ChronoUnit.HOURS), - OffsetDateTime.now().minus(12, ChronoUnit.HOURS), syncConfigType) - .execute(); - - ctx.insertInto(ATTEMPTS, ATTEMPTS.ID, ATTEMPTS.JOB_ID, ATTEMPTS.STATUS, ATTEMPTS.CREATED_AT, ATTEMPTS.UPDATED_AT) - .values(100L, 100L, AttemptStatus.succeeded, OffsetDateTime.now().minus(28, ChronoUnit.HOURS), - OffsetDateTime.now().minus(26, ChronoUnit.HOURS)) - .values(1L, 1L, AttemptStatus.succeeded, OffsetDateTime.now().minus(20, ChronoUnit.HOURS), - OffsetDateTime.now().minus(18, ChronoUnit.HOURS)) - .values(2L, 2L, AttemptStatus.succeeded, OffsetDateTime.now().minus(18, ChronoUnit.HOURS), - OffsetDateTime.now().minus(16, ChronoUnit.HOURS)) - .values(3L, 3L, AttemptStatus.succeeded, OffsetDateTime.now().minus(16, ChronoUnit.HOURS), - OffsetDateTime.now().minus(14, ChronoUnit.HOURS)) - .values(4L, 4L, AttemptStatus.succeeded, OffsetDateTime.now().minus(14, ChronoUnit.HOURS), - OffsetDateTime.now().minus(12, ChronoUnit.HOURS)) - .values(5L, 5L, AttemptStatus.running, OffsetDateTime.now().minus(12, ChronoUnit.HOURS), - OffsetDateTime.now().minus(12, ChronoUnit.HOURS)) - .execute(); - - final var numOfJubsRunningUnusallyLong = db.numberOfJobsRunningUnusuallyLong(); - assertEquals(1, numOfJubsRunningUnusallyLong); - } - - @Test - void shouldNotCountInJobsWithinFifteenMinutes() throws SQLException { - final var connectionId = UUID.randomUUID(); - final var syncConfigType = JobConfigType.sync; - - // Latest job runs 14 minutes while the previous 5 jobs runs average about 3 minutes. - // Despite it has been more than 2x than avg it's still within 15 minutes threshold, thus this - // shouldn't be - // counted in. - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT, JOBS.CONFIG_TYPE) - .values(100L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(28, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(26, ChronoUnit.MINUTES), syncConfigType) - .values(1L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(20, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(18, ChronoUnit.MINUTES), syncConfigType) - .values(2L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(18, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(16, ChronoUnit.MINUTES), syncConfigType) - .values(3L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(16, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(14, ChronoUnit.MINUTES), syncConfigType) - .values(4L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(14, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(2, ChronoUnit.MINUTES), syncConfigType) - .values(5L, connectionId.toString(), JobStatus.running, OffsetDateTime.now().minus(14, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(2, ChronoUnit.MINUTES), syncConfigType) - .execute(); - - ctx.insertInto(ATTEMPTS, ATTEMPTS.ID, ATTEMPTS.JOB_ID, ATTEMPTS.STATUS, ATTEMPTS.CREATED_AT, ATTEMPTS.UPDATED_AT) - .values(100L, 100L, AttemptStatus.succeeded, OffsetDateTime.now().minus(28, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(26, ChronoUnit.MINUTES)) - .values(1L, 1L, AttemptStatus.succeeded, OffsetDateTime.now().minus(20, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(18, ChronoUnit.MINUTES)) - .values(2L, 2L, AttemptStatus.succeeded, OffsetDateTime.now().minus(18, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(16, ChronoUnit.MINUTES)) - .values(3L, 3L, AttemptStatus.succeeded, OffsetDateTime.now().minus(26, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(14, ChronoUnit.MINUTES)) - .values(4L, 4L, AttemptStatus.succeeded, OffsetDateTime.now().minus(18, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(17, ChronoUnit.MINUTES)) - .values(5L, 5L, AttemptStatus.running, OffsetDateTime.now().minus(14, ChronoUnit.MINUTES), - OffsetDateTime.now().minus(14, ChronoUnit.MINUTES)) - .execute(); - - final var numOfJubsRunningUnusallyLong = db.numberOfJobsRunningUnusuallyLong(); - assertEquals(0, numOfJubsRunningUnusallyLong); - } - - @Test - void shouldSkipInsufficientJobRuns() throws SQLException { - final var connectionId = UUID.randomUUID(); - final var syncConfigType = JobConfigType.sync; - - // Require at least 5 runs in last week to get meaningful average runtime. - ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT, JOBS.CONFIG_TYPE) - .values(100L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(28, ChronoUnit.HOURS), - OffsetDateTime.now().minus(26, ChronoUnit.HOURS), syncConfigType) - .values(1L, connectionId.toString(), JobStatus.succeeded, OffsetDateTime.now().minus(20, ChronoUnit.HOURS), - OffsetDateTime.now().minus(18, ChronoUnit.HOURS), syncConfigType) - .values(2L, connectionId.toString(), JobStatus.running, OffsetDateTime.now().minus(18, ChronoUnit.HOURS), - OffsetDateTime.now().minus(1, ChronoUnit.HOURS), syncConfigType) - .execute(); - - ctx.insertInto(ATTEMPTS, ATTEMPTS.ID, ATTEMPTS.JOB_ID, ATTEMPTS.STATUS, ATTEMPTS.CREATED_AT, ATTEMPTS.UPDATED_AT) - .values(100L, 100L, AttemptStatus.succeeded, OffsetDateTime.now().minus(28, ChronoUnit.HOURS), - OffsetDateTime.now().minus(26, ChronoUnit.HOURS)) - .values(1L, 1L, AttemptStatus.succeeded, OffsetDateTime.now().minus(20, ChronoUnit.HOURS), - OffsetDateTime.now().minus(18, ChronoUnit.HOURS)) - .values(2L, 2L, AttemptStatus.running, OffsetDateTime.now().minus(18, ChronoUnit.HOURS), - OffsetDateTime.now().minus(1, ChronoUnit.HOURS)) - .execute(); - - final var numOfJubsRunningUnusallyLong = db.numberOfJobsRunningUnusuallyLong(); - assertEquals(0, numOfJubsRunningUnusallyLong); - } - - } - -} diff --git a/airbyte-oauth/build.gradle b/airbyte-oauth/build.gradle deleted file mode 100644 index e13237785b56..000000000000 --- a/airbyte-oauth/build.gradle +++ /dev/null @@ -1,14 +0,0 @@ -plugins { - id "java-library" - id 'airbyte-integration-test-java' -} - -dependencies { - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-config:config-persistence') - implementation project(':airbyte-json-validation') - implementation libs.airbyte.protocol - testImplementation project(':airbyte-oauth') -} - -Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-oauth/readme.md b/airbyte-oauth/readme.md deleted file mode 100644 index aef6d28357c3..000000000000 --- a/airbyte-oauth/readme.md +++ /dev/null @@ -1,7 +0,0 @@ -# airbyte-oauth - -Library for request handling for OAuth Connectors. While Connectors define many OAuth attributes in their spec, the request sequence is executed in the `airbyte-server`. This module contains that logic. - -## Key Files -* `OAuthFlowImplementation.java` - interface that a source has to implement in order to do OAuth with Airbyte. -* `OAuthImplementationFactory.java` - catalog of the sources for which we support OAuth. diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/BaseOAuth2Flow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/BaseOAuth2Flow.java deleted file mode 100644 index 7cc984731356..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/BaseOAuth2Flow.java +++ /dev/null @@ -1,334 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.gson.Gson; -import com.google.gson.reflect.TypeToken; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.protocol.models.OAuthConfigSpecification; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.lang.reflect.Type; -import java.net.URI; -import java.net.URLEncoder; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Function; -import java.util.function.Supplier; -import org.apache.commons.lang3.RandomStringUtils; - -/** - * Abstract Class factoring common behavior for oAuth 2.0 flow implementations - */ -public abstract class BaseOAuth2Flow extends BaseOAuthFlow { - - /** - * Simple enum of content type strings and their respective encoding functions used for POSTing the - * access token request - */ - public enum TOKEN_REQUEST_CONTENT_TYPE { - - URL_ENCODED("application/x-www-form-urlencoded", BaseOAuth2Flow::toUrlEncodedString), - JSON("application/json", BaseOAuth2Flow::toJson); - - String contentType; - Function, String> converter; - - public String getContentType() { - return contentType; - } - - public Function, String> getConverter() { - return converter; - } - - TOKEN_REQUEST_CONTENT_TYPE(final String contentType, final Function, String> converter) { - this.contentType = contentType; - this.converter = converter; - } - - } - - protected final HttpClient httpClient; - protected final TOKEN_REQUEST_CONTENT_TYPE tokenReqContentType; - private final Supplier stateSupplier; - - public BaseOAuth2Flow(final ConfigRepository configRepository, final HttpClient httpClient) { - this(configRepository, httpClient, BaseOAuth2Flow::generateRandomState); - } - - public BaseOAuth2Flow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - this(configRepository, httpClient, stateSupplier, TOKEN_REQUEST_CONTENT_TYPE.URL_ENCODED); - } - - public BaseOAuth2Flow(final ConfigRepository configRepository, - final HttpClient httpClient, - final Supplier stateSupplier, - final TOKEN_REQUEST_CONTENT_TYPE tokenReqContentType) { - super(configRepository); - this.httpClient = httpClient; - this.stateSupplier = stateSupplier; - this.tokenReqContentType = tokenReqContentType; - } - - @Override - public String getSourceConsentUrl(final UUID workspaceId, - final UUID sourceDefinitionId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final OAuthConfigSpecification oAuthConfigSpecification) - throws IOException, ConfigNotFoundException, JsonValidationException { - validateInputOAuthConfiguration(oAuthConfigSpecification, inputOAuthConfiguration); - final JsonNode oAuthParamConfig = getSourceOAuthParamConfig(workspaceId, sourceDefinitionId); - return formatConsentUrl(sourceDefinitionId, getClientIdUnsafe(oAuthParamConfig), redirectUrl, inputOAuthConfiguration); - } - - @Override - public String getDestinationConsentUrl(final UUID workspaceId, - final UUID destinationDefinitionId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final OAuthConfigSpecification oAuthConfigSpecification) - throws IOException, ConfigNotFoundException, JsonValidationException { - validateInputOAuthConfiguration(oAuthConfigSpecification, inputOAuthConfiguration); - final JsonNode oAuthParamConfig = getDestinationOAuthParamConfig(workspaceId, destinationDefinitionId); - return formatConsentUrl(destinationDefinitionId, getClientIdUnsafe(oAuthParamConfig), redirectUrl, inputOAuthConfiguration); - } - - /** - * Depending on the OAuth flow implementation, the URL to grant user's consent may differ, - * especially in the query parameters to be provided. This function should generate such consent URL - * accordingly. - * - * @param definitionId The configured definition ID of this client - * @param clientId The configured client ID - * @param redirectUrl the redirect URL - * @param inputOAuthConfiguration any configuration property from connector necessary for this OAuth - * Flow - */ - protected abstract String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException; - - private static String generateRandomState() { - return RandomStringUtils.randomAlphanumeric(7); - } - - /** - * Generate a string to use as state in the OAuth process. - */ - protected String getState() { - return stateSupplier.get(); - } - - @Override - @Deprecated - public Map completeSourceOAuth(final UUID workspaceId, - final UUID sourceDefinitionId, - final Map queryParams, - final String redirectUrl) - throws IOException, ConfigNotFoundException { - final JsonNode oAuthParamConfig = getSourceOAuthParamConfig(workspaceId, sourceDefinitionId); - return formatOAuthOutput( - oAuthParamConfig, - completeOAuthFlow( - getClientIdUnsafe(oAuthParamConfig), - getClientSecretUnsafe(oAuthParamConfig), - extractCodeParameter(queryParams), - redirectUrl, - Jsons.emptyObject(), - oAuthParamConfig), - getDefaultOAuthOutputPath()); - } - - @Override - @Deprecated - public Map completeDestinationOAuth(final UUID workspaceId, - final UUID destinationDefinitionId, - final Map queryParams, - final String redirectUrl) - throws IOException, ConfigNotFoundException { - final JsonNode oAuthParamConfig = getDestinationOAuthParamConfig(workspaceId, destinationDefinitionId); - return formatOAuthOutput( - oAuthParamConfig, - completeOAuthFlow( - getClientIdUnsafe(oAuthParamConfig), - getClientSecretUnsafe(oAuthParamConfig), - extractCodeParameter(queryParams), - redirectUrl, - Jsons.emptyObject(), - oAuthParamConfig), - getDefaultOAuthOutputPath()); - } - - @Override - public Map completeSourceOAuth(final UUID workspaceId, - final UUID sourceDefinitionId, - final Map queryParams, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final OAuthConfigSpecification oAuthConfigSpecification) - throws IOException, ConfigNotFoundException, JsonValidationException { - validateInputOAuthConfiguration(oAuthConfigSpecification, inputOAuthConfiguration); - final JsonNode oAuthParamConfig = getSourceOAuthParamConfig(workspaceId, sourceDefinitionId); - return formatOAuthOutput( - oAuthParamConfig, - completeOAuthFlow( - getClientIdUnsafe(oAuthParamConfig), - getClientSecretUnsafe(oAuthParamConfig), - extractCodeParameter(queryParams), - redirectUrl, - inputOAuthConfiguration, - oAuthParamConfig), - oAuthConfigSpecification); - } - - @Override - public Map completeDestinationOAuth(final UUID workspaceId, - final UUID destinationDefinitionId, - final Map queryParams, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final OAuthConfigSpecification oAuthConfigSpecification) - throws IOException, ConfigNotFoundException, JsonValidationException { - validateInputOAuthConfiguration(oAuthConfigSpecification, inputOAuthConfiguration); - final JsonNode oAuthParamConfig = getDestinationOAuthParamConfig(workspaceId, destinationDefinitionId); - return formatOAuthOutput( - oAuthParamConfig, - completeOAuthFlow( - getClientIdUnsafe(oAuthParamConfig), - getClientSecretUnsafe(oAuthParamConfig), - extractCodeParameter(queryParams), - redirectUrl, - inputOAuthConfiguration, - oAuthParamConfig), - oAuthConfigSpecification); - } - - protected Map completeOAuthFlow(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final JsonNode oAuthParamConfig) - throws IOException { - final var accessTokenUrl = getAccessTokenUrl(inputOAuthConfiguration); - final HttpRequest request = HttpRequest.newBuilder() - .POST(HttpRequest.BodyPublishers - .ofString(tokenReqContentType.converter.apply(getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)))) - .uri(URI.create(accessTokenUrl)) - .header("Content-Type", tokenReqContentType.contentType) - .header("Accept", "application/json") - .build(); - // TODO: Handle error response to report better messages - try { - final HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); - return extractOAuthOutput(Jsons.deserialize(response.body()), accessTokenUrl); - } catch (final InterruptedException e) { - throw new IOException("Failed to complete OAuth flow", e); - } - } - - /** - * Query parameters to provide the access token url with. - */ - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("client_id", clientId) - .put("redirect_uri", redirectUrl) - .put("client_secret", clientSecret) - .put("code", authCode) - .build(); - } - - /** - * Once the user is redirected after getting their consent, the API should redirect them to a - * specific redirection URL along with query parameters. This function should parse and extract the - * code from these query parameters in order to continue the OAuth Flow. - */ - protected String extractCodeParameter(final Map queryParams) throws IOException { - if (queryParams.containsKey("code")) { - return (String) queryParams.get("code"); - } else { - throw new IOException("Undefined 'code' from consent redirected url."); - } - } - - /** - * Returns the URL where to retrieve the access token from. - */ - protected abstract String getAccessTokenUrl(final JsonNode inputOAuthConfiguration); - - /** - * Extract all OAuth outputs from distant API response and store them in a flat map. - */ - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - if (data.has("refresh_token")) { - result.put("refresh_token", data.get("refresh_token").asText()); - } else { - throw new IOException(String.format("Missing 'refresh_token' in query params from %s", accessTokenUrl)); - } - return result; - } - - @Override - @Deprecated - public List getDefaultOAuthOutputPath() { - return List.of("credentials"); - } - - protected static void validateInputOAuthConfiguration(final OAuthConfigSpecification oauthConfigSpecification, - final JsonNode inputOAuthConfiguration) - throws JsonValidationException { - if (oauthConfigSpecification != null && oauthConfigSpecification.getOauthUserInputFromConnectorConfigSpecification() != null) { - final JsonSchemaValidator validator = new JsonSchemaValidator(); - validator.ensure(oauthConfigSpecification.getOauthUserInputFromConnectorConfigSpecification(), inputOAuthConfiguration); - } - } - - private static String urlEncode(final String s) { - try { - return URLEncoder.encode(s, StandardCharsets.UTF_8); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - private static String toUrlEncodedString(final Map body) { - final StringBuilder result = new StringBuilder(); - for (final var entry : body.entrySet()) { - if (result.length() > 0) { - result.append("&"); - } - result.append(entry.getKey()).append("=").append(urlEncode(entry.getValue())); - } - return result.toString(); - } - - protected static String toJson(final Map body) { - final Gson gson = new Gson(); - final Type gsonType = new TypeToken>() {}.getType(); - return gson.toJson(body, gsonType); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/BaseOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/BaseOAuthFlow.java deleted file mode 100644 index 878ad73c002d..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/BaseOAuthFlow.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.map.MoreMaps; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.protocol.models.OAuthConfigSpecification; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.function.BiConsumer; - -/** - * Abstract Class implementing common base methods for managing oAuth config (instance-wide) and - * oAuth specifications - */ -public abstract class BaseOAuthFlow implements OAuthFlowImplementation { - - public static final String PROPERTIES = "properties"; - private final ConfigRepository configRepository; - - public BaseOAuthFlow(final ConfigRepository configRepository) { - this.configRepository = configRepository; - } - - protected JsonNode getSourceOAuthParamConfig(final UUID workspaceId, final UUID sourceDefinitionId) throws IOException, ConfigNotFoundException { - try { - final Optional param = MoreOAuthParameters.getSourceOAuthParameter( - configRepository.listSourceOAuthParam().stream(), workspaceId, sourceDefinitionId); - if (param.isPresent()) { - // TODO: if we write a flyway migration to flatten persisted configs in db, we don't need to flatten - // here see https://github.com/airbytehq/airbyte/issues/7624 - return MoreOAuthParameters.flattenOAuthConfig(param.get().getConfiguration()); - } else { - throw new ConfigNotFoundException(ConfigSchema.SOURCE_OAUTH_PARAM, "Undefined OAuth Parameter."); - } - } catch (final JsonValidationException e) { - throw new IOException("Failed to load OAuth Parameters", e); - } - } - - protected JsonNode getDestinationOAuthParamConfig(final UUID workspaceId, final UUID destinationDefinitionId) - throws IOException, ConfigNotFoundException { - try { - final Optional param = MoreOAuthParameters.getDestinationOAuthParameter( - configRepository.listDestinationOAuthParam().stream(), workspaceId, destinationDefinitionId); - if (param.isPresent()) { - // TODO: if we write a migration to flatten persisted configs in db, we don't need to flatten - // here see https://github.com/airbytehq/airbyte/issues/7624 - return MoreOAuthParameters.flattenOAuthConfig(param.get().getConfiguration()); - } else { - throw new ConfigNotFoundException(ConfigSchema.DESTINATION_OAUTH_PARAM, "Undefined OAuth Parameter."); - } - } catch (final JsonValidationException e) { - throw new IOException("Failed to load OAuth Parameters", e); - } - } - - /** - * Throws an exception if the client ID cannot be extracted. Subclasses should override this to - * parse the config differently. - * - * @return The configured Client ID used for this oauth flow - */ - protected String getClientIdUnsafe(final JsonNode oauthConfig) { - return getConfigValueUnsafe(oauthConfig, "client_id"); - } - - /** - * Throws an exception if the client secret cannot be extracted. Subclasses should override this to - * parse the config differently. - * - * @return The configured client secret for this OAuthFlow - */ - protected String getClientSecretUnsafe(final JsonNode oauthConfig) { - return getConfigValueUnsafe(oauthConfig, "client_secret"); - } - - protected static String getConfigValueUnsafe(final JsonNode oauthConfig, final String fieldName) { - if (oauthConfig.get(fieldName) != null) { - return oauthConfig.get(fieldName).asText(); - } else { - throw new IllegalArgumentException(String.format("Undefined parameter '%s' necessary for the OAuth Flow.", fieldName)); - } - } - - /** - * completeOAuth calls should output a flat map of fields produced by the oauth flow to be forwarded - * back to the connector config. This @deprecated function is used when the connector's oauth - * specifications are unknown. So it ends up using hard-coded output path in the OAuth Flow - * implementation instead of relying on the connector's specification to determine where the outputs - * should be stored. - */ - @Deprecated - protected Map formatOAuthOutput(final JsonNode oAuthParamConfig, - final Map oauthOutput, - final List outputPath) { - Map result = new HashMap<>(oauthOutput); - for (final String key : Jsons.keys(oAuthParamConfig)) { - result.put(key, MoreOAuthParameters.SECRET_MASK); - } - for (final String node : outputPath) { - result = Map.of(node, result); - } - return result; - } - - /** - * completeOAuth calls should output a flat map of fields produced by the oauth flow to be forwarded - * back to the connector config. This function follows the connector's oauth specifications of which - * outputs are expected and filters them accordingly. - */ - protected Map formatOAuthOutput(final JsonNode oAuthParamConfig, - final Map completeOAuthFlow, - final OAuthConfigSpecification oAuthConfigSpecification) - throws JsonValidationException { - final JsonSchemaValidator validator = new JsonSchemaValidator(); - - final Map oAuthOutputs = formatOAuthOutput( - validator, - oAuthConfigSpecification.getCompleteOauthOutputSpecification(), - completeOAuthFlow.keySet(), - (resultMap, key) -> resultMap.put(key, completeOAuthFlow.get(key))); - - final Map oAuthServerOutputs = formatOAuthOutput( - validator, - oAuthConfigSpecification.getCompleteOauthServerOutputSpecification(), - Jsons.keys(oAuthParamConfig), - // TODO secrets should be masked with the correct type - // https://github.com/airbytehq/airbyte/issues/5990 - // In the short-term this is not world-ending as all secret fields are currently strings - (resultMap, key) -> resultMap.put(key, MoreOAuthParameters.SECRET_MASK)); - - return MoreMaps.merge(oAuthServerOutputs, oAuthOutputs); - } - - private static Map formatOAuthOutput(final JsonSchemaValidator validator, - final JsonNode outputSchema, - final Collection keys, - final BiConsumer, String> replacement) - throws JsonValidationException { - Map result = Map.of(); - if (outputSchema != null && outputSchema.has(PROPERTIES)) { - final Builder mapBuilder = ImmutableMap.builder(); - for (final String key : keys) { - if (outputSchema.get(PROPERTIES).has(key)) { - replacement.accept(mapBuilder, key); - } - } - result = mapBuilder.build(); - validator.ensure(outputSchema, Jsons.jsonNode(result)); - } - return result; - } - - /** - * This function should be redefined in each OAuthFlow implementation to isolate such "hardcoded" - * values. It is being @deprecated because the output path should not be "hard-coded" in the OAuth - * flow implementation classes anymore but will be specified as part of the OAuth Specification - * object - */ - @Deprecated - public abstract List getDefaultOAuthOutputPath(); - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/MoreOAuthParameters.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/MoreOAuthParameters.java deleted file mode 100644 index 3397bd2baca4..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/MoreOAuthParameters.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth; - -import static com.fasterxml.jackson.databind.node.JsonNodeType.OBJECT; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.base.Strings; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.SourceOAuthParameter; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MoreOAuthParameters { - - private static final Logger LOGGER = LoggerFactory.getLogger(Jsons.class); - public static final String SECRET_MASK = "******"; - - public static Optional getSourceOAuthParameter( - final Stream stream, - final UUID workspaceId, - final UUID sourceDefinitionId) { - return stream - .filter(p -> sourceDefinitionId.equals(p.getSourceDefinitionId())) - .filter(p -> p.getWorkspaceId() == null || workspaceId.equals(p.getWorkspaceId())) - // we prefer params specific to a workspace before global ones (ie workspace is null) - .min(Comparator.comparing(SourceOAuthParameter::getWorkspaceId, Comparator.nullsLast(Comparator.naturalOrder())) - .thenComparing(SourceOAuthParameter::getOauthParameterId)); - } - - public static Optional getDestinationOAuthParameter( - final Stream stream, - final UUID workspaceId, - final UUID destinationDefinitionId) { - return stream - .filter(p -> destinationDefinitionId.equals(p.getDestinationDefinitionId())) - .filter(p -> p.getWorkspaceId() == null || workspaceId.equals(p.getWorkspaceId())) - // we prefer params specific to a workspace before global ones (ie workspace is null) - .min(Comparator.comparing(DestinationOAuthParameter::getWorkspaceId, Comparator.nullsLast(Comparator.naturalOrder())) - .thenComparing(DestinationOAuthParameter::getOauthParameterId)); - } - - public static JsonNode flattenOAuthConfig(final JsonNode config) { - if (config.getNodeType() == OBJECT) { - return flattenOAuthConfig((ObjectNode) Jsons.emptyObject(), (ObjectNode) config); - } else { - throw new IllegalStateException("Config is not an Object config, unable to flatten"); - } - } - - private static ObjectNode flattenOAuthConfig(final ObjectNode flatConfig, final ObjectNode configToFlatten) { - final List keysToFlatten = new ArrayList<>(); - for (final String key : Jsons.keys(configToFlatten)) { - if (configToFlatten.get(key).getNodeType() == OBJECT) { - keysToFlatten.add(key); - } else if (!flatConfig.has(key)) { - flatConfig.set(key, configToFlatten.get(key)); - } else { - throw new IllegalStateException(String.format("OAuth Config's key '%s' already exists", key)); - } - } - keysToFlatten.forEach(key -> flattenOAuthConfig(flatConfig, (ObjectNode) configToFlatten.get(key))); - return flatConfig; - } - - public static JsonNode mergeJsons(final ObjectNode mainConfig, final ObjectNode fromConfig) { - for (final String key : Jsons.keys(fromConfig)) { - if (fromConfig.get(key).getNodeType() == OBJECT) { - // nested objects are merged rather than overwrite the contents of the equivalent object in config - if (mainConfig.get(key) == null) { - mergeJsons(mainConfig.putObject(key), (ObjectNode) fromConfig.get(key)); - } else if (mainConfig.get(key).getNodeType() == OBJECT) { - mergeJsons((ObjectNode) mainConfig.get(key), (ObjectNode) fromConfig.get(key)); - } else { - throw new IllegalStateException("Can't merge an object node into a non-object node!"); - } - } else { - if (!mainConfig.has(key) || isSecretMask(mainConfig.get(key).asText())) { - LOGGER.debug(String.format("injecting instance wide parameter %s into config", key)); - mainConfig.set(key, fromConfig.get(key)); - } - } - } - return mainConfig; - } - - private static boolean isSecretMask(final String input) { - return Strings.isNullOrEmpty(input.replaceAll("\\*", "")); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthFlowImplementation.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthFlowImplementation.java deleted file mode 100644 index 0c68040ec4dd..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthFlowImplementation.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.protocol.models.OAuthConfigSpecification; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.Map; -import java.util.UUID; - -public interface OAuthFlowImplementation { - - String getSourceConsentUrl(UUID workspaceId, - UUID sourceDefinitionId, - String redirectUrl, - JsonNode inputOAuthConfiguration, - OAuthConfigSpecification oauthConfigSpecification) - throws IOException, ConfigNotFoundException, JsonValidationException; - - String getDestinationConsentUrl(UUID workspaceId, - UUID destinationDefinitionId, - String redirectUrl, - JsonNode inputOAuthConfiguration, - OAuthConfigSpecification oauthConfigSpecification) - throws IOException, ConfigNotFoundException, JsonValidationException; - - @Deprecated - Map completeSourceOAuth(UUID workspaceId, UUID sourceDefinitionId, Map queryParams, String redirectUrl) - throws IOException, ConfigNotFoundException; - - Map completeSourceOAuth(UUID workspaceId, - UUID sourceDefinitionId, - Map queryParams, - String redirectUrl, - JsonNode inputOAuthConfiguration, - OAuthConfigSpecification oauthConfigSpecification) - throws IOException, ConfigNotFoundException, JsonValidationException; - - @Deprecated - Map completeDestinationOAuth(UUID workspaceId, UUID destinationDefinitionId, Map queryParams, String redirectUrl) - throws IOException, ConfigNotFoundException; - - Map completeDestinationOAuth(UUID workspaceId, - UUID destinationDefinitionId, - Map queryParams, - String redirectUrl, - JsonNode inputOAuthConfiguration, - OAuthConfigSpecification oAuthConfigSpecification) - throws IOException, ConfigNotFoundException, JsonValidationException; - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java deleted file mode 100644 index ae8a755cd057..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth; - -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.flows.*; -import io.airbyte.oauth.flows.facebook.*; -import io.airbyte.oauth.flows.google.*; -import java.net.http.HttpClient; -import java.util.Map; - -public class OAuthImplementationFactory { - - private final Map OAUTH_FLOW_MAPPING; - - public OAuthImplementationFactory(final ConfigRepository configRepository, final HttpClient httpClient) { - OAUTH_FLOW_MAPPING = ImmutableMap.builder() - .put("airbyte/source-amazon-ads", new AmazonAdsOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-amazon-seller-partner", new AmazonSellerPartnerOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-asana", new AsanaOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-facebook-marketing", new FacebookMarketingOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-facebook-pages", new FacebookPagesOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-github", new GithubOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-google-ads", new GoogleAdsOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-google-analytics-v4", new GoogleAnalyticsViewIdOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-google-analytics-data-api", new GoogleAnalyticsPropertyIdOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-google-search-console", new GoogleSearchConsoleOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-google-sheets", new GoogleSheetsOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-harvest", new HarvestOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-hubspot", new HubspotOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-intercom", new IntercomOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-instagram", new InstagramOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-lever-hiring", new LeverOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-microsoft-teams", new MicrosoftTeamsOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-notion", new NotionOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-bing-ads", new MicrosoftBingAdsOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-pinterest", new PinterestOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-pipedrive", new PipeDriveOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-quickbooks", new QuickbooksOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-retently", new RetentlyOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-linkedin-ads", new LinkedinAdsOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-salesforce", new SalesforceOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-slack", new SlackOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-smartsheets", new SmartsheetsOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-snapchat-marketing", new SnapchatMarketingOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-square", new SquareOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-strava", new StravaOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-surveymonkey", new SurveymonkeyOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-trello", new TrelloOAuthFlow(configRepository)) - .put("airbyte/source-gitlab", new GitlabOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-youtube-analytics", new YouTubeAnalyticsOAuthFlow(configRepository, httpClient)) - // revert me - .put("airbyte/source-youtube-analytics-business", new YouTubeAnalyticsBusinessOAuthFlow(configRepository, httpClient)) - // - .put("airbyte/source-drift", new DriftOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-zendesk-chat", new ZendeskChatOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-zendesk-support", new ZendeskSupportOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-zendesk-talk", new ZendeskTalkOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-monday", new MondayOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-zendesk-sunshine", new ZendeskSunshineOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-mailchimp", new MailchimpOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-shopify", new ShopifyOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-tiktok-marketing", new TikTokMarketingOAuthFlow(configRepository, httpClient)) - .put("airbyte/destination-snowflake", new DestinationSnowflakeOAuthFlow(configRepository, httpClient)) - .put("airbyte/destination-google-sheets", new DestinationGoogleSheetsOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-snowflake", new SourceSnowflakeOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-okta", new OktaOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-paypal-transaction", new PayPalTransactionOAuthFlow(configRepository, httpClient)) - .put("airbyte/source-airtable", new AirtableOAuthFlow(configRepository, httpClient)) - .build(); - } - - public OAuthFlowImplementation create(final StandardSourceDefinition sourceDefinition) { - return create(sourceDefinition.getDockerRepository()); - } - - public OAuthFlowImplementation create(final StandardDestinationDefinition destinationDefinition) { - return create(destinationDefinition.getDockerRepository()); - } - - private OAuthFlowImplementation create(final String imageName) { - if (OAUTH_FLOW_MAPPING.containsKey(imageName)) { - return OAUTH_FLOW_MAPPING.get(imageName); - } else { - throw new IllegalStateException( - String.format("Requested OAuth implementation for %s, but it is not included in the oauth mapping.", imageName)); - } - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AirtableOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AirtableOAuthFlow.java deleted file mode 100644 index 112335ad713c..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AirtableOAuthFlow.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.time.Clock; -import java.time.Instant; -import java.util.Arrays; -import java.util.Base64; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from - * https://airtable.com/developers/web/api/oauth-reference#authorization-request-query - */ -public class AirtableOAuthFlow extends BaseOAuth2Flow { - - private static final String ACCESS_TOKEN_URL = "https://airtable.com/oauth2/v1/token"; - private final Clock clock; - private static final List SCOPES = Arrays.asList( - "data.records:read", - "data.recordComments:read", - "schema.bases:read"); - - public String getScopes() { - // More info and additional scopes could be found here: - // https://airtable.com/developers/web/api/scopes - // should be space-delimitered - return String.join(" ", SCOPES); - } - - public String getCodeVerifier() { - // Randomly generated string, min 43 - max 150 symbols - return "XmG5afcqXCamPk3jshWQXmG5afcqXCamPk3jshWQXmG5afcqXCamPk3jshWQXmG5afcqXCamPk3jshWQ"; - } - - public String getCodeChanlenge() { - // Base64(s256) from CODE_VERIFIER - return "jajoblvFNHmH8rSnW84xFEUKMGC8CYwR82phhRR6iCg"; - } - - @Override - public String getState() { - // State - return "WeHH_yy2irpl8UYAvv-my"; - } - - public AirtableOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - this.clock = Clock.systemUTC(); - } - - @VisibleForTesting - public AirtableOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier, Clock clock) { - super(configRepository, httpClient, stateSupplier); - this.clock = clock; - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost("airtable.com") - .setPath("oauth2/v1/authorize") - // required - .addParameter("redirect_uri", redirectUrl) - .addParameter("client_id", clientId) - .addParameter("response_type", "code") - .addParameter("scope", getScopes()) - .addParameter("code_challenge", getCodeChanlenge()) - .addParameter("code_challenge_method", "S256") - .addParameter("state", getState()); - - try { - return builder.build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - // required - .put("code", authCode) - .put("redirect_uri", redirectUrl) - .put("grant_type", "authorization_code") - .put("client_id", clientId) - .put("code_verifier", getCodeVerifier()) - .put("code_challenge_method", "S256") - .build(); - } - - @Override - protected Map completeOAuthFlow(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final JsonNode oAuthParamConfig) - throws IOException { - final var accessTokenUrl = getAccessTokenUrl(inputOAuthConfiguration); - final byte[] authorization = Base64.getEncoder() - .encode((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); - final HttpRequest request = HttpRequest.newBuilder() - .POST(HttpRequest.BodyPublishers - .ofString(tokenReqContentType.getConverter().apply( - getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)))) - .uri(URI.create(accessTokenUrl)) - .header("Content-Type", tokenReqContentType.getContentType()) - .header("Authorization", "Basic " + new String(authorization, StandardCharsets.UTF_8)) - .build(); - try { - final HttpResponse response = httpClient.send(request, - HttpResponse.BodyHandlers.ofString()); - - return extractOAuthOutput(Jsons.deserialize(response.body()), accessTokenUrl); - } catch (final InterruptedException e) { - throw new IOException("Failed to complete OAuth flow", e); - } - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - if (data.has("refresh_token")) { - result.put("refresh_token", data.get("refresh_token").asText()); - } else { - throw new IOException(String.format("Missing 'refresh_token' in query params from %s", accessTokenUrl)); - } - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - if (data.has("expires_in")) { - Instant expires_in = Instant.now(this.clock).plusSeconds(data.get("expires_in").asInt()); - result.put("token_expiry_date", expires_in.toString()); - } else { - throw new IOException(String.format("Missing 'expires_in' in query params from %s", accessTokenUrl)); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AmazonAdsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AmazonAdsOAuthFlow.java deleted file mode 100644 index 3757a553e625..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AmazonAdsOAuthFlow.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class AmazonAdsOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://www.amazon.com/ap/oa"; - private static final String ACCESS_TOKEN_URL = "https://api.amazon.com/auth/o2/token"; - - public AmazonAdsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - public AmazonAdsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - /** - * Depending on the OAuth flow implementation, the URL to grant user's consent may differ, - * especially in the query parameters to be provided. This function should generate such consent URL - * accordingly. - * - * @param definitionId The configured definition ID of this client - * @param clientId The configured client ID - * @param redirectUrl the redirect URL - */ - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("scope", "advertising::campaign_management") - .addParameter("response_type", "code") - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("client_id", clientId) - .put("redirect_uri", redirectUrl) - .put("client_secret", clientSecret) - .put("code", authCode) - .put("grant_type", "authorization_code") - .build(); - } - - /** - * Returns the URL where to retrieve the access token from. - * - */ - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AmazonSellerPartnerOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AmazonSellerPartnerOAuthFlow.java deleted file mode 100644 index ae5216d538e8..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AmazonSellerPartnerOAuthFlow.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class AmazonSellerPartnerOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://sellercentral.amazon.com/apps/authorize/consent"; - private static final String ACCESS_TOKEN_URL = "https://api.amazon.com/auth/o2/token"; - - @Override - protected String getClientIdUnsafe(final JsonNode oauthConfig) { - return getConfigValueUnsafe(oauthConfig, "lwa_app_id"); - } - - @Override - protected String getClientSecretUnsafe(final JsonNode oauthConfig) { - return getConfigValueUnsafe(oauthConfig, "lwa_client_secret"); - } - - public AmazonSellerPartnerOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - public AmazonSellerPartnerOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - /** - * Depending on the OAuth flow implementation, the URL to grant user's consent may differ, - * especially in the query parameters to be provided. This function should generate such consent URL - * accordingly. - * - * @param definitionId The configured definition ID of this client - * @param clientId The configured client ID - * @param redirectUrl the redirect URL - */ - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - // getting application_id value from user's config - final String application_id = getConfigValueUnsafe(inputOAuthConfiguration, "app_id"); - - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("application_id", application_id) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .addParameter("version", "beta") - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String extractCodeParameter(final Map queryParams) throws IOException { - if (queryParams.containsKey("spapi_oauth_code")) { - return (String) queryParams.get("spapi_oauth_code"); - } else { - throw new IOException("Undefined 'spapi_oauth_code' from consent redirected url."); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("client_id", clientId) - .put("redirect_uri", redirectUrl) - .put("client_secret", clientSecret) - .put("code", authCode) - .put("grant_type", "authorization_code") - .build(); - } - - /** - * Returns the URL where to retrieve the access token from. - * - */ - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - public List getDefaultOAuthOutputPath() { - return List.of(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AsanaOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AsanaOAuthFlow.java deleted file mode 100644 index 799802715976..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/AsanaOAuthFlow.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from https://developers.asana.com/docs/oauth - */ -public class AsanaOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://app.asana.com/-/oauth_authorize"; - private static final String ACCESS_TOKEN_URL = "https://app.asana.com/-/oauth_token"; - - public AsanaOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - AsanaOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("state", getState()) - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - .putAll(super.getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)) - .put("grant_type", "authorization_code") - .build(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/DestinationSnowflakeOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/DestinationSnowflakeOAuthFlow.java deleted file mode 100644 index 0f35026ce5a4..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/DestinationSnowflakeOAuthFlow.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.util.Base64; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import org.apache.commons.lang3.StringUtils; -import org.apache.http.client.utils.URIBuilder; - -public class DestinationSnowflakeOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://%s/oauth/authorize"; - private static final String ACCESS_TOKEN_URL = "https://%s/oauth/token-request"; - - public DestinationSnowflakeOAuthFlow( - final ConfigRepository configRepository, - final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder( - String.format(AUTHORIZE_URL, extractAuthorizeUrl(inputOAuthConfiguration))) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("state", getState()) - .build().toString(); - - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return String.format(ACCESS_TOKEN_URL, extractTokenUrl(inputOAuthConfiguration)); - } - - @Override - protected String extractCodeParameter(final Map queryParams) throws IOException { - return super.extractCodeParameter(queryParams); - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("grant_type", "authorization_code") - .put("code", authCode) - .put("redirect_uri", redirectUrl) - .build(); - } - - // -------------------------------------------- - @Override - protected Map completeOAuthFlow(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final JsonNode oAuthParamConfig) - throws IOException { - final var accessTokenUrl = getAccessTokenUrl(inputOAuthConfiguration); - - final byte[] authorization = Base64.getEncoder() - .encode((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); - final HttpRequest request = HttpRequest.newBuilder() - .POST(HttpRequest.BodyPublishers - .ofString(tokenReqContentType.getConverter().apply( - getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)))) - .uri(URI.create(accessTokenUrl)) - .header("Content-Type", tokenReqContentType.getContentType()) - .header("Accept", "application/json") - .header("Authorization", "Basic " + new String(authorization, StandardCharsets.UTF_8)) - .build(); - try { - final HttpResponse response = httpClient.send(request, - HttpResponse.BodyHandlers.ofString()); - - return extractOAuthOutput(Jsons.deserialize(response.body()), accessTokenUrl); - } catch (final InterruptedException e) { - throw new IOException("Failed to complete OAuth flow", e); - } - } - - /** - * Extract all OAuth outputs from distant API response and store them in a flat map. - */ - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) - throws IOException { - final Map result = new HashMap<>(); - // access_token is valid for only 10 minutes - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", - accessTokenUrl)); - } - - if (data.has("refresh_token")) { - result.put("refresh_token", data.get("refresh_token").asText()); - } else { - throw new IOException(String.format("Missing 'refresh_token' in query params from %s", - accessTokenUrl)); - } - return result; - } - - private String extractAuthorizeUrl(final JsonNode inputOAuthConfiguration) { - final var url = inputOAuthConfiguration.get("host"); - return url == null ? StringUtils.EMPTY : url.asText(); - } - - private String extractTokenUrl(final JsonNode inputOAuthConfiguration) { - final var url = inputOAuthConfiguration.get("host"); - // var url = inputOAuthConfiguration.get("token_url"); - return url == null ? StringUtils.EMPTY : url.asText(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/DriftOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/DriftOAuthFlow.java deleted file mode 100644 index 9d6d59e05c94..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/DriftOAuthFlow.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from - * https://devdocs.drift.com/docs/authentication-and-scopes#1-direct-the-user-to-the-drift-oauth-url- - */ -public class DriftOAuthFlow extends BaseOAuth2Flow { - - private static final String ACCESS_TOKEN_URL = "https://driftapi.com/oauth2/token"; - private static final String CODE = "code"; - - public DriftOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - DriftOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, final String clientId, final String redirectUrl, final JsonNode inputOAuthConfiguration) - throws IOException { - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost("dev.drift.com") - .setPath("authorize") - .addParameter("response_type", CODE) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()); - try { - return builder.build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String extractCodeParameter(final Map queryParams) throws IOException { - if (queryParams.containsKey(CODE)) { - return (String) queryParams.get(CODE); - } else { - throw new IOException("Undefined 'code' from consent redirected url."); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put(CODE, authCode) - .put("grant_type", "authorization_code") - .build(); - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/GithubOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/GithubOAuthFlow.java deleted file mode 100644 index 6aae34c46051..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/GithubOAuthFlow.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from - * https://docs.github.com/en/developers/apps/building-oauth-apps/authorizing-oauth-apps#web-application-flow - */ -public class GithubOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://github.com/login/oauth/authorize"; - private static final String ACCESS_TOKEN_URL = "https://github.com/login/oauth/access_token"; - // Setting "repo" scope would allow grant not only read but also write - // access to our application. Unfortunatelly we cannot follow least - // privelege principle here cause github has no option of granular access - // tune up. - // This is necessary to pull data from private repositories. - // https://docs.github.com/en/developers/apps/building-oauth-apps/scopes-for-oauth-apps#available-scopes - - private static final List SCOPES = Arrays.asList( - "repo", - "read:org", - "read:repo_hook", - "read:user", - "read:discussion", - "workflow"); - - public String getScopes() { - return String.join("%20", SCOPES); - } - - public GithubOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - GithubOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - // we add `scopes` and `state` after we've already built the url, to prevent url encoding for scopes - // https://docs.github.com/en/developers/apps/building-oauth-apps/scopes-for-oauth-apps#available-scopes - // we need to keep scopes in the format of: < scope1%20scope2:sub_scope%20scope3 > - .build().toString() + "&scope=" + getScopes() + "&state=" + getState(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - if (data.has("access_token")) { - return Map.of("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", ACCESS_TOKEN_URL)); - } - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/GitlabOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/GitlabOAuthFlow.java deleted file mode 100644 index 4e3c0d96826b..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/GitlabOAuthFlow.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.time.Clock; -import java.time.Instant; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from https://docs.gitlab.com/ee/api/oauth2.html#authorization-code-flow - */ -public class GitlabOAuthFlow extends BaseOAuth2Flow { - - private static final String ACCESS_TOKEN_URL = "https://%s/oauth/token"; - private final Clock clock; - - public GitlabOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - this.clock = Clock.systemUTC(); - } - - public GitlabOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - this.clock = Clock.systemUTC(); - } - - public GitlabOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier, Clock clock) { - super(configRepository, httpClient, stateSupplier); - this.clock = clock; - } - - protected static String getDomain(JsonNode inputOAuthConfiguration) throws IOException { - final var domain = inputOAuthConfiguration.get("domain"); - if (domain == null) { - throw new IOException("Domain field is empty."); - } - return domain.asText(); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, final String clientId, final String redirectUrl, final JsonNode inputOAuthConfiguration) - throws IOException { - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost(getDomain(inputOAuthConfiguration)) - .setPath("oauth/authorize") - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .addParameter("response_type", "code") - .addParameter("scope", "read_api"); - try { - return builder.build().toString(); - } catch (URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - final var domain = inputOAuthConfiguration.get("domain"); - return String.format(ACCESS_TOKEN_URL, domain == null ? "gitlab.com" : domain.asText()); - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("code", authCode) - .put("grant_type", "authorization_code") - .put("redirect_uri", redirectUrl) - .build(); - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - if (data.has("refresh_token")) { - result.put("refresh_token", data.get("refresh_token").asText()); - } else { - throw new IOException(String.format("Missing 'refresh_token' in query params from %s", accessTokenUrl)); - } - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - if (data.has("expires_in")) { - Instant expires_in = Instant.now(this.clock).plusSeconds(data.get("expires_in").asInt()); - result.put("token_expiry_date", expires_in.toString()); - } else { - throw new IOException(String.format("Missing 'expires_in' in query params from %s", accessTokenUrl)); - } - return result; - } - - @Override - @Deprecated - public Map completeSourceOAuth(final UUID workspaceId, - final UUID sourceDefinitionId, - final Map queryParams, - final String redirectUrl) - throws IOException, ConfigNotFoundException { - throw new IOException("Deprecated API not supported by this connector"); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HarvestOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HarvestOAuthFlow.java deleted file mode 100644 index b2b54b34679e..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HarvestOAuthFlow.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class HarvestOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://id.getharvest.com/oauth2/authorize"; - private static final String ACCESS_TOKEN_URL = "https://id.getharvest.com/api/v2/oauth2/token"; - - public HarvestOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - public HarvestOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - /** - * Depending on the OAuth flow implementation, the URL to grant user's consent may differ, - * especially in the query parameters to be provided. This function should generate such consent URL - * accordingly. - * - * @param definitionId The configured definition ID of this client - * @param clientId The configured client ID - * @param redirectUrl the redirect URL - */ - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("response_type", "code") - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("client_id", clientId) - .put("redirect_uri", redirectUrl) - .put("client_secret", clientSecret) - .put("code", authCode) - .put("grant_type", "authorization_code") - .build(); - } - - /** - * Returns the URL where to retrieve the access token from. - * - */ - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HubspotOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HubspotOAuthFlow.java deleted file mode 100644 index 2252375715bc..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HubspotOAuthFlow.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class HubspotOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://app.hubspot.com/oauth/authorize"; - - public HubspotOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - public HubspotOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier, TOKEN_REQUEST_CONTENT_TYPE.JSON); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - /* - * Not all accounts have access to all scopes so we're requesting them as optional. Hubspot still - * expects scopes to be defined, so the contacts scope is left as required as it is accessible by - * any marketing or CRM account according to - * https://legacydocs.hubspot.com/docs/methods/oauth2/initiate-oauth-integration#scopes - */ - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .addParameter("scopes", getRequiredScopes()) - .addParameter("optional_scopes", getOptionalScopes()) - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("client_id", clientId) - .put("redirect_uri", redirectUrl) - .put("client_secret", clientSecret) - .put("code", authCode) - .put("grant_type", "authorization_code") - .build(); - } - - private String getOptionalScopes() { - return String.join(" ", "content", - "crm.schemas.deals.read", - "crm.objects.owners.read", - "forms", - "tickets", - "e-commerce", - "crm.objects.companies.read", - "crm.lists.read", - "crm.objects.deals.read", - "crm.objects.contacts.read", - "crm.schemas.companies.read", - "files", - "forms-uploaded-files", - "files.ui_hidden.read", - "crm.objects.feedback_submissions.read", - "sales-email-read", - "automation"); - } - - private String getRequiredScopes() { - return "crm.schemas.contacts.read"; - } - - /** - * Returns the URL where to retrieve the access token from. - */ - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return "https://api.hubapi.com/oauth/v1/token"; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/IntercomOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/IntercomOAuthFlow.java deleted file mode 100644 index 8d10641944d5..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/IntercomOAuthFlow.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class IntercomOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://app.intercom.com/a/oauth/connect"; - private static final String ACCESS_TOKEN_URL = "https://api.intercom.io/auth/eagle/token"; - - public IntercomOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public IntercomOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("state", getState()) - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) { - // Intercom does not have refresh token but calls it "long lived access token" instead: - // see https://developers.intercom.com/building-apps/docs/setting-up-oauth - Preconditions.checkArgument(data.has("access_token"), "Missing 'access_token' in query params from %s", ACCESS_TOKEN_URL); - return Map.of("access_token", data.get("access_token").asText()); - } - - @Override - public List getDefaultOAuthOutputPath() { - return List.of(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LeverOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LeverOAuthFlow.java deleted file mode 100644 index 1dff49afb7f3..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LeverOAuthFlow.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.URLDecoder; -import java.net.http.HttpClient; -import java.nio.charset.StandardCharsets; -import java.util.Locale; -import java.util.Map; -import java.util.UUID; -import org.apache.http.client.utils.URIBuilder; - -public class LeverOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "%s/authorize"; - private static final String ACCESS_TOKEN_URL = "%s/oauth/token"; - - private static final String SCOPES = String.join("+", "applications:read:admin", - "applications:read:admin", - "interviews:read:admin", - "notes:read:admin", - "offers:read:admin", - "opportunities:read:admin", - "referrals:read:admin", - "resumes:read:admin", - "users:read:admin", - "offline_access"); - - public LeverOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - private String getAudience(final JsonNode inputOAuthConfiguration) { - return String.format("%s/v1/", getBaseApiUrl(inputOAuthConfiguration)); - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("client_id", clientId) - .put("redirect_uri", redirectUrl) - .put("client_secret", clientSecret) - .put("grant_type", "authorization_code") - .put("code", authCode) - .build(); - } - - /** - * Returns the URL where to retrieve the access token from. - */ - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return String.format(ACCESS_TOKEN_URL, getBaseAuthUrl(inputOAuthConfiguration)); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, final String clientId, final String redirectUrl, final JsonNode inputOAuthConfiguration) - throws IOException { - - try { - return URLDecoder.decode((new URIBuilder(String.format(AUTHORIZE_URL, getBaseAuthUrl(inputOAuthConfiguration))) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .addParameter("response_type", "code") - .addParameter("scope", SCOPES) - .addParameter("audience", getAudience(inputOAuthConfiguration)) - .addParameter("prompt", "consent").build().toString()), StandardCharsets.UTF_8); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - private String getBaseAuthUrl(final JsonNode inputOAuthConfiguration) { - if (isProduction(inputOAuthConfiguration)) { - return "http1s://auth.lever.co"; - } else { - return "https://sandbox-lever.auth0.com"; - } - } - - private String getBaseApiUrl(final JsonNode inputOAuthConfiguration) { - if (isProduction(inputOAuthConfiguration)) { - return "https://api.lever.co/"; - } else { - return "https://api.sandbox.lever.co"; - } - } - - private boolean isProduction(final JsonNode inputOAuthConfiguration) { - final var environment = inputOAuthConfiguration.get("environment"); - return environment != null - && environment.asText().toLowerCase(Locale.ROOT).equals("production"); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java deleted file mode 100644 index b83f5493748d..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class LinkedinAdsOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://www.linkedin.com/oauth/v2/authorization"; - private static final String ACCESS_TOKEN_URL = "https://www.linkedin.com/oauth/v2/accessToken"; - private static final String SCOPES = "r_ads_reporting r_emailaddress r_liteprofile r_ads r_basicprofile r_organization_social"; - - public LinkedinAdsOAuthFlow(ConfigRepository configRepository, HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public LinkedinAdsOAuthFlow(ConfigRepository configRepository, final HttpClient httpClient, Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(UUID definitionId, - String clientId, - String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("scope", SCOPES) - .addParameter("state", getState()) - .build().toString(); - } catch (URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - .putAll(super.getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)) - .put("grant_type", "authorization_code") - .build(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MailchimpOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MailchimpOAuthFlow.java deleted file mode 100644 index 9db60485fc99..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MailchimpOAuthFlow.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from https://mailchimp.com/developer/marketing/guides/access-user-data-oauth-2/ - */ -public class MailchimpOAuthFlow extends BaseOAuth2Flow { - - private static final String ACCESS_TOKEN_URL = "https://login.mailchimp.com/oauth2/token"; - private static final String AUTHORIZE_URL = "https://login.mailchimp.com/oauth2/authorize"; - - public MailchimpOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public MailchimpOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("response_type", "code") - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .build().toString(); - } catch (URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - // required - .put("grant_type", "authorization_code") - .put("code", authCode) - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("redirect_uri", redirectUrl) - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - // getting out access_token - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftBingAdsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftBingAdsOAuthFlow.java deleted file mode 100644 index 9be66ca0da2a..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftBingAdsOAuthFlow.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class MicrosoftBingAdsOAuthFlow extends BaseOAuth2Flow { - - private static final String fieldName = "tenant_id"; - - public MicrosoftBingAdsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public MicrosoftBingAdsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - private String getScopes() { - return "offline_access%20https://ads.microsoft.com/msads.manage"; - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - final String tenantId; - try { - tenantId = getConfigValueUnsafe(inputOAuthConfiguration, fieldName); - } catch (final IllegalArgumentException e) { - throw new IOException("Failed to get " + fieldName + " value from input configuration", e); - } - - try { - return new URIBuilder() - .setScheme("https") - .setHost("login.microsoftonline.com") - .setPath(tenantId + "/oauth2/v2.0/authorize") - .addParameter("client_id", clientId) - .addParameter("response_type", "code") - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_mode", "query") - .addParameter("state", getState()) - .build().toString() + "&scope=" + getScopes(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - .put("client_id", clientId) - .put("code", authCode) - .put("redirect_uri", redirectUrl) - .put("grant_type", "authorization_code") - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - final String tenantId = getConfigValueUnsafe(inputOAuthConfiguration, fieldName); - return "https://login.microsoftonline.com/" + tenantId + "/oauth2/v2.0/token"; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftTeamsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftTeamsOAuthFlow.java deleted file mode 100644 index 458419556df3..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftTeamsOAuthFlow.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class MicrosoftTeamsOAuthFlow extends BaseOAuth2Flow { - - private static final String fieldName = "tenant_id"; - - public MicrosoftTeamsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - public MicrosoftTeamsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier, TOKEN_REQUEST_CONTENT_TYPE.JSON); - } - - /** - * Depending on the OAuth flow implementation, the URL to grant user's consent may differ, - * especially in the query parameters to be provided. This function should generate such consent URL - * accordingly. - * - * @param definitionId The configured definition ID of this client - * @param clientId The configured client ID - * @param redirectUrl the redirect URL - */ - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - final String tenantId; - try { - tenantId = getConfigValueUnsafe(inputOAuthConfiguration, fieldName); - } catch (final IllegalArgumentException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - - try { - return new URIBuilder() - .setScheme("https") - .setHost("login.microsoftonline.com") - .setPath(tenantId + "/oauth2/v2.0/authorize") - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .addParameter("scope", getScopes()) - .addParameter("response_type", "code") - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("client_id", clientId) - .put("redirect_uri", redirectUrl) - .put("client_secret", clientSecret) - .put("code", authCode) - .put("grant_type", "authorization_code") - .build(); - } - - private String getScopes() { - return String.join(" ", "offline_access", - "Application.Read.All", - "Channel.ReadBasic.All", - "ChannelMember.Read.All", - "ChannelMember.ReadWrite.All", - "ChannelSettings.Read.All", - "ChannelSettings.ReadWrite.All", - "Directory.Read.All", - "Directory.ReadWrite.All", - "Files.Read.All", - "Files.ReadWrite.All", - "Group.Read.All", - "Group.ReadWrite.All", - "GroupMember.Read.All", - "Reports.Read.All", - "Sites.Read.All", - "Sites.ReadWrite.All", - "TeamsTab.Read.All", - "TeamsTab.ReadWrite.All", - "User.Read.All", - "User.ReadWrite.All"); - } - - @Override - @Deprecated - public Map completeSourceOAuth(final UUID workspaceId, - final UUID sourceDefinitionId, - final Map queryParams, - final String redirectUrl) - throws IOException { - throw new IOException("Using the deprecated OAuth methods is not supported. This OAuth flow depends on values defined in connector configs"); - } - - @Override - @Deprecated - public Map completeDestinationOAuth(final UUID workspaceId, - final UUID destinationDefinitionId, - final Map queryParams, - final String redirectUrl) - throws IOException { - throw new IOException("Using the deprecated OAuth methods is not supported. This OAuth flow depends on values defined in connector configs"); - } - - /** - * Returns the URL where to retrieve the access token from. - * - */ - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - final String tenantId = getConfigValueUnsafe(inputOAuthConfiguration, fieldName); - return "https://login.microsoftonline.com/" + tenantId + "/oauth2/v2.0/token"; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MondayOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MondayOAuthFlow.java deleted file mode 100644 index 4ae2a73c2bb2..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MondayOAuthFlow.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from https://developer.zendesk.com/documentation/live-chat/getting-started/auth/ - */ -public class MondayOAuthFlow extends BaseOAuth2Flow { - - private static final String ACCESS_TOKEN_URL = "https://auth.monday.com/oauth2/token"; - - public MondayOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public MondayOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - // getting subdomain value from user's config - final String subdomain = getConfigValueUnsafe(inputOAuthConfiguration, "subdomain"); - - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost("auth.monday.com") - .setPath("oauth2/authorize") - // required - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("scope", "me:read boards:read workspaces:read users:read account:read updates:read assets:read tags:read teams:read") - .addParameter("state", getState()); - - try { - // applying optional parameter of subdomain, if there is any value - if (!subdomain.isEmpty()) { - builder.addParameter("subdomain", subdomain); - } - return builder.build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - // required - .put("code", authCode) - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("redirect_uri", redirectUrl) - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - // getting out access_token - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/NotionOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/NotionOAuthFlow.java deleted file mode 100644 index c86f88eeabe3..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/NotionOAuthFlow.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.util.Base64; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class NotionOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://api.notion.com/v1/oauth/authorize"; - private static final String ACCESS_TOKEN_URL = "https://api.notion.com/v1/oauth/token"; - - public NotionOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - public NotionOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - /** - * Depending on the OAuth flow implementation, the URL to grant user's consent may differ, - * especially in the query parameters to be provided. This function should generate such consent URL - * accordingly. - * - * @param definitionId The configured definition ID of this client - * @param clientId The configured client ID - * @param redirectUrl the redirect URL - */ - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("response_type", "code") - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for Notion OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - // required - .put("grant_type", "authorization_code") - .put("code", authCode) - .put("redirect_uri", redirectUrl) - .build(); - } - - /** - * Returns the URL where to retrieve the access token from. - * - */ - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map completeOAuthFlow(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final JsonNode oAuthParamConfig) - throws IOException { - final var accessTokenUrl = getAccessTokenUrl(inputOAuthConfiguration); - final String authorization = Base64.getEncoder() - .encodeToString((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); - final HttpRequest request = HttpRequest.newBuilder() - .POST(HttpRequest.BodyPublishers - .ofString(tokenReqContentType.getConverter().apply( - getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)))) - .uri(URI.create(accessTokenUrl)) - .header("Content-Type", tokenReqContentType.getContentType()) - .header("Authorization", "Basic " + authorization) - .build(); - - try { - final HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); - return extractOAuthOutput(Jsons.deserialize(response.body()), accessTokenUrl); - } catch (final InterruptedException e) { - throw new IOException("Failed to complete Notion OAuth flow", e); - } - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - // getting out access_token - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/OktaOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/OktaOAuthFlow.java deleted file mode 100644 index 9ad81fa69c80..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/OktaOAuthFlow.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.util.Base64; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from https://developer.okta.com/docs/guides/implement-oauth-for-okta/main/ - */ -public class OktaOAuthFlow extends BaseOAuth2Flow { - - public OktaOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public OktaOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - // getting domain value from user's config - final String domain = getConfigValueUnsafe(inputOAuthConfiguration, "domain"); - - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost(domain + ".okta.com") - .setPath("oauth2/v1/authorize") - // required - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("scope", "okta.users.read okta.logs.read okta.groups.read okta.roles.read offline_access") - .addParameter("response_type", "code") - .addParameter("state", getState()); - - try { - return builder.build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - // required - .put("code", authCode) - .put("redirect_uri", redirectUrl) - .put("grant_type", "authorization_code") - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - // getting domain value from user's config - final String domain = getConfigValueUnsafe(inputOAuthConfiguration, "domain"); - return "https://" + domain + ".okta.com/oauth2/v1/token"; - } - - @Override - protected Map completeOAuthFlow(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final JsonNode oAuthParamConfig) - throws IOException { - final var accessTokenUrl = getAccessTokenUrl(inputOAuthConfiguration); - final byte[] authorization = Base64.getEncoder() - .encode((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); - final HttpRequest request = HttpRequest.newBuilder() - .POST(HttpRequest.BodyPublishers - .ofString(tokenReqContentType.getConverter().apply( - getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)))) - .uri(URI.create(accessTokenUrl)) - .header("Content-Type", tokenReqContentType.getContentType()) - .header("Accept", "application/json") - .header("Authorization", "Basic " + new String(authorization, StandardCharsets.UTF_8)) - .build(); - try { - final HttpResponse response = httpClient.send(request, - HttpResponse.BodyHandlers.ofString()); - - return extractOAuthOutput(Jsons.deserialize(response.body()), accessTokenUrl); - } catch (final InterruptedException e) { - throw new IOException("Failed to complete OAuth flow", e); - } - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/PayPalTransactionOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/PayPalTransactionOAuthFlow.java deleted file mode 100644 index d463b6131a32..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/PayPalTransactionOAuthFlow.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.util.Base64; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class PayPalTransactionOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://www.paypal.com/connect"; - private static final String ACCESS_TOKEN_URL = "https://api-m.paypal.com/v1/oauth2/token"; - private static final String SCOPES = "openid email profile"; - - public PayPalTransactionOAuthFlow(ConfigRepository configRepository, HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public PayPalTransactionOAuthFlow(ConfigRepository configRepository, final HttpClient httpClient, Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(UUID definitionId, - String clientId, - String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("flowEntry", "static") - .addParameter("client_id", clientId) - .addParameter("response_type", "code") - .addParameter("scope", SCOPES) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .build().toString(); - } catch (URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map completeOAuthFlow(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final JsonNode oAuthParamConfig) - throws IOException { - final var accessTokenUrl = getAccessTokenUrl(inputOAuthConfiguration); - final String authorization = Base64.getEncoder() - .encodeToString((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); - final HttpRequest request = HttpRequest.newBuilder() - .POST(HttpRequest.BodyPublishers - .ofString(tokenReqContentType.getConverter().apply( - getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)))) - .uri(URI.create(accessTokenUrl)) - .header("Content-Type", tokenReqContentType.getContentType()) - .header("Authorization", "Basic " + authorization) - .build(); - - try { - final HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); - return extractOAuthOutput(Jsons.deserialize(response.body()), accessTokenUrl); - } catch (final InterruptedException e) { - throw new IOException("Failed to complete PayPal OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - .put("grant_type", "authorization_code") - .put("code", authCode) - .build(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/PinterestOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/PinterestOAuthFlow.java deleted file mode 100644 index 8098967adcd4..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/PinterestOAuthFlow.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.util.Base64; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from https://developers.pinterest.com/docs/getting-started/authentication - */ -public class PinterestOAuthFlow extends BaseOAuth2Flow { - - private static final String ACCESS_TOKEN_URL = "https://api.pinterest.com/v5/oauth/token"; - - public PinterestOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public PinterestOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost("pinterest.com") - .setPath("oauth") - // required - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("scope", "ads:read,boards:read,boards:read_secret,catalogs:read,pins:read,pins:read_secret,user_accounts:read") - .addParameter("state", getState()); - - try { - return builder.build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map completeOAuthFlow(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final JsonNode oAuthParamConfig) - throws IOException { - final var accessTokenUrl = getAccessTokenUrl(inputOAuthConfiguration); - final String authorization = Base64.getEncoder() - .encodeToString((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); - final HttpRequest request = HttpRequest.newBuilder() - .POST(HttpRequest.BodyPublishers - .ofString(tokenReqContentType.getConverter().apply( - getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)))) - .uri(URI.create(accessTokenUrl)) - .header("Content-Type", tokenReqContentType.getContentType()) - .header("Authorization", "Basic " + authorization) - .build(); - - try { - final HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); - return extractOAuthOutput(Jsons.deserialize(response.body()), accessTokenUrl); - } catch (final InterruptedException e) { - throw new IOException("Failed to complete Pinterest OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - // required - .put("grant_type", "authorization_code") - .put("code", authCode) - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("redirect_uri", redirectUrl) - .put("scope", "read") - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - // getting out access_token - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - // getting out refresh_token - if (data.has("refresh_token")) { - result.put("refresh_token", data.get("refresh_token").asText()); - } else { - throw new IOException(String.format("Missing 'refresh_token' in query params from %s", accessTokenUrl)); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/PipeDriveOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/PipeDriveOAuthFlow.java deleted file mode 100644 index e3f98ef8e77b..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/PipeDriveOAuthFlow.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from https://pipedrive.readme.io/docs/marketplace-oauth-authorization - */ -public class PipeDriveOAuthFlow extends BaseOAuth2Flow { - - final String AUTHORIZE_URL = "https://oauth.pipedrive.com/oauth/authorize"; - final String ACCESS_TOKEN_URL = "https://oauth.pipedrive.com/oauth/token"; - - public PipeDriveOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public PipeDriveOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - .putAll(super.getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)) - .put("grant_type", "authorization_code") - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - public List getDefaultOAuthOutputPath() { - return List.of("authorization"); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/QuickbooksOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/QuickbooksOAuthFlow.java deleted file mode 100644 index 1fc0906b6627..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/QuickbooksOAuthFlow.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class QuickbooksOAuthFlow extends BaseOAuth2Flow { - - final String CONSENT_URL = "https://appcenter.intuit.com/app/connect/oauth2"; - final String TOKEN_URL = "https://oauth.platform.intuit.com/oauth2/v1/tokens/bearer"; - - public QuickbooksOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public QuickbooksOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - public String getScopes() { - return "com.intuit.quickbooks.accounting"; - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - - return (new URIBuilder(CONSENT_URL) - .addParameter("client_id", clientId) - .addParameter("scope", getScopes()) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("state", getState()) - .build()).toString(); - - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("redirect_uri", redirectUrl) - .put("grant_type", "authorization_code") - .put("code", authCode) - .put("client_id", clientId) - .put("client_secret", clientSecret) - .build(); - } - - /** - * Returns the URL where to retrieve the access token from. - */ - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return TOKEN_URL; - } - - /** - * This function should be redefined in each OAuthFlow implementation to isolate such "hardcoded" - * values. - */ - @Override - public List getDefaultOAuthOutputPath() { - return List.of("credentials"); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/RetentlyOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/RetentlyOAuthFlow.java deleted file mode 100644 index f57bcf9e4c6c..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/RetentlyOAuthFlow.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class RetentlyOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://app.retently.com/api/oauth/authorize"; - private static final String ACCESS_TOKEN_URL = "https://app.retently.com/api/oauth/token"; - - public RetentlyOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - public RetentlyOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - /** - * Depending on the OAuth flow implementation, the URL to grant user's consent may differ, - * especially in the query parameters to be provided. This function should generate such consent URL - * accordingly. - * - * @param definitionId The configured definition ID of this client - * @param clientId The configured client ID - * @param redirectUrl the redirect URL - */ - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("response_type", "code") - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("client_id", clientId) - .put("redirect_uri", redirectUrl) - .put("client_secret", clientSecret) - .put("code", authCode) - .put("grant_type", "authorization_code") - .build(); - } - - /** - * Returns the URL where to retrieve the access token from. - * - */ - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SalesforceOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SalesforceOAuthFlow.java deleted file mode 100644 index 5fa02e9f9ca6..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SalesforceOAuthFlow.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from - * https://help.salesforce.com/s/articleView?language=en_US&id=sf.remoteaccess_oauth_web_server_flow.htm - */ -public class SalesforceOAuthFlow extends BaseOAuth2Flow { - // Clickable link for IDE - // https://help.salesforce.com/s/articleView?language=en_US&id=sf.remoteaccess_oauth_web_server_flow.htm - - private static final String AUTHORIZE_URL = "https://%s.salesforce.com/services/oauth2/authorize"; - private static final String ACCESS_TOKEN_URL = "https://%s.salesforce.com/services/oauth2/token"; - - public SalesforceOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - SalesforceOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(String.format(AUTHORIZE_URL, getEnvironment(inputOAuthConfiguration))) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("state", getState()) - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return String.format(ACCESS_TOKEN_URL, getEnvironment(inputOAuthConfiguration)); - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - .putAll(super.getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)) - .put("grant_type", "authorization_code") - .build(); - } - - @Override - public List getDefaultOAuthOutputPath() { - return List.of(); - } - - private String getEnvironment(JsonNode inputOAuthConfiguration) { - var isSandbox = inputOAuthConfiguration.get("is_sandbox"); - if (isSandbox == null) { - return "login"; - } - return (isSandbox.asBoolean() == true) ? "test" : "login"; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ShopifyOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ShopifyOAuthFlow.java deleted file mode 100644 index 78744342cb47..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ShopifyOAuthFlow.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class ShopifyOAuthFlow extends BaseOAuth2Flow { - - private static final List SCOPES = Arrays.asList( - "read_themes", - "read_orders", - "read_all_orders", - "read_assigned_fulfillment_orders", - "read_content", - "read_customers", - "read_discounts", - "read_draft_orders", - "read_fulfillments", - "read_locales", - "read_locations", - "read_price_rules", - "read_products", - "read_product_listings", - "read_shopify_payments_payouts"); - - public String getScopes() { - return String.join(",", SCOPES); - } - - public ShopifyOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public ShopifyOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - // getting shop value from user's config - final String shop = getConfigValueUnsafe(inputOAuthConfiguration, "shop"); - // building consent url - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost(shop + ".myshopify.com") - .setPath("admin/oauth/authorize") - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .addParameter("grant_options[]", "value") - .addParameter("scope", getScopes()); - - try { - return builder.build().toString(); - } catch (URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("code", authCode) - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - // getting shop value from user's config - final String shop = getConfigValueUnsafe(inputOAuthConfiguration, "shop"); - // building the access_token_url - return "https://" + shop + ".myshopify.com/admin/oauth/access_token"; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - // getting out access_token - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SlackOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SlackOAuthFlow.java deleted file mode 100644 index b3b2e140b4ed..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SlackOAuthFlow.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class SlackOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://slack.com/oauth/authorize"; - private static final String ACCESS_TOKEN_URL = "https://slack.com/api/oauth.access"; - - public SlackOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public SlackOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - /** - * Depending on the OAuth flow implementation, the URL to grant user's consent may differ, - * especially in the query parameters to be provided. This function should generate such consent URL - * accordingly. - */ - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .addParameter("scope", "read") - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - /** - * Returns the URL where to retrieve the access token from. - */ - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - if (data.has("access_token")) { - return Map.of("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", ACCESS_TOKEN_URL)); - } - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SmartsheetsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SmartsheetsOAuthFlow.java deleted file mode 100644 index 9c93f9ccc7b8..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SmartsheetsOAuthFlow.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.time.Clock; -import java.time.Instant; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class SmartsheetsOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://app.smartsheet.com/b/authorize"; - private static final String ACCESS_TOKEN_URL = "https://api.smartsheet.com/2.0/token"; - private final Clock clock; - - public SmartsheetsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - this.clock = Clock.systemUTC(); - } - - @VisibleForTesting - public SmartsheetsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - this.clock = Clock.systemUTC(); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("response_type", "code") - .addParameter("state", getState()) - .addParameter("scope", "READ_SHEETS") - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - if (data.has("refresh_token")) { - result.put("refresh_token", data.get("refresh_token").asText()); - } else { - throw new IOException(String.format("Missing 'refresh_token' in query params from %s", accessTokenUrl)); - } - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - if (data.has("expires_in")) { - Instant expires_in = Instant.now(this.clock).plusSeconds(data.get("expires_in").asInt()); - result.put("token_expiry_date", expires_in.toString()); - } else { - throw new IOException(String.format("Missing 'expires_in' in query params from %s", accessTokenUrl)); - } - return result; - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("grant_type", "authorization_code") - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("code", authCode) - .build(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SnapchatMarketingOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SnapchatMarketingOAuthFlow.java deleted file mode 100644 index 13de36c90cdf..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SnapchatMarketingOAuthFlow.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from https://marketingapi.snapchat.com/docs/#authentication - */ -public class SnapchatMarketingOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://accounts.snapchat.com/login/oauth2/authorize"; - private static final String ACCESS_TOKEN_URL = "https://accounts.snapchat.com/login/oauth2/access_token"; - private static final String SCOPES = "snapchat-marketing-api"; - - public SnapchatMarketingOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - SnapchatMarketingOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("scope", SCOPES) - .addParameter("state", getState()) - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - .putAll(super.getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)) - .put("grant_type", "authorization_code") - .build(); - } - - @Override - public List getDefaultOAuthOutputPath() { - return List.of(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java deleted file mode 100644 index 221cb1858fa9..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.util.Base64; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class SourceSnowflakeOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://%s/oauth/authorize"; - private static final String ACCESS_TOKEN_URL = "https://%s/oauth/token-request"; - - public SourceSnowflakeOAuthFlow(ConfigRepository configRepository, HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public SourceSnowflakeOAuthFlow(ConfigRepository configRepository, HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(UUID definitionId, - String clientId, - String redirectUrl, - JsonNode inputOAuthConfiguration) - throws IOException { - try { - String consentUrl = new URIBuilder(String.format(AUTHORIZE_URL, extractUrl(inputOAuthConfiguration))) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("state", getState()) - .build().toString(); - String providedRole = extractRole(inputOAuthConfiguration); - return providedRole.isEmpty() - ? consentUrl - : getConsentUrlWithScopeRole(consentUrl, providedRole); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - private static String getConsentUrlWithScopeRole(String consentUrl, String providedRole) throws URISyntaxException { - return new URIBuilder(consentUrl) - .addParameter("scope", "session:role:" + providedRole) - .build().toString(); - } - - @Override - protected String getAccessTokenUrl(JsonNode inputOAuthConfiguration) { - return String.format(ACCESS_TOKEN_URL, extractUrl(inputOAuthConfiguration)); - } - - @Override - protected String extractCodeParameter(Map queryParams) throws IOException { - return super.extractCodeParameter(queryParams); - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - // required - .put("grant_type", "authorization_code") - .put("code", authCode) - .put("redirect_uri", redirectUrl) - .build(); - } - - @Override - protected Map completeOAuthFlow(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final JsonNode oAuthParamConfig) - throws IOException { - final var accessTokenUrl = getAccessTokenUrl(inputOAuthConfiguration); - final byte[] authorization = Base64.getEncoder() - .encode((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); - final HttpRequest request = HttpRequest.newBuilder() - .POST(HttpRequest.BodyPublishers - .ofString(tokenReqContentType.getConverter().apply( - getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)))) - .uri(URI.create(accessTokenUrl)) - .header("Content-Type", tokenReqContentType.getContentType()) - .header("Accept", "application/json") - .header("Authorization", "Basic " + new String(authorization, StandardCharsets.UTF_8)) - .build(); - try { - final HttpResponse response = httpClient.send(request, - HttpResponse.BodyHandlers.ofString()); - - return extractOAuthOutput(Jsons.deserialize(response.body()), accessTokenUrl); - } catch (final InterruptedException e) { - throw new IOException("Failed to complete OAuth flow", e); - } - } - - @Override - protected Map extractOAuthOutput(JsonNode data, String accessTokenUrl) - throws IOException { - final Map result = new HashMap<>(); - // access_token is valid for only 10 minutes - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", - accessTokenUrl)); - } - - if (data.has("refresh_token")) { - result.put("refresh_token", data.get("refresh_token").asText()); - } else { - throw new IOException(String.format("Missing 'refresh_token' in query params from %s", - accessTokenUrl)); - } - if (data.has("username")) { - result.put("username", data.get("username").asText()); - } else { - throw new IOException(String.format("Missing 'username' in query params from %s", - accessTokenUrl)); - } - return result; - } - - private String extractUrl(JsonNode inputOAuthConfiguration) { - var url = inputOAuthConfiguration.get("host"); - return url == null ? "snowflakecomputing.com" : url.asText(); - } - - private String extractRole(JsonNode inputOAuthConfiguration) { - var role = inputOAuthConfiguration.get("role"); - return role == null ? "" : role.asText(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SquareOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SquareOAuthFlow.java deleted file mode 100644 index 398e7bf0636a..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SquareOAuthFlow.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.URLDecoder; -import java.net.http.HttpClient; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import org.apache.http.client.utils.URIBuilder; - -public class SquareOAuthFlow extends BaseOAuth2Flow { - - private static final List SCOPES = Arrays.asList( - "CUSTOMERS_READ", - "EMPLOYEES_READ", - "ITEMS_READ", - "MERCHANT_PROFILE_READ", - "ORDERS_READ", - "PAYMENTS_READ", - "TIMECARDS_READ" - // OAuth Permissions: - // https://developer.squareup.com/docs/oauth-api/square-permissions - // https://developer.squareup.com/reference/square/enums/OAuthPermission - // "DISPUTES_READ", - // "GIFTCARDS_READ", - // "INVENTORY_READ", - // "INVOICES_READ", - // "TIMECARDS_SETTINGS_READ", - // "LOYALTY_READ", - // "ONLINE_STORE_SITE_READ", - // "ONLINE_STORE_SNIPPETS_READ", - // "SUBSCRIPTIONS_READ", - ); - private static final String AUTHORIZE_URL = "https://connect.squareup.com/oauth2/authorize"; - private static final String ACCESS_TOKEN_URL = "https://connect.squareup.com/oauth2/token"; - - public SquareOAuthFlow(ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public SquareOAuthFlow(ConfigRepository configRepository, - HttpClient httpClient, - Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - // Need to have decoded format, otherwise square fails saying that scope is incorrect - return URLDecoder.decode(new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("scope", String.join("+", SCOPES)) - .addParameter("session", "False") - .addParameter("state", getState()) - .build().toString(), StandardCharsets.UTF_8); - } catch (URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - String scopes = SCOPES.stream() - .map(name -> ('"' + name + '"')) - .collect(Collectors.joining(",")); - scopes = '[' + scopes + ']'; - - return ImmutableMap.builder() - // required - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("code", authCode) - .put("grant_type", "authorization_code") - .put("scopes", scopes) - .build(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/StravaOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/StravaOAuthFlow.java deleted file mode 100644 index 5a93e7030bbe..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/StravaOAuthFlow.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -public class StravaOAuthFlow extends BaseOAuth2Flow { - - private static final String AUTHORIZE_URL = "https://www.strava.com/oauth/authorize"; - private static final String ACCESS_TOKEN_URL = "https://www.strava.com/oauth/token"; - - public StravaOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - public StravaOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier, TOKEN_REQUEST_CONTENT_TYPE.JSON); - } - - /** - * Depending on the OAuth flow implementation, the URL to grant user's consent may differ, - * especially in the query parameters to be provided. This function should generate such consent URL - * accordingly. - * - * @param definitionId The configured definition ID of this client - * @param clientId The configured client ID - * @param redirectUrl the redirect URL - */ - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - try { - return new URIBuilder(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .addParameter("scope", getScopes()) - .addParameter("response_type", "code") - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("client_id", clientId) - .put("redirect_uri", redirectUrl) - .put("client_secret", clientSecret) - .put("code", authCode) - .put("grant_type", "authorization_code") - .build(); - } - - private String getScopes() { - return "activity:read_all"; - } - - /** - * Returns the URL where to retrieve the access token from. - * - */ - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SurveymonkeyOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SurveymonkeyOAuthFlow.java deleted file mode 100644 index cf087ff2b114..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SurveymonkeyOAuthFlow.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * https://developer.surveymonkey.com/api/v3/?#authentication - */ -public class SurveymonkeyOAuthFlow extends BaseOAuth2Flow { - - /** - * https://developer.surveymonkey.com/api/v3/#access-url - */ - private static final String API_ACCESS_URL_USA = "https://api.surveymonkey.com/"; - private static final String API_ACCESS_URL_EU = "https://api.eu.surveymonkey.com/"; - private static final String API_ACCESS_URL_CA = "https://api.surveymonkey.ca/"; - - private static final String EUROPE = "Europe"; - private static final String CANADA = "Canada"; - private static final String USA = "USA"; - - private static final String AUTHORIZE_URL = "oauth/authorize"; - private static final String ACCESS_TOKEN_URL = "oauth/token"; - - public SurveymonkeyOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - SurveymonkeyOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - protected String getBaseURLByOrigin(final JsonNode inputOAuthConfiguration) throws Error { - final String origin = getConfigValueUnsafe(inputOAuthConfiguration, "origin"); - if (EUROPE.equals(origin)) { - return API_ACCESS_URL_EU; - } else if (CANADA.equals(origin)) { - return API_ACCESS_URL_CA; - } else if (USA.equals(origin)) { - return API_ACCESS_URL_USA; - } else { - throw new Error("Unknown Origin: " + origin); - } - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - final String baseUrl = getBaseURLByOrigin(inputOAuthConfiguration); - return new URIBuilder(baseUrl) - .setPath(AUTHORIZE_URL) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("state", getState()) - .build().toString(); - } catch (URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - final String baseURL = getBaseURLByOrigin(inputOAuthConfiguration); - return baseURL + ACCESS_TOKEN_URL; - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - .putAll(super.getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)) - .put("grant_type", "authorization_code") - .build(); - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) { - Preconditions.checkArgument(data.has("access_token"), "Missing 'access_token' in query params from %s", ACCESS_TOKEN_URL); - return Map.of("access_token", data.get("access_token").asText()); - } - - @Override - public List getDefaultOAuthOutputPath() { - return List.of(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/TikTokMarketingOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/TikTokMarketingOAuthFlow.java deleted file mode 100644 index cad1b8b4e674..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/TikTokMarketingOAuthFlow.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from - * https://ads.tiktok.com/marketing_api/docs?id=1701890912382977&is_new_connect=0&is_new_user=0&rid=sta6fe2yww - */ -public class TikTokMarketingOAuthFlow extends BaseOAuth2Flow { - - private static final String ACCESS_TOKEN_URL = "https://ads.tiktok.com/open_api/v1.2/oauth2/access_token/"; - - @Override - protected String getClientIdUnsafe(final JsonNode oauthConfig) { - return getConfigValueUnsafe(oauthConfig, "app_id"); - } - - @Override - protected String getClientSecretUnsafe(final JsonNode oauthConfig) { - return getConfigValueUnsafe(oauthConfig, "secret"); - } - - public TikTokMarketingOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public TikTokMarketingOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String appId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost("ads.tiktok.com") - .setPath("marketing_api/auth") - // required - .addParameter("app_id", appId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()); - - try { - return builder.build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(final String appId, - final String secret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - // required - .put("auth_code", authCode) - .put("app_id", appId) - .put("secret", secret) - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected String extractCodeParameter(final Map queryParams) throws IOException { - if (queryParams.containsKey("auth_code")) { - return (String) queryParams.get("auth_code"); - } else if (queryParams.containsKey("code")) { - return (String) queryParams.get("code"); - } else { - throw new IOException("Undefined 'auth_code'/'code' from consent redirected url."); - } - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - // getting out access_token - if ((data.has("data")) && (data.get("data").has("access_token"))) { - result.put("access_token", data.get("data").get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/TrelloOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/TrelloOAuthFlow.java deleted file mode 100644 index 732a5626a1c2..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/TrelloOAuthFlow.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.api.client.auth.oauth.OAuthAuthorizeTemporaryTokenUrl; -import com.google.api.client.auth.oauth.OAuthCredentialsResponse; -import com.google.api.client.auth.oauth.OAuthGetAccessToken; -import com.google.api.client.auth.oauth.OAuthGetTemporaryToken; -import com.google.api.client.auth.oauth.OAuthHmacSigner; -import com.google.api.client.http.HttpTransport; -import com.google.api.client.http.javanet.NetHttpTransport; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.protocol.models.OAuthConfigSpecification; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.UUID; - -/** - * Following docs from - * https://developer.atlassian.com/cloud/trello/guides/rest-api/authorization/#using-basic-oauth - */ -public class TrelloOAuthFlow extends BaseOAuthFlow { - - private static final String REQUEST_TOKEN_URL = "https://trello.com/1/OAuthGetRequestToken"; - private static final String AUTHENTICATE_URL = "https://trello.com/1/OAuthAuthorizeToken"; - private static final String ACCESS_TOKEN_URL = "https://trello.com/1/OAuthGetAccessToken"; - private static final String OAUTH_VERIFIER = "oauth_verifier"; - - // Airbyte webserver creates new TrelloOAuthFlow class instance for every API - // call. Since oAuth 1.0 workflow requires data from previous step to build - // correct signature. - // Use static signer instance to share token secret for oAuth flow between - // get_consent_url and complete_oauth API calls. - private static final OAuthHmacSigner signer = new OAuthHmacSigner(); - private final HttpTransport transport; - - public TrelloOAuthFlow(final ConfigRepository configRepository) { - super(configRepository); - transport = new NetHttpTransport(); - } - - @VisibleForTesting - public TrelloOAuthFlow(final ConfigRepository configRepository, final HttpTransport transport) { - super(configRepository); - this.transport = transport; - } - - @Override - public String getSourceConsentUrl(final UUID workspaceId, - final UUID sourceDefinitionId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final OAuthConfigSpecification oauthConfigSpecification) - throws IOException, ConfigNotFoundException { - final JsonNode oAuthParamConfig = getSourceOAuthParamConfig(workspaceId, sourceDefinitionId); - return getConsentUrl(oAuthParamConfig, redirectUrl); - } - - @Override - public String getDestinationConsentUrl(final UUID workspaceId, - final UUID destinationDefinitionId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final OAuthConfigSpecification oauthConfigSpecification) - throws IOException, ConfigNotFoundException { - final JsonNode oAuthParamConfig = getDestinationOAuthParamConfig(workspaceId, destinationDefinitionId); - return getConsentUrl(oAuthParamConfig, redirectUrl); - } - - private String getConsentUrl(final JsonNode oAuthParamConfig, final String redirectUrl) throws IOException { - final String clientKey = getClientIdUnsafe(oAuthParamConfig); - final String clientSecret = getClientSecretUnsafe(oAuthParamConfig); - final OAuthGetTemporaryToken oAuthGetTemporaryToken = new OAuthGetTemporaryToken(REQUEST_TOKEN_URL); - signer.clientSharedSecret = clientSecret; - signer.tokenSharedSecret = null; - oAuthGetTemporaryToken.signer = signer; - oAuthGetTemporaryToken.callback = redirectUrl; - oAuthGetTemporaryToken.transport = transport; - oAuthGetTemporaryToken.consumerKey = clientKey; - final OAuthCredentialsResponse temporaryTokenResponse = oAuthGetTemporaryToken.execute(); - - final OAuthAuthorizeTemporaryTokenUrl oAuthAuthorizeTemporaryTokenUrl = new OAuthAuthorizeTemporaryTokenUrl(AUTHENTICATE_URL); - oAuthAuthorizeTemporaryTokenUrl.temporaryToken = temporaryTokenResponse.token; - signer.tokenSharedSecret = temporaryTokenResponse.tokenSecret; - return oAuthAuthorizeTemporaryTokenUrl.build(); - } - - @Override - @Deprecated - public Map completeSourceOAuth(final UUID workspaceId, - final UUID sourceDefinitionId, - final Map queryParams, - final String redirectUrl) - throws IOException, ConfigNotFoundException { - final JsonNode oAuthParamConfig = getSourceOAuthParamConfig(workspaceId, sourceDefinitionId); - return formatOAuthOutput(oAuthParamConfig, internalCompleteOAuth(oAuthParamConfig, queryParams), getDefaultOAuthOutputPath()); - } - - @Override - @Deprecated - public Map completeDestinationOAuth(final UUID workspaceId, - final UUID destinationDefinitionId, - final Map queryParams, - final String redirectUrl) - throws IOException, ConfigNotFoundException { - final JsonNode oAuthParamConfig = getDestinationOAuthParamConfig(workspaceId, destinationDefinitionId); - return formatOAuthOutput(oAuthParamConfig, internalCompleteOAuth(oAuthParamConfig, queryParams), getDefaultOAuthOutputPath()); - } - - @Override - public Map completeSourceOAuth(final UUID workspaceId, - final UUID sourceDefinitionId, - final Map queryParams, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final OAuthConfigSpecification oAuthConfigSpecification) - throws IOException, ConfigNotFoundException, JsonValidationException { - final JsonNode oAuthParamConfig = getDestinationOAuthParamConfig(workspaceId, sourceDefinitionId); - return formatOAuthOutput(oAuthParamConfig, internalCompleteOAuth(oAuthParamConfig, queryParams), oAuthConfigSpecification); - } - - @Override - public Map completeDestinationOAuth(final UUID workspaceId, - final UUID destinationDefinitionId, - final Map queryParams, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final OAuthConfigSpecification oAuthConfigSpecification) - throws IOException, ConfigNotFoundException, JsonValidationException { - final JsonNode oAuthParamConfig = getDestinationOAuthParamConfig(workspaceId, destinationDefinitionId); - return formatOAuthOutput(oAuthParamConfig, internalCompleteOAuth(oAuthParamConfig, queryParams), oAuthConfigSpecification); - } - - private Map internalCompleteOAuth(final JsonNode oAuthParamConfig, final Map queryParams) - throws IOException { - final String clientKey = getClientIdUnsafe(oAuthParamConfig); - if (!queryParams.containsKey(OAUTH_VERIFIER) || !queryParams.containsKey("oauth_token")) { - throw new IOException( - "Undefined " + (!queryParams.containsKey(OAUTH_VERIFIER) ? OAUTH_VERIFIER : "oauth_token") + " from consent redirected url."); - } - final String temporaryToken = (String) queryParams.get("oauth_token"); - final String verificationCode = (String) queryParams.get(OAUTH_VERIFIER); - final OAuthGetAccessToken oAuthGetAccessToken = new OAuthGetAccessToken(ACCESS_TOKEN_URL); - oAuthGetAccessToken.signer = signer; - oAuthGetAccessToken.transport = transport; - oAuthGetAccessToken.temporaryToken = temporaryToken; - oAuthGetAccessToken.verifier = verificationCode; - oAuthGetAccessToken.consumerKey = clientKey; - final OAuthCredentialsResponse accessTokenResponse = oAuthGetAccessToken.execute(); - final String accessToken = accessTokenResponse.token; - return Map.of("token", accessToken, "key", clientKey); - } - - @Override - public List getDefaultOAuthOutputPath() { - return List.of(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskChatOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskChatOAuthFlow.java deleted file mode 100644 index 7834c988e0a6..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskChatOAuthFlow.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from https://developer.zendesk.com/documentation/live-chat/getting-started/auth/ - */ -public class ZendeskChatOAuthFlow extends BaseOAuth2Flow { - - private static final String ACCESS_TOKEN_URL = "https://www.zopim.com/oauth2/token"; - - public ZendeskChatOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public ZendeskChatOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - // getting subdomain value from user's config - final String subdomain = getConfigValueUnsafe(inputOAuthConfiguration, "subdomain"); - - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost("www.zopim.com") - .setPath("oauth2/authorizations/new") - // required - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("scope", "read chat") - .addParameter("state", getState()); - - try { - // applying optional parameter of subdomain, if there is any value - if (!subdomain.isEmpty()) { - builder.addParameter("subdomain", subdomain); - } - return builder.build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - // required - .put("grant_type", "authorization_code") - .put("code", authCode) - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("redirect_uri", redirectUrl) - .put("scope", "read") - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - // getting out access_token - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - // getting out refresh_token - if (data.has("refresh_token")) { - result.put("refresh_token", data.get("refresh_token").asText()); - } else { - throw new IOException(String.format("Missing 'refresh_token' in query params from %s", accessTokenUrl)); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskSunshineOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskSunshineOAuthFlow.java deleted file mode 100644 index 9be070ba17cf..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskSunshineOAuthFlow.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from - * https://developer.zendesk.com/api-reference/custom-data/introduction/#authentication - */ -public class ZendeskSunshineOAuthFlow extends BaseOAuth2Flow { - - public ZendeskSunshineOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public ZendeskSunshineOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - // getting subdomain value from user's config - final String subdomain = getConfigValueUnsafe(inputOAuthConfiguration, "subdomain"); - - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost(String.format("%s.zendesk.com", subdomain)) - .setPath("oauth/authorizations/new") - // required - .addParameter("response_type", "code") - .addParameter("redirect_uri", redirectUrl) - .addParameter("client_id", clientId) - .addParameter("scope", "read") - .addParameter("state", getState()); - - try { - return builder.build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - // required - .put("grant_type", "authorization_code") - .put("code", authCode) - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("redirect_uri", redirectUrl) - .put("scope", "read") - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - // getting subdomain value from user's config - final String subdomain = getConfigValueUnsafe(inputOAuthConfiguration, "subdomain"); - - return String.format("https://%s.zendesk.com/oauth/tokens", subdomain); - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - // getting out access_token - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskSupportOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskSupportOAuthFlow.java deleted file mode 100644 index 352ec364f7ff..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskSupportOAuthFlow.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from - * https://support.zendesk.com/hc/en-us/articles/4408845965210-Using-OAuth-authentication-with-your-application - */ -public class ZendeskSupportOAuthFlow extends BaseOAuth2Flow { - - public ZendeskSupportOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public ZendeskSupportOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - // getting subdomain value from user's config - final String subdomain = getConfigValueUnsafe(inputOAuthConfiguration, "subdomain"); - - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost(subdomain + ".zendesk.com") - .setPath("oauth/authorizations/new") - // required - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("scope", "read") - .addParameter("state", getState()); - - try { - return builder.build().toString(); - } catch (URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - // required - .put("grant_type", "authorization_code") - .put("code", authCode) - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("redirect_uri", redirectUrl) - .put("scope", "read") - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - // getting subdomain value from user's config - final String subdomain = getConfigValueUnsafe(inputOAuthConfiguration, "subdomain"); - return "https://" + subdomain + ".zendesk.com/oauth/tokens"; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - // getting out access_token - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskTalkOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskTalkOAuthFlow.java deleted file mode 100644 index c2dfffd8cc01..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ZendeskTalkOAuthFlow.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from - * https://support.zendesk.com/hc/en-us/articles/4408845965210-Using-OAuth-authentication-with-your-application - */ -public class ZendeskTalkOAuthFlow extends BaseOAuth2Flow { - - public ZendeskTalkOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - public ZendeskTalkOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - - // getting subdomain value from user's config - final String subdomain = getConfigValueUnsafe(inputOAuthConfiguration, "subdomain"); - - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost(subdomain + ".zendesk.com") - .setPath("oauth/authorizations/new") - // required - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("scope", "read") - .addParameter("state", getState()); - - try { - return builder.build().toString(); - } catch (URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected Map getAccessTokenQueryParameters(String clientId, - String clientSecret, - String authCode, - String redirectUrl) { - return ImmutableMap.builder() - // required - .put("grant_type", "authorization_code") - .put("code", authCode) - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("redirect_uri", redirectUrl) - .put("scope", "read") - .build(); - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - // getting subdomain value from user's config - final String subdomain = getConfigValueUnsafe(inputOAuthConfiguration, "subdomain"); - return "https://" + subdomain + ".zendesk.com/oauth/tokens"; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = new HashMap<>(); - // getting out access_token - if (data.has("access_token")) { - result.put("access_token", data.get("access_token").asText()); - } else { - throw new IOException(String.format("Missing 'access_token' in query params from %s", accessTokenUrl)); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/FacebookMarketingOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/FacebookMarketingOAuthFlow.java deleted file mode 100644 index 980ec95682a9..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/FacebookMarketingOAuthFlow.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.facebook; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import java.net.http.HttpClient; -import java.util.function.Supplier; - -public class FacebookMarketingOAuthFlow extends FacebookOAuthFlow { - - private static final String SCOPES = "ads_management,ads_read,read_insights,business_management"; - - public FacebookMarketingOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - FacebookMarketingOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String getScopes() { - return SCOPES; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/FacebookOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/FacebookOAuthFlow.java deleted file mode 100644 index bdfff4542c64..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/FacebookOAuthFlow.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.facebook; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Preconditions; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from - * https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow - */ -public abstract class FacebookOAuthFlow extends BaseOAuth2Flow { - - private static final String ACCESS_TOKEN_URL = "https://graph.facebook.com/v12.0/oauth/access_token"; - private static final String AUTH_CODE_TOKEN_URL = "https://www.facebook.com/v12.0/dialog/oauth"; - private static final String ACCESS_TOKEN = "access_token"; - - public FacebookOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - FacebookOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - protected abstract String getScopes(); - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - try { - return new URIBuilder(AUTH_CODE_TOKEN_URL) - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("state", getState()) - .addParameter("scope", getScopes()) - .build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) { - // Facebook does not have refresh token but calls it "long lived access token" instead: - // see https://developers.facebook.com/docs/facebook-login/access-tokens/refreshing - Preconditions.checkArgument(data.has(ACCESS_TOKEN), "Missing 'access_token' in query params from %s", ACCESS_TOKEN_URL); - return Map.of(ACCESS_TOKEN, data.get(ACCESS_TOKEN).asText()); - } - - @Override - protected Map completeOAuthFlow(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl, - final JsonNode inputOAuthConfiguration, - final JsonNode oAuthParamConfig) - throws IOException { - // Access tokens generated via web login are short-lived tokens - // they arre valid for 1 hour and need to be exchanged for long-lived access token - // https://developers.facebook.com/docs/facebook-login/access-tokens (Short-Term Tokens and - // https://developers.facebook.com/docs/instagram-basic-display-api/overview#short-lived-access-tokens - // Long-Term Tokens section) - - final Map data = - super.completeOAuthFlow(clientId, clientSecret, authCode, redirectUrl, inputOAuthConfiguration, oAuthParamConfig); - Preconditions.checkArgument(data.containsKey(ACCESS_TOKEN)); - final String shortLivedAccessToken = (String) data.get(ACCESS_TOKEN); - final String longLivedAccessToken = getLongLivedAccessToken(clientId, clientSecret, shortLivedAccessToken); - return Map.of(ACCESS_TOKEN, longLivedAccessToken); - } - - protected URI createLongLivedTokenURI(final String clientId, final String clientSecret, final String shortLivedAccessToken) - throws URISyntaxException { - // Exchange Short-lived Access token for Long-lived one - // https://developers.facebook.com/docs/facebook-login/access-tokens/refreshing - // It's valid for 60 days and resreshed once per day if using in requests. - // If no requests are made, the token will expire after about 60 days and - // the person will have to go through the login flow again to get a new - // token. - return new URIBuilder(ACCESS_TOKEN_URL) - .addParameter("client_secret", clientSecret) - .addParameter("client_id", clientId) - .addParameter("grant_type", "fb_exchange_token") - .addParameter("fb_exchange_token", shortLivedAccessToken) - .build(); - } - - protected String getLongLivedAccessToken(final String clientId, final String clientSecret, final String shortLivedAccessToken) throws IOException { - try { - final URI uri = createLongLivedTokenURI(clientId, clientSecret, shortLivedAccessToken); - final HttpRequest request = HttpRequest.newBuilder() - .GET() - .uri(uri) - .build(); - final HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); - final JsonNode responseJson = Jsons.deserialize(response.body()); - Preconditions.checkArgument(responseJson.hasNonNull(ACCESS_TOKEN), "%s response should have access_token", responseJson); - return responseJson.get(ACCESS_TOKEN).asText(); - } catch (final InterruptedException | URISyntaxException e) { - throw new IOException("Failed to complete OAuth flow", e); - } - } - - @Override - public List getDefaultOAuthOutputPath() { - return List.of(); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/FacebookPagesOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/FacebookPagesOAuthFlow.java deleted file mode 100644 index c379085f46d5..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/FacebookPagesOAuthFlow.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.facebook; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import java.net.http.HttpClient; -import java.util.function.Supplier; - -public class FacebookPagesOAuthFlow extends FacebookOAuthFlow { - - private static final String SCOPES = "pages_manage_ads,pages_manage_metadata,pages_read_engagement,pages_read_user_content"; - - public FacebookPagesOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - FacebookPagesOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String getScopes() { - return SCOPES; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java deleted file mode 100644 index d688e7fe625b..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.facebook; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import java.net.http.HttpClient; -import java.util.function.Supplier; - -// Instagram Graph API require Facebook API User token -public class InstagramOAuthFlow extends FacebookMarketingOAuthFlow { - - private static final String SCOPES = "ads_management,instagram_basic,instagram_manage_insights,pages_show_list,pages_read_engagement"; - - public InstagramOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - InstagramOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String getScopes() { - return SCOPES; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlow.java deleted file mode 100644 index f465a88c16b9..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlow.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import java.net.http.HttpClient; -import java.util.function.Supplier; - -public class DestinationGoogleSheetsOAuthFlow extends GoogleOAuthFlow { - - @VisibleForTesting - static final String SCOPE_URL = "https://www.googleapis.com/auth/spreadsheets https://www.googleapis.com/auth/drive"; - - public DestinationGoogleSheetsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - DestinationGoogleSheetsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String getScope() { - return SCOPE_URL; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlow.java deleted file mode 100644 index 52dba948462d..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlow.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import java.net.http.HttpClient; -import java.util.function.Supplier; - -public class GoogleAdsOAuthFlow extends GoogleOAuthFlow { - - @VisibleForTesting - static final String SCOPE_URL = "https://www.googleapis.com/auth/adwords"; - - public GoogleAdsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - GoogleAdsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String getScope() { - return SCOPE_URL; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleAnalyticsPropertyIdOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleAnalyticsPropertyIdOAuthFlow.java deleted file mode 100644 index 51cfb2afb6eb..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleAnalyticsPropertyIdOAuthFlow.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import java.net.http.HttpClient; -import java.util.function.Supplier; - -public class GoogleAnalyticsPropertyIdOAuthFlow extends GoogleOAuthFlow { - - public static final String SCOPE_URL = "https://www.googleapis.com/auth/analytics.readonly"; - - public GoogleAnalyticsPropertyIdOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - GoogleAnalyticsPropertyIdOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String getScope() { - return SCOPE_URL; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleAnalyticsViewIdOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleAnalyticsViewIdOAuthFlow.java deleted file mode 100644 index 366634ba87e2..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleAnalyticsViewIdOAuthFlow.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import java.net.http.HttpClient; -import java.util.function.Supplier; - -public class GoogleAnalyticsViewIdOAuthFlow extends GoogleOAuthFlow { - - public static final String SCOPE_URL = "https://www.googleapis.com/auth/analytics.readonly"; - - public GoogleAnalyticsViewIdOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - GoogleAnalyticsViewIdOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String getScope() { - return SCOPE_URL; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleOAuthFlow.java deleted file mode 100644 index ef35bee5dd40..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleOAuthFlow.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuth2Flow; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.UUID; -import java.util.function.Supplier; -import org.apache.http.client.utils.URIBuilder; - -/** - * Following docs from https://developers.google.com/identity/protocols/oauth2/web-server - */ -public abstract class GoogleOAuthFlow extends BaseOAuth2Flow { - - private static final String ACCESS_TOKEN_URL = "https://oauth2.googleapis.com/token"; - - public GoogleOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - GoogleOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String formatConsentUrl(final UUID definitionId, - final String clientId, - final String redirectUrl, - final JsonNode inputOAuthConfiguration) - throws IOException { - final URIBuilder builder = new URIBuilder() - .setScheme("https") - .setHost("accounts.google.com") - .setPath("o/oauth2/v2/auth") - .addParameter("client_id", clientId) - .addParameter("redirect_uri", redirectUrl) - .addParameter("response_type", "code") - .addParameter("scope", getScope()) - // recommended - .addParameter("access_type", "offline") - .addParameter("state", getState()) - // optional - .addParameter("include_granted_scopes", "true") - // .addParameter("login_hint", "user_email") - .addParameter("prompt", "consent"); - try { - return builder.build().toString(); - } catch (final URISyntaxException e) { - throw new IOException("Failed to format Consent URL for OAuth flow", e); - } - } - - /** - * @return the scope for the specific google oauth implementation. - */ - protected abstract String getScope(); - - @Override - protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { - return ACCESS_TOKEN_URL; - } - - @Override - protected Map getAccessTokenQueryParameters(final String clientId, - final String clientSecret, - final String authCode, - final String redirectUrl) { - return ImmutableMap.builder() - .put("client_id", clientId) - .put("client_secret", clientSecret) - .put("code", authCode) - .put("grant_type", "authorization_code") - .put("redirect_uri", redirectUrl) - .build(); - } - - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) throws IOException { - final Map result = super.extractOAuthOutput(data, accessTokenUrl); - if (data.has("access_token")) { - // google also returns an access token the first time you complete oauth flow - result.put("access_token", data.get("access_token").asText()); - } - return result; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlow.java deleted file mode 100644 index 2c4acb6bff44..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlow.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import java.net.http.HttpClient; -import java.util.List; -import java.util.function.Supplier; - -public class GoogleSearchConsoleOAuthFlow extends GoogleOAuthFlow { - - @VisibleForTesting - static final String SCOPE_URL = "https://www.googleapis.com/auth/webmasters.readonly"; - - public GoogleSearchConsoleOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - GoogleSearchConsoleOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String getScope() { - return SCOPE_URL; - } - - @Override - @Deprecated - public List getDefaultOAuthOutputPath() { - return List.of("authorization"); - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlow.java deleted file mode 100644 index e0f661b8bfa0..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlow.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import java.net.http.HttpClient; -import java.util.function.Supplier; - -public class GoogleSheetsOAuthFlow extends GoogleOAuthFlow { - - // space-delimited string for multiple scopes, see: - // https://datatracker.ietf.org/doc/html/rfc6749#section-3.3 - @VisibleForTesting - static final String SCOPE_URL = "https://www.googleapis.com/auth/spreadsheets.readonly https://www.googleapis.com/auth/drive.readonly"; - - public GoogleSheetsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - GoogleSheetsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String getScope() { - return SCOPE_URL; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsBusinessOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsBusinessOAuthFlow.java deleted file mode 100644 index 3fdde87e7482..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsBusinessOAuthFlow.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import java.net.http.HttpClient; -import java.util.function.Supplier; - -public class YouTubeAnalyticsBusinessOAuthFlow extends GoogleOAuthFlow { - - private static final String SCOPE_URL = - "https://www.googleapis.com/auth/yt-analytics.readonly https://www.googleapis.com/auth/yt-analytics-monetary.readonly"; - - public YouTubeAnalyticsBusinessOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - YouTubeAnalyticsBusinessOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String getScope() { - return SCOPE_URL; - } - -} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsOAuthFlow.java deleted file mode 100644 index d6ebd37bbb85..000000000000 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsOAuthFlow.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.persistence.ConfigRepository; -import java.net.http.HttpClient; -import java.util.function.Supplier; - -public class YouTubeAnalyticsOAuthFlow extends GoogleOAuthFlow { - - private static final String SCOPE_URL = "https://www.googleapis.com/auth/yt-analytics.readonly"; - - public YouTubeAnalyticsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { - super(configRepository, httpClient); - } - - @VisibleForTesting - YouTubeAnalyticsOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { - super(configRepository, httpClient, stateSupplier); - } - - @Override - protected String getScope() { - return SCOPE_URL; - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/FacebookOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/FacebookOAuthFlowIntegrationTest.java deleted file mode 100644 index c08210be1a91..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/FacebookOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.facebook; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.oauth.flows.OAuthFlowIntegrationTest; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class FacebookOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - protected static final Path CREDENTIALS_PATH = Path.of("secrets/facebook_marketing.json"); - protected static final String REDIRECT_URL = "http://localhost:9000/auth_flow"; - - @Override - protected Path getCredentialsPath() { - return CREDENTIALS_PATH; - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { - return new FacebookMarketingOAuthFlow(configRepository, httpClient); - } - - @Override - @BeforeEach - public void setup() throws IOException { - super.setup(); - } - - @Override - protected int getServerListeningPort() { - return 9000; - } - - @Test - public void testFullFacebookOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(ImmutableMap.builder() - .put("client_id", credentialsJson.get("client_id").asText()) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build())))); - final String url = flow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - waitForResponse(20); - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("access_token")); - assertTrue(params.get("access_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GithubOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GithubOAuthFlowIntegrationTest.java deleted file mode 100644 index db87552cc3e7..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GithubOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class GithubOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - protected static final Path CREDENTIALS_PATH = Path.of("secrets/github.json"); - protected static final String REDIRECT_URL = "http://localhost:8000/auth_flow"; - protected static final int SERVER_LISTENING_PORT = 8000; - - @Override - protected Path getCredentialsPath() { - return CREDENTIALS_PATH; - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { - return new GithubOAuthFlow(configRepository, httpClient); - } - - @Override - protected int getServerListeningPort() { - return SERVER_LISTENING_PORT; - } - - @Override - @BeforeEach - public void setup() throws IOException { - super.setup(); - } - - @Test - public void testFullGithubOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(ImmutableMap.builder() - .put("client_id", credentialsJson.get("client_id").asText()) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build())))); - final String url = flow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("access_token")); - assertTrue(params.get("access_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GitlabOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GitlabOAuthFlowIntegrationTest.java deleted file mode 100644 index a33ff437ffa8..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GitlabOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class GitlabOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - protected static final Path CREDENTIALS_PATH = Path.of("secrets/gitlab.json"); - protected static final String REDIRECT_URL = "http://localhost:8000/auth_flow"; - protected static final int SERVER_LISTENING_PORT = 8000; - - @Override - protected Path getCredentialsPath() { - return CREDENTIALS_PATH; - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(ConfigRepository configRepository, HttpClient httpClient) { - return new GitlabOAuthFlow(configRepository, httpClient); - } - - @Override - protected int getServerListeningPort() { - return SERVER_LISTENING_PORT; - } - - @BeforeEach - public void setup() throws IOException { - super.setup(); - } - - @Test - public void testFullGitlabOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(ImmutableMap.builder() - .put("client_id", credentialsJson.get("client_id").asText()) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build())))); - final String url = flow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("access_token")); - assertTrue(params.get("access_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/IntercomOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/IntercomOAuthFlowIntegrationTest.java deleted file mode 100644 index 71c784acf110..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/IntercomOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class IntercomOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - protected static final Path CREDENTIALS_PATH = Path.of("secrets/intercom.json"); - protected static final String REDIRECT_URL = "http://localhost:8000/code"; - protected static final int SERVER_LISTENING_PORT = 8000; - - @Override - protected Path getCredentialsPath() { - return CREDENTIALS_PATH; - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { - return new IntercomOAuthFlow(configRepository, httpClient); - } - - @Override - protected int getServerListeningPort() { - return SERVER_LISTENING_PORT; - } - - @Override - @BeforeEach - public void setup() throws IOException { - super.setup(); - } - - @Test - public void testFullIntercomOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode( - Map.of("authorization", - ImmutableMap.builder() - .put("client_id", credentialsJson.get("client_id").asText()) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build()))))); - - final String url = flow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("access_token")); - assertTrue(params.get("access_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java deleted file mode 100644 index 3426d9b3ad0e..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.Test; - -public class LinkedinAdsOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - protected static final Path CREDENTIALS_PATH = Path.of("secrets/config_oauth.json"); - protected static final String REDIRECT_URL = "http://localhost:3000/auth_flow"; - - @Override - protected int getServerListeningPort() { - return 3000; - } - - @Override - protected Path getCredentialsPath() { - return CREDENTIALS_PATH; - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { - return new LinkedinAdsOAuthFlow(configRepository, httpClient); - } - - @SuppressWarnings({"BusyWait", "unchecked"}) - @Test - public void testFullOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(Map.of("credentials", ImmutableMap.builder() - .put("client_id", credentialsJson.get("client_id").asText()) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build()))))); - final String url = - getFlowImplementation(configRepository, httpClient).getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("credentials")); - final Map credentials; - credentials = Collections.unmodifiableMap((Map) params.get("credentials")); - assertTrue(credentials.containsKey("refresh_token")); - assertTrue(credentials.get("refresh_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/PipeDriveOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/PipeDriveOAuthFlowIntegrationTest.java deleted file mode 100644 index 2295f17f8d31..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/PipeDriveOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.Test; - -public class PipeDriveOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - @Override - protected Path getCredentialsPath() { - return Path.of("secrets/pipedrive.json"); - } - - @Override - protected String getRedirectUrl() { - return "http://localhost:3000/auth_flow"; - } - - @Override - protected int getServerListeningPort() { - return 3000; - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { - return new PipeDriveOAuthFlow(configRepository, httpClient); - } - - @Test - public void testFullPipeDriveOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(getCredentialsPath()); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(Map.of("authorization", ImmutableMap.builder() - .put("client_id", credentialsJson.get("client_id").asText()) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build()))))); - final String url = getFlowImplementation(configRepository, httpClient).getSourceConsentUrl(workspaceId, definitionId, getRedirectUrl(), - Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - waitForResponse(20); - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), getRedirectUrl()); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("authorization")); - final var creds = (Map) params.get("authorization"); - assertTrue(creds.get("refresh_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/QuickbooksOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/QuickbooksOAuthFlowIntegrationTest.java deleted file mode 100644 index 951f996bba4f..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/QuickbooksOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.Test; - -public class QuickbooksOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - protected static final Path CREDENTIALS_PATH = Path.of("secrets/quickbooks.json"); - protected static final String REDIRECT_URL = "http://localhost:3000/auth_flow"; - - @Override - protected int getServerListeningPort() { - return 3000; - } - - @Override - protected Path getCredentialsPath() { - return CREDENTIALS_PATH; - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { - return new QuickbooksOAuthFlow(configRepository, httpClient); - } - - @SuppressWarnings({"BusyWait", "unchecked"}) - @Test - public void testFullOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(Map.of("credentials", ImmutableMap.builder() - .put("client_id", credentialsJson.get("client_id").asText()) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build()))))); - final String url = - getFlowImplementation(configRepository, httpClient).getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("credentials")); - final Map credentials; - credentials = Collections.unmodifiableMap((Map) params.get("credentials")); - assertTrue(credentials.containsKey("refresh_token")); - assertTrue(credentials.get("refresh_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SalesforceOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SalesforceOAuthFlowIntegrationTest.java deleted file mode 100644 index ca313268d937..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SalesforceOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.sun.net.httpserver.HttpExchange; -import com.sun.net.httpserver.HttpHandler; -import com.sun.net.httpserver.HttpServer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.net.http.HttpClient; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SalesforceOAuthFlowIntegrationTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(SalesforceOAuthFlowIntegrationTest.class); - private static final String REDIRECT_URL = "http://localhost:8000/code"; - private static final Path CREDENTIALS_PATH = Path.of("secrets/salesforce.json"); - - private ConfigRepository configRepository; - private SalesforceOAuthFlow salesforceOAuthFlow; - private HttpServer server; - private ServerHandler serverHandler; - private HttpClient httpClient; - - @BeforeEach - public void setup() throws IOException { - if (!Files.exists(CREDENTIALS_PATH)) { - throw new IllegalStateException( - "Must provide path to a oauth credentials file."); - } - configRepository = mock(ConfigRepository.class); - httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); - salesforceOAuthFlow = new SalesforceOAuthFlow(configRepository, httpClient); - - server = HttpServer.create(new InetSocketAddress(8000), 0); - server.setExecutor(null); // creates a default executor - server.start(); - serverHandler = new ServerHandler("code"); - server.createContext("/code", serverHandler); - } - - @AfterEach - void tearDown() { - server.stop(1); - } - - @Test - public void testFullSalesforceOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - final String clientId = credentialsJson.get("client_id").asText(); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(ImmutableMap.builder() - .put("client_id", clientId) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build())))); - final String url = salesforceOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = salesforceOAuthFlow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("refresh_token")); - assertTrue(params.get("refresh_token").toString().length() > 0); - } - - static class ServerHandler implements HttpHandler { - - final private String expectedParam; - private Map responseQuery; - private String paramValue; - private boolean succeeded; - - public ServerHandler(final String expectedParam) { - this.expectedParam = expectedParam; - this.paramValue = ""; - this.succeeded = false; - } - - public boolean isSucceeded() { - return succeeded; - } - - public String getParamValue() { - return paramValue; - } - - public Map getResponseQuery() { - return responseQuery; - } - - @Override - public void handle(final HttpExchange t) { - final String query = t.getRequestURI().getQuery(); - LOGGER.info("Received query: '{}'", query); - final Map data; - try { - data = deserialize(query); - final String response; - if (data != null && data.containsKey(expectedParam)) { - paramValue = data.get(expectedParam); - response = String.format("Successfully extracted %s:\n'%s'\nTest should be continuing the OAuth Flow to retrieve the refresh_token...", - expectedParam, paramValue); - responseQuery = data; - LOGGER.info(response); - t.sendResponseHeaders(200, response.length()); - succeeded = true; - } else { - response = String.format("Unable to parse query params from redirected url: %s", query); - t.sendResponseHeaders(500, response.length()); - } - final OutputStream os = t.getResponseBody(); - os.write(response.getBytes(StandardCharsets.UTF_8)); - os.close(); - } catch (final RuntimeException | IOException e) { - LOGGER.error("Failed to parse from body {}", query, e); - } - } - - private static Map deserialize(final String query) { - if (query == null) { - return null; - } - final Map result = new HashMap<>(); - for (final String param : query.split("&")) { - final String[] entry = param.split("=", 2); - if (entry.length > 1) { - result.put(entry[0], entry[1]); - } else { - result.put(entry[0], ""); - } - } - return result; - } - - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SlackOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SlackOAuthFlowIntegrationTest.java deleted file mode 100644 index 10b99e7f4f03..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SlackOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.Test; - -public class SlackOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - @Override - protected Path getCredentialsPath() { - return Path.of("secrets/slack.json"); - } - - @Override - protected String getRedirectUrl() { - return "https://27b0-2804-14d-2a76-9a9a-fdbb-adee-9e5d-6c.ngrok.io/auth_flow"; - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { - return new SlackOAuthFlow(configRepository, httpClient); - } - - @Test - public void testFullSlackOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(getCredentialsPath()); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString).get("credentials"); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(ImmutableMap.builder() - .put("client_id", credentialsJson.get("client_id").asText()) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build())))); - final String url = getFlowImplementation(configRepository, httpClient).getSourceConsentUrl(workspaceId, definitionId, getRedirectUrl(), - Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), getRedirectUrl()); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("credentials")); - assertTrue(((Map) params.get("credentials")).containsKey("access_token")); - assertTrue(((Map) params.get("credentials")).get("access_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SnapchatMarketingOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SnapchatMarketingOAuthFlowIntegrationTest.java deleted file mode 100644 index cd45fd959cc3..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SnapchatMarketingOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.Test; - -public class SnapchatMarketingOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - @Override - protected Path getCredentialsPath() { - return Path.of("secrets/snapchat.json"); - } - - @Override - protected String getRedirectUrl() { - return "https://f215-195-114-147-152.ngrok.io/auth_flow"; - } - - @Override - protected int getServerListeningPort() { - return 3000; - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { - return new SnapchatMarketingOAuthFlow(configRepository, httpClient); - } - - @Test - public void testFullSnapchatMarketingOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(getCredentialsPath()); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(ImmutableMap.builder() - .put("client_id", credentialsJson.get("client_id").asText()) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build())))); - final String url = getFlowImplementation(configRepository, httpClient).getSourceConsentUrl(workspaceId, definitionId, getRedirectUrl(), - Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - waitForResponse(20); - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), getRedirectUrl()); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("refresh_token")); - assertTrue(params.get("refresh_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SquareOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SquareOAuthFlowIntegrationTest.java deleted file mode 100644 index 27c42993bda9..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SquareOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class SquareOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - protected static final Path CREDENTIALS_PATH = Path.of("secrets/square.json"); - protected static final String REDIRECT_URL = "http://localhost:8000/code"; - protected static final int SERVER_LISTENING_PORT = 8000; - - @Override - protected Path getCredentialsPath() { - return CREDENTIALS_PATH; - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { - return new SquareOAuthFlow(configRepository, httpClient); - } - - @Override - protected int getServerListeningPort() { - return SERVER_LISTENING_PORT; - } - - @Override - @BeforeEach - public void setup() throws IOException { - super.setup(); - } - - @Test - public void testFullSquareOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode( - Map.of("authorization", - ImmutableMap.builder() - .put("client_id", credentialsJson.get("client_id").asText()) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build()))))); - - final String url = flow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("access_token")); - assertTrue(params.get("access_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SurveymonkeyOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SurveymonkeyOAuthFlowIntegrationTest.java deleted file mode 100644 index 9e87d0093fc9..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SurveymonkeyOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class SurveymonkeyOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - protected static final Path CREDENTIALS_PATH = Path.of("secrets/surveymonkey.json"); - protected static final String REDIRECT_URL = "http://localhost:3000/auth_flow"; - - @Override - protected Path getCredentialsPath() { - return CREDENTIALS_PATH; - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { - return new SurveymonkeyOAuthFlow(configRepository, httpClient); - } - - @Override - @BeforeEach - public void setup() throws IOException { - super.setup(); - } - - @Override - protected int getServerListeningPort() { - return 3000; - } - - @Test - public void testFullSurveymonkeyOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(ImmutableMap.builder() - .put("client_id", credentialsJson.get("client_id").asText()) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build())))); - final String url = flow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - waitForResponse(20); - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("access_token")); - assertTrue(params.get("access_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TrelloOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TrelloOAuthFlowIntegrationTest.java deleted file mode 100644 index 29702f801d00..000000000000 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TrelloOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.sun.net.httpserver.HttpExchange; -import com.sun.net.httpserver.HttpHandler; -import com.sun.net.httpserver.HttpServer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class TrelloOAuthFlowIntegrationTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(TrelloOAuthFlowIntegrationTest.class); - private static final String REDIRECT_URL = "http://localhost:8000/code"; - private static final Path CREDENTIALS_PATH = Path.of("secrets/trello.json"); - - private ConfigRepository configRepository; - private TrelloOAuthFlow trelloOAuthFlow; - private HttpServer server; - private ServerHandler serverHandler; - - @BeforeEach - public void setup() throws IOException { - if (!Files.exists(CREDENTIALS_PATH)) { - throw new IllegalStateException( - "Must provide path to a oauth credentials file."); - } - configRepository = mock(ConfigRepository.class); - trelloOAuthFlow = new TrelloOAuthFlow(configRepository); - - server = HttpServer.create(new InetSocketAddress(8000), 0); - server.setExecutor(null); // creates a default executor - server.start(); - serverHandler = new ServerHandler("oauth_verifier"); - server.createContext("/code", serverHandler); - } - - @AfterEach - void tearDown() { - server.stop(1); - } - - @Test - public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - final String clientId = credentialsJson.get("client_id").asText(); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(ImmutableMap.builder() - .put("client_id", clientId) - .put("client_secret", credentialsJson.get("client_secret").asText()) - .build())))); - final String url = trelloOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = trelloOAuthFlow.completeSourceOAuth(workspaceId, definitionId, - Map.of("oauth_verifier", serverHandler.getParamValue(), "oauth_token", serverHandler.getResponseQuery().get("oauth_token")), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("token")); - assertTrue(params.containsKey("key")); - assertTrue(params.get("token").toString().length() > 0); - } - - static class ServerHandler implements HttpHandler { - - final private String expectedParam; - private Map responseQuery; - private String paramValue; - private boolean succeeded; - - public ServerHandler(final String expectedParam) { - this.expectedParam = expectedParam; - this.paramValue = ""; - this.succeeded = false; - } - - public boolean isSucceeded() { - return succeeded; - } - - public String getParamValue() { - return paramValue; - } - - public Map getResponseQuery() { - return responseQuery; - } - - @Override - public void handle(final HttpExchange t) { - final String query = t.getRequestURI().getQuery(); - LOGGER.info("Received query: '{}'", query); - final Map data; - try { - data = deserialize(query); - final String response; - if (data != null && data.containsKey(expectedParam)) { - paramValue = data.get(expectedParam); - response = String.format("Successfully extracted %s:\n'%s'\nTest should be continuing the OAuth Flow to retrieve the refresh_token...", - expectedParam, paramValue); - responseQuery = data; - LOGGER.info(response); - t.sendResponseHeaders(200, response.length()); - succeeded = true; - } else { - response = String.format("Unable to parse query params from redirected url: %s", query); - t.sendResponseHeaders(500, response.length()); - } - final OutputStream os = t.getResponseBody(); - os.write(response.getBytes(StandardCharsets.UTF_8)); - os.close(); - } catch (final RuntimeException | IOException e) { - LOGGER.error("Failed to parse from body {}", query, e); - } - } - - private static Map deserialize(final String query) { - if (query == null) { - return null; - } - final Map result = new HashMap<>(); - for (final String param : query.split("&")) { - final String[] entry = param.split("="); - if (entry.length > 1) { - result.put(entry[0], entry[1]); - } else { - result.put(entry[0], ""); - } - } - return result; - } - - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/HubspotOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/HubspotOAuthFlowIntegrationTest.java deleted file mode 100644 index 783c708d8726..000000000000 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/HubspotOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.Test; - -public class HubspotOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { - - @Override - protected Path getCredentialsPath() { - return Path.of("secrets/hubspot.json"); - } - - protected OAuthFlowImplementation getFlowObject(final ConfigRepository configRepository) { - return new HubspotOAuthFlow(configRepository, httpClient); - } - - @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { - return new HubspotOAuthFlow(configRepository, httpClient); - } - - @Test - public void testFullOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 100; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(getCredentialsPath()); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(ImmutableMap.builder() - .put("client_id", credentialsJson.get("credentials").get("client_id").asText()) - .put("client_secret", credentialsJson.get("credentials").get("client_secret").asText()) - .build())))); - final var flowObject = getFlowImplementation(configRepository, httpClient); - final String url = flowObject.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = flowObject.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("credentials")); - final Map credentials = (Map) params.get("credentials"); - assertTrue(credentials.containsKey("refresh_token")); - assertTrue(credentials.get("refresh_token").toString().length() > 0); - assertTrue(credentials.containsKey("access_token")); - assertTrue(credentials.get("access_token").toString().length() > 0); - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/OAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/OAuthFlowIntegrationTest.java deleted file mode 100644 index eb784e3effa5..000000000000 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/OAuthFlowIntegrationTest.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.mockito.Mockito.mock; - -import com.sun.net.httpserver.HttpExchange; -import com.sun.net.httpserver.HttpHandler; -import com.sun.net.httpserver.HttpServer; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.OAuthFlowImplementation; -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.net.http.HttpClient; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings("PMD.AvoidReassigningParameters") -public abstract class OAuthFlowIntegrationTest { - - /** - * Convenience base class for OAuthFlow tests. Those tests right now are meant to be run manually, - * due to the consent flow in the browser - */ - protected static final Logger LOGGER = LoggerFactory.getLogger(OAuthFlowIntegrationTest.class); - protected static final String REDIRECT_URL = "http://localhost/auth_flow"; - protected static final int SERVER_LISTENING_PORT = 80; - - protected HttpClient httpClient; - protected ConfigRepository configRepository; - protected OAuthFlowImplementation flow; - protected HttpServer server; - protected ServerHandler serverHandler; - - protected Path getCredentialsPath() { - return Path.of("secrets/config.json"); - }; - - protected String getRedirectUrl() { - return REDIRECT_URL; - } - - protected abstract OAuthFlowImplementation getFlowImplementation(ConfigRepository configRepository, HttpClient httpClient); - - @BeforeEach - public void setup() throws IOException { - if (!Files.exists(getCredentialsPath())) { - throw new IllegalStateException( - "Must provide path to a oauth credentials file."); - } - configRepository = mock(ConfigRepository.class); - httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); - flow = this.getFlowImplementation(configRepository, httpClient); - - server = HttpServer.create(new InetSocketAddress(getServerListeningPort()), 0); - server.setExecutor(null); // creates a default executor - server.start(); - serverHandler = new ServerHandler("code"); - // Same endpoint as we use for airbyte instance - server.createContext(getCallBackServerPath(), serverHandler); - - } - - protected String getCallBackServerPath() { - return "/auth_flow"; - } - - protected int getServerListeningPort() { - return SERVER_LISTENING_PORT; - } - - @AfterEach - void tearDown() { - server.stop(1); - } - - protected void waitForResponse(int limit) throws InterruptedException { - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - } - - public static class ServerHandler implements HttpHandler { - - final private String expectedParam; - private String paramValue; - private boolean succeeded; - - public ServerHandler(final String expectedParam) { - this.expectedParam = expectedParam; - this.paramValue = ""; - this.succeeded = false; - } - - public boolean isSucceeded() { - return succeeded; - } - - public String getParamValue() { - return paramValue; - } - - @Override - public void handle(final HttpExchange t) { - final String query = t.getRequestURI().getQuery(); - LOGGER.info("Received query: '{}'", query); - final Map data; - try { - data = deserialize(query); - final String response; - if (data != null && data.containsKey(expectedParam)) { - paramValue = data.get(expectedParam); - response = String.format("Successfully extracted %s:\n'%s'\nTest should be continuing the OAuth Flow to retrieve the refresh_token...", - expectedParam, paramValue); - LOGGER.info(response); - t.sendResponseHeaders(200, response.length()); - succeeded = true; - } else { - response = String.format("Unable to parse query params from redirected url: %s", query); - t.sendResponseHeaders(500, response.length()); - } - final OutputStream os = t.getResponseBody(); - os.write(response.getBytes(StandardCharsets.UTF_8)); - os.close(); - } catch (final RuntimeException | IOException e) { - LOGGER.error("Failed to parse from body {}", query, e); - } - } - - private static Map deserialize(final String query) { - if (query == null) { - return null; - } - final Map result = new HashMap<>(); - for (final String param : query.split("&")) { - final String[] entry = param.split("=", 2); - if (entry.length > 1) { - result.put(entry[0], entry[1]); - } else { - result.put(entry[0], ""); - } - } - return result; - } - - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowIntegrationTest.java deleted file mode 100644 index 9353c7a64c48..000000000000 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.sun.net.httpserver.HttpExchange; -import com.sun.net.httpserver.HttpHandler; -import com.sun.net.httpserver.HttpServer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.net.http.HttpClient; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class GoogleAdsOAuthFlowIntegrationTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(GoogleAdsOAuthFlowIntegrationTest.class); - private static final String REDIRECT_URL = "http://localhost/code"; - private static final Path CREDENTIALS_PATH = Path.of("secrets/google_ads.json"); - - private ConfigRepository configRepository; - private GoogleAdsOAuthFlow googleAdsOAuthFlow; - private HttpServer server; - private ServerHandler serverHandler; - private HttpClient httpClient; - - @BeforeEach - public void setup() throws IOException { - if (!Files.exists(CREDENTIALS_PATH)) { - throw new IllegalStateException( - "Must provide path to a oauth credentials file."); - } - configRepository = mock(ConfigRepository.class); - httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); - googleAdsOAuthFlow = new GoogleAdsOAuthFlow(configRepository, httpClient); - - server = HttpServer.create(new InetSocketAddress(80), 0); - server.setExecutor(null); // creates a default executor - server.start(); - serverHandler = new ServerHandler("code"); - server.createContext("/code", serverHandler); - } - - @AfterEach - void tearDown() { - server.stop(1); - } - - @Test - public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH, StandardCharsets.UTF_8); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(Map.of("credentials", ImmutableMap.builder() - .put("client_id", credentialsJson.get("credentials").get("client_id").asText()) - .put("client_secret", credentialsJson.get("credentials").get("client_secret").asText()) - .build()))))); - final String url = googleAdsOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = googleAdsOAuthFlow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("credentials")); - final Map credentials = (Map) params.get("credentials"); - assertTrue(credentials.containsKey("refresh_token")); - assertTrue(credentials.get("refresh_token").toString().length() > 0); - assertTrue(credentials.containsKey("access_token")); - assertTrue(credentials.get("access_token").toString().length() > 0); - } - - static class ServerHandler implements HttpHandler { - - final private String expectedParam; - private String paramValue; - private boolean succeeded; - - public ServerHandler(final String expectedParam) { - this.expectedParam = expectedParam; - this.paramValue = ""; - this.succeeded = false; - } - - public boolean isSucceeded() { - return succeeded; - } - - public String getParamValue() { - return paramValue; - } - - @Override - public void handle(final HttpExchange t) { - final String query = t.getRequestURI().getQuery(); - LOGGER.info("Received query: '{}'", query); - final Map data; - try { - data = deserialize(query); - final String response; - if (data != null && data.containsKey(expectedParam)) { - paramValue = data.get(expectedParam); - response = String.format("Successfully extracted %s:\n'%s'\nTest should be continuing the OAuth Flow to retrieve the refresh_token...", - expectedParam, paramValue); - LOGGER.info(response); - t.sendResponseHeaders(200, response.length()); - succeeded = true; - } else { - response = String.format("Unable to parse query params from redirected url: %s", query); - t.sendResponseHeaders(500, response.length()); - } - final OutputStream os = t.getResponseBody(); - os.write(response.getBytes(StandardCharsets.UTF_8)); - os.close(); - } catch (final RuntimeException | IOException e) { - LOGGER.error("Failed to parse from body {}", query, e); - } - } - - private static Map deserialize(final String query) { - if (query == null) { - return null; - } - final Map result = new HashMap<>(); - for (final String param : query.split("&")) { - final String[] entry = param.split("="); - if (entry.length > 1) { - result.put(entry[0], entry[1]); - } else { - result.put(entry[0], ""); - } - } - return result; - } - - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowIntegrationTest.java deleted file mode 100644 index 43522b546c97..000000000000 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.sun.net.httpserver.HttpExchange; -import com.sun.net.httpserver.HttpHandler; -import com.sun.net.httpserver.HttpServer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.net.http.HttpClient; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class GoogleAnalyticsOAuthFlowIntegrationTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(GoogleAnalyticsOAuthFlowIntegrationTest.class); - private static final String REDIRECT_URL = "http://localhost/code"; - private static final Path CREDENTIALS_PATH = Path.of("secrets/google_analytics.json"); - - private ConfigRepository configRepository; - private GoogleAnalyticsViewIdOAuthFlow googleAnalyticsViewIdOAuthFlow; - private HttpServer server; - private ServerHandler serverHandler; - private HttpClient httpClient; - - @BeforeEach - public void setup() throws IOException { - if (!Files.exists(CREDENTIALS_PATH)) { - throw new IllegalStateException( - "Must provide path to a oauth credentials file."); - } - configRepository = mock(ConfigRepository.class); - httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); - googleAnalyticsViewIdOAuthFlow = new GoogleAnalyticsViewIdOAuthFlow(configRepository, httpClient); - - server = HttpServer.create(new InetSocketAddress(80), 0); - server.setExecutor(null); // creates a default executor - server.start(); - serverHandler = new ServerHandler("code"); - server.createContext("/code", serverHandler); - } - - @AfterEach - void tearDown() { - server.stop(1); - } - - @Test - public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(Map.of("credentials", ImmutableMap.builder() - .put("client_id", credentialsJson.get("credentials").get("client_id").asText()) - .put("client_secret", credentialsJson.get("credentials").get("client_secret").asText()) - .build()))))); - final String url = googleAnalyticsViewIdOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = googleAnalyticsViewIdOAuthFlow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("credentials")); - final Map credentials = (Map) params.get("credentials"); - assertTrue(credentials.containsKey("refresh_token")); - assertTrue(credentials.get("refresh_token").toString().length() > 0); - assertTrue(credentials.containsKey("access_token")); - assertTrue(credentials.get("access_token").toString().length() > 0); - } - - static class ServerHandler implements HttpHandler { - - final private String expectedParam; - private String paramValue; - private boolean succeeded; - - public ServerHandler(final String expectedParam) { - this.expectedParam = expectedParam; - this.paramValue = ""; - this.succeeded = false; - } - - public boolean isSucceeded() { - return succeeded; - } - - public String getParamValue() { - return paramValue; - } - - @Override - public void handle(final HttpExchange t) { - final String query = t.getRequestURI().getQuery(); - LOGGER.info("Received query: '{}'", query); - final Map data; - try { - data = deserialize(query); - final String response; - if (data != null && data.containsKey(expectedParam)) { - paramValue = data.get(expectedParam); - response = String.format("Successfully extracted %s:\n'%s'\nTest should be continuing the OAuth Flow to retrieve the refresh_token...", - expectedParam, paramValue); - LOGGER.info(response); - t.sendResponseHeaders(200, response.length()); - succeeded = true; - } else { - response = String.format("Unable to parse query params from redirected url: %s", query); - t.sendResponseHeaders(500, response.length()); - } - final OutputStream os = t.getResponseBody(); - os.write(response.getBytes(StandardCharsets.UTF_8)); - os.close(); - } catch (final RuntimeException | IOException e) { - LOGGER.error("Failed to parse from body {}", query, e); - } - } - - private static Map deserialize(final String query) { - if (query == null) { - return null; - } - final Map result = new HashMap<>(); - for (final String param : query.split("&")) { - final String[] entry = param.split("="); - if (entry.length > 1) { - result.put(entry[0], entry[1]); - } else { - result.put(entry[0], ""); - } - } - return result; - } - - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowIntegrationTest.java deleted file mode 100644 index f9e0285d8707..000000000000 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.sun.net.httpserver.HttpExchange; -import com.sun.net.httpserver.HttpHandler; -import com.sun.net.httpserver.HttpServer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.net.http.HttpClient; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class GoogleSearchConsoleOAuthFlowIntegrationTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(GoogleSearchConsoleOAuthFlowIntegrationTest.class); - private static final String REDIRECT_URL = "http://localhost/code"; - private static final Path CREDENTIALS_PATH = Path.of("secrets/google_search_console.json"); - - private ConfigRepository configRepository; - private GoogleSearchConsoleOAuthFlow googleSearchConsoleOAuthFlow; - private HttpServer server; - private ServerHandler serverHandler; - private HttpClient httpClient; - - @BeforeEach - public void setup() throws IOException { - if (!Files.exists(CREDENTIALS_PATH)) { - throw new IllegalStateException( - "Must provide path to a oauth credentials file."); - } - configRepository = mock(ConfigRepository.class); - httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); - googleSearchConsoleOAuthFlow = new GoogleSearchConsoleOAuthFlow(configRepository, httpClient); - - server = HttpServer.create(new InetSocketAddress(80), 0); - server.setExecutor(null); // creates a default executor - server.start(); - serverHandler = new ServerHandler("code"); - server.createContext("/code", serverHandler); - } - - @AfterEach - void tearDown() { - server.stop(1); - } - - @Test - public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(Map.of("authorization", ImmutableMap.builder() - .put("client_id", credentialsJson.get("authorization").get("client_id").asText()) - .put("client_secret", credentialsJson.get("authorization").get("client_secret").asText()) - .build()))))); - final String url = googleSearchConsoleOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = googleSearchConsoleOAuthFlow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("authorization")); - final Map credentials = (Map) params.get("authorization"); - assertTrue(credentials.containsKey("refresh_token")); - assertTrue(credentials.get("refresh_token").toString().length() > 0); - assertTrue(credentials.containsKey("access_token")); - assertTrue(credentials.get("access_token").toString().length() > 0); - } - - static class ServerHandler implements HttpHandler { - - final private String expectedParam; - private String paramValue; - private boolean succeeded; - - public ServerHandler(final String expectedParam) { - this.expectedParam = expectedParam; - this.paramValue = ""; - this.succeeded = false; - } - - public boolean isSucceeded() { - return succeeded; - } - - public String getParamValue() { - return paramValue; - } - - @Override - public void handle(final HttpExchange t) { - final String query = t.getRequestURI().getQuery(); - LOGGER.info("Received query: '{}'", query); - final Map data; - try { - data = deserialize(query); - final String response; - if (data != null && data.containsKey(expectedParam)) { - paramValue = data.get(expectedParam); - response = String.format("Successfully extracted %s:\n'%s'\nTest should be continuing the OAuth Flow to retrieve the refresh_token...", - expectedParam, paramValue); - LOGGER.info(response); - t.sendResponseHeaders(200, response.length()); - succeeded = true; - } else { - response = String.format("Unable to parse query params from redirected url: %s", query); - t.sendResponseHeaders(500, response.length()); - } - final OutputStream os = t.getResponseBody(); - os.write(response.getBytes(StandardCharsets.UTF_8)); - os.close(); - } catch (final RuntimeException | IOException e) { - LOGGER.error("Failed to parse from body {}", query, e); - } - } - - private static Map deserialize(final String query) { - if (query == null) { - return null; - } - final Map result = new HashMap<>(); - for (final String param : query.split("&")) { - final String[] entry = param.split("="); - if (entry.length > 1) { - result.put(entry[0], entry[1]); - } else { - result.put(entry[0], ""); - } - } - return result; - } - - } - -} diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowIntegrationTest.java deleted file mode 100644 index 89b156b50417..000000000000 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowIntegrationTest.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.sun.net.httpserver.HttpExchange; -import com.sun.net.httpserver.HttpHandler; -import com.sun.net.httpserver.HttpServer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.net.http.HttpClient; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class GoogleSheetsOAuthFlowIntegrationTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(GoogleSheetsOAuthFlowIntegrationTest.class); - private static final String REDIRECT_URL = "http://localhost/code"; - private static final Path CREDENTIALS_PATH = Path.of("secrets/google_sheets.json"); - - private ConfigRepository configRepository; - private GoogleSheetsOAuthFlow googleSheetsOAuthFlow; - private HttpServer server; - private ServerHandler serverHandler; - private HttpClient httpClient; - - @BeforeEach - public void setup() throws IOException { - if (!Files.exists(CREDENTIALS_PATH)) { - throw new IllegalStateException( - "Must provide path to a oauth credentials file."); - } - configRepository = mock(ConfigRepository.class); - httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); - googleSheetsOAuthFlow = new GoogleSheetsOAuthFlow(configRepository, httpClient); - - server = HttpServer.create(new InetSocketAddress(80), 0); - server.setExecutor(null); // creates a default executor - server.start(); - serverHandler = new ServerHandler("code"); - server.createContext("/code", serverHandler); - } - - @AfterEach - void tearDown() { - server.stop(1); - } - - @Test - public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { - int limit = 20; - final UUID workspaceId = UUID.randomUUID(); - final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); - final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(Map.of("credentials", ImmutableMap.builder() - .put("client_id", credentialsJson.get("credentials").get("client_id").asText()) - .put("client_secret", credentialsJson.get("credentials").get("client_secret").asText()) - .build()))))); - final String url = googleSheetsOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - LOGGER.info("Waiting for user consent at: {}", url); - // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing - // access... - while (!serverHandler.isSucceeded() && limit > 0) { - Thread.sleep(1000); - limit -= 1; - } - assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); - final Map params = googleSheetsOAuthFlow.completeSourceOAuth(workspaceId, definitionId, - Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); - LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); - assertTrue(params.containsKey("credentials")); - final Map credentials = (Map) params.get("credentials"); - assertTrue(credentials.containsKey("refresh_token")); - assertTrue(credentials.get("refresh_token").toString().length() > 0); - assertTrue(credentials.containsKey("access_token")); - assertTrue(credentials.get("access_token").toString().length() > 0); - } - - static class ServerHandler implements HttpHandler { - - final private String expectedParam; - private String paramValue; - private boolean succeeded; - - public ServerHandler(final String expectedParam) { - this.expectedParam = expectedParam; - this.paramValue = ""; - this.succeeded = false; - } - - public boolean isSucceeded() { - return succeeded; - } - - public String getParamValue() { - return paramValue; - } - - @Override - public void handle(final HttpExchange t) { - final String query = t.getRequestURI().getQuery(); - LOGGER.info("Received query: '{}'", query); - final Map data; - try { - data = deserialize(query); - final String response; - if (data != null && data.containsKey(expectedParam)) { - paramValue = data.get(expectedParam); - response = String.format("Successfully extracted %s:\n'%s'\nTest should be continuing the OAuth Flow to retrieve the refresh_token...", - expectedParam, paramValue); - LOGGER.info(response); - t.sendResponseHeaders(200, response.length()); - succeeded = true; - } else { - response = String.format("Unable to parse query params from redirected url: %s", query); - t.sendResponseHeaders(500, response.length()); - } - final OutputStream os = t.getResponseBody(); - os.write(response.getBytes(StandardCharsets.UTF_8)); - os.close(); - } catch (final RuntimeException | IOException e) { - LOGGER.error("Failed to parse from body {}", query, e); - } - } - - private static Map deserialize(final String query) { - if (query == null) { - return null; - } - final Map result = new HashMap<>(); - for (final String param : query.split("&")) { - final String[] entry = param.split("="); - if (entry.length > 1) { - result.put(entry[0], entry[1]); - } else { - result.put(entry[0], ""); - } - } - return result; - } - - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/MoreOAuthParametersTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/MoreOAuthParametersTest.java deleted file mode 100644 index 005967b4ad10..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/MoreOAuthParametersTest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -class MoreOAuthParametersTest { - - private static final String FIELD = "field"; - private static final String OAUTH_CREDS = "oauth_credentials"; - - @Test - void testFlattenConfig() { - final JsonNode nestedConfig = Jsons.jsonNode(Map.of( - FIELD, "value1", - "top-level", Map.of( - "nested_field", "value2"))); - final JsonNode expectedConfig = Jsons.jsonNode(Map.of( - FIELD, "value1", - "nested_field", "value2")); - final JsonNode actualConfig = MoreOAuthParameters.flattenOAuthConfig(nestedConfig); - assertEquals(expectedConfig, actualConfig); - } - - @Test - void testFailureFlattenConfig() { - final JsonNode nestedConfig = Jsons.jsonNode(Map.of( - FIELD, "value1", - "top-level", Map.of( - "nested_field", "value2", - FIELD, "value3"))); - assertThrows(IllegalStateException.class, () -> MoreOAuthParameters.flattenOAuthConfig(nestedConfig)); - } - - @Test - void testInjectUnnestedNode() { - final ObjectNode oauthParams = (ObjectNode) Jsons.jsonNode(generateOAuthParameters()); - - final ObjectNode actual = generateJsonConfig(); - final ObjectNode expected = Jsons.clone(actual); - expected.setAll(oauthParams); - - MoreOAuthParameters.mergeJsons(actual, oauthParams); - - assertEquals(expected, actual); - } - - @Test - @DisplayName("A nested config should be inserted with the same nesting structure") - void testInjectNewNestedNode() { - final ObjectNode oauthParams = (ObjectNode) Jsons.jsonNode(generateOAuthParameters()); - final ObjectNode nestedConfig = (ObjectNode) Jsons.jsonNode(ImmutableMap.builder() - .put(OAUTH_CREDS, oauthParams) - .build()); - - // nested node does not exist in actual object - final ObjectNode actual = generateJsonConfig(); - final ObjectNode expected = Jsons.clone(actual); - expected.putObject(OAUTH_CREDS).setAll(oauthParams); - - MoreOAuthParameters.mergeJsons(actual, nestedConfig); - - assertEquals(expected, actual); - } - - @Test - @DisplayName("A nested node which partially exists in the main config should be merged into the main config, not overwrite the whole nested object") - void testInjectedPartiallyExistingNestedNode() { - final ObjectNode oauthParams = (ObjectNode) Jsons.jsonNode(generateOAuthParameters()); - final ObjectNode nestedConfig = (ObjectNode) Jsons.jsonNode(ImmutableMap.builder() - .put(OAUTH_CREDS, oauthParams) - .build()); - - // nested node partially exists in actual object - final ObjectNode actual = generateJsonConfig(); - actual.putObject(OAUTH_CREDS).put("irrelevant_field", "_"); - final ObjectNode expected = Jsons.clone(actual); - ((ObjectNode) expected.get(OAUTH_CREDS)).setAll(oauthParams); - - MoreOAuthParameters.mergeJsons(actual, nestedConfig); - - assertEquals(expected, actual); - } - - private ObjectNode generateJsonConfig() { - return (ObjectNode) Jsons.jsonNode(ImmutableMap.builder() - .put("apiSecret", "123") - .put("client", "testing") - .build()); - } - - private Map generateOAuthParameters() { - return ImmutableMap.builder() - .put("api_secret", "mysecret") - .put("api_client", UUID.randomUUID().toString()) - .build(); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/AmazonAdsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/AmazonAdsOAuthFlowTest.java deleted file mode 100644 index 4e6b9ec5a94c..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/AmazonAdsOAuthFlowTest.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; - -public class AmazonAdsOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new AmazonAdsOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://www.amazon.com/ap/oa?client_id=test_client_id&scope=advertising%3A%3Acampaign_management&response_type=code&redirect_uri=https%3A%2F%2Fairbyte.io&state=state"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/AsanaOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/AsanaOAuthFlowTest.java deleted file mode 100644 index fe2accc120d5..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/AsanaOAuthFlowTest.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; - -public class AsanaOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new AsanaOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://app.asana.com/-/oauth_authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&state=state"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/BaseOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/BaseOAuthFlowTest.java deleted file mode 100644 index 3455ae4c3cc4..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/BaseOAuthFlowTest.java +++ /dev/null @@ -1,451 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationOAuthParameter; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import io.airbyte.protocol.models.OAuthConfigSpecification; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.net.http.HttpResponse; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public abstract class BaseOAuthFlowTest { - - private static final String REDIRECT_URL = "https://airbyte.io"; - private static final String REFRESH_TOKEN = "refresh_token"; - private static final String CLIENT_ID = "client_id"; - private static final String TYPE = "type"; - private static final String CODE = "code"; - private static final String TEST_CODE = "test_code"; - private static final String EXPECTED_BUT_GOT = "Expected %s values but got\n\t%s\ninstead of\n\t%s"; - - private HttpClient httpClient; - private ConfigRepository configRepository; - private BaseOAuthFlow oauthFlow; - - private UUID workspaceId; - private UUID definitionId; - - protected HttpClient getHttpClient() { - return httpClient; - } - - protected ConfigRepository getConfigRepository() { - return configRepository; - } - - @BeforeEach - void setup() throws JsonValidationException, IOException { - httpClient = mock(HttpClient.class); - configRepository = mock(ConfigRepository.class); - oauthFlow = getOAuthFlow(); - - workspaceId = UUID.randomUUID(); - definitionId = UUID.randomUUID(); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(getOAuthParamConfig()))); - when(configRepository.listDestinationOAuthParam()).thenReturn(List.of(new DestinationOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withDestinationDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(getOAuthParamConfig()))); - } - - /** - * This should be implemented for the particular oauth flow implementation - * - * @return the oauth flow implementation to test - */ - protected abstract BaseOAuthFlow getOAuthFlow(); - - /** - * This should be implemented for the particular oauth flow implementation - * - * @return the expected consent URL - */ - protected abstract String getExpectedConsentUrl(); - - /** - * Redefine if the oauth flow implementation does not return `refresh_token`. (maybe for example - * using `access_token` like in the `GithubOAuthFlowTest` instead?) - * - * @return the full output expected to be returned by this oauth flow + all its instance wide - * variables - */ - protected Map getExpectedOutput() { - return Map.of( - REFRESH_TOKEN, "refresh_token_response", - CLIENT_ID, MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - /** - * Redefine if the oauth flow implementation does not return `refresh_token`. (maybe for example - * using `access_token` like in the `GithubOAuthFlowTest` instead?) - * - * @return the output specification used to identify what the oauth flow should be returning - */ - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of(REFRESH_TOKEN, Map.of(TYPE, "string"))); - } - - /** - * Redefine if the oauth flow implementation does not return `refresh_token`. (maybe for example - * using `access_token` like in the `GithubOAuthFlowTest` instead?) - * - * @return the filtered outputs once it is filtered by the output specifications - */ - protected Map getExpectedFilteredOutput() { - return Map.of( - REFRESH_TOKEN, "refresh_token_response", - CLIENT_ID, MoreOAuthParameters.SECRET_MASK); - } - - /** - * @return the output specification used to filter what the oauth flow should be returning - */ - protected JsonNode getCompleteOAuthServerOutputSpecification() { - return getJsonSchema(Map.of(CLIENT_ID, Map.of(TYPE, "string"))); - } - - /** - * Redefine to match the oauth implementation flow getDefaultOAuthOutputPath() - * - * @return the backward compatible path that is used in the deprecated oauth flows. - */ - protected List getExpectedOutputPath() { - return List.of("credentials"); - } - - /** - * @return if the OAuth implementation flow has a dependency on input values from connector config. - */ - protected boolean hasDependencyOnConnectorConfigValues() { - return !getInputOAuthConfiguration().isEmpty(); - } - - /** - * If the OAuth implementation flow has a dependency on input values from connector config, this - * method should be redefined. - * - * @return the input configuration sent to oauth flow (values from connector config) - */ - protected JsonNode getInputOAuthConfiguration() { - return Jsons.emptyObject(); - } - - /** - * If the OAuth implementation flow has a dependency on input values from connector config, this - * method should be redefined. - * - * @return the input configuration sent to oauth flow (values from connector config) - */ - protected JsonNode getUserInputFromConnectorConfigSpecification() { - return getJsonSchema(Map.of()); - } - - /** - * @return the instance wide config params for this oauth flow - */ - protected JsonNode getOAuthParamConfig() { - return Jsons.jsonNode(ImmutableMap.builder() - .put(CLIENT_ID, "test_client_id") - .put("client_secret", "test_client_secret") - .build()); - } - - protected static JsonNode getJsonSchema(final Map properties) { - return Jsons.jsonNode(Map.of( - TYPE, "object", - "additionalProperties", "false", - "properties", properties)); - } - - protected OAuthConfigSpecification getoAuthConfigSpecification() { - return new OAuthConfigSpecification() - .withOauthUserInputFromConnectorConfigSpecification(getUserInputFromConnectorConfigSpecification()) - .withCompleteOauthOutputSpecification(getCompleteOAuthOutputSpecification()) - .withCompleteOauthServerOutputSpecification(getCompleteOAuthServerOutputSpecification()); - } - - private OAuthConfigSpecification getEmptyOAuthConfigSpecification() { - return new OAuthConfigSpecification() - .withCompleteOauthOutputSpecification(Jsons.emptyObject()) - .withCompleteOauthServerOutputSpecification(Jsons.emptyObject()); - } - - protected String getConstantState() { - return "state"; - } - - protected String getMockedResponse() { - final Map returnedCredentials = getExpectedOutput(); - return Jsons.serialize(returnedCredentials); - } - - protected OAuthConfigSpecification getOAuthConfigSpecification() { - return getoAuthConfigSpecification() - // change property types to induce json validation errors. - .withCompleteOauthServerOutputSpecification(getJsonSchema(Map.of(CLIENT_ID, Map.of(TYPE, "integer")))) - .withCompleteOauthOutputSpecification(getJsonSchema(Map.of(REFRESH_TOKEN, Map.of(TYPE, "integer")))); - } - - @Test - void testGetDefaultOutputPath() { - assertEquals(getExpectedOutputPath(), oauthFlow.getDefaultOAuthOutputPath()); - } - - @Test - void testValidateInputOAuthConfigurationFailure() { - final JsonNode invalidInputOAuthConfiguration = Jsons.jsonNode(Map.of("UnexpectedRandomField", 42)); - assertThrows(JsonValidationException.class, - () -> oauthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, invalidInputOAuthConfiguration, getoAuthConfigSpecification())); - assertThrows(JsonValidationException.class, () -> oauthFlow.getDestinationConsentUrl(workspaceId, definitionId, REDIRECT_URL, - invalidInputOAuthConfiguration, getoAuthConfigSpecification())); - assertThrows(JsonValidationException.class, () -> oauthFlow.completeSourceOAuth(workspaceId, definitionId, Map.of(), REDIRECT_URL, - invalidInputOAuthConfiguration, getoAuthConfigSpecification())); - assertThrows(JsonValidationException.class, () -> oauthFlow.completeDestinationOAuth(workspaceId, definitionId, Map.of(), REDIRECT_URL, - invalidInputOAuthConfiguration, getoAuthConfigSpecification())); - } - - @Test - void testGetConsentUrlEmptyOAuthParameters() throws JsonValidationException, IOException { - when(configRepository.listSourceOAuthParam()).thenReturn(List.of()); - when(configRepository.listDestinationOAuthParam()).thenReturn(List.of()); - assertThrows(ConfigNotFoundException.class, - () -> oauthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, getInputOAuthConfiguration(), getoAuthConfigSpecification())); - assertThrows(ConfigNotFoundException.class, - () -> oauthFlow.getDestinationConsentUrl(workspaceId, definitionId, REDIRECT_URL, getInputOAuthConfiguration(), - getoAuthConfigSpecification())); - } - - @Test - void testGetConsentUrlIncompleteOAuthParameters() throws IOException, JsonValidationException { - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.emptyObject()))); - when(configRepository.listDestinationOAuthParam()).thenReturn(List.of(new DestinationOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withDestinationDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.emptyObject()))); - assertThrows(IllegalArgumentException.class, - () -> oauthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, getInputOAuthConfiguration(), getoAuthConfigSpecification())); - assertThrows(IllegalArgumentException.class, - () -> oauthFlow.getDestinationConsentUrl(workspaceId, definitionId, REDIRECT_URL, getInputOAuthConfiguration(), - getoAuthConfigSpecification())); - } - - @Test - void testGetSourceConsentUrlEmptyOAuthSpec() throws IOException, ConfigNotFoundException, JsonValidationException { - if (hasDependencyOnConnectorConfigValues()) { - assertThrows(IOException.class, () -> oauthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null), - "OAuth Flow Implementations with dependencies on connector config can't be supported without OAuthConfigSpecifications"); - } else { - final String consentUrl = oauthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - assertEquals(getExpectedConsentUrl(), consentUrl); - } - } - - @Test - void testGetDestinationConsentUrlEmptyOAuthSpec() throws IOException, ConfigNotFoundException, JsonValidationException { - if (hasDependencyOnConnectorConfigValues()) { - assertThrows(IOException.class, () -> oauthFlow.getDestinationConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null), - "OAuth Flow Implementations with dependencies on connector config can't be supported without OAuthConfigSpecifications"); - } else { - final String consentUrl = oauthFlow.getDestinationConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - assertEquals(getExpectedConsentUrl(), consentUrl); - } - } - - @Test - void testGetSourceConsentUrl() throws IOException, ConfigNotFoundException, JsonValidationException { - final String consentUrl = - oauthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, getInputOAuthConfiguration(), getoAuthConfigSpecification()); - assertEquals(getExpectedConsentUrl(), consentUrl); - } - - @Test - void testGetDestinationConsentUrl() throws IOException, ConfigNotFoundException, JsonValidationException { - final String consentUrl = - oauthFlow.getDestinationConsentUrl(workspaceId, definitionId, REDIRECT_URL, getInputOAuthConfiguration(), getoAuthConfigSpecification()); - assertEquals(getExpectedConsentUrl(), consentUrl); - } - - @Test - void testCompleteOAuthMissingCode() { - final Map queryParams = Map.of(); - assertThrows(IOException.class, () -> oauthFlow.completeSourceOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL)); - } - - @Test - void testDeprecatedCompleteSourceOAuth() throws IOException, InterruptedException, ConfigNotFoundException { - final Map returnedCredentials = getExpectedOutput(); - final HttpResponse response = mock(HttpResponse.class); - when(response.body()).thenReturn(Jsons.serialize(returnedCredentials)); - when(httpClient.send(any(), any())).thenReturn(response); - final Map queryParams = Map.of(CODE, TEST_CODE); - - if (hasDependencyOnConnectorConfigValues()) { - assertThrows(IOException.class, () -> oauthFlow.completeSourceOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL), - "OAuth Flow Implementations with dependencies on connector config can't be supported in the deprecated APIs"); - } else { - Map actualRawQueryParams = oauthFlow.completeSourceOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL); - for (final String node : getExpectedOutputPath()) { - assertNotNull(actualRawQueryParams.get(node)); - actualRawQueryParams = (Map) actualRawQueryParams.get(node); - } - final Map expectedOutput = returnedCredentials; - final Map actualQueryParams = actualRawQueryParams; - assertEquals(expectedOutput.size(), actualQueryParams.size(), - String.format(EXPECTED_BUT_GOT, expectedOutput.size(), actualQueryParams, expectedOutput)); - expectedOutput.forEach((key, value) -> assertEquals(value, actualQueryParams.get(key))); - } - } - - @Test - void testDeprecatedCompleteDestinationOAuth() throws IOException, ConfigNotFoundException, InterruptedException { - final Map returnedCredentials = getExpectedOutput(); - final HttpResponse response = mock(HttpResponse.class); - when(response.body()).thenReturn(Jsons.serialize(returnedCredentials)); - when(httpClient.send(any(), any())).thenReturn(response); - final Map queryParams = Map.of(CODE, TEST_CODE); - - if (hasDependencyOnConnectorConfigValues()) { - assertThrows(IOException.class, () -> oauthFlow.completeDestinationOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL), - "OAuth Flow Implementations with dependencies on connector config can't be supported in the deprecated APIs"); - } else { - Map actualRawQueryParams = oauthFlow.completeDestinationOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL); - for (final String node : getExpectedOutputPath()) { - assertNotNull(actualRawQueryParams.get(node)); - actualRawQueryParams = (Map) actualRawQueryParams.get(node); - } - final Map expectedOutput = returnedCredentials; - final Map actualQueryParams = actualRawQueryParams; - assertEquals(expectedOutput.size(), actualQueryParams.size(), - String.format(EXPECTED_BUT_GOT, expectedOutput.size(), actualQueryParams, expectedOutput)); - expectedOutput.forEach((key, value) -> assertEquals(value, actualQueryParams.get(key))); - } - } - - @Test - void testEmptyOutputCompleteSourceOAuth() throws IOException, InterruptedException, ConfigNotFoundException, JsonValidationException { - final HttpResponse response = mock(HttpResponse.class); - when(response.body()).thenReturn(getMockedResponse()); - when(httpClient.send(any(), any())).thenReturn(response); - final Map queryParams = Map.of(CODE, TEST_CODE); - final Map actualQueryParams = oauthFlow.completeSourceOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL, - getInputOAuthConfiguration(), getEmptyOAuthConfigSpecification()); - assertEquals(0, actualQueryParams.size(), - String.format("Expected no values but got %s", actualQueryParams)); - } - - @Test - void testEmptyOutputCompleteDestinationOAuth() throws IOException, InterruptedException, ConfigNotFoundException, JsonValidationException { - final HttpResponse response = mock(HttpResponse.class); - when(response.body()).thenReturn(getMockedResponse()); - when(httpClient.send(any(), any())).thenReturn(response); - final Map queryParams = Map.of(CODE, TEST_CODE); - final Map actualQueryParams = oauthFlow.completeDestinationOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL, - getInputOAuthConfiguration(), getEmptyOAuthConfigSpecification()); - assertEquals(0, actualQueryParams.size(), - String.format("Expected no values but got %s", actualQueryParams)); - } - - @Test - void testEmptyInputCompleteSourceOAuth() throws IOException, InterruptedException, ConfigNotFoundException, JsonValidationException { - final HttpResponse response = mock(HttpResponse.class); - when(response.body()).thenReturn(getMockedResponse()); - when(httpClient.send(any(), any())).thenReturn(response); - final Map queryParams = Map.of(CODE, TEST_CODE); - final Map actualQueryParams = oauthFlow.completeSourceOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL, - Jsons.emptyObject(), getoAuthConfigSpecification()); - final Map expectedOutput = getExpectedFilteredOutput(); - assertEquals(expectedOutput.size(), actualQueryParams.size(), - String.format(EXPECTED_BUT_GOT, expectedOutput.size(), actualQueryParams, expectedOutput)); - expectedOutput.forEach((key, value) -> assertEquals(value, actualQueryParams.get(key))); - } - - @Test - void testEmptyInputCompleteDestinationOAuth() throws IOException, InterruptedException, ConfigNotFoundException, JsonValidationException { - final HttpResponse response = mock(HttpResponse.class); - when(response.body()).thenReturn(getMockedResponse()); - when(httpClient.send(any(), any())).thenReturn(response); - final Map queryParams = Map.of(CODE, TEST_CODE); - final Map actualQueryParams = oauthFlow.completeDestinationOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL, - Jsons.emptyObject(), getoAuthConfigSpecification()); - final Map expectedOutput = getExpectedFilteredOutput(); - assertEquals(expectedOutput.size(), actualQueryParams.size(), - String.format(EXPECTED_BUT_GOT, expectedOutput.size(), actualQueryParams, expectedOutput)); - expectedOutput.forEach((key, value) -> assertEquals(value, actualQueryParams.get(key))); - } - - @Test - void testCompleteSourceOAuth() throws IOException, InterruptedException, ConfigNotFoundException, JsonValidationException { - final HttpResponse response = mock(HttpResponse.class); - when(response.body()).thenReturn(getMockedResponse()); - when(httpClient.send(any(), any())).thenReturn(response); - final Map queryParams = Map.of(CODE, TEST_CODE); - final Map actualQueryParams = oauthFlow.completeSourceOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL, - getInputOAuthConfiguration(), getoAuthConfigSpecification()); - final Map expectedOutput = getExpectedFilteredOutput(); - assertEquals(expectedOutput.size(), actualQueryParams.size(), - String.format(EXPECTED_BUT_GOT, expectedOutput.size(), actualQueryParams, expectedOutput)); - expectedOutput.forEach((key, value) -> assertEquals(value, actualQueryParams.get(key))); - } - - @Test - void testCompleteDestinationOAuth() throws IOException, InterruptedException, ConfigNotFoundException, JsonValidationException { - final HttpResponse response = mock(HttpResponse.class); - when(response.body()).thenReturn(getMockedResponse()); - when(httpClient.send(any(), any())).thenReturn(response); - final Map queryParams = Map.of(CODE, TEST_CODE); - final Map actualQueryParams = oauthFlow.completeDestinationOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL, - getInputOAuthConfiguration(), getoAuthConfigSpecification()); - final Map expectedOutput = getExpectedFilteredOutput(); - assertEquals(expectedOutput.size(), actualQueryParams.size(), - String.format(EXPECTED_BUT_GOT, expectedOutput.size(), actualQueryParams, expectedOutput)); - expectedOutput.forEach((key, value) -> assertEquals(value, actualQueryParams.get(key))); - } - - @Test - void testValidateOAuthOutputFailure() throws IOException, InterruptedException, ConfigNotFoundException, JsonValidationException { - final HttpResponse response = mock(HttpResponse.class); - when(response.body()).thenReturn(getMockedResponse()); - when(httpClient.send(any(), any())).thenReturn(response); - final Map queryParams = Map.of(CODE, TEST_CODE); - final OAuthConfigSpecification oAuthConfigSpecification = getOAuthConfigSpecification(); - assertThrows(JsonValidationException.class, () -> oauthFlow.completeSourceOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL, - getInputOAuthConfiguration(), oAuthConfigSpecification)); - assertThrows(JsonValidationException.class, () -> oauthFlow.completeDestinationOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL, - getInputOAuthConfiguration(), oAuthConfigSpecification)); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/DriftOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/DriftOAuthFlowTest.java deleted file mode 100644 index 6838596de33a..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/DriftOAuthFlowTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.Map; - -public class DriftOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new DriftOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://dev.drift.com/authorize?response_type=code&client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state"; - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/GithubOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/GithubOAuthFlowTest.java deleted file mode 100644 index e83c79e3fe66..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/GithubOAuthFlowTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.Map; - -public class GithubOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new GithubOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://github.com/login/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&scope=repo%20read:org%20read:repo_hook%20read:user%20read:discussion%20workflow&state=state"; - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/GitlabOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/GitlabOAuthFlowTest.java deleted file mode 100644 index adf25b06d882..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/GitlabOAuthFlowTest.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.time.Clock; -import java.time.Instant; -import java.time.ZoneId; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class GitlabOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - final Clock clock = Clock.fixed(Instant.ofEpochSecond(1673464409), ZoneId.of("UTC")); - return new GitlabOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState, clock); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://gitlab.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&response_type=code&scope=read_api"; - } - - @Override - @SuppressWarnings("PMD.AvoidDuplicateLiterals") - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of( - "token_expiry_date", Map.of("type", "string"), - "access_token", Map.of("type", "string"), - "refresh_token", Map.of("type", "string"))); - } - - @Override - protected JsonNode getInputOAuthConfiguration() { - return Jsons.jsonNode(Map.of("domain", "gitlab.com")); - } - - @Override - protected JsonNode getUserInputFromConnectorConfigSpecification() { - return getJsonSchema(Map.of("domain", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "expires_in", "720", - "refresh_token", "refresh_token_response", - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "token_expiry_date", "2023-01-11T19:25:29Z", - "refresh_token", "refresh_token_response", - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - - @Test - @Override - void testEmptyInputCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteDestinationOAuth() {} - - @Test - @Override - void testEmptyOutputCompleteDestinationOAuth() {} - - @Test - @Override - void testCompleteDestinationOAuth() {} - - @Test - @Override - void testGetDestinationConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testGetDestinationConsentUrl() {} - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HarvestOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HarvestOAuthFlowTest.java deleted file mode 100644 index 053d75ddd0c2..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HarvestOAuthFlowTest.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; - -public class HarvestOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new HarvestOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://id.getharvest.com/oauth2/authorize?client_id=test_client_id&response_type=code&redirect_uri=https%3A%2F%2Fairbyte.io&state=state"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java deleted file mode 100644 index 054184b99c83..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; - -public class HubspotOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new HubspotOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://app.hubspot.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scopes=crm.schemas.contacts.read&optional_scopes=content+crm.schemas.deals.read+crm.objects.owners.read+forms+tickets+e-commerce+crm.objects.companies.read+crm.lists.read+crm.objects.deals.read+crm.objects.contacts.read+crm.schemas.companies.read+files+forms-uploaded-files+files.ui_hidden.read+crm.objects.feedback_submissions.read+sales-email-read+automation"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/IntercomOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/IntercomOAuthFlowTest.java deleted file mode 100644 index 03ebd8a6f00a..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/IntercomOAuthFlowTest.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.List; -import java.util.Map; - -public class IntercomOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new IntercomOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected List getExpectedOutputPath() { - return List.of(); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://app.intercom.com/a/oauth/connect?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&state=state"; - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LeverOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LeverOAuthFlowTest.java deleted file mode 100644 index faaf297ea728..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LeverOAuthFlowTest.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; - -public class LeverOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new HarvestOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://id.getharvest.com/oauth2/authorize?client_id=test_client_id&response_type=code&redirect_uri=https%3A%2F%2Fairbyte.io&state=state"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlowTest.java deleted file mode 100644 index 6d281c3cdbf2..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlowTest.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; - -public class LinkedinAdsOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new LinkedinAdsOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://www.linkedin.com/oauth/v2/authorization?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=r_ads_reporting+r_emailaddress+r_liteprofile+r_ads+r_basicprofile+r_organization_social&state=state"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MailchimpOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MailchimpOAuthFlowTest.java deleted file mode 100644 index c1a71de9eb4e..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MailchimpOAuthFlowTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.Map; - -public class MailchimpOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new MailchimpOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://login.mailchimp.com/oauth2/authorize?client_id=test_client_id&response_type=code&redirect_uri=https%3A%2F%2Fairbyte.io&state=state"; - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftBingAdsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftBingAdsOAuthFlowTest.java deleted file mode 100644 index 4549eec2279e..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftBingAdsOAuthFlowTest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.oauth.BaseOAuthFlow; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class MicrosoftBingAdsOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new MicrosoftBingAdsOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://login.microsoftonline.com/test_tenant_id/oauth2/v2.0/authorize?client_id=test_client_id&response_type=code&redirect_uri=https%3A%2F%2Fairbyte.io&response_mode=query&state=state&scope=offline_access%20https://ads.microsoft.com/msads.manage"; - } - - @Override - protected JsonNode getInputOAuthConfiguration() { - return Jsons.jsonNode(Map.of("tenant_id", "test_tenant_id")); - } - - @Override - protected JsonNode getUserInputFromConnectorConfigSpecification() { - return getJsonSchema(Map.of("tenant_id", Map.of("type", "string"))); - } - - @Test - @Override - void testEmptyInputCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteSourceOAuth() {} - - @Test - @Override - void testEmptyInputCompleteSourceOAuth() {} - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftTeamsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftTeamsOAuthFlowTest.java deleted file mode 100644 index 968c3f72840f..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftTeamsOAuthFlowTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.oauth.BaseOAuthFlow; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class MicrosoftTeamsOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new MicrosoftTeamsOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://login.microsoftonline.com/test_tenant_id/oauth2/v2.0/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=offline_access+Application.Read.All+Channel.ReadBasic.All+ChannelMember.Read.All+ChannelMember.ReadWrite.All+ChannelSettings.Read.All+ChannelSettings.ReadWrite.All+Directory.Read.All+Directory.ReadWrite.All+Files.Read.All+Files.ReadWrite.All+Group.Read.All+Group.ReadWrite.All+GroupMember.Read.All+Reports.Read.All+Sites.Read.All+Sites.ReadWrite.All+TeamsTab.Read.All+TeamsTab.ReadWrite.All+User.Read.All+User.ReadWrite.All&response_type=code"; - } - - @Override - protected JsonNode getInputOAuthConfiguration() { - return Jsons.jsonNode(Map.of("tenant_id", "test_tenant_id")); - } - - @Override - protected JsonNode getUserInputFromConnectorConfigSpecification() { - return getJsonSchema(Map.of("tenant_id", Map.of("type", "string"))); - } - - @Test - @Override - void testEmptyInputCompleteSourceOAuth() {} - - @Test - @Override - void testEmptyInputCompleteDestinationOAuth() {} - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MondayOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MondayOAuthFlowTest.java deleted file mode 100644 index d350e880ec48..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MondayOAuthFlowTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class MondayOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new MondayOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://auth.monday.com/oauth2/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&scope=me%3Aread+boards%3Aread+workspaces%3Aread+users%3Aread+account%3Aread+updates%3Aread+assets%3Aread+tags%3Aread+teams%3Aread&state=state&subdomain=test_subdomain"; - } - - @Override - protected JsonNode getInputOAuthConfiguration() { - return Jsons.jsonNode(Map.of("subdomain", "test_subdomain")); - } - - @Override - protected JsonNode getUserInputFromConnectorConfigSpecification() { - return getJsonSchema(Map.of("subdomain", Map.of("type", "string"))); - } - - @Test - @Override - void testGetSourceConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testGetDestinationConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testDeprecatedCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteSourceOAuth() {} - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/PinterestOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/PinterestOAuthFlowTest.java deleted file mode 100644 index 89c27667e3f2..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/PinterestOAuthFlowTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class PinterestOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new PinterestOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://pinterest.com/oauth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=ads%3Aread%2Cboards%3Aread%2Cboards%3Aread_secret%2Ccatalogs%3Aread%2Cpins%3Aread%2Cpins%3Aread_secret%2Cuser_accounts%3Aread&state=state"; - } - - @Test - @Override - void testEmptyOutputCompleteSourceOAuth() {} - - @Test - @Override - void testGetSourceConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testValidateOAuthOutputFailure() {} - - @Test - @Override - void testCompleteSourceOAuth() {} - - @Test - @Override - void testEmptyInputCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteSourceOAuth() {} - - @Test - @Override - void testEmptyOutputCompleteDestinationOAuth() {} - - @Test - @Override - void testCompleteDestinationOAuth() {} - - @Test - @Override - void testGetDestinationConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testEmptyInputCompleteSourceOAuth() {} - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/PipeDriveOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/PipeDriveOAuthFlowTest.java deleted file mode 100644 index 72212afdafbd..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/PipeDriveOAuthFlowTest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; -import java.util.List; - -public class PipeDriveOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new PipeDriveOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://oauth.pipedrive.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state"; - } - - @Override - protected List getExpectedOutputPath() { - return List.of("authorization"); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/QuickbooksOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/QuickbooksOAuthFlowTest.java deleted file mode 100644 index 76e8a98a9e6a..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/QuickbooksOAuthFlowTest.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; - -public class QuickbooksOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new QuickbooksOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://appcenter.intuit.com/app/connect/oauth2?client_id=test_client_id&scope=com.intuit.quickbooks.accounting&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&state=state"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/RetentlyOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/RetentlyOAuthFlowTest.java deleted file mode 100644 index 05cb82e5e094..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/RetentlyOAuthFlowTest.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; - -public class RetentlyOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new RetentlyOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://app.retently.com/api/oauth/authorize?client_id=test_client_id&response_type=code&redirect_uri=https%3A%2F%2Fairbyte.io&state=state"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SalesforceOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SalesforceOAuthFlowTest.java deleted file mode 100644 index bacadf3941fb..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SalesforceOAuthFlowTest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; -import java.util.List; - -public class SalesforceOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new SalesforceOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://login.salesforce.com/services/oauth2/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&state=state"; - } - - @Override - protected List getExpectedOutputPath() { - return List.of(); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SlackOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SlackOAuthFlowTest.java deleted file mode 100644 index ade157777cd3..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SlackOAuthFlowTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.Map; - -public class SlackOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new SlackOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://slack.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=read"; - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SmartsheetsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SmartsheetsOAuthFlowTest.java deleted file mode 100644 index 2ccb08207cfa..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SmartsheetsOAuthFlowTest.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.List; -import java.util.Map; - -public class SmartsheetsOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new SmartsheetsOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected List getExpectedOutputPath() { - return List.of(); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://app.smartsheet.com/b/authorize?client_id=test_client_id&response_type=code&state=state&scope=READ_SHEETS"; - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnapchatMarketingOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnapchatMarketingOAuthFlowTest.java deleted file mode 100644 index b6e41d2d2dda..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnapchatMarketingOAuthFlowTest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; -import java.util.List; - -public class SnapchatMarketingOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new SnapchatMarketingOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected List getExpectedOutputPath() { - return List.of(); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://accounts.snapchat.com/login/oauth2/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=snapchat-marketing-api&state=state"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java deleted file mode 100644 index b64c261bff77..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class SnowflakeOAuthFlowTest extends BaseOAuthFlowTest { - - public static final String STRING = "string"; - public static final String TYPE = "type"; - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new SourceSnowflakeOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://account.aws.snowflakecomputing.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&state=state&scope=session%3Arole%3Asome_role"; - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "refresh_token", "refresh_token_response", - "username", "username"); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of(TYPE, STRING), "refresh_token", Map.of(TYPE, STRING))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "refresh_token", "refresh_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getOAuthParamConfig() { - return Jsons.jsonNode(ImmutableMap.builder() - .put("client_id", "test_client_id") - .put("client_secret", "test_client_secret") - .build()); - } - - @Override - protected JsonNode getInputOAuthConfiguration() { - return Jsons.jsonNode(ImmutableMap.builder() - .put("host", "account.aws.snowflakecomputing.com") - .put("role", "some_role") - .build()); - } - - @Override - protected JsonNode getUserInputFromConnectorConfigSpecification() { - return getJsonSchema(Map.of("host", Map.of(TYPE, STRING), "role", Map.of(TYPE, STRING))); - } - - @Test - @Override - void testGetSourceConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testGetDestinationConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testDeprecatedCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteSourceOAuth() {} - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SquareOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SquareOAuthFlowTest.java deleted file mode 100644 index 5ce4f78d8716..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SquareOAuthFlowTest.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; - -public class SquareOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new SquareOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://connect.squareup.com/oauth2/authorize?client_id=test_client_id" + - "&scope=CUSTOMERS_READ+EMPLOYEES_READ+ITEMS_READ+MERCHANT_PROFILE_READ+ORDERS_READ+PAYMENTS_READ+TIMECARDS_READ" + - "&session=False&state=state"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/StravaOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/StravaOAuthFlowTest.java deleted file mode 100644 index 43de22e7a014..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/StravaOAuthFlowTest.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import io.airbyte.oauth.BaseOAuthFlow; - -public class StravaOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new StravaOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://www.strava.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=activity%3Aread_all&response_type=code"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SurveymonkeyOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SurveymonkeyOAuthFlowTest.java deleted file mode 100644 index af564e0cf083..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SurveymonkeyOAuthFlowTest.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.List; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class SurveymonkeyOAuthFlowTest extends BaseOAuthFlowTest { - - public static final String STRING = "string"; - public static final String TYPE = "type"; - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new SurveymonkeyOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://api.surveymonkey.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&state=state"; - } - - @Override - protected List getExpectedOutputPath() { - return List.of(); - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getInputOAuthConfiguration() { - return Jsons.jsonNode(ImmutableMap.builder() - .put("origin", "USA") - .build()); - } - - @Override - protected JsonNode getUserInputFromConnectorConfigSpecification() { - return getJsonSchema(Map.of("origin", "USA")); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - - @Test - void testGetAccessTokenUrl() { - final SurveymonkeyOAuthFlow oauthFlow = (SurveymonkeyOAuthFlow) getOAuthFlow(); - final String expectedAccessTokenUrl = "https://api.surveymonkey.com/oauth/token"; - - final String accessTokenUrl = oauthFlow.getAccessTokenUrl(getInputOAuthConfiguration()); - assertEquals(accessTokenUrl, expectedAccessTokenUrl); - } - - @Test - @Override - void testGetDestinationConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testGetDestinationConsentUrl() {} - - @Test - @Override - void testGetSourceConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testGetSourceConsentUrl() {} - - @Test - @Override - void testEmptyInputCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteSourceOAuth() {} - - @Test - @Override - void testEmptyInputCompleteSourceOAuth() {} - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/TikTokMarketingOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/TikTokMarketingOAuthFlowTest.java deleted file mode 100644 index 04f80fd6a653..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/TikTokMarketingOAuthFlowTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.protocol.models.OAuthConfigSpecification; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class TikTokMarketingOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new TikTokMarketingOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://ads.tiktok.com/marketing_api/auth?app_id=app_id" + - "&redirect_uri=https%3A%2F%2Fairbyte.io" + - "&state=state"; - } - - @Override - protected JsonNode getOAuthParamConfig() { - return Jsons.jsonNode(ImmutableMap.builder() - .put("app_id", "app_id") - .put("secret", "secret") - .build()); - } - - @Override - protected OAuthConfigSpecification getOAuthConfigSpecification() { - return getoAuthConfigSpecification() - // change property types to induce json validation errors. - .withCompleteOauthServerOutputSpecification(getJsonSchema(Map.of("app_id", Map.of("type", "integer")))) - .withCompleteOauthOutputSpecification(getJsonSchema(Map.of("access_token", Map.of("type", "integer")))); - } - - @Override - protected String getMockedResponse() { - return "{\n" - + " \"data\":{\n" - + " \"access_token\":\"access_token_response\"\n" - + " }\n" - + "}"; - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of("access_token", "access_token_response"); - } - - @Test - @Override - void testDeprecatedCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteSourceOAuth() {} - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/TrelloOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/TrelloOAuthFlowTest.java deleted file mode 100644 index e9746c0a7d69..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/TrelloOAuthFlowTest.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.google.api.client.http.HttpTransport; -import com.google.api.client.http.LowLevelHttpRequest; -import com.google.api.client.http.LowLevelHttpResponse; -import com.google.api.client.testing.http.MockHttpTransport; -import com.google.api.client.testing.http.MockLowLevelHttpRequest; -import com.google.api.client.testing.http.MockLowLevelHttpResponse; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.MoreOAuthParameters; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class TrelloOAuthFlowTest { - - private static final String REDIRECT_URL = "https://airbyte.io"; - - private UUID workspaceId; - private UUID definitionId; - private ConfigRepository configRepository; - private TrelloOAuthFlow trelloOAuthFlow; - private HttpTransport transport; - - @BeforeEach - void setup() throws IOException, JsonValidationException { - workspaceId = UUID.randomUUID(); - definitionId = UUID.randomUUID(); - - transport = new MockHttpTransport() { - - @Override - public LowLevelHttpRequest buildRequest(final String method, final String url) throws IOException { - return new MockLowLevelHttpRequest() { - - @Override - public LowLevelHttpResponse execute() throws IOException { - final MockLowLevelHttpResponse response = new MockLowLevelHttpResponse(); - response.setStatusCode(200); - response.setContentType("application/x-www-form-urlencoded"); - response.setContent("oauth_token=test_token&oauth_token_secret=test_secret&oauth_callback_confirmed=true"); - return response; - } - - }; - } - - }; - configRepository = mock(ConfigRepository.class); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(ImmutableMap.builder() - .put("client_id", "test_client_id") - .put("client_secret", "test_client_secret") - .build())))); - trelloOAuthFlow = new TrelloOAuthFlow(configRepository, transport); - } - - @Test - void testGetSourceConsentUrl() throws IOException, InterruptedException, ConfigNotFoundException { - final String consentUrl = - trelloOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); - assertEquals("https://trello.com/1/OAuthAuthorizeToken?oauth_token=test_token", consentUrl); - } - - @Test - void testCompleteSourceAuth() throws IOException, InterruptedException, ConfigNotFoundException { - final Map expectedParams = Map.of( - "key", "test_client_id", - "token", "test_token", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - final Map queryParams = Map.of("oauth_token", "token", "oauth_verifier", "verifier"); - final Map actualParams = - trelloOAuthFlow.completeSourceOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL); - assertEquals(actualParams, expectedParams); - assertEquals(expectedParams.size(), actualParams.size(), - String.format("Expected %s values but got %s", expectedParams.size(), actualParams)); - expectedParams.forEach((key, value) -> assertEquals(value, actualParams.get(key))); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/ZendeskSunshineOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/ZendeskSunshineOAuthFlowTest.java deleted file mode 100644 index 7026225851df..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/ZendeskSunshineOAuthFlowTest.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class ZendeskSunshineOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new ZendeskSunshineOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://test_subdomain.zendesk.com/oauth/authorizations/new?response_type=code&redirect_uri=https%3A%2F%2Fairbyte.io&client_id=test_client_id&scope=read&state=state"; - } - - @Override - protected JsonNode getInputOAuthConfiguration() { - return Jsons.jsonNode(Map.of("subdomain", "test_subdomain")); - } - - @Override - protected JsonNode getUserInputFromConnectorConfigSpecification() { - return getJsonSchema(Map.of("subdomain", Map.of("type", "string"))); - } - - @Test - @Override - void testEmptyOutputCompleteSourceOAuth() {} - - @Test - @Override - void testGetSourceConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testValidateOAuthOutputFailure() {} - - @Test - @Override - void testCompleteSourceOAuth() {} - - @Test - @Override - void testEmptyInputCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteSourceOAuth() {} - - @Test - @Override - void testEmptyOutputCompleteDestinationOAuth() {} - - @Test - @Override - void testCompleteDestinationOAuth() {} - - @Test - @Override - void testGetDestinationConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testEmptyInputCompleteSourceOAuth() {} - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/ZendeskTalkOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/ZendeskTalkOAuthFlowTest.java deleted file mode 100644 index f46cd7c587c8..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/ZendeskTalkOAuthFlowTest.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import java.util.Map; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class ZendeskTalkOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new ZendeskTalkOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://test_subdomain.zendesk.com/oauth/authorizations/new?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=read&state=state"; - } - - @Override - protected JsonNode getInputOAuthConfiguration() { - return Jsons.jsonNode(Map.of("subdomain", "test_subdomain")); - } - - @Override - protected JsonNode getUserInputFromConnectorConfigSpecification() { - return getJsonSchema(Map.of("subdomain", Map.of("type", "string"))); - } - - @Test - @Override - void testEmptyOutputCompleteSourceOAuth() {} - - @Test - @Override - void testGetSourceConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testValidateOAuthOutputFailure() {} - - @Test - @Override - void testCompleteSourceOAuth() {} - - @Test - @Override - void testEmptyInputCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteDestinationOAuth() {} - - @Test - @Override - void testDeprecatedCompleteSourceOAuth() {} - - @Test - @Override - void testEmptyOutputCompleteDestinationOAuth() {} - - @Test - @Override - void testCompleteDestinationOAuth() {} - - @Test - @Override - void testGetDestinationConsentUrlEmptyOAuthSpec() {} - - @Test - @Override - void testEmptyInputCompleteSourceOAuth() {} - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/FacebookMarketingOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/FacebookMarketingOAuthFlowTest.java deleted file mode 100644 index afb3c2ba0b53..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/FacebookMarketingOAuthFlowTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.facebook; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import io.airbyte.oauth.flows.BaseOAuthFlowTest; -import java.util.List; -import java.util.Map; - -public class FacebookMarketingOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new FacebookMarketingOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://www.facebook.com/v12.0/dialog/oauth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=ads_management%2Cads_read%2Cread_insights%2Cbusiness_management"; - } - - @Override - protected List getExpectedOutputPath() { - return List.of(); - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/FacebookPagesOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/FacebookPagesOAuthFlowTest.java deleted file mode 100644 index 252e547a7df9..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/FacebookPagesOAuthFlowTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.facebook; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import io.airbyte.oauth.flows.BaseOAuthFlowTest; -import java.util.List; -import java.util.Map; - -public class FacebookPagesOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new FacebookPagesOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://www.facebook.com/v12.0/dialog/oauth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=pages_manage_ads%2Cpages_manage_metadata%2Cpages_read_engagement%2Cpages_read_user_content"; - } - - @Override - protected List getExpectedOutputPath() { - return List.of(); - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java deleted file mode 100644 index 37afd705f88f..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.facebook; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.MoreOAuthParameters; -import io.airbyte.oauth.flows.BaseOAuthFlowTest; -import java.util.List; -import java.util.Map; - -public class InstagramOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new InstagramOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://www.facebook.com/v12.0/dialog/oauth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=ads_management%2Cinstagram_basic%2Cinstagram_manage_insights%2Cpages_show_list%2Cpages_read_engagement"; - } - - @Override - protected List getExpectedOutputPath() { - return List.of(); - } - - @Override - protected Map getExpectedOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK, - "client_secret", MoreOAuthParameters.SECRET_MASK); - } - - @Override - protected JsonNode getCompleteOAuthOutputSpecification() { - return getJsonSchema(Map.of("access_token", Map.of("type", "string"))); - } - - @Override - protected Map getExpectedFilteredOutput() { - return Map.of( - "access_token", "access_token_response", - "client_id", MoreOAuthParameters.SECRET_MASK); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlowTest.java deleted file mode 100644 index 7ac8ad2dfc84..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/DestinationGoogleSheetsOAuthFlowTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.flows.BaseOAuthFlowTest; - -public class DestinationGoogleSheetsOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new DestinationGoogleSheetsOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://accounts.google.com/o/oauth2/v2/auth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fspreadsheets+https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive&access_type=offline&state=state&include_granted_scopes=true&prompt=consent"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowTest.java deleted file mode 100644 index c995b3a38a2c..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.flows.BaseOAuthFlowTest; - -public class GoogleAdsOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new GoogleAdsOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://accounts.google.com/o/oauth2/v2/auth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fadwords&access_type=offline&state=state&include_granted_scopes=true&prompt=consent"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowTest.java deleted file mode 100644 index ea85acad5c34..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.flows.BaseOAuthFlowTest; - -public class GoogleAnalyticsOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new GoogleAnalyticsViewIdOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://accounts.google.com/o/oauth2/v2/auth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fanalytics.readonly&access_type=offline&state=state&include_granted_scopes=true&prompt=consent"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowTest.java deleted file mode 100644 index abb008879d87..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowTest.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.flows.BaseOAuthFlowTest; -import java.util.List; - -public class GoogleSearchConsoleOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new GoogleSearchConsoleOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://accounts.google.com/o/oauth2/v2/auth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fwebmasters.readonly&access_type=offline&state=state&include_granted_scopes=true&prompt=consent"; - } - - @Override - protected List getExpectedOutputPath() { - return List.of("authorization"); - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowTest.java deleted file mode 100644 index cddc7bf2bed1..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.flows.BaseOAuthFlowTest; - -public class GoogleSheetsOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new GoogleSheetsOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://accounts.google.com/o/oauth2/v2/auth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fspreadsheets.readonly+https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive.readonly&access_type=offline&state=state&include_granted_scopes=true&prompt=consent"; - } - -} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsOAuthFlowTest.java deleted file mode 100644 index a8b23e188f72..000000000000 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsOAuthFlowTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.oauth.flows.google; - -import io.airbyte.oauth.BaseOAuthFlow; -import io.airbyte.oauth.flows.BaseOAuthFlowTest; - -public class YouTubeAnalyticsOAuthFlowTest extends BaseOAuthFlowTest { - - @Override - protected BaseOAuthFlow getOAuthFlow() { - return new YouTubeAnalyticsOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); - } - - @Override - protected String getExpectedConsentUrl() { - return "https://accounts.google.com/o/oauth2/v2/auth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fyt-analytics.readonly&access_type=offline&state=state&include_granted_scopes=true&prompt=consent"; - } - -} diff --git a/airbyte-persistence/README.md b/airbyte-persistence/README.md deleted file mode 100644 index dd92b7108721..000000000000 --- a/airbyte-persistence/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# airbyte-persistence - -Home for all-things persistence related. \ No newline at end of file diff --git a/airbyte-persistence/job-persistence/build.gradle b/airbyte-persistence/job-persistence/build.gradle deleted file mode 100644 index 3bc8f7e29f90..000000000000 --- a/airbyte-persistence/job-persistence/build.gradle +++ /dev/null @@ -1,35 +0,0 @@ -plugins { - id "java-library" -} - -dependencies { - implementation project(':airbyte-commons') - implementation project(':airbyte-commons-protocol') - implementation project(':airbyte-oauth') - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-db:jooq') - implementation project(':airbyte-db:db-lib') - implementation libs.airbyte.protocol - implementation project(':airbyte-config:config-persistence') - implementation project(':airbyte-json-validation') - implementation project(':airbyte-notification') - implementation project(':airbyte-analytics') - - implementation 'io.sentry:sentry:6.1.0' - implementation libs.otel.semconv - implementation libs.otel.sdk - implementation libs.otel.sdk.testing - implementation libs.micrometer.statsd - implementation platform(libs.otel.bom) - implementation("io.opentelemetry:opentelemetry-api") - implementation("io.opentelemetry:opentelemetry-sdk") - implementation("io.opentelemetry:opentelemetry-exporter-otlp") - - implementation 'com.datadoghq:java-dogstatsd-client:4.0.0' - - testImplementation project(':airbyte-config:config-persistence') - testImplementation project(':airbyte-test-utils') - testImplementation libs.platform.testcontainers.postgresql -} - -Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java deleted file mode 100644 index 6175ae30da82..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.version.Version; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.JobResetConnectionConfig; -import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.JobTypeResourceLimit.JobType; -import io.airbyte.config.ResetSourceConfiguration; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.DestinationSyncMode; -import io.airbyte.protocol.models.StreamDescriptor; -import io.airbyte.protocol.models.SyncMode; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import javax.annotation.Nullable; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class DefaultJobCreator implements JobCreator { - - private final JobPersistence jobPersistence; - private final ResourceRequirements workerResourceRequirements; - - public DefaultJobCreator(final JobPersistence jobPersistence, - final ResourceRequirements workerResourceRequirements) { - this.jobPersistence = jobPersistence; - this.workerResourceRequirements = workerResourceRequirements; - } - - @Override - public Optional createSyncJob(final SourceConnection source, - final DestinationConnection destination, - final StandardSync standardSync, - final String sourceDockerImageName, - final Version sourceProtocolVersion, - final String destinationDockerImageName, - final Version destinationProtocolVersion, - final List standardSyncOperations, - @Nullable final JsonNode webhookOperationConfigs, - final StandardSourceDefinition sourceDefinition, - final StandardDestinationDefinition destinationDefinition, - final UUID workspaceId) - throws IOException { - // reusing this isn't going to quite work. - - final ResourceRequirements mergedOrchestratorResourceReq = ResourceRequirementsUtils.getResourceRequirements( - standardSync.getResourceRequirements(), - workerResourceRequirements); - final ResourceRequirements mergedSrcResourceReq = ResourceRequirementsUtils.getResourceRequirements( - standardSync.getResourceRequirements(), - sourceDefinition.getResourceRequirements(), - workerResourceRequirements, - JobType.SYNC); - final ResourceRequirements mergedDstResourceReq = ResourceRequirementsUtils.getResourceRequirements( - standardSync.getResourceRequirements(), - destinationDefinition.getResourceRequirements(), - workerResourceRequirements, - JobType.SYNC); - - final JobSyncConfig jobSyncConfig = new JobSyncConfig() - .withNamespaceDefinition(standardSync.getNamespaceDefinition()) - .withNamespaceFormat(standardSync.getNamespaceFormat()) - .withPrefix(standardSync.getPrefix()) - .withSourceDockerImage(sourceDockerImageName) - .withSourceProtocolVersion(sourceProtocolVersion) - .withDestinationDockerImage(destinationDockerImageName) - .withDestinationProtocolVersion(destinationProtocolVersion) - .withOperationSequence(standardSyncOperations) - .withWebhookOperationConfigs(webhookOperationConfigs) - .withConfiguredAirbyteCatalog(standardSync.getCatalog()) - .withResourceRequirements(mergedOrchestratorResourceReq) - .withSourceResourceRequirements(mergedSrcResourceReq) - .withDestinationResourceRequirements(mergedDstResourceReq) - .withIsSourceCustomConnector(sourceDefinition.getCustom()) - .withIsDestinationCustomConnector(destinationDefinition.getCustom()) - .withWorkspaceId(workspaceId); - - final JobConfig jobConfig = new JobConfig() - .withConfigType(ConfigType.SYNC) - .withSync(jobSyncConfig); - return jobPersistence.enqueueJob(standardSync.getConnectionId().toString(), jobConfig); - } - - @Override - public Optional createResetConnectionJob(final DestinationConnection destination, - final StandardSync standardSync, - final String destinationDockerImage, - final Version destinationProtocolVersion, - final boolean isDestinationCustomConnector, - final List standardSyncOperations, - final List streamsToReset) - throws IOException { - final ConfiguredAirbyteCatalog configuredAirbyteCatalog = standardSync.getCatalog(); - configuredAirbyteCatalog.getStreams().forEach(configuredAirbyteStream -> { - final StreamDescriptor streamDescriptor = CatalogHelpers.extractDescriptor(configuredAirbyteStream); - if (streamsToReset.contains(streamDescriptor)) { - // The Reset Source will emit no record messages for any streams, so setting the destination sync - // mode to OVERWRITE will empty out this stream in the destination. - // Note: streams in streamsToReset that are NOT in this configured catalog (i.e. deleted streams) - // will still have their state reset by the Reset Source, but will not be modified in the - // destination since they are not present in the catalog that is sent to the destination. - configuredAirbyteStream.setSyncMode(SyncMode.FULL_REFRESH); - configuredAirbyteStream.setDestinationSyncMode(DestinationSyncMode.OVERWRITE); - } else { - // Set streams that are not being reset to APPEND so that they are not modified in the destination - if (configuredAirbyteStream.getDestinationSyncMode() == DestinationSyncMode.OVERWRITE) { - configuredAirbyteStream.setDestinationSyncMode(DestinationSyncMode.APPEND); - } - } - }); - final JobResetConnectionConfig resetConnectionConfig = new JobResetConnectionConfig() - .withNamespaceDefinition(standardSync.getNamespaceDefinition()) - .withNamespaceFormat(standardSync.getNamespaceFormat()) - .withPrefix(standardSync.getPrefix()) - .withDestinationDockerImage(destinationDockerImage) - .withDestinationProtocolVersion(destinationProtocolVersion) - .withOperationSequence(standardSyncOperations) - .withConfiguredAirbyteCatalog(configuredAirbyteCatalog) - .withResourceRequirements(ResourceRequirementsUtils.getResourceRequirements( - standardSync.getResourceRequirements(), - workerResourceRequirements)) - .withResetSourceConfiguration(new ResetSourceConfiguration().withStreamsToReset(streamsToReset)) - .withIsSourceCustomConnector(false) - .withIsDestinationCustomConnector(isDestinationCustomConnector) - .withWorkspaceId(destination.getWorkspaceId()); - - final JobConfig jobConfig = new JobConfig() - .withConfigType(ConfigType.RESET_CONNECTION) - .withResetConnection(resetConnectionConfig); - return jobPersistence.enqueueJob(standardSync.getConnectionId().toString(), jobConfig); - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java deleted file mode 100644 index bedda1566f29..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java +++ /dev/null @@ -1,1389 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.ATTEMPTS; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.JOBS; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.NORMALIZATION_SUMMARIES; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.STREAM_STATS; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.SYNC_STATS; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.JsonNodeType; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import com.google.common.collect.UnmodifiableIterator; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.protocol.migrations.v1.CatalogMigrationV1Helper; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.text.Names; -import io.airbyte.commons.text.Sqls; -import io.airbyte.commons.version.AirbyteProtocolVersion; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.commons.version.Version; -import io.airbyte.config.AttemptFailureSummary; -import io.airbyte.config.AttemptSyncConfig; -import io.airbyte.config.FailureReason; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.JobOutput; -import io.airbyte.config.JobOutput.OutputType; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.config.StreamSyncStats; -import io.airbyte.config.SyncStats; -import io.airbyte.config.persistence.PersistenceHelpers; -import io.airbyte.db.Database; -import io.airbyte.db.ExceptionWrappingDatabase; -import io.airbyte.db.instance.jobs.JobsDatabaseSchema; -import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.AttemptNormalizationStatus; -import io.airbyte.persistence.job.models.AttemptStatus; -import io.airbyte.persistence.job.models.AttemptWithJobInfo; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.persistence.job.models.JobStatus; -import io.airbyte.persistence.job.models.JobWithStatusAndTimestamp; -import java.io.IOException; -import java.math.BigInteger; -import java.nio.file.Path; -import java.time.Instant; -import java.time.LocalDateTime; -import java.time.OffsetDateTime; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.apache.commons.lang3.StringUtils; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.InsertValuesStepN; -import org.jooq.JSONB; -import org.jooq.Named; -import org.jooq.Record; -import org.jooq.RecordMapper; -import org.jooq.Result; -import org.jooq.Sequence; -import org.jooq.Table; -import org.jooq.conf.ParamType; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DefaultJobPersistence implements JobPersistence { - - // not static because job history test case manipulates these. - private final int JOB_HISTORY_MINIMUM_AGE_IN_DAYS; - private final int JOB_HISTORY_MINIMUM_RECENCY; - private final int JOB_HISTORY_EXCESSIVE_NUMBER_OF_JOBS; - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultJobPersistence.class); - public static final String ATTEMPT_NUMBER = "attempt_number"; - private static final String JOB_ID = "job_id"; - private static final String WHERE = "WHERE "; - private static final String AND = " AND "; - private static final String SCOPE_CLAUSE = "scope = ? AND "; - - protected static final String DEFAULT_SCHEMA = "public"; - private static final String BACKUP_SCHEMA = "import_backup"; - public static final String DEPLOYMENT_ID_KEY = "deployment_id"; - public static final String METADATA_KEY_COL = "key"; - public static final String METADATA_VAL_COL = "value"; - - @VisibleForTesting - static final String BASE_JOB_SELECT_AND_JOIN = jobSelectAndJoin("jobs"); - - private static final String AIRBYTE_METADATA_TABLE = "airbyte_metadata"; - public static final String ORDER_BY_JOB_TIME_ATTEMPT_TIME = - "ORDER BY jobs.created_at DESC, jobs.id DESC, attempts.created_at ASC, attempts.id ASC "; - public static final String ORDER_BY_JOB_CREATED_AT_DESC = "ORDER BY jobs.created_at DESC "; - public static final String LIMIT_1 = "LIMIT 1 "; - private static final String JOB_STATUS_IS_NON_TERMINAL = String.format("status IN (%s) ", - JobStatus.NON_TERMINAL_STATUSES.stream() - .map(Sqls::toSqlName) - .map(Names::singleQuote) - .collect(Collectors.joining(","))); - - private final ExceptionWrappingDatabase jobDatabase; - private final Supplier timeSupplier; - - @VisibleForTesting - DefaultJobPersistence(final Database jobDatabase, - final Supplier timeSupplier, - final int minimumAgeInDays, - final int excessiveNumberOfJobs, - final int minimumRecencyCount) { - this.jobDatabase = new ExceptionWrappingDatabase(jobDatabase); - this.timeSupplier = timeSupplier; - JOB_HISTORY_MINIMUM_AGE_IN_DAYS = minimumAgeInDays; - JOB_HISTORY_EXCESSIVE_NUMBER_OF_JOBS = excessiveNumberOfJobs; - JOB_HISTORY_MINIMUM_RECENCY = minimumRecencyCount; - } - - public DefaultJobPersistence(final Database jobDatabase) { - this(jobDatabase, Instant::now, 30, 500, 10); - } - - private static String jobSelectAndJoin(final String jobsSubquery) { - return "SELECT\n" - + "jobs.id AS job_id,\n" - + "jobs.config_type AS config_type,\n" - + "jobs.scope AS scope,\n" - + "jobs.config AS config,\n" - + "jobs.status AS job_status,\n" - + "jobs.started_at AS job_started_at,\n" - + "jobs.created_at AS job_created_at,\n" - + "jobs.updated_at AS job_updated_at,\n" - + "attempts.attempt_number AS attempt_number,\n" - + "attempts.attempt_sync_config AS attempt_sync_config,\n" - + "attempts.log_path AS log_path,\n" - + "attempts.output AS attempt_output,\n" - + "attempts.status AS attempt_status,\n" - + "attempts.processing_task_queue AS processing_task_queue,\n" - + "attempts.failure_summary AS attempt_failure_summary,\n" - + "attempts.created_at AS attempt_created_at,\n" - + "attempts.updated_at AS attempt_updated_at,\n" - + "attempts.ended_at AS attempt_ended_at\n" - + "FROM " + jobsSubquery + " LEFT OUTER JOIN attempts ON jobs.id = attempts.job_id "; - } - - /** - * @param scope This is the primary id of a standard sync (StandardSync#connectionId). - */ - @Override - public Optional enqueueJob(final String scope, final JobConfig jobConfig) throws IOException { - LOGGER.info("enqueuing pending job for scope: {}", scope); - final LocalDateTime now = LocalDateTime.ofInstant(timeSupplier.get(), ZoneOffset.UTC); - - final String queueingRequest = Job.REPLICATION_TYPES.contains(jobConfig.getConfigType()) - ? String.format("WHERE NOT EXISTS (SELECT 1 FROM jobs WHERE config_type IN (%s) AND scope = '%s' AND status NOT IN (%s)) ", - Job.REPLICATION_TYPES.stream().map(Sqls::toSqlName).map(Names::singleQuote).collect(Collectors.joining(",")), - scope, - JobStatus.TERMINAL_STATUSES.stream().map(Sqls::toSqlName).map(Names::singleQuote).collect(Collectors.joining(","))) - : ""; - - return jobDatabase.query( - ctx -> ctx.fetch( - "INSERT INTO jobs(config_type, scope, created_at, updated_at, status, config) " + - "SELECT CAST(? AS JOB_CONFIG_TYPE), ?, ?, ?, CAST(? AS JOB_STATUS), CAST(? as JSONB) " + - queueingRequest + - "RETURNING id ", - Sqls.toSqlName(jobConfig.getConfigType()), - scope, - now, - now, - Sqls.toSqlName(JobStatus.PENDING), - Jsons.serialize(jobConfig))) - .stream() - .findFirst() - .map(r -> r.getValue("id", Long.class)); - } - - @Override - public void resetJob(final long jobId) throws IOException { - final LocalDateTime now = LocalDateTime.ofInstant(timeSupplier.get(), ZoneOffset.UTC); - jobDatabase.query(ctx -> { - updateJobStatus(ctx, jobId, JobStatus.PENDING, now); - return null; - }); - } - - @Override - public void cancelJob(final long jobId) throws IOException { - final LocalDateTime now = LocalDateTime.ofInstant(timeSupplier.get(), ZoneOffset.UTC); - jobDatabase.query(ctx -> { - updateJobStatus(ctx, jobId, JobStatus.CANCELLED, now); - return null; - }); - } - - @Override - public void failJob(final long jobId) throws IOException { - final LocalDateTime now = LocalDateTime.ofInstant(timeSupplier.get(), ZoneOffset.UTC); - jobDatabase.query(ctx -> { - updateJobStatus(ctx, jobId, JobStatus.FAILED, now); - return null; - }); - } - - private void updateJobStatus(final DSLContext ctx, final long jobId, final JobStatus newStatus, final LocalDateTime now) { - final Job job = getJob(ctx, jobId); - if (job.isJobInTerminalState()) { - // If the job is already terminal, no need to set a new status - return; - } - job.validateStatusTransition(newStatus); - ctx.execute( - "UPDATE jobs SET status = CAST(? as JOB_STATUS), updated_at = ? WHERE id = ?", - Sqls.toSqlName(newStatus), - now, - jobId); - } - - @Override - public int createAttempt(final long jobId, final Path logPath) throws IOException { - final LocalDateTime now = LocalDateTime.ofInstant(timeSupplier.get(), ZoneOffset.UTC); - - return jobDatabase.transaction(ctx -> { - final Job job = getJob(ctx, jobId); - if (job.isJobInTerminalState()) { - final var errMsg = String.format( - "Cannot create an attempt for a job id: %s that is in a terminal state: %s for connection id: %s", - job.getId(), job.getStatus(), job.getScope()); - throw new IllegalStateException(errMsg); - } - - if (job.hasRunningAttempt()) { - final var errMsg = String.format( - "Cannot create an attempt for a job id: %s that has a running attempt: %s for connection id: %s", - job.getId(), job.getStatus(), job.getScope()); - throw new IllegalStateException(errMsg); - } - - updateJobStatus(ctx, jobId, JobStatus.RUNNING, now); - - // will fail if attempt number already exists for the job id. - return ctx.fetch( - "INSERT INTO attempts(job_id, attempt_number, log_path, status, created_at, updated_at) VALUES(?, ?, ?, CAST(? AS ATTEMPT_STATUS), ?, ?) RETURNING attempt_number", - jobId, - job.getAttemptsCount(), - logPath.toString(), - Sqls.toSqlName(AttemptStatus.RUNNING), - now, - now) - .stream() - .findFirst() - .map(r -> r.get(ATTEMPT_NUMBER, Integer.class)) - .orElseThrow(() -> new RuntimeException("This should not happen")); - }); - - } - - @Override - public void failAttempt(final long jobId, final int attemptNumber) throws IOException { - final LocalDateTime now = LocalDateTime.ofInstant(timeSupplier.get(), ZoneOffset.UTC); - jobDatabase.transaction(ctx -> { - updateJobStatus(ctx, jobId, JobStatus.INCOMPLETE, now); - - ctx.execute( - "UPDATE attempts SET status = CAST(? as ATTEMPT_STATUS), updated_at = ? , ended_at = ? WHERE job_id = ? AND attempt_number = ?", - Sqls.toSqlName(AttemptStatus.FAILED), - now, - now, - jobId, - attemptNumber); - return null; - }); - } - - @Override - public void succeedAttempt(final long jobId, final int attemptNumber) throws IOException { - final LocalDateTime now = LocalDateTime.ofInstant(timeSupplier.get(), ZoneOffset.UTC); - jobDatabase.transaction(ctx -> { - updateJobStatus(ctx, jobId, JobStatus.SUCCEEDED, now); - - ctx.execute( - "UPDATE attempts SET status = CAST(? as ATTEMPT_STATUS), updated_at = ? , ended_at = ? WHERE job_id = ? AND attempt_number = ?", - Sqls.toSqlName(AttemptStatus.SUCCEEDED), - now, - now, - jobId, - attemptNumber); - return null; - }); - } - - @Override - public void setAttemptTemporalWorkflowInfo(final long jobId, - final int attemptNumber, - final String temporalWorkflowId, - final String processingTaskQueue) - throws IOException { - jobDatabase.query(ctx -> ctx.execute( - " UPDATE attempts SET temporal_workflow_id = ? , processing_task_queue = ? WHERE job_id = ? AND attempt_number = ?", - temporalWorkflowId, - processingTaskQueue, - jobId, - attemptNumber)); - } - - @Override - public Optional getAttemptTemporalWorkflowId(final long jobId, final int attemptNumber) throws IOException { - final var result = jobDatabase.query(ctx -> ctx.fetch( - " SELECT temporal_workflow_id from attempts WHERE job_id = ? AND attempt_number = ?", - jobId, - attemptNumber)).stream().findFirst(); - - if (result.isEmpty() || result.get().get("temporal_workflow_id") == null) { - return Optional.empty(); - } - - return Optional.of(result.get().get("temporal_workflow_id", String.class)); - } - - @Override - public void writeOutput(final long jobId, final int attemptNumber, final JobOutput output) - throws IOException { - final OffsetDateTime now = OffsetDateTime.ofInstant(timeSupplier.get(), ZoneOffset.UTC); - - jobDatabase.transaction(ctx -> { - ctx.update(ATTEMPTS) - .set(ATTEMPTS.OUTPUT, JSONB.valueOf(Jsons.serialize(output))) - .set(ATTEMPTS.UPDATED_AT, now) - .where(ATTEMPTS.JOB_ID.eq(jobId), ATTEMPTS.ATTEMPT_NUMBER.eq(attemptNumber)) - .execute(); - final Long attemptId = getAttemptId(jobId, attemptNumber, ctx); - - final SyncStats syncStats = output.getSync().getStandardSyncSummary().getTotalStats(); - if (syncStats != null) { - saveToSyncStatsTable(now, syncStats, attemptId, ctx); - } - - final NormalizationSummary normalizationSummary = output.getSync().getNormalizationSummary(); - if (normalizationSummary != null) { - ctx.insertInto(NORMALIZATION_SUMMARIES) - .set(NORMALIZATION_SUMMARIES.ID, UUID.randomUUID()) - .set(NORMALIZATION_SUMMARIES.UPDATED_AT, now) - .set(NORMALIZATION_SUMMARIES.CREATED_AT, now) - .set(NORMALIZATION_SUMMARIES.ATTEMPT_ID, attemptId) - .set(NORMALIZATION_SUMMARIES.START_TIME, - OffsetDateTime.ofInstant(Instant.ofEpochMilli(normalizationSummary.getStartTime()), ZoneOffset.UTC)) - .set(NORMALIZATION_SUMMARIES.END_TIME, OffsetDateTime.ofInstant(Instant.ofEpochMilli(normalizationSummary.getEndTime()), ZoneOffset.UTC)) - .set(NORMALIZATION_SUMMARIES.FAILURES, JSONB.valueOf(Jsons.serialize(normalizationSummary.getFailures()))) - .execute(); - } - return null; - }); - - } - - @Override - public void writeStats(final long jobId, - final int attemptNumber, - final long estimatedRecords, - final long estimatedBytes, - final long recordsEmitted, - final long bytesEmitted, - final List streamStats) - throws IOException { - final OffsetDateTime now = OffsetDateTime.ofInstant(timeSupplier.get(), ZoneOffset.UTC); - jobDatabase.transaction(ctx -> { - final var attemptId = getAttemptId(jobId, attemptNumber, ctx); - - final var syncStats = new SyncStats() - .withEstimatedRecords(estimatedRecords) - .withEstimatedBytes(estimatedBytes) - .withRecordsEmitted(recordsEmitted) - .withBytesEmitted(bytesEmitted); - saveToSyncStatsTable(now, syncStats, attemptId, ctx); - - saveToStreamStatsTable(now, streamStats, attemptId, ctx); - return null; - }); - - } - - private static void saveToSyncStatsTable(final OffsetDateTime now, final SyncStats syncStats, final Long attemptId, final DSLContext ctx) { - // Although JOOQ supports upsert using the onConflict statement, we cannot use it as the table - // currently has duplicate records and also doesn't contain the unique constraint on the attempt_id - // column JOOQ requires. We are forced to check for existence. - final var isExisting = ctx.fetchExists(SYNC_STATS, SYNC_STATS.ATTEMPT_ID.eq(attemptId)); - if (isExisting) { - ctx.update(SYNC_STATS) - .set(SYNC_STATS.UPDATED_AT, now) - .set(SYNC_STATS.BYTES_EMITTED, syncStats.getBytesEmitted()) - .set(SYNC_STATS.RECORDS_EMITTED, syncStats.getRecordsEmitted()) - .set(SYNC_STATS.ESTIMATED_RECORDS, syncStats.getEstimatedRecords()) - .set(SYNC_STATS.ESTIMATED_BYTES, syncStats.getEstimatedBytes()) - .set(SYNC_STATS.RECORDS_COMMITTED, syncStats.getRecordsCommitted()) - .set(SYNC_STATS.SOURCE_STATE_MESSAGES_EMITTED, syncStats.getSourceStateMessagesEmitted()) - .set(SYNC_STATS.DESTINATION_STATE_MESSAGES_EMITTED, syncStats.getDestinationStateMessagesEmitted()) - .set(SYNC_STATS.MAX_SECONDS_BEFORE_SOURCE_STATE_MESSAGE_EMITTED, syncStats.getMaxSecondsBeforeSourceStateMessageEmitted()) - .set(SYNC_STATS.MEAN_SECONDS_BEFORE_SOURCE_STATE_MESSAGE_EMITTED, syncStats.getMeanSecondsBeforeSourceStateMessageEmitted()) - .set(SYNC_STATS.MAX_SECONDS_BETWEEN_STATE_MESSAGE_EMITTED_AND_COMMITTED, syncStats.getMaxSecondsBetweenStateMessageEmittedandCommitted()) - .set(SYNC_STATS.MEAN_SECONDS_BETWEEN_STATE_MESSAGE_EMITTED_AND_COMMITTED, syncStats.getMeanSecondsBetweenStateMessageEmittedandCommitted()) - .where(SYNC_STATS.ATTEMPT_ID.eq(attemptId)) - .execute(); - return; - } - - ctx.insertInto(SYNC_STATS) - .set(SYNC_STATS.ID, UUID.randomUUID()) - .set(SYNC_STATS.CREATED_AT, now) - .set(SYNC_STATS.ATTEMPT_ID, attemptId) - .set(SYNC_STATS.UPDATED_AT, now) - .set(SYNC_STATS.BYTES_EMITTED, syncStats.getBytesEmitted()) - .set(SYNC_STATS.RECORDS_EMITTED, syncStats.getRecordsEmitted()) - .set(SYNC_STATS.ESTIMATED_RECORDS, syncStats.getEstimatedRecords()) - .set(SYNC_STATS.ESTIMATED_BYTES, syncStats.getEstimatedBytes()) - .set(SYNC_STATS.RECORDS_COMMITTED, syncStats.getRecordsCommitted()) - .set(SYNC_STATS.SOURCE_STATE_MESSAGES_EMITTED, syncStats.getSourceStateMessagesEmitted()) - .set(SYNC_STATS.DESTINATION_STATE_MESSAGES_EMITTED, syncStats.getDestinationStateMessagesEmitted()) - .set(SYNC_STATS.MAX_SECONDS_BEFORE_SOURCE_STATE_MESSAGE_EMITTED, syncStats.getMaxSecondsBeforeSourceStateMessageEmitted()) - .set(SYNC_STATS.MEAN_SECONDS_BEFORE_SOURCE_STATE_MESSAGE_EMITTED, syncStats.getMeanSecondsBeforeSourceStateMessageEmitted()) - .set(SYNC_STATS.MAX_SECONDS_BETWEEN_STATE_MESSAGE_EMITTED_AND_COMMITTED, syncStats.getMaxSecondsBetweenStateMessageEmittedandCommitted()) - .set(SYNC_STATS.MEAN_SECONDS_BETWEEN_STATE_MESSAGE_EMITTED_AND_COMMITTED, syncStats.getMeanSecondsBetweenStateMessageEmittedandCommitted()) - .execute(); - } - - private static void saveToStreamStatsTable(final OffsetDateTime now, - final List perStreamStats, - final Long attemptId, - final DSLContext ctx) { - Optional.ofNullable(perStreamStats).orElse(Collections.emptyList()).forEach( - streamStats -> { - // We cannot entirely rely on JOOQ's generated SQL for upserts as it does not support null fields - // for conflict detection. We are forced to separately check for existence. - final var stats = streamStats.getStats(); - final var isExisting = - ctx.fetchExists(STREAM_STATS, STREAM_STATS.ATTEMPT_ID.eq(attemptId).and(STREAM_STATS.STREAM_NAME.eq(streamStats.getStreamName())) - .and(PersistenceHelpers.isNullOrEquals(STREAM_STATS.STREAM_NAMESPACE, streamStats.getStreamNamespace()))); - if (isExisting) { - ctx.update(STREAM_STATS) - .set(STREAM_STATS.UPDATED_AT, now) - .set(STREAM_STATS.BYTES_EMITTED, stats.getBytesEmitted()) - .set(STREAM_STATS.RECORDS_EMITTED, stats.getRecordsEmitted()) - .set(STREAM_STATS.ESTIMATED_RECORDS, stats.getEstimatedRecords()) - .set(STREAM_STATS.ESTIMATED_BYTES, stats.getEstimatedBytes()) - .where(STREAM_STATS.ATTEMPT_ID.eq(attemptId)) - .execute(); - return; - } - - ctx.insertInto(STREAM_STATS) - .set(STREAM_STATS.ID, UUID.randomUUID()) - .set(STREAM_STATS.ATTEMPT_ID, attemptId) - .set(STREAM_STATS.STREAM_NAME, streamStats.getStreamName()) - .set(STREAM_STATS.STREAM_NAMESPACE, streamStats.getStreamNamespace()) - .set(STREAM_STATS.CREATED_AT, now) - .set(STREAM_STATS.UPDATED_AT, now) - .set(STREAM_STATS.BYTES_EMITTED, stats.getBytesEmitted()) - .set(STREAM_STATS.RECORDS_EMITTED, stats.getRecordsEmitted()) - .set(STREAM_STATS.ESTIMATED_BYTES, stats.getEstimatedBytes()) - .set(STREAM_STATS.ESTIMATED_RECORDS, stats.getEstimatedRecords()) - .set(STREAM_STATS.UPDATED_AT, now) - .set(STREAM_STATS.BYTES_EMITTED, stats.getBytesEmitted()) - .set(STREAM_STATS.RECORDS_EMITTED, stats.getRecordsEmitted()) - .set(STREAM_STATS.ESTIMATED_BYTES, stats.getEstimatedBytes()) - .set(STREAM_STATS.ESTIMATED_RECORDS, stats.getEstimatedRecords()) - .execute(); - }); - } - - @Override - public void writeAttemptSyncConfig(final long jobId, final int attemptNumber, final AttemptSyncConfig attemptSyncConfig) throws IOException { - final OffsetDateTime now = OffsetDateTime.ofInstant(timeSupplier.get(), ZoneOffset.UTC); - - jobDatabase.transaction( - ctx -> ctx.update(ATTEMPTS) - .set(ATTEMPTS.ATTEMPT_SYNC_CONFIG, JSONB.valueOf(Jsons.serialize(attemptSyncConfig))) - .set(ATTEMPTS.UPDATED_AT, now) - .where(ATTEMPTS.JOB_ID.eq(jobId), ATTEMPTS.ATTEMPT_NUMBER.eq(attemptNumber)) - .execute()); - } - - @Override - public void writeAttemptFailureSummary(final long jobId, final int attemptNumber, final AttemptFailureSummary failureSummary) throws IOException { - final OffsetDateTime now = OffsetDateTime.ofInstant(timeSupplier.get(), ZoneOffset.UTC); - - jobDatabase.transaction( - ctx -> ctx.update(ATTEMPTS) - .set(ATTEMPTS.FAILURE_SUMMARY, JSONB.valueOf(Jsons.serialize(failureSummary))) - .set(ATTEMPTS.UPDATED_AT, now) - .where(ATTEMPTS.JOB_ID.eq(jobId), ATTEMPTS.ATTEMPT_NUMBER.eq(attemptNumber)) - .execute()); - } - - @Override - public AttemptStats getAttemptStats(final long jobId, final int attemptNumber) throws IOException { - return jobDatabase - .query(ctx -> { - final Long attemptId = getAttemptId(jobId, attemptNumber, ctx); - final var syncStats = ctx.select(DSL.asterisk()).from(SYNC_STATS).where(SYNC_STATS.ATTEMPT_ID.eq(attemptId)) - .orderBy(SYNC_STATS.UPDATED_AT.desc()) - .fetchOne(getSyncStatsRecordMapper()); - final var perStreamStats = ctx.select(DSL.asterisk()).from(STREAM_STATS).where(STREAM_STATS.ATTEMPT_ID.eq(attemptId)) - .fetch(getStreamStatsRecordsMapper()); - return new AttemptStats(syncStats, perStreamStats); - }); - } - - @Override - public Map getAttemptStats(final List jobIds) throws IOException { - if (jobIds == null || jobIds.isEmpty()) { - return Map.of(); - } - - final var jobIdsStr = StringUtils.join(jobIds, ','); - return jobDatabase.query(ctx -> { - // Instead of one massive join query, separate this query into two queries for better readability - // for now. - // We can combine the queries at a later date if this still proves to be not efficient enough. - final Map attemptStats = hydrateSyncStats(jobIdsStr, ctx); - hydrateStreamStats(jobIdsStr, ctx, attemptStats); - return attemptStats; - }); - } - - private static Map hydrateSyncStats(final String jobIdsStr, final DSLContext ctx) { - final var attemptStats = new HashMap(); - final var syncResults = ctx.fetch( - "SELECT atmpt.attempt_number, atmpt.job_id," - + "stats.estimated_bytes, stats.estimated_records, stats.bytes_emitted, stats.records_emitted, stats.records_committed " - + "FROM sync_stats stats " - + "INNER JOIN attempts atmpt ON stats.attempt_id = atmpt.id " - + "WHERE job_id IN ( " + jobIdsStr + ");"); - syncResults.forEach(r -> { - final var key = new JobAttemptPair(r.get(ATTEMPTS.JOB_ID), r.get(ATTEMPTS.ATTEMPT_NUMBER)); - final var syncStats = new SyncStats() - .withBytesEmitted(r.get(SYNC_STATS.BYTES_EMITTED)) - .withRecordsEmitted(r.get(SYNC_STATS.RECORDS_EMITTED)) - .withRecordsCommitted(r.get(SYNC_STATS.RECORDS_COMMITTED)) - .withEstimatedRecords(r.get(SYNC_STATS.ESTIMATED_RECORDS)) - .withEstimatedBytes(r.get(SYNC_STATS.ESTIMATED_BYTES)); - attemptStats.put(key, new AttemptStats(syncStats, Lists.newArrayList())); - }); - return attemptStats; - } - - /** - * This method needed to be called after - * {@link DefaultJobPersistence#hydrateSyncStats(String, DSLContext)} as it assumes hydrateSyncStats - * has prepopulated the map. - */ - private static void hydrateStreamStats(final String jobIdsStr, final DSLContext ctx, final Map attemptStats) { - final var streamResults = ctx.fetch( - "SELECT atmpt.attempt_number, atmpt.job_id, " - + "stats.stream_name, stats.stream_namespace, stats.estimated_bytes, stats.estimated_records, stats.bytes_emitted, stats.records_emitted " - + "FROM stream_stats stats " - + "INNER JOIN attempts atmpt ON atmpt.id = stats.attempt_id " - + "WHERE attempt_id IN " - + "( SELECT id FROM attempts WHERE job_id IN ( " + jobIdsStr + "));"); - - streamResults.forEach(r -> { - final var streamSyncStats = new StreamSyncStats() - .withStreamNamespace(r.get(STREAM_STATS.STREAM_NAMESPACE)) - .withStreamName(r.get(STREAM_STATS.STREAM_NAME)) - .withStats(new SyncStats() - .withBytesEmitted(r.get(STREAM_STATS.BYTES_EMITTED)) - .withRecordsEmitted(r.get(STREAM_STATS.RECORDS_EMITTED)) - .withEstimatedRecords(r.get(STREAM_STATS.ESTIMATED_RECORDS)) - .withEstimatedBytes(r.get(STREAM_STATS.ESTIMATED_BYTES))); - - final var key = new JobAttemptPair(r.get(ATTEMPTS.JOB_ID), r.get(ATTEMPTS.ATTEMPT_NUMBER)); - if (!attemptStats.containsKey(key)) { - LOGGER.error("{} stream stats entry does not have a corresponding sync stats entry. This suggest the database is in a bad state.", key); - return; - } - attemptStats.get(key).perStreamStats().add(streamSyncStats); - }); - } - - @Override - public List getNormalizationSummary(final long jobId, final int attemptNumber) throws IOException { - return jobDatabase - .query(ctx -> { - final Long attemptId = getAttemptId(jobId, attemptNumber, ctx); - return ctx.select(DSL.asterisk()).from(NORMALIZATION_SUMMARIES).where(NORMALIZATION_SUMMARIES.ATTEMPT_ID.eq(attemptId)) - .fetch(getNormalizationSummaryRecordMapper()) - .stream() - .toList(); - }); - } - - @VisibleForTesting - static Long getAttemptId(final long jobId, final int attemptNumber, final DSLContext ctx) { - final Optional record = - ctx.fetch("SELECT id from attempts where job_id = ? AND attempt_number = ?", jobId, - attemptNumber).stream().findFirst(); - if (record.isEmpty()) { - return -1L; - } - - return record.get().get("id", Long.class); - } - - private static RecordMapper getSyncStatsRecordMapper() { - return record -> new SyncStats().withBytesEmitted(record.get(SYNC_STATS.BYTES_EMITTED)).withRecordsEmitted(record.get(SYNC_STATS.RECORDS_EMITTED)) - .withEstimatedBytes(record.get(SYNC_STATS.ESTIMATED_BYTES)).withEstimatedRecords(record.get(SYNC_STATS.ESTIMATED_RECORDS)) - .withSourceStateMessagesEmitted(record.get(SYNC_STATS.SOURCE_STATE_MESSAGES_EMITTED)) - .withDestinationStateMessagesEmitted(record.get(SYNC_STATS.DESTINATION_STATE_MESSAGES_EMITTED)) - .withRecordsCommitted(record.get(SYNC_STATS.RECORDS_COMMITTED)) - .withMeanSecondsBeforeSourceStateMessageEmitted(record.get(SYNC_STATS.MEAN_SECONDS_BEFORE_SOURCE_STATE_MESSAGE_EMITTED)) - .withMaxSecondsBeforeSourceStateMessageEmitted(record.get(SYNC_STATS.MAX_SECONDS_BEFORE_SOURCE_STATE_MESSAGE_EMITTED)) - .withMeanSecondsBetweenStateMessageEmittedandCommitted(record.get(SYNC_STATS.MEAN_SECONDS_BETWEEN_STATE_MESSAGE_EMITTED_AND_COMMITTED)) - .withMaxSecondsBetweenStateMessageEmittedandCommitted(record.get(SYNC_STATS.MAX_SECONDS_BETWEEN_STATE_MESSAGE_EMITTED_AND_COMMITTED)); - } - - private static RecordMapper getStreamStatsRecordsMapper() { - return record -> { - final var stats = new SyncStats() - .withEstimatedRecords(record.get(STREAM_STATS.ESTIMATED_RECORDS)).withEstimatedBytes(record.get(STREAM_STATS.ESTIMATED_BYTES)) - .withRecordsEmitted(record.get(STREAM_STATS.RECORDS_EMITTED)).withBytesEmitted(record.get(STREAM_STATS.BYTES_EMITTED)); - return new StreamSyncStats() - .withStreamName(record.get(STREAM_STATS.STREAM_NAME)).withStreamNamespace(record.get(STREAM_STATS.STREAM_NAMESPACE)) - .withStats(stats); - }; - } - - private static RecordMapper getNormalizationSummaryRecordMapper() { - return record -> { - try { - return new NormalizationSummary().withStartTime(record.get(NORMALIZATION_SUMMARIES.START_TIME).toInstant().toEpochMilli()) - .withEndTime(record.get(NORMALIZATION_SUMMARIES.END_TIME).toInstant().toEpochMilli()) - .withFailures(record.get(NORMALIZATION_SUMMARIES.FAILURES, String.class) == null ? null : deserializeFailureReasons(record)); - } catch (final JsonProcessingException e) { - throw new RuntimeException(e); - } - }; - } - - private static List deserializeFailureReasons(final Record record) throws JsonProcessingException { - final ObjectMapper mapper = new ObjectMapper(); - return List.of(mapper.readValue(String.valueOf(record.get(NORMALIZATION_SUMMARIES.FAILURES)), FailureReason[].class)); - } - - @Override - public Job getJob(final long jobId) throws IOException { - return jobDatabase.query(ctx -> getJob(ctx, jobId)); - } - - private Job getJob(final DSLContext ctx, final long jobId) { - return getJobOptional(ctx, jobId).orElseThrow(() -> new RuntimeException("Could not find job with id: " + jobId)); - } - - private Optional getJobOptional(final DSLContext ctx, final long jobId) { - return getJobFromResult(ctx.fetch(BASE_JOB_SELECT_AND_JOIN + "WHERE jobs.id = ?", jobId)); - } - - @Override - public Long getJobCount(final Set configTypes, final String connectionId) throws IOException { - return jobDatabase.query(ctx -> ctx.selectCount().from(JOBS) - .where(JOBS.CONFIG_TYPE.in(Sqls.toSqlNames(configTypes))) - .and(JOBS.SCOPE.eq(connectionId)) - .fetchOne().into(Long.class)); - } - - @Override - public List listJobs(final ConfigType configType, final String configId, final int pagesize, final int offset) throws IOException { - return listJobs(Set.of(configType), configId, pagesize, offset); - } - - @Override - public List listJobs(final Set configTypes, final String configId, final int pagesize, final int offset) throws IOException { - return jobDatabase.query(ctx -> { - final String jobsSubquery = "(" + ctx.select(DSL.asterisk()).from(JOBS) - .where(JOBS.CONFIG_TYPE.in(Sqls.toSqlNames(configTypes))) - .and(JOBS.SCOPE.eq(configId)) - .orderBy(JOBS.CREATED_AT.desc(), JOBS.ID.desc()) - .limit(pagesize) - .offset(offset) - .getSQL(ParamType.INLINED) + ") AS jobs"; - - return getJobsFromResult(ctx.fetch(jobSelectAndJoin(jobsSubquery) + ORDER_BY_JOB_TIME_ATTEMPT_TIME)); - }); - } - - @Override - public List listJobsIncludingId(final Set configTypes, final String connectionId, final long includingJobId, final int pagesize) - throws IOException { - final Optional includingJobCreatedAt = jobDatabase.query(ctx -> ctx.select(JOBS.CREATED_AT).from(JOBS) - .where(JOBS.CONFIG_TYPE.in(Sqls.toSqlNames(configTypes))) - .and(JOBS.SCOPE.eq(connectionId)) - .and(JOBS.ID.eq(includingJobId)) - .stream() - .findFirst() - .map(record -> record.get(JOBS.CREATED_AT, OffsetDateTime.class))); - - if (includingJobCreatedAt.isEmpty()) { - return List.of(); - } - - final int countIncludingJob = jobDatabase.query(ctx -> ctx.selectCount().from(JOBS) - .where(JOBS.CONFIG_TYPE.in(Sqls.toSqlNames(configTypes))) - .and(JOBS.SCOPE.eq(connectionId)) - .and(JOBS.CREATED_AT.greaterOrEqual(includingJobCreatedAt.get())) - .fetchOne().into(int.class)); - - // calculate the multiple of `pagesize` that includes the target job - final int pageSizeThatIncludesJob = (countIncludingJob / pagesize + 1) * pagesize; - return listJobs(configTypes, connectionId, pageSizeThatIncludesJob, 0); - } - - @Override - public List listJobsWithStatus(final JobStatus status) throws IOException { - return listJobsWithStatus(Sets.newHashSet(ConfigType.values()), status); - } - - @Override - public List listJobsWithStatus(final Set configTypes, final JobStatus status) throws IOException { - return jobDatabase.query(ctx -> getJobsFromResult(ctx - .fetch(BASE_JOB_SELECT_AND_JOIN + WHERE + - "CAST(config_type AS VARCHAR) IN " + Sqls.toSqlInFragment(configTypes) + AND + - "CAST(jobs.status AS VARCHAR) = ? " + - ORDER_BY_JOB_TIME_ATTEMPT_TIME, - Sqls.toSqlName(status)))); - } - - @Override - public List listJobsWithStatus(final ConfigType configType, final JobStatus status) throws IOException { - return listJobsWithStatus(Sets.newHashSet(configType), status); - } - - @Override - public List listJobsForConnectionWithStatuses(final UUID connectionId, final Set configTypes, final Set statuses) - throws IOException { - return jobDatabase.query(ctx -> getJobsFromResult(ctx - .fetch(BASE_JOB_SELECT_AND_JOIN + WHERE + - SCOPE_CLAUSE + - "config_type IN " + Sqls.toSqlInFragment(configTypes) + AND + - "jobs.status IN " + Sqls.toSqlInFragment(statuses) + " " + - ORDER_BY_JOB_TIME_ATTEMPT_TIME, - connectionId.toString()))); - } - - @Override - public List listJobStatusAndTimestampWithConnection(final UUID connectionId, - final Set configTypes, - final Instant jobCreatedAtTimestamp) - throws IOException { - final LocalDateTime timeConvertedIntoLocalDateTime = LocalDateTime.ofInstant(jobCreatedAtTimestamp, ZoneOffset.UTC); - - final String JobStatusSelect = "SELECT id, status, created_at, updated_at FROM jobs "; - return jobDatabase.query(ctx -> ctx - .fetch(JobStatusSelect + WHERE + - SCOPE_CLAUSE + - "CAST(config_type AS VARCHAR) in " + Sqls.toSqlInFragment(configTypes) + AND + - "created_at >= ? ORDER BY created_at DESC", connectionId.toString(), timeConvertedIntoLocalDateTime)) - .stream() - .map(r -> new JobWithStatusAndTimestamp( - r.get("id", Long.class), - JobStatus.valueOf(r.get("status", String.class).toUpperCase()), - r.get("created_at", Long.class) / 1000, - r.get("updated_at", Long.class) / 1000)) - .toList(); - } - - @Override - public Optional getLastReplicationJob(final UUID connectionId) throws IOException { - return jobDatabase.query(ctx -> ctx - .fetch(BASE_JOB_SELECT_AND_JOIN + WHERE + - "CAST(jobs.config_type AS VARCHAR) in " + Sqls.toSqlInFragment(Job.REPLICATION_TYPES) + AND + - SCOPE_CLAUSE + - "CAST(jobs.status AS VARCHAR) <> ? " + - ORDER_BY_JOB_CREATED_AT_DESC + LIMIT_1, - connectionId.toString(), - Sqls.toSqlName(JobStatus.CANCELLED)) - .stream() - .findFirst() - .flatMap(r -> getJobOptional(ctx, r.get(JOB_ID, Long.class)))); - } - - @Override - public Optional getLastSyncJob(final UUID connectionId) throws IOException { - return jobDatabase.query(ctx -> ctx - .fetch(BASE_JOB_SELECT_AND_JOIN + WHERE + - "CAST(jobs.config_type AS VARCHAR) = ? " + AND + - "scope = ? " + - ORDER_BY_JOB_CREATED_AT_DESC + LIMIT_1, - Sqls.toSqlName(ConfigType.SYNC), - connectionId.toString()) - .stream() - .findFirst() - .flatMap(r -> getJobOptional(ctx, r.get(JOB_ID, Long.class)))); - } - - /** - * For each connection ID in the input, find that connection's latest sync job and return it if one - * exists. - */ - @Override - public List getLastSyncJobForConnections(final List connectionIds) throws IOException { - if (connectionIds.isEmpty()) { - return Collections.emptyList(); - } - - return jobDatabase.query(ctx -> ctx - .fetch("SELECT DISTINCT ON (scope) * FROM jobs " - + WHERE + "CAST(jobs.config_type AS VARCHAR) = ? " - + AND + scopeInList(connectionIds) - + "ORDER BY scope, created_at DESC", - Sqls.toSqlName(ConfigType.SYNC)) - .stream() - .flatMap(r -> getJobOptional(ctx, r.get("id", Long.class)).stream()) - .collect(Collectors.toList())); - } - - /** - * For each connection ID in the input, find that connection's most recent non-terminal sync job and - * return it if one exists. - */ - @Override - public List getRunningSyncJobForConnections(final List connectionIds) throws IOException { - if (connectionIds.isEmpty()) { - return Collections.emptyList(); - } - - return jobDatabase.query(ctx -> ctx - .fetch("SELECT DISTINCT ON (scope) * FROM jobs " - + WHERE + "CAST(jobs.config_type AS VARCHAR) = ? " - + AND + scopeInList(connectionIds) - + AND + JOB_STATUS_IS_NON_TERMINAL - + "ORDER BY scope, created_at DESC", - Sqls.toSqlName(ConfigType.SYNC)) - .stream() - .flatMap(r -> getJobOptional(ctx, r.get("id", Long.class)).stream()) - .collect(Collectors.toList())); - } - - private String scopeInList(final Collection connectionIds) { - return String.format("scope IN (%s) ", - connectionIds.stream() - .map(UUID::toString) - .map(Names::singleQuote) - .collect(Collectors.joining(","))); - } - - @Override - public Optional getFirstReplicationJob(final UUID connectionId) throws IOException { - return jobDatabase.query(ctx -> ctx - .fetch(BASE_JOB_SELECT_AND_JOIN + WHERE + - "CAST(jobs.config_type AS VARCHAR) in " + Sqls.toSqlInFragment(Job.REPLICATION_TYPES) + AND + - SCOPE_CLAUSE + - "CAST(jobs.status AS VARCHAR) <> ? " + - "ORDER BY jobs.created_at ASC LIMIT 1", - connectionId.toString(), - Sqls.toSqlName(JobStatus.CANCELLED)) - .stream() - .findFirst() - .flatMap(r -> getJobOptional(ctx, r.get(JOB_ID, Long.class)))); - } - - @Override - public Optional getNextJob() throws IOException { - // rules: - // 1. get oldest, pending job - // 2. job is excluded if another job of the same scope is already running - // 3. job is excluded if another job of the same scope is already incomplete - return jobDatabase.query(ctx -> ctx - .fetch(BASE_JOB_SELECT_AND_JOIN + WHERE + - "CAST(jobs.status AS VARCHAR) = 'pending' AND " + - "jobs.scope NOT IN ( SELECT scope FROM jobs WHERE status = 'running' OR status = 'incomplete' ) " + - "ORDER BY jobs.created_at ASC LIMIT 1") - .stream() - .findFirst() - .flatMap(r -> getJobOptional(ctx, r.get(JOB_ID, Long.class)))); - } - - @Override - public List listJobs(final ConfigType configType, final Instant attemptEndedAtTimestamp) throws IOException { - final LocalDateTime timeConvertedIntoLocalDateTime = LocalDateTime.ofInstant(attemptEndedAtTimestamp, ZoneOffset.UTC); - return jobDatabase.query(ctx -> getJobsFromResult(ctx - .fetch(BASE_JOB_SELECT_AND_JOIN + WHERE + - "CAST(config_type AS VARCHAR) = ? AND " + - " attempts.ended_at > ? ORDER BY jobs.created_at ASC, attempts.created_at ASC", Sqls.toSqlName(configType), - timeConvertedIntoLocalDateTime))); - } - - @Override - public List listAttemptsWithJobInfo(final ConfigType configType, final Instant attemptEndedAtTimestamp) throws IOException { - final LocalDateTime timeConvertedIntoLocalDateTime = LocalDateTime.ofInstant(attemptEndedAtTimestamp, ZoneOffset.UTC); - return jobDatabase.query(ctx -> getAttemptsWithJobsFromResult(ctx.fetch( - BASE_JOB_SELECT_AND_JOIN + WHERE + "CAST(config_type AS VARCHAR) = ? AND " + " attempts.ended_at > ? ORDER BY attempts.ended_at ASC", - Sqls.toSqlName(configType), - timeConvertedIntoLocalDateTime))); - } - - @Override - public List getAttemptNormalizationStatusesForJob(final Long jobId) throws IOException { - return jobDatabase - .query(ctx -> ctx.select(ATTEMPTS.ATTEMPT_NUMBER, SYNC_STATS.RECORDS_COMMITTED, NORMALIZATION_SUMMARIES.FAILURES) - .from(ATTEMPTS) - .join(SYNC_STATS).on(SYNC_STATS.ATTEMPT_ID.eq(ATTEMPTS.ID)) - .leftJoin(NORMALIZATION_SUMMARIES).on(NORMALIZATION_SUMMARIES.ATTEMPT_ID.eq(ATTEMPTS.ID)) - .where(ATTEMPTS.JOB_ID.eq(jobId)) - .fetch(record -> new AttemptNormalizationStatus(record.get(ATTEMPTS.ATTEMPT_NUMBER), - Optional.of(record.get(SYNC_STATS.RECORDS_COMMITTED)), record.get(NORMALIZATION_SUMMARIES.FAILURES) != null))); - } - - // Retrieves only Job information from the record, without any attempt info - private static Job getJobFromRecord(final Record record) { - return new Job(record.get(JOB_ID, Long.class), - Enums.toEnum(record.get("config_type", String.class), ConfigType.class).orElseThrow(), - record.get("scope", String.class), - parseJobConfigFromString(record.get("config", String.class)), - new ArrayList(), - JobStatus.valueOf(record.get("job_status", String.class).toUpperCase()), - Optional.ofNullable(record.get("job_started_at")).map(value -> getEpoch(record, "started_at")).orElse(null), - getEpoch(record, "job_created_at"), - getEpoch(record, "job_updated_at")); - } - - private static JobConfig parseJobConfigFromString(final String jobConfigString) { - final JobConfig jobConfig = Jsons.deserialize(jobConfigString, JobConfig.class); - // On-the-fly migration of persisted data types related objects (protocol v0->v1) - if (jobConfig.getConfigType() == ConfigType.SYNC && jobConfig.getSync() != null) { - // TODO feature flag this for data types rollout - // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(jobConfig.getSync().getConfiguredAirbyteCatalog()); - CatalogMigrationV1Helper.downgradeSchemaIfNeeded(jobConfig.getSync().getConfiguredAirbyteCatalog()); - } else if (jobConfig.getConfigType() == ConfigType.RESET_CONNECTION && jobConfig.getResetConnection() != null) { - // TODO feature flag this for data types rollout - // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(jobConfig.getResetConnection().getConfiguredAirbyteCatalog()); - CatalogMigrationV1Helper.downgradeSchemaIfNeeded(jobConfig.getResetConnection().getConfiguredAirbyteCatalog()); - } - return jobConfig; - } - - private static Attempt getAttemptFromRecord(final Record record) { - final String attemptOutputString = record.get("attempt_output", String.class); - return new Attempt( - record.get(ATTEMPT_NUMBER, int.class), - record.get(JOB_ID, Long.class), - Path.of(record.get("log_path", String.class)), - record.get("attempt_sync_config", String.class) == null ? null - : Jsons.deserialize(record.get("attempt_sync_config", String.class), AttemptSyncConfig.class), - attemptOutputString == null ? null : parseJobOutputFromString(attemptOutputString), - Enums.toEnum(record.get("attempt_status", String.class), AttemptStatus.class).orElseThrow(), - record.get("processing_task_queue", String.class), - record.get("attempt_failure_summary", String.class) == null ? null - : Jsons.deserialize(record.get("attempt_failure_summary", String.class), AttemptFailureSummary.class), - getEpoch(record, "attempt_created_at"), - getEpoch(record, "attempt_updated_at"), - Optional.ofNullable(record.get("attempt_ended_at")) - .map(value -> getEpoch(record, "attempt_ended_at")) - .orElse(null)); - } - - private static JobOutput parseJobOutputFromString(final String jobOutputString) { - final JobOutput jobOutput = Jsons.deserialize(jobOutputString, JobOutput.class); - // On-the-fly migration of persisted data types related objects (protocol v0->v1) - if (jobOutput.getOutputType() == OutputType.DISCOVER_CATALOG && jobOutput.getDiscoverCatalog() != null) { - // TODO feature flag this for data types rollout - // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(jobOutput.getDiscoverCatalog().getCatalog()); - CatalogMigrationV1Helper.downgradeSchemaIfNeeded(jobOutput.getDiscoverCatalog().getCatalog()); - } else if (jobOutput.getOutputType() == OutputType.SYNC && jobOutput.getSync() != null) { - // TODO feature flag this for data types rollout - // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(jobOutput.getSync().getOutputCatalog()); - CatalogMigrationV1Helper.downgradeSchemaIfNeeded(jobOutput.getSync().getOutputCatalog()); - } - return jobOutput; - } - - private static List getAttemptsWithJobsFromResult(final Result result) { - return result - .stream() - .filter(record -> record.getValue(ATTEMPT_NUMBER) != null) - .map(record -> new AttemptWithJobInfo(getAttemptFromRecord(record), getJobFromRecord(record))) - .collect(Collectors.toList()); - } - - private static List getJobsFromResult(final Result result) { - // keeps results strictly in order so the sql query controls the sort - final List jobs = new ArrayList(); - Job currentJob = null; - for (final Record entry : result) { - if (currentJob == null || currentJob.getId() != entry.get(JOB_ID, Long.class)) { - currentJob = getJobFromRecord(entry); - jobs.add(currentJob); - } - if (entry.getValue(ATTEMPT_NUMBER) != null) { - currentJob.getAttempts().add(getAttemptFromRecord(entry)); - } - } - - return jobs; - } - - @VisibleForTesting - static Optional getJobFromResult(final Result result) { - return getJobsFromResult(result).stream().findFirst(); - } - - private static long getEpoch(final Record record, final String fieldName) { - return record.get(fieldName, LocalDateTime.class).toEpochSecond(ZoneOffset.UTC); - } - - private final String SECRET_MIGRATION_STATUS = "secretMigration"; - - @Override - public boolean isSecretMigrated() throws IOException { - return getMetadata(SECRET_MIGRATION_STATUS).count() == 1; - } - - @Override - public void setSecretMigrationDone() throws IOException { - setMetadata(SECRET_MIGRATION_STATUS, "true"); - } - - @Override - public Optional getVersion() throws IOException { - return getMetadata(AirbyteVersion.AIRBYTE_VERSION_KEY_NAME).findFirst(); - } - - @Override - public void setVersion(final String airbyteVersion) throws IOException { - // This is not using setMetadata due to the extra (s_init_db, airbyteVersion) that is - // added to the metadata table - jobDatabase.query(ctx -> ctx.execute(String.format( - "INSERT INTO %s(%s, %s) VALUES('%s', '%s'), ('%s_init_db', '%s') ON CONFLICT (%s) DO UPDATE SET %s = '%s'", - AIRBYTE_METADATA_TABLE, - METADATA_KEY_COL, - METADATA_VAL_COL, - AirbyteVersion.AIRBYTE_VERSION_KEY_NAME, - airbyteVersion, - current_timestamp(), - airbyteVersion, - METADATA_KEY_COL, - METADATA_VAL_COL, - airbyteVersion))); - - } - - @Override - public Optional getAirbyteProtocolVersionMax() throws IOException { - return getMetadata(AirbyteProtocolVersion.AIRBYTE_PROTOCOL_VERSION_MAX_KEY_NAME).findFirst().map(Version::new); - } - - @Override - public void setAirbyteProtocolVersionMax(final Version version) throws IOException { - setMetadata(AirbyteProtocolVersion.AIRBYTE_PROTOCOL_VERSION_MAX_KEY_NAME, version.serialize()); - } - - @Override - public Optional getAirbyteProtocolVersionMin() throws IOException { - return getMetadata(AirbyteProtocolVersion.AIRBYTE_PROTOCOL_VERSION_MIN_KEY_NAME).findFirst().map(Version::new); - } - - @Override - public void setAirbyteProtocolVersionMin(final Version version) throws IOException { - setMetadata(AirbyteProtocolVersion.AIRBYTE_PROTOCOL_VERSION_MIN_KEY_NAME, version.serialize()); - } - - @Override - public Optional getCurrentProtocolVersionRange() throws IOException { - final Optional min = getAirbyteProtocolVersionMin(); - final Optional max = getAirbyteProtocolVersionMax(); - - if (min.isPresent() != max.isPresent()) { - // Flagging this because this would be highly suspicious but not bad enough that we should fail - // hard. - // If the new config is fine, the system should self-heal. - LOGGER.warn("Inconsistent AirbyteProtocolVersion found, only one of min/max was found. (min:{}, max:{})", - min.map(Version::serialize).orElse(""), max.map(Version::serialize).orElse("")); - } - - if (min.isEmpty() && max.isEmpty()) { - return Optional.empty(); - } - - return Optional.of(new AirbyteProtocolVersionRange(min.orElse(AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION), - max.orElse(AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION))); - } - - private Stream getMetadata(final String keyName) throws IOException { - return jobDatabase.query(ctx -> ctx.select() - .from(AIRBYTE_METADATA_TABLE) - .where(DSL.field(METADATA_KEY_COL).eq(keyName)) - .fetch()).stream().map(r -> r.getValue(METADATA_VAL_COL, String.class)); - } - - private void setMetadata(final String keyName, final String value) throws IOException { - jobDatabase.query(ctx -> ctx - .insertInto(DSL.table(AIRBYTE_METADATA_TABLE)) - .columns(DSL.field(METADATA_KEY_COL), DSL.field(METADATA_VAL_COL)) - .values(keyName, value) - .onConflict(DSL.field(METADATA_KEY_COL)) - .doUpdate() - .set(DSL.field(METADATA_VAL_COL), value) - .execute()); - } - - @Override - public Optional getDeployment() throws IOException { - final Result result = jobDatabase.query(ctx -> ctx.select() - .from(AIRBYTE_METADATA_TABLE) - .where(DSL.field(METADATA_KEY_COL).eq(DEPLOYMENT_ID_KEY)) - .fetch()); - return result.stream().findFirst().map(r -> UUID.fromString(r.getValue(METADATA_VAL_COL, String.class))); - } - - @Override - public void setDeployment(final UUID deployment) throws IOException { - // if an existing deployment id already exists, on conflict, return it so we can log it. - final UUID committedDeploymentId = jobDatabase.query(ctx -> ctx.fetch(String.format( - "INSERT INTO %s(%s, %s) VALUES('%s', '%s') ON CONFLICT (%s) DO NOTHING RETURNING (SELECT %s FROM %s WHERE %s='%s') as existing_deployment_id", - AIRBYTE_METADATA_TABLE, - METADATA_KEY_COL, - METADATA_VAL_COL, - DEPLOYMENT_ID_KEY, - deployment, - METADATA_KEY_COL, - METADATA_VAL_COL, - AIRBYTE_METADATA_TABLE, - METADATA_KEY_COL, - DEPLOYMENT_ID_KEY))) - .stream() - .filter(record -> record.get("existing_deployment_id", String.class) != null) - .map(record -> UUID.fromString(record.get("existing_deployment_id", String.class))) - .findFirst() - .orElse(deployment); // if no record was returned that means that the new deployment id was used. - - if (!deployment.equals(committedDeploymentId)) { - LOGGER.warn("Attempted to set a deployment id {}, but deployment id {} already set. Retained original value.", deployment, deployment); - } - } - - private static String current_timestamp() { - return ZonedDateTime.now().format(DateTimeFormatter.ISO_OFFSET_DATE_TIME); - } - - @Override - public Map> exportDatabase() throws IOException { - return exportDatabase(DEFAULT_SCHEMA); - } - - private Map> exportDatabase(final String schema) throws IOException { - final List tables = listTables(schema); - final Map> result = new HashMap<>(); - - for (final String table : tables) { - result.put(JobsDatabaseSchema.valueOf(table.toUpperCase()), exportTable(schema, table)); - } - - return result; - } - - /** - * List tables from @param schema and @return their names - */ - private List listTables(final String schema) throws IOException { - if (schema != null) { - return jobDatabase.query(context -> context.meta().getSchemas(schema).stream() - .flatMap(s -> context.meta(s).getTables().stream()) - .map(Named::getName) - .filter(table -> JobsDatabaseSchema.getTableNames().contains(table.toLowerCase())) - .collect(Collectors.toList())); - } else { - return List.of(); - } - } - - @Override - public void purgeJobHistory() { - purgeJobHistory(LocalDateTime.now()); - } - - @VisibleForTesting - public void purgeJobHistory(final LocalDateTime asOfDate) { - try { - final String JOB_HISTORY_PURGE_SQL = MoreResources.readResource("job_history_purge.sql"); - // interval '?' days cannot use a ? bind, so we're using %d instead. - final String sql = String.format(JOB_HISTORY_PURGE_SQL, (JOB_HISTORY_MINIMUM_AGE_IN_DAYS - 1)); - jobDatabase.query(ctx -> ctx.execute(sql, - asOfDate.format(DateTimeFormatter.ofPattern("YYYY-MM-dd")), - JOB_HISTORY_EXCESSIVE_NUMBER_OF_JOBS, - JOB_HISTORY_MINIMUM_RECENCY)); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - private Stream exportTable(final String schema, final String tableName) throws IOException { - final Table tableSql = getTable(schema, tableName); - try (final Stream records = jobDatabase.query(ctx -> ctx.select(DSL.asterisk()).from(tableSql).fetchStream())) { - return records.map(record -> { - final Set jsonFieldNames = Arrays.stream(record.fields()) - .filter(f -> "jsonb".equals(f.getDataType().getTypeName())) - .map(Field::getName) - .collect(Collectors.toSet()); - final JsonNode row = Jsons.deserialize(record.formatJSON(JdbcUtils.getDefaultJSONFormat())); - // for json fields, deserialize them so they are treated as objects instead of strings. this is to - // get around that formatJson doesn't handle deserializing them for us. - jsonFieldNames.forEach(jsonFieldName -> ((ObjectNode) row).replace(jsonFieldName, Jsons.deserialize(row.get(jsonFieldName).asText()))); - return row; - }); - } - } - - @Override - public void importDatabase(final String airbyteVersion, final Map> data) throws IOException { - importDatabase(airbyteVersion, DEFAULT_SCHEMA, data, false); - } - - private void importDatabase(final String airbyteVersion, - final String targetSchema, - final Map> data, - final boolean incrementalImport) - throws IOException { - if (!data.isEmpty()) { - createSchema(BACKUP_SCHEMA); - jobDatabase.transaction(ctx -> { - // obtain locks on all tables first, to prevent deadlocks - for (final JobsDatabaseSchema tableType : data.keySet()) { - ctx.execute(String.format("LOCK TABLE %s IN ACCESS EXCLUSIVE MODE", tableType.name())); - } - for (final JobsDatabaseSchema tableType : data.keySet()) { - if (!incrementalImport) { - truncateTable(ctx, targetSchema, tableType.name(), BACKUP_SCHEMA); - } - importTable(ctx, targetSchema, tableType, data.get(tableType)); - } - registerImportMetadata(ctx, airbyteVersion); - return null; - }); - } - // TODO write "import success vXX on now()" to audit log table? - } - - private void createSchema(final String schema) throws IOException { - jobDatabase.query(ctx -> ctx.createSchemaIfNotExists(schema).execute()); - } - - /** - * In a single transaction, truncate all @param tables from @param schema, making backup copies - * in @param backupSchema - */ - private static void truncateTable(final DSLContext ctx, final String schema, final String tableName, final String backupSchema) { - final Table tableSql = getTable(schema, tableName); - final Table backupTableSql = getTable(backupSchema, tableName); - ctx.dropTableIfExists(backupTableSql).execute(); - ctx.createTable(backupTableSql).as(DSL.select(DSL.asterisk()).from(tableSql)).withData().execute(); - ctx.truncateTable(tableSql).restartIdentity().cascade().execute(); - } - - /** - * TODO: we need version specific importers to copy data to the database. Issue: #5682. - */ - private static void importTable(final DSLContext ctx, final String schema, final JobsDatabaseSchema tableType, final Stream jsonStream) { - LOGGER.info("Importing table {} from archive into database.", tableType.name()); - final Table tableSql = getTable(schema, tableType.name()); - final JsonNode jsonSchema = tableType.getTableDefinition(); - if (jsonSchema != null) { - // Use an ArrayList to mirror the order of columns from the schema file since columns may not be - // written consistently in the same order in the stream - final List> columns = getFields(jsonSchema); - // Build a Stream of List of Values using the same order as columns, filling blanks if needed (when - // stream omits them for nullable columns) - final Stream> data = jsonStream.map(node -> { - final List values = new ArrayList<>(); - for (final Field column : columns) { - values.add(getJsonNodeValue(node, column.getName())); - } - return values; - }); - // Then insert rows into table in batches, to avoid crashing due to inserting too much data at once - final UnmodifiableIterator>> partitions = Iterators.partition(data.iterator(), 100); - partitions.forEachRemaining(values -> { - final InsertValuesStepN insertStep = ctx - .insertInto(tableSql) - .columns(columns); - - values.forEach(insertStep::values); - - if (insertStep.getBindValues().size() > 0) { - // LOGGER.debug(insertStep.toString()); - ctx.batch(insertStep).execute(); - } - }); - final Optional> idColumn = columns.stream().filter(f -> "id".equals(f.getName())).findFirst(); - if (idColumn.isPresent()) - resetIdentityColumn(ctx, schema, tableType); - } - } - - /** - * In schema.sql, we create tables with IDENTITY PRIMARY KEY columns named 'id' that will generate - * auto-incremented ID for each new record. When importing batch of records from outside of the DB, - * we need to update Postgres Internal state to continue auto-incrementing from the latest value or - * we would risk to violate primary key constraints by inserting new records with duplicate ids. - * - * This function reset such Identity states (called SQL Sequence objects). - */ - private static void resetIdentityColumn(final DSLContext ctx, final String schema, final JobsDatabaseSchema tableType) { - final Result result = ctx.fetch(String.format("SELECT MAX(id) FROM %s.%s", schema, tableType.name())); - final Optional maxId = result.stream() - .map(r -> r.get(0, Integer.class)) - .filter(Objects::nonNull) - .findFirst(); - if (maxId.isPresent()) { - final Sequence sequenceName = DSL.sequence(DSL.name(schema, String.format("%s_%s_seq", tableType.name().toLowerCase(), "id"))); - ctx.alterSequenceIfExists(sequenceName).restartWith(maxId.get() + 1).execute(); - } - } - - /** - * Insert records into the metadata table to keep track of import Events that were applied on the - * database. Update and overwrite the corresponding @param airbyteVersion. - */ - private static void registerImportMetadata(final DSLContext ctx, final String airbyteVersion) { - ctx.execute(String.format("INSERT INTO %s VALUES('%s_import_db', '%s');", AIRBYTE_METADATA_TABLE, current_timestamp(), airbyteVersion)); - ctx.execute(String.format("UPDATE %s SET %s = '%s' WHERE %s = '%s';", - AIRBYTE_METADATA_TABLE, - METADATA_VAL_COL, - airbyteVersion, - METADATA_KEY_COL, - AirbyteVersion.AIRBYTE_VERSION_KEY_NAME)); - } - - /** - * Read @param jsonSchema and @returns a list of properties (converted as Field objects) - */ - @SuppressWarnings("PMD.ForLoopCanBeForeach") - private static List> getFields(final JsonNode jsonSchema) { - final List> result = new ArrayList<>(); - final JsonNode properties = jsonSchema.get("properties"); - for (final Iterator it = properties.fieldNames(); it.hasNext();) { - final String fieldName = it.next(); - result.add(DSL.field(fieldName)); - } - return result; - } - - /** - * @return Java Values for the @param columnName in @param jsonNode - */ - private static Object getJsonNodeValue(final JsonNode jsonNode, final String columnName) { - if (!jsonNode.has(columnName)) { - return null; - } - final JsonNode valueNode = jsonNode.get(columnName); - final JsonNodeType nodeType = valueNode.getNodeType(); - if (nodeType == JsonNodeType.OBJECT) { - return valueNode.toString(); - } else if (nodeType == JsonNodeType.STRING) { - return valueNode.asText(); - } else if (nodeType == JsonNodeType.NUMBER) { - return valueNode.asDouble(); - } else if (nodeType == JsonNodeType.NULL) { - return null; - } - throw new IllegalArgumentException(String.format("Undefined type for column %s", columnName)); - } - - private static Table getTable(final String schema, final String tableName) { - return DSL.table(String.format("%s.%s", schema, tableName)); - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobCreator.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobCreator.java deleted file mode 100644 index ad2ccbf89be5..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobCreator.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.version.Version; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.protocol.models.StreamDescriptor; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import javax.annotation.Nullable; - -public interface JobCreator { - - /** - * @param source db model representing where data comes from - * @param destination db model representing where data goes - * @param standardSync sync options - * @param sourceDockerImage docker image to use for the source - * @param destinationDockerImage docker image to use for the destination - * @param workspaceId - * @return the new job if no other conflicting job was running, otherwise empty - * @throws IOException if something wrong happens - */ - Optional createSyncJob(SourceConnection source, - DestinationConnection destination, - StandardSync standardSync, - String sourceDockerImage, - Version sourceProtocolVersion, - String destinationDockerImage, - Version destinationProtocolVersion, - List standardSyncOperations, - @Nullable JsonNode webhookOperationConfigs, - StandardSourceDefinition sourceDefinition, - StandardDestinationDefinition destinationDefinition, - UUID workspaceId) - throws IOException; - - /** - * - * @param destination db model representing where data goes - * @param standardSync sync options - * @param destinationDockerImage docker image to use for the destination - * @param streamsToReset - * @return the new job if no other conflicting job was running, otherwise empty - * @throws IOException if something wrong happens - */ - Optional createResetConnectionJob(DestinationConnection destination, - StandardSync standardSync, - String destinationDockerImage, - Version destinationProtocolVersion, - boolean isCustom, - List standardSyncOperations, - List streamsToReset) - throws IOException; - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java deleted file mode 100644 index fc196f510cf2..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.commons.map.MoreMaps; -import io.airbyte.config.Notification; -import io.airbyte.config.Notification.NotificationType; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.notification.NotificationClient; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.persistence.job.tracker.TrackingMetadata; -import java.time.Duration; -import java.time.Instant; -import java.time.ZoneId; -import java.time.format.DateTimeFormatter; -import java.time.format.FormatStyle; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.apache.commons.lang3.time.DurationFormatUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class JobNotifier { - - private static final Logger LOGGER = LoggerFactory.getLogger(JobNotifier.class); - - public static final String FAILURE_NOTIFICATION = "Failure Notification"; - public static final String SUCCESS_NOTIFICATION = "Success Notification"; - public static final String CONNECTION_DISABLED_WARNING_NOTIFICATION = "Connection Disabled Warning Notification"; - public static final String CONNECTION_DISABLED_NOTIFICATION = "Connection Disabled Notification"; - - private final ConfigRepository configRepository; - private final TrackingClient trackingClient; - private final WebUrlHelper webUrlHelper; - private final WorkspaceHelper workspaceHelper; - - public JobNotifier(final WebUrlHelper webUrlHelper, - final ConfigRepository configRepository, - final WorkspaceHelper workspaceHelper, - final TrackingClient trackingClient) { - this.webUrlHelper = webUrlHelper; - this.workspaceHelper = workspaceHelper; - this.configRepository = configRepository; - this.trackingClient = trackingClient; - } - - private void notifyJob(final String reason, final String action, final Job job) { - try { - final UUID workspaceId = workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(job.getId()); - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); - notifyJob(reason, action, job, workspaceId, workspace, workspace.getNotifications()); - } catch (final Exception e) { - LOGGER.error("Unable to read configuration:", e); - } - } - - private void notifyJob(final String reason, - final String action, - final Job job, - final UUID workspaceId, - final StandardWorkspace workspace, - final List notifications) { - final UUID connectionId = UUID.fromString(job.getScope()); - try { - final StandardSourceDefinition sourceDefinition = configRepository.getSourceDefinitionFromConnection(connectionId); - final StandardDestinationDefinition destinationDefinition = configRepository.getDestinationDefinitionFromConnection(connectionId); - final String sourceConnector = sourceDefinition.getName(); - final String destinationConnector = destinationDefinition.getName(); - final String failReason = Strings.isNullOrEmpty(reason) ? "" : String.format(", as the %s", reason); - final String jobDescription = getJobDescription(job, failReason); - final String logUrl = webUrlHelper.getConnectionUrl(workspaceId, connectionId); - final Map jobMetadata = TrackingMetadata.generateJobAttemptMetadata(job); - final Map sourceMetadata = TrackingMetadata.generateSourceDefinitionMetadata(sourceDefinition); - final Map destinationMetadata = TrackingMetadata.generateDestinationDefinitionMetadata(destinationDefinition); - for (final Notification notification : notifications) { - final NotificationClient notificationClient = getNotificationClient(notification); - try { - final Builder notificationMetadata = ImmutableMap.builder(); - notificationMetadata.put("connection_id", connectionId); - if (NotificationType.SLACK.equals(notification.getNotificationType()) && - notification.getSlackConfiguration().getWebhook().contains("hooks.slack.com")) { - // flag as slack if the webhook URL is also pointing to slack - notificationMetadata.put("notification_type", NotificationType.SLACK); - } else if (NotificationType.CUSTOMERIO.equals(notification.getNotificationType())) { - notificationMetadata.put("notification_type", NotificationType.CUSTOMERIO); - } else { - // Slack Notification type could be "hacked" and re-used for custom webhooks - notificationMetadata.put("notification_type", "N/A"); - } - trackingClient.track( - workspaceId, - action, - MoreMaps.merge(jobMetadata, sourceMetadata, destinationMetadata, notificationMetadata.build())); - - if (FAILURE_NOTIFICATION.equalsIgnoreCase(action)) { - if (!notificationClient.notifyJobFailure(sourceConnector, destinationConnector, jobDescription, logUrl, job.getId())) { - LOGGER.warn("Failed to successfully notify failure: {}", notification); - } - break; - } else if (SUCCESS_NOTIFICATION.equalsIgnoreCase(action)) { - if (!notificationClient.notifyJobSuccess(sourceConnector, destinationConnector, jobDescription, logUrl, job.getId())) { - LOGGER.warn("Failed to successfully notify success: {}", notification); - } - break; - } else if (CONNECTION_DISABLED_NOTIFICATION.equalsIgnoreCase(action)) { - if (!notificationClient.notifyConnectionDisabled(workspace.getEmail(), sourceConnector, destinationConnector, jobDescription, - workspaceId, connectionId)) { - LOGGER.warn("Failed to successfully notify auto-disable connection: {}", notification); - } - break; - } else if (CONNECTION_DISABLED_WARNING_NOTIFICATION.equalsIgnoreCase(action)) { - if (!notificationClient.notifyConnectionDisableWarning(workspace.getEmail(), sourceConnector, destinationConnector, jobDescription, - workspaceId, connectionId)) { - LOGGER.warn("Failed to successfully notify auto-disable connection warning: {}", notification); - } - - } - } catch (final Exception e) { - LOGGER.error("Failed to notify: {} due to an exception", notification, e); - } - } - } catch (final Exception e) { - LOGGER.error("Unable to read configuration:", e); - } - } - - // This method allows for the alert to be sent without the customerio configuration set in the - // database - // This is only needed because there is no UI element to allow for users to create that - // configuration. - // Once that exists, this can be removed and we should be using `notifyJobByEmail`. - // The alert is sent to the email associated with the workspace. - public void notifyJobByEmail(final String reason, final String action, final Job job) { - final Notification emailNotification = new Notification(); - emailNotification.setNotificationType(NotificationType.CUSTOMERIO); - try { - final UUID workspaceId = workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(job.getId()); - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); - notifyJob(reason, action, job, workspaceId, workspace, Collections.singletonList(emailNotification)); - } catch (final Exception e) { - LOGGER.error("Unable to read configuration:", e); - } - } - - private String getJobDescription(final Job job, final String reason) { - final Instant jobStartedDate = Instant.ofEpochSecond(job.getStartedAtInSecond().orElse(job.getCreatedAtInSecond())); - final DateTimeFormatter formatter = DateTimeFormatter.ofLocalizedDateTime(FormatStyle.FULL).withZone(ZoneId.systemDefault()); - final Instant jobUpdatedDate = Instant.ofEpochSecond(job.getUpdatedAtInSecond()); - final Instant adjustedJobUpdatedDate = jobUpdatedDate.equals(jobStartedDate) ? Instant.now() : jobUpdatedDate; - final Duration duration = Duration.between(jobStartedDate, adjustedJobUpdatedDate); - final String durationString = DurationFormatUtils.formatDurationWords(duration.toMillis(), true, true); - - return String.format("sync started on %s, running for %s%s.", formatter.format(jobStartedDate), durationString, reason); - } - - public void failJob(final String reason, final Job job) { - notifyJob(reason, FAILURE_NOTIFICATION, job); - } - - public void successJob(final Job job) { - notifyJob(null, SUCCESS_NOTIFICATION, job); - } - - public void autoDisableConnection(final Job job) { - notifyJob(null, CONNECTION_DISABLED_NOTIFICATION, job); - } - - public void autoDisableConnectionWarning(final Job job) { - notifyJob(null, CONNECTION_DISABLED_WARNING_NOTIFICATION, job); - } - - protected NotificationClient getNotificationClient(final Notification notification) { - return NotificationClient.createNotificationClient(notification); - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java deleted file mode 100644 index 5a9adc69a74f..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java +++ /dev/null @@ -1,370 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.Version; -import io.airbyte.config.AttemptFailureSummary; -import io.airbyte.config.AttemptSyncConfig; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.JobOutput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.config.StreamSyncStats; -import io.airbyte.config.SyncStats; -import io.airbyte.db.instance.jobs.JobsDatabaseSchema; -import io.airbyte.persistence.job.models.AttemptNormalizationStatus; -import io.airbyte.persistence.job.models.AttemptWithJobInfo; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.persistence.job.models.JobStatus; -import io.airbyte.persistence.job.models.JobWithStatusAndTimestamp; -import java.io.IOException; -import java.nio.file.Path; -import java.time.Instant; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Stream; - -/** - * General interface methods for persistence to the Jobs database. This database is separate from - * the config database as job-related tables has an order of magnitude higher load and scale - * differently from the config tables. - */ -public interface JobPersistence { - - // - // SIMPLE GETTERS - // - - /** - * Convenience POJO for various stats data structures. - * - * @param combinedStats - * @param perStreamStats - */ - record AttemptStats(SyncStats combinedStats, List perStreamStats) {} - - record JobAttemptPair(long id, int attemptNumber) {} - - /** - * Retrieve the combined and per stream stats for a single attempt. - * - * @return {@link AttemptStats} - * @throws IOException - */ - AttemptStats getAttemptStats(long jobId, int attemptNumber) throws IOException; - - /** - * Alternative method to retrieve combined and per stream stats per attempt for a list of jobs to - * avoid overloading the database with too many queries. - *

- * This implementation is intended to utilise complex joins under the hood to reduce the potential - * N+1 database pattern. - * - * @param jobIds - * @return - * @throws IOException - */ - Map getAttemptStats(List jobIds) throws IOException; - - List getNormalizationSummary(long jobId, int attemptNumber) throws IOException; - - Job getJob(long jobId) throws IOException; - - // - // JOB LIFECYCLE - // - - /** - * Enqueue a new job. Its initial status will be pending. - * - * @param scope key that will be used to determine if two jobs should not be run at the same time; - * it is the primary id of the standard sync (StandardSync#connectionId) - * @param jobConfig configuration for the job - * @return job id - * @throws IOException exception due to interaction with persistence - */ - Optional enqueueJob(String scope, JobConfig jobConfig) throws IOException; - - /** - * Set job status from current status to PENDING. Throws {@link IllegalStateException} if the job is - * in a terminal state. - * - * @param jobId job to reset - * @throws IOException exception due to interaction with persistence - */ - void resetJob(long jobId) throws IOException; - - /** - * Set job status from current status to CANCELLED. If already in a terminal status, no op. - * - * @param jobId job to cancel - * @throws IOException exception due to interaction with persistence - */ - void cancelJob(long jobId) throws IOException; - - /** - * Set job status from current status to FAILED. If already in a terminal status, no op. - * - * @param jobId job to fail - * @throws IOException exception due to interaction with persistence - */ - void failJob(long jobId) throws IOException; - - // - // ATTEMPT LIFECYCLE - // - - /** - * Create a new attempt for a job and return its attempt number. Throws - * {@link IllegalStateException} if the job is already in a terminal state. - * - * @param jobId job for which an attempt will be created - * @param logPath path where logs should be written for the attempt - * @return The attempt number of the created attempt (see {@link DefaultJobPersistence}) - * @throws IOException exception due to interaction with persistence - */ - int createAttempt(long jobId, Path logPath) throws IOException; - - /** - * Sets an attempt to FAILED. Also attempts to set the parent job to INCOMPLETE. The job's status - * will not be changed if it is already in a terminal state. - * - * @param jobId job id - * @param attemptNumber attempt id - * @throws IOException exception due to interaction with persistence - */ - void failAttempt(long jobId, int attemptNumber) throws IOException; - - /** - * Sets an attempt to SUCCEEDED. Also attempts to set the parent job to SUCCEEDED. The job's status - * is changed regardless of what state it is in. - * - * @param jobId job id - * @param attemptNumber attempt id - * @throws IOException exception due to interaction with persistence - */ - void succeedAttempt(long jobId, int attemptNumber) throws IOException; - - // - // END OF LIFECYCLE - // - - /** - * Sets an attempt's temporal workflow id. Later used to cancel the workflow. - */ - void setAttemptTemporalWorkflowInfo(long jobId, int attemptNumber, String temporalWorkflowId, String processingTaskQueue) throws IOException; - - /** - * Retrieves an attempt's temporal workflow id. Used to cancel the workflow. - */ - Optional getAttemptTemporalWorkflowId(long jobId, int attemptNumber) throws IOException; - - /** - * When the output is a StandardSyncOutput, caller of this method should persiste - * StandardSyncOutput#state in the configs database by calling - * ConfigRepository#updateConnectionState, which takes care of persisting the connection state. - */ - void writeOutput(long jobId, int attemptNumber, JobOutput output) throws IOException; - - void writeStats(long jobId, - int attemptNumber, - long estimatedRecords, - long estimatedBytes, - long recordsEmitted, - long bytesEmitted, - List streamStats) - throws IOException; - - /** - * Writes a summary of all failures that occurred during the attempt. - * - * @param jobId job id - * @param attemptNumber attempt number - * @param failureSummary summary containing failure metadata and ordered list of failures - * @throws IOException exception due to interaction with persistence - */ - void writeAttemptFailureSummary(long jobId, int attemptNumber, AttemptFailureSummary failureSummary) throws IOException; - - /** - * Writes the attempt-specific configuration used to build the sync input during the attempt. - * - * @param jobId job id - * @param attemptNumber attempt number - * @param attemptSyncConfig attempt-specific configuration used to build the sync input for this - * attempt - * @throws IOException exception due to interaction with persistence - */ - void writeAttemptSyncConfig(long jobId, int attemptNumber, AttemptSyncConfig attemptSyncConfig) throws IOException; - - /** - * @param configTypes - the type of config, e.g. sync - * @param connectionId - ID of the connection for which the job count should be retrieved - * @return count of jobs belonging to the specified connection - * @throws IOException - */ - Long getJobCount(final Set configTypes, final String connectionId) throws IOException; - - /** - * @param configTypes - type of config, e.g. sync - * @param configId - id of that config - * @return lists job in descending order by created_at - * @throws IOException - what you do when you IO - */ - List listJobs(Set configTypes, String configId, int limit, int offset) throws IOException; - - /** - * @param configType The type of job - * @param attemptEndedAtTimestamp The timestamp after which you want the jobs - * @return List of jobs that have attempts after the provided timestamp - * @throws IOException - */ - List listJobs(ConfigType configType, Instant attemptEndedAtTimestamp) throws IOException; - - List listJobs(JobConfig.ConfigType configType, String configId, int limit, int offset) throws IOException; - - /** - * @param configTypes - type of config, e.g. sync - * @param connectionId - id of the connection for which jobs should be retrieved - * @param includingJobId - id of the job that should be the included in the list, if it exists in - * the connection - * @param pagesize - the pagesize that should be used when building the list (response may include - * multiple pages) - * @return List of jobs in descending created_at order including the specified job. Will include - * multiple pages of jobs if required to include the specified job. If the specified job - * does not exist in the connection, the returned list will be empty. - * @throws IOException - */ - List listJobsIncludingId(Set configTypes, String connectionId, long includingJobId, int pagesize) throws IOException; - - List listJobsWithStatus(JobStatus status) throws IOException; - - List listJobsWithStatus(Set configTypes, JobStatus status) throws IOException; - - List listJobsWithStatus(JobConfig.ConfigType configType, JobStatus status) throws IOException; - - List listJobsForConnectionWithStatuses(UUID connectionId, Set configTypes, Set statuses) throws IOException; - - /** - * @param connectionId The ID of the connection - * @param configTypes The types of jobs - * @param jobCreatedAtTimestamp The timestamp after which you want the jobs - * @return List of jobs that only include information regarding id, status, timestamps from a - * specific connection that have attempts after the provided timestamp, sorted by jobs' - * createAt in descending order - * @throws IOException - */ - List listJobStatusAndTimestampWithConnection(UUID connectionId, - Set configTypes, - Instant jobCreatedAtTimestamp) - throws IOException; - - Optional getLastReplicationJob(UUID connectionId) throws IOException; - - Optional getLastSyncJob(UUID connectionId) throws IOException; - - List getLastSyncJobForConnections(final List connectionIds) throws IOException; - - List getRunningSyncJobForConnections(final List connectionIds) throws IOException; - - Optional getFirstReplicationJob(UUID connectionId) throws IOException; - - Optional getNextJob() throws IOException; - - /** - * @param configType The type of job - * @param attemptEndedAtTimestamp The timestamp after which you want the attempts - * @return List of attempts (with job attached) that ended after the provided timestamp, sorted by - * attempts' endedAt in ascending order - * @throws IOException - */ - List listAttemptsWithJobInfo(ConfigType configType, Instant attemptEndedAtTimestamp) throws IOException; - - /// ARCHIVE - - /** - * Returns the AirbyteVersion. - */ - Optional getVersion() throws IOException; - - /** - * Set the airbyte version - */ - void setVersion(String airbyteVersion) throws IOException; - - /** - * Get the max supported Airbyte Protocol Version - */ - Optional getAirbyteProtocolVersionMax() throws IOException; - - /** - * Set the max supported Airbyte Protocol Version - */ - void setAirbyteProtocolVersionMax(Version version) throws IOException; - - /** - * Get the min supported Airbyte Protocol Version - */ - Optional getAirbyteProtocolVersionMin() throws IOException; - - /** - * Set the min supported Airbyte Protocol Version - */ - void setAirbyteProtocolVersionMin(Version version) throws IOException; - - /** - * Get the current Airbyte Protocol Version range if defined - */ - Optional getCurrentProtocolVersionRange() throws IOException; - - /** - * Returns a deployment UUID. - */ - Optional getDeployment() throws IOException; - // a deployment references a setup of airbyte. it is created the first time the docker compose or - // K8s is ready. - - /** - * Set deployment id. If one is already set, the new value is ignored. - */ - void setDeployment(UUID uuid) throws IOException; - - /** - * Export all SQL tables from @param schema into streams of JsonNode objects. This returns a Map of - * table schemas to the associated streams of records that is being exported. - */ - Map> exportDatabase() throws IOException; - - /** - * Import all SQL tables from streams of JsonNode objects. - * - * @param data is a Map of table schemas to the associated streams of records to import. - * @param airbyteVersion is the version of the files to be imported and should match the Airbyte - * version in the Database. - */ - void importDatabase(String airbyteVersion, Map> data) throws IOException; - - /** - * Purges job history while ensuring that the latest saved-state information is maintained. - */ - void purgeJobHistory(); - - /** - * Check if the secret has been migrated to a new secret store from a plain text values - */ - boolean isSecretMigrated() throws IOException; - - /** - * Set that the secret migration has been performed. - */ - void setSecretMigrationDone() throws IOException; - - List getAttemptNormalizationStatusesForJob(final Long jobId) throws IOException; - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/ResourceRequirementsUtils.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/ResourceRequirementsUtils.java deleted file mode 100644 index 12a0132231d4..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/ResourceRequirementsUtils.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import com.google.common.base.Preconditions; -import io.airbyte.config.ActorDefinitionResourceRequirements; -import io.airbyte.config.JobTypeResourceLimit; -import io.airbyte.config.JobTypeResourceLimit.JobType; -import io.airbyte.config.ResourceRequirements; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; -import javax.annotation.Nullable; - -public class ResourceRequirementsUtils { - - /** - * Given connection-level resource requirements, actor-definition-level resource requirements, - * worker-default resource requirements, and a job type, returns the final resource requirements - * generated by merging the provided requirements in hierarchy order. - * - * Connection-level resource requirements take precendence over actor-definition level resource - * requirements. Within the actor-definition level requirements, job-type-specific requirements take - * precedence over default definition requirements. Actor-definition level resource requirements - * take precedence over worker default resource requirements. - * - * @param connectionResourceReqs - the resource requirements set on the connection - * @param actorDefinitionResourceReqs - the resource requirements set on the actor definition - * @param workerDefaultResourceReqs - the default worker resource requirements set in the env - * variables - * @param jobType - type of job to extract resource requirements for from the actor definition reqs - * @return resource requirements, if present, otherwise an empty ResourceRequirements object. - */ - public static ResourceRequirements getResourceRequirements(@Nullable final ResourceRequirements connectionResourceReqs, - @Nullable final ActorDefinitionResourceRequirements actorDefinitionResourceReqs, - @Nullable final ResourceRequirements workerDefaultResourceReqs, - final JobType jobType) { - final ResourceRequirements jobSpecificDefinitionResourceReqs = getResourceRequirementsForJobType(actorDefinitionResourceReqs, jobType) - .orElse(null); - final ResourceRequirements defaultDefinitionResourceReqs = Optional.ofNullable(actorDefinitionResourceReqs) - .map(ActorDefinitionResourceRequirements::getDefault).orElse(null); - return mergeResourceRequirements( - connectionResourceReqs, - jobSpecificDefinitionResourceReqs, - defaultDefinitionResourceReqs, - workerDefaultResourceReqs); - } - - /** - * Given connection-level and worker-default resource requirements, returns the final resource - * requirements generated by merging the provided requirements in hierarchy order. - * - * Connection-level resource requirements take precendence over worker-default resource - * requirements. - * - * @param connectionResourceReqs - the resource requirements set on the connection - * @param workerDefaultResourceReqs - the default worker resource requirements set in the env - * variables - * @return resource requirements, if present, otherwise an empty ResourceRequirements object. - */ - public static ResourceRequirements getResourceRequirements(@Nullable final ResourceRequirements connectionResourceReqs, - @Nullable final ResourceRequirements workerDefaultResourceReqs) { - return mergeResourceRequirements( - connectionResourceReqs, - workerDefaultResourceReqs); - } - - /** - * Given a list of resource requirements, merges them together. Earlier reqs override later ones. - * - * @param resourceReqs - list of resource request to merge - * @return merged resource req - */ - private static ResourceRequirements mergeResourceRequirements(final ResourceRequirements... resourceReqs) { - final ResourceRequirements outputReqs = new ResourceRequirements(); - final List reversed = new ArrayList<>(Arrays.asList(resourceReqs)); - Collections.reverse(reversed); - - // start from the lowest priority requirements so that we can repeatedly override the output - // requirements to guarantee that we end with the highest priority setting for each - for (final ResourceRequirements resourceReq : reversed) { - if (resourceReq == null) { - continue; - } - - if (resourceReq.getCpuRequest() != null) { - outputReqs.setCpuRequest(resourceReq.getCpuRequest()); - } - if (resourceReq.getCpuLimit() != null) { - outputReqs.setCpuLimit(resourceReq.getCpuLimit()); - } - if (resourceReq.getMemoryRequest() != null) { - outputReqs.setMemoryRequest(resourceReq.getMemoryRequest()); - } - if (resourceReq.getMemoryLimit() != null) { - outputReqs.setMemoryLimit(resourceReq.getMemoryLimit()); - } - } - return outputReqs; - } - - private static Optional getResourceRequirementsForJobType(final ActorDefinitionResourceRequirements actorDefResourceReqs, - final JobType jobType) { - if (actorDefResourceReqs == null) { - return Optional.empty(); - } - - final List jobTypeResourceRequirement = actorDefResourceReqs.getJobSpecific() - .stream() - .filter(jobSpecific -> jobSpecific.getJobType() == jobType).map(JobTypeResourceLimit::getResourceRequirements).collect( - Collectors.toList()); - - Preconditions.checkArgument(jobTypeResourceRequirement.size() <= 1, "Should only have one resource requirement per job type."); - return jobTypeResourceRequirement.isEmpty() - ? Optional.empty() - : Optional.of(jobTypeResourceRequirement.get(0)); - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/WebUrlHelper.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/WebUrlHelper.java deleted file mode 100644 index bd475f57b708..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/WebUrlHelper.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import java.util.UUID; - -public class WebUrlHelper { - - private final String webAppUrl; - - public WebUrlHelper(final String webAppUrl) { - this.webAppUrl = webAppUrl; - } - - public String getBaseUrl() { - if (webAppUrl.endsWith("/")) { - return webAppUrl.substring(0, webAppUrl.length() - 1); - } - - return webAppUrl; - } - - public String getWorkspaceUrl(final UUID workspaceId) { - return String.format("%s/workspaces/%s", getBaseUrl(), workspaceId); - } - - public String getConnectionUrl(final UUID workspaceId, final UUID connectionId) { - return String.format("%s/connections/%s", getWorkspaceUrl(workspaceId), connectionId); - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/WorkspaceHelper.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/WorkspaceHelper.java deleted file mode 100644 index 1f8b3e95dd88..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/WorkspaceHelper.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import com.google.common.base.Preconditions; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import io.airbyte.commons.functional.CheckedSupplier; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.JobConfig; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.validation.json.JsonValidationException; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.Objects; -import java.util.UUID; -import java.util.concurrent.ExecutionException; -import org.checkerframework.checker.nullness.qual.NonNull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -// todo (cgardens) - this class is in an unintuitive module. it is weird that you need to import -// scheduler:persistence in order to get workspace ids for configs (e.g. source). Our options are to -// split this helper by database or put it in a new module. -@SuppressWarnings("PMD.AvoidCatchingThrowable") -@Singleton -public class WorkspaceHelper { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkspaceHelper.class); - - private final LoadingCache sourceToWorkspaceCache; - private final LoadingCache destinationToWorkspaceCache; - private final LoadingCache connectionToWorkspaceCache; - private final LoadingCache operationToWorkspaceCache; - private final LoadingCache jobToWorkspaceCache; - - public WorkspaceHelper(final ConfigRepository configRepository, final JobPersistence jobPersistence) { - - this.sourceToWorkspaceCache = getExpiringCache(new CacheLoader<>() { - - @Override - public UUID load(@NonNull final UUID sourceId) throws JsonValidationException, ConfigNotFoundException, IOException { - final SourceConnection source = configRepository.getSourceConnection(sourceId); - return source.getWorkspaceId(); - } - - }); - - this.destinationToWorkspaceCache = getExpiringCache(new CacheLoader<>() { - - @Override - public UUID load(@NonNull final UUID destinationId) throws JsonValidationException, ConfigNotFoundException, IOException { - final DestinationConnection destination = configRepository.getDestinationConnection(destinationId); - return destination.getWorkspaceId(); - } - - }); - - this.connectionToWorkspaceCache = getExpiringCache(new CacheLoader<>() { - - @Override - public UUID load(@NonNull final UUID connectionId) throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSync connection = configRepository.getStandardSync(connectionId); - final UUID sourceId = connection.getSourceId(); - final UUID destinationId = connection.getDestinationId(); - return getWorkspaceForConnectionIgnoreExceptions(sourceId, destinationId); - } - - }); - - this.operationToWorkspaceCache = getExpiringCache(new CacheLoader<>() { - - @Override - public UUID load(@NonNull final UUID operationId) throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSyncOperation operation = configRepository.getStandardSyncOperation(operationId); - return operation.getWorkspaceId(); - } - - }); - - this.jobToWorkspaceCache = getExpiringCache(new CacheLoader<>() { - - @Override - public UUID load(@NonNull final Long jobId) throws ConfigNotFoundException, IOException { - final Job job = jobPersistence.getJob(jobId); - if (job == null) { - throw new ConfigNotFoundException(Job.class.toString(), jobId.toString()); - } - if (job.getConfigType() == JobConfig.ConfigType.SYNC || job.getConfigType() == JobConfig.ConfigType.RESET_CONNECTION) { - return getWorkspaceForConnectionIdIgnoreExceptions(UUID.fromString(job.getScope())); - } else { - throw new IllegalArgumentException("Only sync/reset jobs are associated with workspaces! A " + job.getConfigType() + " job was requested!"); - } - } - - }); - } - - /** - * There are generally two kinds of helper methods present here. The first kind propagate exceptions - * for the method backing the cache. The second ignores them. The former is meant to be used with - * proper api calls, while the latter is meant to be use with asserts and precondtions checks. - *

- * In API calls, distinguishing between various exceptions helps return the correct status code. - */ - - // SOURCE ID - public UUID getWorkspaceForSourceId(final UUID sourceId) throws ConfigNotFoundException, JsonValidationException { - return handleCacheExceptions(() -> sourceToWorkspaceCache.get(sourceId)); - } - - public UUID getWorkspaceForSourceIdIgnoreExceptions(final UUID sourceId) { - return swallowExecutionException(() -> getWorkspaceForSourceId(sourceId)); - } - - // DESTINATION ID - public UUID getWorkspaceForDestinationId(final UUID destinationId) throws JsonValidationException, ConfigNotFoundException { - return handleCacheExceptions(() -> destinationToWorkspaceCache.get(destinationId)); - } - - public UUID getWorkspaceForDestinationIdIgnoreExceptions(final UUID destinationId) { - return swallowExecutionException(() -> destinationToWorkspaceCache.get(destinationId)); - } - - // JOB ID - public UUID getWorkspaceForJobId(final Long jobId) throws JsonValidationException, ConfigNotFoundException { - return handleCacheExceptions(() -> jobToWorkspaceCache.get(jobId)); - } - - public UUID getWorkspaceForJobIdIgnoreExceptions(final Long jobId) { - return swallowExecutionException(() -> jobToWorkspaceCache.get(jobId)); - } - - // CONNECTION ID - public UUID getWorkspaceForConnection(final UUID sourceId, final UUID destinationId) throws JsonValidationException, ConfigNotFoundException { - final UUID sourceWorkspace = getWorkspaceForSourceId(sourceId); - final UUID destinationWorkspace = getWorkspaceForDestinationId(destinationId); - - Preconditions.checkArgument(Objects.equals(sourceWorkspace, destinationWorkspace), "Source and destination must be from the same workspace!"); - return sourceWorkspace; - } - - public UUID getWorkspaceForConnectionIgnoreExceptions(final UUID sourceId, final UUID destinationId) { - final UUID sourceWorkspace = getWorkspaceForSourceIdIgnoreExceptions(sourceId); - final UUID destinationWorkspace = getWorkspaceForDestinationIdIgnoreExceptions(destinationId); - - Preconditions.checkArgument(Objects.equals(sourceWorkspace, destinationWorkspace), "Source and destination must be from the same workspace!"); - return sourceWorkspace; - } - - public UUID getWorkspaceForConnectionId(final UUID connectionId) throws JsonValidationException, ConfigNotFoundException { - return handleCacheExceptions(() -> connectionToWorkspaceCache.get(connectionId)); - } - - public UUID getWorkspaceForConnectionIdIgnoreExceptions(final UUID connectionId) { - return swallowExecutionException(() -> connectionToWorkspaceCache.get(connectionId)); - } - - // OPERATION ID - public UUID getWorkspaceForOperationId(final UUID operationId) throws JsonValidationException, ConfigNotFoundException { - return handleCacheExceptions(() -> operationToWorkspaceCache.get(operationId)); - } - - public UUID getWorkspaceForOperationIdIgnoreExceptions(final UUID operationId) { - return swallowExecutionException(() -> operationToWorkspaceCache.get(operationId)); - } - - private static UUID handleCacheExceptions(final CheckedSupplier supplier) - throws ConfigNotFoundException, JsonValidationException { - try { - return supplier.get(); - } catch (final ExecutionException e) { - LOGGER.error("Error retrieving cache:", e.getCause()); - if (e.getCause() instanceof ConfigNotFoundException) { - throw (ConfigNotFoundException) e.getCause(); - } - if (e.getCause() instanceof JsonValidationException) { - throw (JsonValidationException) e.getCause(); - } - throw new RuntimeException(e.getCause().toString(), e); - } - } - - private static UUID swallowExecutionException(final CheckedSupplier supplier) { - try { - return supplier.get(); - } catch (final Throwable e) { - throw new RuntimeException(e); - } - } - - private static LoadingCache getExpiringCache(final CacheLoader cacheLoader) { - return CacheBuilder.newBuilder() - .maximumSize(20000) - .build(cacheLoader); - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/ConnectorJobReportingContext.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/ConnectorJobReportingContext.java deleted file mode 100644 index 33ca2a9a98f8..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/ConnectorJobReportingContext.java +++ /dev/null @@ -1,9 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -import java.util.UUID; - -public record ConnectorJobReportingContext(UUID jobId, String dockerImage) {} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReporter.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReporter.java deleted file mode 100644 index bbcb81cf8709..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReporter.java +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -import com.google.common.collect.ImmutableSet; -import edu.umd.cs.findbugs.annotations.Nullable; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.map.MoreMaps; -import io.airbyte.config.AttemptFailureSummary; -import io.airbyte.config.Configs.DeploymentMode; -import io.airbyte.config.FailureReason; -import io.airbyte.config.FailureReason.FailureOrigin; -import io.airbyte.config.FailureReason.FailureType; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.WebUrlHelper; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class JobErrorReporter { - - private static final Logger LOGGER = LoggerFactory.getLogger(JobErrorReporter.class); - - private static final String FROM_TRACE_MESSAGE = "from_trace_message"; - private static final String DEPLOYMENT_MODE_META_KEY = "deployment_mode"; - private static final String AIRBYTE_VERSION_META_KEY = "airbyte_version"; - private static final String FAILURE_ORIGIN_META_KEY = "failure_origin"; - private static final String FAILURE_TYPE_META_KEY = "failure_type"; - private static final String WORKSPACE_ID_META_KEY = "workspace_id"; - private static final String WORKSPACE_URL_META_KEY = "workspace_url"; - private static final String CONNECTION_ID_META_KEY = "connection_id"; - private static final String CONNECTION_URL_META_KEY = "connection_url"; - private static final String CONNECTOR_NAME_META_KEY = "connector_name"; - private static final String CONNECTOR_REPOSITORY_META_KEY = "connector_repository"; - private static final String CONNECTOR_DEFINITION_ID_META_KEY = "connector_definition_id"; - private static final String CONNECTOR_RELEASE_STAGE_META_KEY = "connector_release_stage"; - private static final String CONNECTOR_COMMAND_META_KEY = "connector_command"; - private static final String NORMALIZATION_REPOSITORY_META_KEY = "normalization_repository"; - private static final String JOB_ID_KEY = "job_id"; - - private static final ImmutableSet UNSUPPORTED_FAILURETYPES = - ImmutableSet.of(FailureType.CONFIG_ERROR, FailureType.MANUAL_CANCELLATION); - - private final ConfigRepository configRepository; - private final DeploymentMode deploymentMode; - private final String airbyteVersion; - private final WebUrlHelper webUrlHelper; - private final JobErrorReportingClient jobErrorReportingClient; - - public JobErrorReporter(final ConfigRepository configRepository, - final DeploymentMode deploymentMode, - final String airbyteVersion, - final WebUrlHelper webUrlHelper, - final JobErrorReportingClient jobErrorReportingClient) { - - this.configRepository = configRepository; - this.deploymentMode = deploymentMode; - this.airbyteVersion = airbyteVersion; - this.webUrlHelper = webUrlHelper; - this.jobErrorReportingClient = jobErrorReportingClient; - } - - /** - * Reports a Sync Job's connector-caused FailureReasons to the JobErrorReportingClient - * - * @param connectionId - connection that had the failure - * @param failureSummary - final attempt failure summary - * @param jobContext - sync job reporting context - */ - public void reportSyncJobFailure(final UUID connectionId, final AttemptFailureSummary failureSummary, final SyncJobReportingContext jobContext) { - Exceptions.swallow(() -> { - final List traceMessageFailures = failureSummary.getFailures().stream() - .filter(failure -> failure.getMetadata() != null && failure.getMetadata().getAdditionalProperties().containsKey(FROM_TRACE_MESSAGE)) - .toList(); - - final StandardWorkspace workspace = configRepository.getStandardWorkspaceFromConnection(connectionId, true); - final Map commonMetadata = MoreMaps.merge( - Map.of(JOB_ID_KEY, String.valueOf(jobContext.jobId())), - getConnectionMetadata(workspace.getWorkspaceId(), connectionId)); - - for (final FailureReason failureReason : traceMessageFailures) { - final FailureOrigin failureOrigin = failureReason.getFailureOrigin(); - - if (failureOrigin == FailureOrigin.SOURCE) { - final StandardSourceDefinition sourceDefinition = configRepository.getSourceDefinitionFromConnection(connectionId); - final String dockerImage = jobContext.sourceDockerImage(); - final Map metadata = MoreMaps.merge(commonMetadata, getSourceMetadata(sourceDefinition)); - - reportJobFailureReason(workspace, failureReason, dockerImage, metadata); - } else if (failureOrigin == FailureOrigin.DESTINATION) { - final StandardDestinationDefinition destinationDefinition = configRepository.getDestinationDefinitionFromConnection(connectionId); - final String dockerImage = jobContext.destinationDockerImage(); - final Map metadata = MoreMaps.merge(commonMetadata, getDestinationMetadata(destinationDefinition)); - - reportJobFailureReason(workspace, failureReason, dockerImage, metadata); - } else if (failureOrigin == FailureOrigin.NORMALIZATION) { - final StandardSourceDefinition sourceDefinition = configRepository.getSourceDefinitionFromConnection(connectionId); - final StandardDestinationDefinition destinationDefinition = configRepository.getDestinationDefinitionFromConnection(connectionId); - // since error could be arising from source or destination or normalization itself, we want all the - // metadata - // prefixing source keys so we don't overlap (destination as 'true' keys since normalization runs on - // the destination) - final Map metadata = MoreMaps.merge( - commonMetadata, - getNormalizationMetadata(destinationDefinition.getNormalizationConfig().getNormalizationRepository()), - prefixConnectorMetadataKeys(getSourceMetadata(sourceDefinition), "source"), - getDestinationMetadata(destinationDefinition)); - final String dockerImage = - destinationDefinition.getNormalizationConfig().getNormalizationRepository() + ":" + - destinationDefinition.getNormalizationConfig().getNormalizationTag(); - - reportJobFailureReason(workspace, failureReason, dockerImage, metadata); - } - } - }); - } - - /** - * Reports a FailureReason from a connector Check job for a Source to the JobErrorReportingClient - * - * @param workspaceId - workspace for which the check failed - * @param failureReason - failure reason from the check connection job - * @param jobContext - connector job reporting context - */ - public void reportSourceCheckJobFailure(final UUID sourceDefinitionId, - @Nullable final UUID workspaceId, - final FailureReason failureReason, - final ConnectorJobReportingContext jobContext) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardWorkspace workspace = workspaceId != null ? configRepository.getStandardWorkspaceNoSecrets(workspaceId, true) : null; - final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(sourceDefinitionId); - final Map metadata = MoreMaps.merge( - getSourceMetadata(sourceDefinition), - Map.of(JOB_ID_KEY, jobContext.jobId().toString())); - reportJobFailureReason(workspace, failureReason.withFailureOrigin(FailureOrigin.SOURCE), jobContext.dockerImage(), metadata); - } - - /** - * Reports a FailureReason from a connector Check job for a Destination to the - * JobErrorReportingClient - * - * @param workspaceId - workspace for which the check failed - * @param failureReason - failure reason from the check connection job - * @param jobContext - connector job reporting context - */ - public void reportDestinationCheckJobFailure(final UUID destinationDefinitionId, - @Nullable final UUID workspaceId, - final FailureReason failureReason, - final ConnectorJobReportingContext jobContext) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardWorkspace workspace = workspaceId != null ? configRepository.getStandardWorkspaceNoSecrets(workspaceId, true) : null; - final StandardDestinationDefinition destinationDefinition = configRepository.getStandardDestinationDefinition(destinationDefinitionId); - final Map metadata = MoreMaps.merge( - getDestinationMetadata(destinationDefinition), - Map.of(JOB_ID_KEY, jobContext.jobId().toString())); - reportJobFailureReason(workspace, failureReason.withFailureOrigin(FailureOrigin.DESTINATION), jobContext.dockerImage(), metadata); - } - - /** - * Reports a FailureReason from a connector Deploy job for a Source to the JobErrorReportingClient - * - * @param workspaceId - workspace for which the Discover job failed - * @param failureReason - failure reason from the Discover job - * @param jobContext - connector job reporting context - */ - public void reportDiscoverJobFailure(final UUID sourceDefinitionId, - @Nullable final UUID workspaceId, - final FailureReason failureReason, - final ConnectorJobReportingContext jobContext) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardWorkspace workspace = workspaceId != null ? configRepository.getStandardWorkspaceNoSecrets(workspaceId, true) : null; - final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(sourceDefinitionId); - final Map metadata = MoreMaps.merge( - getSourceMetadata(sourceDefinition), - Map.of(JOB_ID_KEY, jobContext.jobId().toString())); - reportJobFailureReason(workspace, failureReason, jobContext.dockerImage(), metadata); - } - - /** - * Reports a FailureReason from a connector Spec job to the JobErrorReportingClient - * - * @param failureReason - failure reason from the Deploy job - * @param jobContext - connector job reporting context - */ - public void reportSpecJobFailure(final FailureReason failureReason, final ConnectorJobReportingContext jobContext) { - final String dockerImage = jobContext.dockerImage(); - final String connectorRepository = dockerImage.split(":")[0]; - final Map metadata = Map.of( - JOB_ID_KEY, jobContext.jobId().toString(), - CONNECTOR_REPOSITORY_META_KEY, connectorRepository); - reportJobFailureReason(null, failureReason, dockerImage, metadata); - } - - private Map getConnectionMetadata(final UUID workspaceId, final UUID connectionId) { - final String connectionUrl = webUrlHelper.getConnectionUrl(workspaceId, connectionId); - return Map.ofEntries( - Map.entry(CONNECTION_ID_META_KEY, connectionId.toString()), - Map.entry(CONNECTION_URL_META_KEY, connectionUrl)); - } - - private Map getDestinationMetadata(final StandardDestinationDefinition destinationDefinition) { - return Map.ofEntries( - Map.entry(CONNECTOR_DEFINITION_ID_META_KEY, destinationDefinition.getDestinationDefinitionId().toString()), - Map.entry(CONNECTOR_NAME_META_KEY, destinationDefinition.getName()), - Map.entry(CONNECTOR_REPOSITORY_META_KEY, destinationDefinition.getDockerRepository()), - Map.entry(CONNECTOR_RELEASE_STAGE_META_KEY, destinationDefinition.getReleaseStage().value())); - } - - private Map getSourceMetadata(final StandardSourceDefinition sourceDefinition) { - return Map.ofEntries( - Map.entry(CONNECTOR_DEFINITION_ID_META_KEY, sourceDefinition.getSourceDefinitionId().toString()), - Map.entry(CONNECTOR_NAME_META_KEY, sourceDefinition.getName()), - Map.entry(CONNECTOR_REPOSITORY_META_KEY, sourceDefinition.getDockerRepository()), - Map.entry(CONNECTOR_RELEASE_STAGE_META_KEY, sourceDefinition.getReleaseStage().value())); - } - - private Map getNormalizationMetadata(final String normalizationImage) { - return Map.ofEntries( - Map.entry(NORMALIZATION_REPOSITORY_META_KEY, normalizationImage)); - } - - private Map prefixConnectorMetadataKeys(final Map connectorMetadata, final String prefix) { - final Map prefixedMetadata = new HashMap<>(); - for (final Map.Entry entry : connectorMetadata.entrySet()) { - prefixedMetadata.put(String.format("%s_%s", prefix, entry.getKey()), entry.getValue()); - } - return prefixedMetadata; - } - - private Map getFailureReasonMetadata(final FailureReason failureReason) { - final Map failureReasonAdditionalProps = failureReason.getMetadata().getAdditionalProperties(); - final Map outMetadata = new HashMap<>(); - - if (failureReasonAdditionalProps.containsKey(CONNECTOR_COMMAND_META_KEY) - && failureReasonAdditionalProps.get(CONNECTOR_COMMAND_META_KEY) != null) { - outMetadata.put(CONNECTOR_COMMAND_META_KEY, failureReasonAdditionalProps.get(CONNECTOR_COMMAND_META_KEY).toString()); - } - - if (failureReason.getFailureOrigin() != null) { - outMetadata.put(FAILURE_ORIGIN_META_KEY, failureReason.getFailureOrigin().value()); - } - - if (failureReason.getFailureType() != null) { - outMetadata.put(FAILURE_TYPE_META_KEY, failureReason.getFailureType().value()); - } - - return outMetadata; - } - - private Map getWorkspaceMetadata(final UUID workspaceId) { - final String workspaceUrl = webUrlHelper.getWorkspaceUrl(workspaceId); - return Map.ofEntries( - Map.entry(WORKSPACE_ID_META_KEY, workspaceId.toString()), - Map.entry(WORKSPACE_URL_META_KEY, workspaceUrl)); - } - - private void reportJobFailureReason(@Nullable final StandardWorkspace workspace, - final FailureReason failureReason, - final String dockerImage, - final Map metadata) { - // Failure types associated with a config-error or a manual-cancellation should NOT be reported. - if (UNSUPPORTED_FAILURETYPES.contains(failureReason.getFailureType())) { - return; - } - - final Map commonMetadata = new HashMap<>(Map.ofEntries( - Map.entry(AIRBYTE_VERSION_META_KEY, airbyteVersion), - Map.entry(DEPLOYMENT_MODE_META_KEY, deploymentMode.name()))); - - if (workspace != null) { - commonMetadata.putAll(getWorkspaceMetadata(workspace.getWorkspaceId())); - } - - final Map allMetadata = MoreMaps.merge( - commonMetadata, - getFailureReasonMetadata(failureReason), - metadata); - - try { - jobErrorReportingClient.reportJobFailureReason(workspace, failureReason, dockerImage, allMetadata); - } catch (final Exception e) { - LOGGER.error("Error when reporting job failure reason: {}", failureReason, e); - } - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReportingClient.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReportingClient.java deleted file mode 100644 index 8f3334f05194..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReportingClient.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -import edu.umd.cs.findbugs.annotations.Nullable; -import io.airbyte.config.FailureReason; -import io.airbyte.config.StandardWorkspace; -import java.util.Map; - -/** - * A generic interface for a client that reports errors - */ -public interface JobErrorReportingClient { - - /** - * Report a job failure reason - */ - void reportJobFailureReason(@Nullable StandardWorkspace workspace, - final FailureReason reason, - @Nullable final String dockerImage, - Map metadata); - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReportingClientFactory.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReportingClientFactory.java deleted file mode 100644 index 825038e288e8..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReportingClientFactory.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -import io.airbyte.config.Configs; -import io.airbyte.config.Configs.JobErrorReportingStrategy; - -public class JobErrorReportingClientFactory { - - /** - * Creates an error reporting client based on the desired strategy to use - * - * @param strategy - which type of error reporting client should be created - * @return JobErrorReportingClient - */ - public static JobErrorReportingClient getClient(final JobErrorReportingStrategy strategy, final Configs configs) { - return switch (strategy) { - case SENTRY -> new SentryJobErrorReportingClient(configs.getJobErrorReportingSentryDSN(), new SentryExceptionHelper()); - case LOGGING -> new LoggingJobErrorReportingClient(); - }; - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/LoggingJobErrorReportingClient.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/LoggingJobErrorReportingClient.java deleted file mode 100644 index 185b1a676db1..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/LoggingJobErrorReportingClient.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -import edu.umd.cs.findbugs.annotations.Nullable; -import io.airbyte.config.FailureReason; -import io.airbyte.config.StandardWorkspace; -import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LoggingJobErrorReportingClient implements JobErrorReportingClient { - - private static final Logger LOGGER = LoggerFactory.getLogger(LoggingJobErrorReportingClient.class); - - @Override - public void reportJobFailureReason(@Nullable final StandardWorkspace workspace, - final FailureReason reason, - final String dockerImage, - final Map metadata) { - LOGGER.info("Report Job Error -> workspaceId: {}, dockerImage: {}, failureReason: {}, metadata: {}", - workspace != null ? workspace.getWorkspaceId() : "null", - dockerImage, - reason, - metadata); - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelper.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelper.java deleted file mode 100644 index 6f257095464c..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelper.java +++ /dev/null @@ -1,345 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -import io.airbyte.commons.lang.Exceptions; -import io.sentry.protocol.SentryException; -import io.sentry.protocol.SentryStackFrame; -import io.sentry.protocol.SentryStackTrace; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings("PMD.AvoidLiteralsInIfCondition") -public class SentryExceptionHelper { - - public record SentryParsedException(SentryExceptionPlatform platform, List exceptions) {} - - private static final Logger LOGGER = LoggerFactory.getLogger(SentryExceptionHelper.class); - - public enum ERROR_MAP_KEYS { - ERROR_MAP_MESSAGE_KEY, - ERROR_MAP_TYPE_KEY - } - - /** - * Specifies the platform for a thrown exception. Values must be supported by Sentry as specified in - * https://develop.sentry.dev/sdk/event-payloads/#required-attributes. Currently, only java, python - * and dbt (other) exceptions are supported. - */ - public enum SentryExceptionPlatform { - - JAVA("java"), - PYTHON("python"), - OTHER("other"); - - private final String value; - - SentryExceptionPlatform(final String value) { - this.value = value; - } - - public String getValue() { - return value; - } - - @Override - public String toString() { - return String.valueOf(value); - } - - } - - /** - * Processes a raw stacktrace string into structured SentryExceptions - *

- * Currently, Java and Python stacktraces are supported. If an unsupported stacktrace format is - * encountered, an empty optional will be returned, in which case we can fall back to alternate - * grouping. - */ - public Optional buildSentryExceptions(final String stacktrace) { - return Exceptions.swallowWithDefault(() -> { - if (stacktrace.startsWith("Traceback (most recent call last):")) { - return buildPythonSentryExceptions(stacktrace); - } - if (stacktrace.contains("\tat ") && stacktrace.contains(".java")) { - return buildJavaSentryExceptions(stacktrace); - } - if (stacktrace.startsWith("AirbyteDbtError: ")) { - return buildNormalizationDbtSentryExceptions(stacktrace); - } - - return Optional.empty(); - }, Optional.empty()); - } - - private static Optional buildPythonSentryExceptions(final String stacktrace) { - final List sentryExceptions = new ArrayList<>(); - - // separate chained exceptions - // e.g "\n\nThe above exception was the direct cause of the following exception:\n\n" - // "\n\nDuring handling of the above exception, another exception occurred:\n\n" - final String exceptionSeparator = "\n\n[\\w ,]+:\n\n"; - final String[] exceptions = stacktrace.split(exceptionSeparator); - - for (final String exceptionStr : exceptions) { - final SentryStackTrace stackTrace = new SentryStackTrace(); - final List stackFrames = new ArrayList<>(); - - // Use a regex to grab stack trace frame information - final Pattern framePattern = Pattern.compile("File \"(?.+)\", line (?\\d+), in (?.+)\\n {4}(?.+)\\n"); - final Matcher matcher = framePattern.matcher(exceptionStr); - int lastMatchIdx = -1; - - while (matcher.find()) { - final String absPath = matcher.group("absPath"); - final String lineno = matcher.group("lineno"); - final String function = matcher.group("function"); - final String contextLine = matcher.group("contextLine"); - - final SentryStackFrame stackFrame = new SentryStackFrame(); - stackFrame.setAbsPath(absPath); - stackFrame.setLineno(Integer.valueOf(lineno)); - stackFrame.setFunction(function); - stackFrame.setContextLine(contextLine); - stackFrames.add(stackFrame); - - lastMatchIdx = matcher.end(); - } - - if (!stackFrames.isEmpty()) { - stackTrace.setFrames(stackFrames); - - final SentryException sentryException = new SentryException(); - sentryException.setStacktrace(stackTrace); - - // The final part of our stack trace has the exception type and (optionally) a value - // (e.g. "RuntimeError: This is the value") - final String remaining = exceptionStr.substring(lastMatchIdx); - final String[] parts = remaining.split(":", 2); - - if (parts.length > 0) { - sentryException.setType(parts[0].trim()); - if (parts.length == 2) { - sentryException.setValue(parts[1].trim()); - } - - sentryExceptions.add(sentryException); - } - } - } - - if (sentryExceptions.isEmpty()) - return Optional.empty(); - - return Optional.of(new SentryParsedException(SentryExceptionPlatform.PYTHON, sentryExceptions)); - } - - private static Optional buildJavaSentryExceptions(final String stacktrace) { - final List sentryExceptions = new ArrayList<>(); - - // separate chained exceptions - // e.g "\nCaused by: " - final String exceptionSeparator = "\nCaused by: "; - final String[] exceptions = stacktrace.split(exceptionSeparator); - - for (final String exceptionStr : exceptions) { - final SentryStackTrace stackTrace = new SentryStackTrace(); - final List stackFrames = new ArrayList<>(); - - // Use a regex to grab stack trace frame information - final Pattern framePattern = Pattern.compile( - "\n\tat (?:[\\w.$/]+/)?(?[\\w$.]+)\\.(?[\\w<>$]+)\\((?:(?[\\w]+\\.java):(?\\d+)\\)|(?[\\w\\s]*))"); - final Matcher matcher = framePattern.matcher(exceptionStr); - - while (matcher.find()) { - final String module = matcher.group("module"); - final String filename = matcher.group("filename"); - final String lineno = matcher.group("lineno"); - final String function = matcher.group("function"); - final String sourceDescription = matcher.group("desc"); - - final SentryStackFrame stackFrame = new SentryStackFrame(); - stackFrame.setModule(module); - stackFrame.setFunction(function); - stackFrame.setFilename(filename); - - if (lineno != null) { - stackFrame.setLineno(Integer.valueOf(lineno)); - } - if (sourceDescription != null && sourceDescription.equals("Native Method")) { - stackFrame.setNative(true); - } - - stackFrames.add(stackFrame); - } - - if (!stackFrames.isEmpty()) { - Collections.reverse(stackFrames); - stackTrace.setFrames(stackFrames); - - final SentryException sentryException = new SentryException(); - sentryException.setStacktrace(stackTrace); - - // The first section of our stacktrace before the first frame has exception type and value - final String[] sections = exceptionStr.split("\n\tat ", 2); - final String[] headerParts = sections[0].split(": ", 2); - - if (headerParts.length > 0) { - sentryException.setType(headerParts[0].trim()); - if (headerParts.length == 2) { - sentryException.setValue(headerParts[1].trim()); - } - - sentryExceptions.add(sentryException); - } - } - } - - if (sentryExceptions.isEmpty()) - return Optional.empty(); - - return Optional.of(new SentryParsedException(SentryExceptionPlatform.JAVA, sentryExceptions)); - } - - private static Optional buildNormalizationDbtSentryExceptions(final String stacktrace) { - final List sentryExceptions = new ArrayList<>(); - - final Map usefulErrorMap = getUsefulErrorMessageAndTypeFromDbtError(stacktrace); - - // if our errorMessage from the function != stacktrace then we know we've pulled out something - // useful - if (!usefulErrorMap.get(ERROR_MAP_KEYS.ERROR_MAP_MESSAGE_KEY).equals(stacktrace)) { - final SentryException usefulException = new SentryException(); - usefulException.setValue(usefulErrorMap.get(ERROR_MAP_KEYS.ERROR_MAP_MESSAGE_KEY)); - usefulException.setType(usefulErrorMap.get(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY)); - sentryExceptions.add(usefulException); - } - - if (sentryExceptions.isEmpty()) - return Optional.empty(); - - return Optional.of(new SentryParsedException(SentryExceptionPlatform.OTHER, sentryExceptions)); - } - - public static Map getUsefulErrorMessageAndTypeFromDbtError(final String stacktrace) { - // the dbt 'stacktrace' is really just all the log messages at 'error' level, stuck together. - // therefore there is not a totally consistent structure to these, - // see the docs: https://docs.getdbt.com/guides/legacy/debugging-errors - // the logic below is built based on the ~450 unique dbt errors we encountered before this PR - // and is a best effort to isolate the useful part of the error logs for debugging and grouping - // and bring some semblance of exception 'types' to differentiate between errors. - final Map errorMessageAndType = new HashMap<>(); - final String[] stacktraceLines = stacktrace.split("\n"); - - boolean defaultNextLine = false; - // TODO: this whole code block is quite ugh, commented to try and make each part clear but could be - // much more readable. - mainLoop: for (int i = 0; i < stacktraceLines.length; i++) { - // This order is important due to how these errors can co-occur. - // This order attempts to keep error definitions consistent based on our observations of possible - // dbt error structures. - try { - // Database Errors - if (stacktraceLines[i].contains("Database Error in model")) { - // Database Error : SQL compilation error - if (stacktraceLines[i + 1].contains("SQL compilation error")) { - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_MESSAGE_KEY, - String.format("%s %s", stacktraceLines[i + 1].trim(), stacktraceLines[i + 2].trim())); - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY, "DbtDatabaseSQLCompilationError"); - break; - } - // Database Error: Invalid input - else if (stacktraceLines[i + 1].contains("Invalid input")) { - for (final String followingLine : Arrays.copyOfRange(stacktraceLines, i + 1, stacktraceLines.length)) { - if (followingLine.trim().startsWith("context:")) { - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_MESSAGE_KEY, - String.format("%s\n%s", stacktraceLines[i + 1].trim(), followingLine.trim())); - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY, "DbtDatabaseInvalidInputError"); - break mainLoop; - } - } - } - // Database Error: Syntax error - else if (stacktraceLines[i + 1].contains("syntax error at or near \"")) { - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_MESSAGE_KEY, - String.format("%s\n%s", stacktraceLines[i + 1].trim(), stacktraceLines[i + 2].trim())); - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY, "DbtDatabaseSyntaxError"); - break; - } - // Database Error: default - else { - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY, "DbtDatabaseError"); - defaultNextLine = true; - } - } - // Unhandled Error - else if (stacktraceLines[i].contains("Unhandled error while executing model")) { - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY, "DbtUnhandledError"); - defaultNextLine = true; - } - // Compilation Errors - else if (stacktraceLines[i].contains("Compilation Error")) { - // Compilation Error: Ambiguous Relation - if (stacktraceLines[i + 1].contains("When searching for a relation, dbt found an approximate match.")) { - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_MESSAGE_KEY, - String.format("%s %s", stacktraceLines[i + 1].trim(), stacktraceLines[i + 2].trim())); - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY, "DbtCompilationAmbiguousRelationError"); - break; - } - // Compilation Error: default - else { - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY, "DbtCompilationError"); - defaultNextLine = true; - } - } - // Runtime Errors - else if (stacktraceLines[i].contains("Runtime Error")) { - // Runtime Error: Database error - for (final String followingLine : Arrays.copyOfRange(stacktraceLines, i + 1, stacktraceLines.length)) { - if ("Database Error".equals(followingLine.trim())) { - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_MESSAGE_KEY, - String.format("%s", stacktraceLines[Arrays.stream(stacktraceLines).toList().indexOf(followingLine) + 1].trim())); - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY, "DbtRuntimeDatabaseError"); - break mainLoop; - } - } - // Runtime Error: default - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY, "DbtRuntimeError"); - defaultNextLine = true; - } - // Database Error: formatted differently, catch last to avoid counting other types of errors as - // Database Error - else if ("Database Error".equals(stacktraceLines[i].trim())) { - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY, "DbtDatabaseError"); - defaultNextLine = true; - } - // handle the default case without repeating code - if (defaultNextLine) { - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_MESSAGE_KEY, stacktraceLines[i + 1].trim()); - break; - } - } catch (final ArrayIndexOutOfBoundsException e) { - // this means our logic is slightly off, our assumption of where error lines are is incorrect - LOGGER.warn("Failed trying to parse useful error message out of dbt error, defaulting to full stacktrace"); - } - } - if (errorMessageAndType.isEmpty()) { - // For anything we haven't caught, just return full stacktrace - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_MESSAGE_KEY, stacktrace); - errorMessageAndType.put(ERROR_MAP_KEYS.ERROR_MAP_TYPE_KEY, "AirbyteDbtError"); - } - return errorMessageAndType; - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryJobErrorReportingClient.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryJobErrorReportingClient.java deleted file mode 100644 index a5f90d81ac61..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryJobErrorReportingClient.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -import edu.umd.cs.findbugs.annotations.Nullable; -import io.airbyte.config.FailureReason; -import io.airbyte.config.Metadata; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.persistence.job.errorreporter.SentryExceptionHelper.SentryParsedException; -import io.sentry.Hub; -import io.sentry.IHub; -import io.sentry.NoOpHub; -import io.sentry.SentryEvent; -import io.sentry.SentryOptions; -import io.sentry.protocol.Message; -import io.sentry.protocol.SentryException; -import io.sentry.protocol.User; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -public class SentryJobErrorReportingClient implements JobErrorReportingClient { - - static final String STACKTRACE_PARSE_ERROR_TAG_KEY = "stacktrace_parse_error"; - static final String STACKTRACE_PLATFORM_TAG_KEY = "stacktrace_platform"; - private final IHub sentryHub; - private final SentryExceptionHelper exceptionHelper; - - SentryJobErrorReportingClient(final IHub sentryHub, final SentryExceptionHelper exceptionHelper) { - this.sentryHub = sentryHub; - this.exceptionHelper = exceptionHelper; - } - - public SentryJobErrorReportingClient(final String sentryDSN, final SentryExceptionHelper exceptionHelper) { - this(createSentryHubWithDSN(sentryDSN), exceptionHelper); - } - - static IHub createSentryHubWithDSN(final String sentryDSN) { - if (sentryDSN == null || sentryDSN.isEmpty()) { - return NoOpHub.getInstance(); - } - - final SentryOptions options = new SentryOptions(); - options.setDsn(sentryDSN); - options.setAttachStacktrace(false); - options.setEnableUncaughtExceptionHandler(false); - return new Hub(options); - } - - /** - * Reports a Connector Job FailureReason to Sentry - * - * @param workspace - Workspace where this failure occurred - * @param failureReason - FailureReason to report - * @param dockerImage - Tagged docker image that represents the release where this failure occurred - * @param metadata - Extra metadata to set as tags on the event - */ - @Override - public void reportJobFailureReason(@Nullable final StandardWorkspace workspace, - final FailureReason failureReason, - @Nullable final String dockerImage, - final Map metadata) { - final SentryEvent event = new SentryEvent(); - - if (dockerImage != null) { - // Remove invalid characters from the release name, use @ so sentry knows how to grab the tag - // e.g. airbyte/source-xyz:1.2.0 -> airbyte-source-xyz@1.2.0 - // More info at https://docs.sentry.io/product/cli/releases/#creating-releases - final String release = dockerImage.replace("/", "-").replace(":", "@"); - event.setRelease(release); - - // enhance event fingerprint to ensure separate grouping per connector - final String[] releaseParts = release.split("@"); - if (releaseParts.length > 0) { - event.setFingerprints(List.of("{{ default }}", releaseParts[0])); - } - } - - // set workspace as the user in sentry to get impact and priority - if (workspace != null) { - final User sentryUser = new User(); - sentryUser.setId(String.valueOf(workspace.getWorkspaceId())); - sentryUser.setUsername(workspace.getName()); - event.setUser(sentryUser); - } - - // set metadata as tags - event.setTags(metadata); - - // set failure reason's internalMessage as event message - // Sentry will use this to fuzzy-group if no stacktrace information is available - final Message message = new Message(); - message.setFormatted(failureReason.getInternalMessage()); - event.setMessage(message); - - // events can come from any platform - event.setPlatform("other"); - - // attach failure reason stack trace - final String failureStackTrace = failureReason.getStacktrace(); - if (failureStackTrace != null && !failureStackTrace.isBlank()) { - final Optional optParsedException = exceptionHelper.buildSentryExceptions(failureStackTrace); - if (optParsedException.isPresent()) { - final SentryParsedException parsedException = optParsedException.get(); - final String platform = parsedException.platform().getValue(); - event.setPlatform(platform); - event.setTag(STACKTRACE_PLATFORM_TAG_KEY, platform); - event.setExceptions(parsedException.exceptions()); - } else { - event.setTag(STACKTRACE_PARSE_ERROR_TAG_KEY, "1"); - - // We couldn't parse the stacktrace, but we can still give it to Sentry for (less accurate) grouping - final String normalizedStacktrace = failureStackTrace - .replace("\n", ", ") - .replace(failureReason.getInternalMessage(), ""); - - final SentryException sentryException = new SentryException(); - sentryException.setValue(normalizedStacktrace); - event.setExceptions(List.of(sentryException)); - } - } - - sentryHub.configureScope(scope -> { - final Map failureReasonContext = new HashMap<>(); - failureReasonContext.put("internalMessage", failureReason.getInternalMessage()); - failureReasonContext.put("externalMessage", failureReason.getExternalMessage()); - failureReasonContext.put("stacktrace", failureReason.getStacktrace()); - failureReasonContext.put("timestamp", failureReason.getTimestamp().toString()); - - final Metadata failureReasonMeta = failureReason.getMetadata(); - if (failureReasonMeta != null) { - failureReasonContext.put("metadata", failureReasonMeta.toString()); - } - - scope.setContexts("Failure Reason", failureReasonContext); - }); - - sentryHub.captureEvent(event); - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SyncJobReportingContext.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SyncJobReportingContext.java deleted file mode 100644 index 03941f894f40..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SyncJobReportingContext.java +++ /dev/null @@ -1,9 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -public record SyncJobReportingContext(long jobId, String sourceDockerImage, String destinationDockerImage) { - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactory.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactory.java deleted file mode 100644 index 7af0741c9aa7..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactory.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.factory; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.commons.version.Version; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.DefaultJobCreator; -import io.airbyte.persistence.job.WorkspaceHelper; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.UUID; - -public class DefaultSyncJobFactory implements SyncJobFactory { - - private final boolean connectorSpecificResourceDefaultsEnabled; - private final DefaultJobCreator jobCreator; - private final ConfigRepository configRepository; - private final OAuthConfigSupplier oAuthConfigSupplier; - private final WorkspaceHelper workspaceHelper; - - public DefaultSyncJobFactory(final boolean connectorSpecificResourceDefaultsEnabled, - final DefaultJobCreator jobCreator, - final ConfigRepository configRepository, - final OAuthConfigSupplier oAuthConfigSupplier, - final WorkspaceHelper workspaceHelper) { - this.connectorSpecificResourceDefaultsEnabled = connectorSpecificResourceDefaultsEnabled; - this.jobCreator = jobCreator; - this.configRepository = configRepository; - this.oAuthConfigSupplier = oAuthConfigSupplier; - this.workspaceHelper = workspaceHelper; - } - - @Override - public Long create(final UUID connectionId) { - try { - final StandardSync standardSync = configRepository.getStandardSync(connectionId); - final UUID workspaceId = workspaceHelper.getWorkspaceForSourceId(standardSync.getSourceId()); - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); - final SourceConnection sourceConnection = configRepository.getSourceConnection(standardSync.getSourceId()); - final DestinationConnection destinationConnection = configRepository.getDestinationConnection(standardSync.getDestinationId()); - final JsonNode sourceConfiguration = oAuthConfigSupplier.injectSourceOAuthParameters( - sourceConnection.getSourceDefinitionId(), - sourceConnection.getWorkspaceId(), - sourceConnection.getConfiguration()); - sourceConnection.withConfiguration(sourceConfiguration); - final JsonNode destinationConfiguration = oAuthConfigSupplier.injectDestinationOAuthParameters( - destinationConnection.getDestinationDefinitionId(), - destinationConnection.getWorkspaceId(), - destinationConnection.getConfiguration()); - destinationConnection.withConfiguration(destinationConfiguration); - final StandardSourceDefinition sourceDefinition = configRepository - .getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()); - final StandardDestinationDefinition destinationDefinition = configRepository - .getStandardDestinationDefinition(destinationConnection.getDestinationDefinitionId()); - - final String sourceImageName = sourceDefinition.getDockerRepository() + ":" + sourceDefinition.getDockerImageTag(); - final String destinationImageName = destinationDefinition.getDockerRepository() + ":" + destinationDefinition.getDockerImageTag(); - - final List standardSyncOperations = Lists.newArrayList(); - for (final var operationId : standardSync.getOperationIds()) { - final StandardSyncOperation standardSyncOperation = configRepository.getStandardSyncOperation(operationId); - standardSyncOperations.add(standardSyncOperation); - } - - // for OSS users, make it possible to ignore default actor-level resource requirements - if (!connectorSpecificResourceDefaultsEnabled) { - sourceDefinition.setResourceRequirements(null); - destinationDefinition.setResourceRequirements(null); - } - - return jobCreator.createSyncJob( - sourceConnection, - destinationConnection, - standardSync, - sourceImageName, - new Version(sourceDefinition.getProtocolVersion()), - destinationImageName, - new Version(destinationDefinition.getProtocolVersion()), - standardSyncOperations, - workspace.getWebhookOperationConfigs(), - sourceDefinition, - destinationDefinition, - workspace.getWorkspaceId()) - .orElseThrow(() -> new IllegalStateException("We shouldn't be trying to create a new sync job if there is one running already.")); - - } catch (final IOException | JsonValidationException | ConfigNotFoundException e) { - throw new RuntimeException(e); - } - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/OAuthConfigSupplier.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/OAuthConfigSupplier.java deleted file mode 100644 index b6ac46f7e9f3..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/OAuthConfigSupplier.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.factory; - -import static com.fasterxml.jackson.databind.node.JsonNodeType.ARRAY; -import static com.fasterxml.jackson.databind.node.JsonNodeType.OBJECT; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.MoreOAuthParameters; -import io.airbyte.persistence.job.tracker.TrackingMetadata; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.BiConsumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class OAuthConfigSupplier { - - private static final Logger LOGGER = LoggerFactory.getLogger(OAuthConfigSupplier.class); - - public static final String PATH_IN_CONNECTOR_CONFIG = "path_in_connector_config"; - private static final String PROPERTIES = "properties"; - final private ConfigRepository configRepository; - private final TrackingClient trackingClient; - - public OAuthConfigSupplier(final ConfigRepository configRepository, final TrackingClient trackingClient) { - this.configRepository = configRepository; - this.trackingClient = trackingClient; - } - - public static boolean hasOAuthConfigSpecification(final ConnectorSpecification spec) { - return spec != null && spec.getAdvancedAuth() != null && spec.getAdvancedAuth().getOauthConfigSpecification() != null; - } - - public JsonNode maskSourceOAuthParameters(final UUID sourceDefinitionId, final UUID workspaceId, final JsonNode sourceConnectorConfig) - throws IOException { - try { - final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(sourceDefinitionId); - MoreOAuthParameters.getSourceOAuthParameter(configRepository.listSourceOAuthParam().stream(), workspaceId, sourceDefinitionId) - .ifPresent(sourceOAuthParameter -> { - maskOauthParameters(sourceDefinition.getName(), sourceDefinition.getSpec(), sourceConnectorConfig); - }); - return sourceConnectorConfig; - } catch (final JsonValidationException | ConfigNotFoundException e) { - throw new IOException(e); - } - } - - public JsonNode maskDestinationOAuthParameters(final UUID destinationDefinitionId, - final UUID workspaceId, - final JsonNode destinationConnectorConfig) - throws IOException { - try { - final StandardDestinationDefinition destinationDefinition = configRepository.getStandardDestinationDefinition(destinationDefinitionId); - MoreOAuthParameters.getDestinationOAuthParameter(configRepository.listDestinationOAuthParam().stream(), workspaceId, destinationDefinitionId) - .ifPresent(destinationOAuthParameter -> { - maskOauthParameters(destinationDefinition.getName(), destinationDefinition.getSpec(), destinationConnectorConfig); - }); - return destinationConnectorConfig; - } catch (final JsonValidationException | ConfigNotFoundException e) { - throw new IOException(e); - } - } - - public JsonNode injectSourceOAuthParameters(final UUID sourceDefinitionId, final UUID workspaceId, final JsonNode sourceConnectorConfig) - throws IOException { - try { - final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(sourceDefinitionId); - MoreOAuthParameters.getSourceOAuthParameter(configRepository.listSourceOAuthParam().stream(), workspaceId, sourceDefinitionId) - .ifPresent(sourceOAuthParameter -> { - if (injectOAuthParameters(sourceDefinition.getName(), sourceDefinition.getSpec(), sourceOAuthParameter.getConfiguration(), - sourceConnectorConfig)) { - final Map metadata = TrackingMetadata.generateSourceDefinitionMetadata(sourceDefinition); - Exceptions.swallow(() -> trackingClient.track(workspaceId, "OAuth Injection - Backend", metadata)); - } - }); - return sourceConnectorConfig; - } catch (final JsonValidationException | ConfigNotFoundException e) { - throw new IOException(e); - } - } - - public JsonNode injectDestinationOAuthParameters(final UUID destinationDefinitionId, - final UUID workspaceId, - final JsonNode destinationConnectorConfig) - throws IOException { - try { - final StandardDestinationDefinition destinationDefinition = configRepository.getStandardDestinationDefinition(destinationDefinitionId); - MoreOAuthParameters.getDestinationOAuthParameter(configRepository.listDestinationOAuthParam().stream(), workspaceId, destinationDefinitionId) - .ifPresent(destinationOAuthParameter -> { - if (injectOAuthParameters(destinationDefinition.getName(), destinationDefinition.getSpec(), destinationOAuthParameter.getConfiguration(), - destinationConnectorConfig)) { - final Map metadata = TrackingMetadata.generateDestinationDefinitionMetadata(destinationDefinition); - Exceptions.swallow(() -> trackingClient.track(workspaceId, "OAuth Injection - Backend", metadata)); - } - }); - return destinationConnectorConfig; - } catch (final JsonValidationException | ConfigNotFoundException e) { - throw new IOException(e); - } - } - - /** - * Gets the OAuth parameter paths as specified in the connector spec and traverses through them - */ - private static void traverseOAuthOutputPaths(final ConnectorSpecification spec, - final String connectorName, - final BiConsumer> consumer) { - final JsonNode outputSpecTop = spec.getAdvancedAuth().getOauthConfigSpecification().getCompleteOauthServerOutputSpecification(); - final JsonNode outputSpec; - if (outputSpecTop.has(PROPERTIES)) { - outputSpec = outputSpecTop.get(PROPERTIES); - } else { - LOGGER.error(String.format("In %s's advanced_auth spec, completeOAuthServerOutputSpecification does not declare properties.", connectorName)); - return; - } - - for (final String key : Jsons.keys(outputSpec)) { - final JsonNode node = outputSpec.get(key); - if (node.getNodeType() == OBJECT) { - final JsonNode pathNode = node.get(PATH_IN_CONNECTOR_CONFIG); - if (pathNode != null && pathNode.getNodeType() == ARRAY) { - final List propertyPath = new ArrayList<>(); - final ArrayNode arrayNode = (ArrayNode) pathNode; - for (int i = 0; i < arrayNode.size(); ++i) { - propertyPath.add(arrayNode.get(i).asText()); - } - if (!propertyPath.isEmpty()) { - consumer.accept(key, propertyPath); - } else { - LOGGER.error(String.format("In %s's advanced_auth spec, completeOAuthServerOutputSpecification includes an invalid empty %s for %s", - connectorName, PATH_IN_CONNECTOR_CONFIG, key)); - } - } else { - LOGGER.error( - String.format("In %s's advanced_auth spec, completeOAuthServerOutputSpecification does not declare an Array %s for %s", - connectorName, PATH_IN_CONNECTOR_CONFIG, key)); - } - } else { - LOGGER.error(String.format("In %s's advanced_auth spec, completeOAuthServerOutputSpecification does not declare an ObjectNode for %s", - connectorName, key)); - } - } - } - - private static void maskOauthParameters(final String connectorName, final ConnectorSpecification spec, final JsonNode connectorConfig) { - if (!hasOAuthConfigSpecification(spec)) { - return; - } - if (!checkOAuthPredicate(spec.getAdvancedAuth().getPredicateKey(), spec.getAdvancedAuth().getPredicateValue(), connectorConfig)) { - // OAuth is not applicable in this connectorConfig due to the predicate not being verified - return; - } - - traverseOAuthOutputPaths(spec, connectorName, (_key, propertyPath) -> { - Jsons.replaceNestedValue(connectorConfig, propertyPath, Jsons.jsonNode(MoreOAuthParameters.SECRET_MASK)); - }); - - } - - private static boolean injectOAuthParameters(final String connectorName, - final ConnectorSpecification spec, - final JsonNode oAuthParameters, - final JsonNode connectorConfig) { - if (!hasOAuthConfigSpecification(spec)) { - // keep backward compatible behavior if connector does not declare an OAuth config spec - MoreOAuthParameters.mergeJsons((ObjectNode) connectorConfig, (ObjectNode) oAuthParameters); - return true; - } - if (!checkOAuthPredicate(spec.getAdvancedAuth().getPredicateKey(), spec.getAdvancedAuth().getPredicateValue(), connectorConfig)) { - // OAuth is not applicable in this connectorConfig due to the predicate not being verified - return false; - } - - // TODO: if we write a migration to flatten persisted configs in db, we don't need to flatten - // here see https://github.com/airbytehq/airbyte/issues/7624 - final JsonNode flatOAuthParameters = MoreOAuthParameters.flattenOAuthConfig(oAuthParameters); - - final AtomicBoolean result = new AtomicBoolean(false); - traverseOAuthOutputPaths(spec, connectorName, (key, propertyPath) -> { - Jsons.replaceNestedValue(connectorConfig, propertyPath, flatOAuthParameters.get(key)); - result.set(true); - }); - - return result.get(); - } - - private static boolean checkOAuthPredicate(final List predicateKey, final String predicateValue, final JsonNode connectorConfig) { - if (predicateKey != null && !predicateKey.isEmpty()) { - JsonNode node = connectorConfig; - for (final String key : predicateKey) { - if (node.has(key)) { - node = node.get(key); - } else { - return false; - } - } - if (predicateValue != null && !predicateValue.isBlank()) { - return node.asText().equals(predicateValue); - } else { - return true; - } - } - return true; - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/SyncJobFactory.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/SyncJobFactory.java deleted file mode 100644 index 9c08b1375cca..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/SyncJobFactory.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.factory; - -import java.util.UUID; - -public interface SyncJobFactory { - - Long create(UUID connectionId); - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Attempt.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Attempt.java deleted file mode 100644 index 8c04ef11d50e..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Attempt.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.models; - -import io.airbyte.config.AttemptFailureSummary; -import io.airbyte.config.AttemptSyncConfig; -import io.airbyte.config.JobOutput; -import java.nio.file.Path; -import java.util.Objects; -import java.util.Optional; -import javax.annotation.Nullable; - -public class Attempt { - - private final int attemptNumber; - private final long jobId; - private final JobOutput output; - private final AttemptStatus status; - private final String processingTaskQueue; - private final AttemptFailureSummary failureSummary; - private final AttemptSyncConfig syncConfig; - private final Path logPath; - private final long updatedAtInSecond; - private final long createdAtInSecond; - private final Long endedAtInSecond; - - public Attempt(final int attemptNumber, - final long jobId, - final Path logPath, - final @Nullable AttemptSyncConfig syncConfig, - final @Nullable JobOutput output, - final AttemptStatus status, - final String processingTaskQueue, - final @Nullable AttemptFailureSummary failureSummary, - final long createdAtInSecond, - final long updatedAtInSecond, - final @Nullable Long endedAtInSecond) { - this.attemptNumber = attemptNumber; - this.jobId = jobId; - this.syncConfig = syncConfig; - this.output = output; - this.status = status; - this.processingTaskQueue = processingTaskQueue; - this.failureSummary = failureSummary; - this.logPath = logPath; - this.updatedAtInSecond = updatedAtInSecond; - this.createdAtInSecond = createdAtInSecond; - this.endedAtInSecond = endedAtInSecond; - } - - public int getAttemptNumber() { - return attemptNumber; - } - - public long getJobId() { - return jobId; - } - - public Optional getSyncConfig() { - return Optional.ofNullable(syncConfig); - } - - public Optional getOutput() { - return Optional.ofNullable(output); - } - - public AttemptStatus getStatus() { - return status; - } - - public String getProcessingTaskQueue() { - return processingTaskQueue; - } - - public Optional getFailureSummary() { - return Optional.ofNullable(failureSummary); - } - - public Path getLogPath() { - return logPath; - } - - public Optional getEndedAtInSecond() { - return Optional.ofNullable(endedAtInSecond); - } - - public long getCreatedAtInSecond() { - return createdAtInSecond; - } - - public long getUpdatedAtInSecond() { - return updatedAtInSecond; - } - - public static boolean isAttemptInTerminalState(final Attempt attempt) { - return AttemptStatus.TERMINAL_STATUSES.contains(attempt.getStatus()); - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final Attempt attempt = (Attempt) o; - return attemptNumber == attempt.attemptNumber && - jobId == attempt.jobId && - updatedAtInSecond == attempt.updatedAtInSecond && - createdAtInSecond == attempt.createdAtInSecond && - Objects.equals(syncConfig, attempt.syncConfig) && - Objects.equals(output, attempt.output) && - status == attempt.status && - Objects.equals(failureSummary, attempt.failureSummary) && - Objects.equals(logPath, attempt.logPath) && - Objects.equals(endedAtInSecond, attempt.endedAtInSecond); - } - - @Override - public int hashCode() { - return Objects.hash(attemptNumber, jobId, syncConfig, output, status, failureSummary, logPath, updatedAtInSecond, createdAtInSecond, - endedAtInSecond); - } - - @Override - public String toString() { - return "Attempt{" + - "id=" + attemptNumber + - ", jobId=" + jobId + - ", syncConfig=" + syncConfig + - ", output=" + output + - ", status=" + status + - ", failureSummary=" + failureSummary + - ", logPath=" + logPath + - ", updatedAtInSecond=" + updatedAtInSecond + - ", createdAtInSecond=" + createdAtInSecond + - ", endedAtInSecond=" + endedAtInSecond + - '}'; - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptNormalizationStatus.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptNormalizationStatus.java deleted file mode 100644 index 6fddad2a6d6a..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptNormalizationStatus.java +++ /dev/null @@ -1,9 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.models; - -import java.util.Optional; - -public record AttemptNormalizationStatus(int attemptNumber, Optional recordsCommitted, boolean normalizationFailed) {} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptStatus.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptStatus.java deleted file mode 100644 index 3ed0a2af0eea..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptStatus.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.models; - -import com.google.common.collect.Sets; -import java.util.Set; - -public enum AttemptStatus { - - RUNNING, - FAILED, - SUCCEEDED; - - public static final Set TERMINAL_STATUSES = Sets.newHashSet(FAILED, SUCCEEDED); - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptWithJobInfo.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptWithJobInfo.java deleted file mode 100644 index 9882698af382..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptWithJobInfo.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.models; - -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; - -public class AttemptWithJobInfo { - - /** - * This {@link JobInfo} class contains pieces of information about the parent job that may be - * useful. This approach was taken as opposed to using the actual {@link Job} class here to avoid - * confusion around the fact that the Job instance would not have its `attempts` field populated. - */ - public static class JobInfo { - - private final long id; - private final ConfigType configType; - private final String scope; - private final JobConfig config; - private final JobStatus status; - - public JobInfo(final long id, final ConfigType configType, final String scope, final JobConfig config, final JobStatus status) { - this.id = id; - this.configType = configType; - this.scope = scope; - this.config = config; - this.status = status; - } - - public long getId() { - return id; - } - - public ConfigType getConfigType() { - return configType; - } - - public String getScope() { - return scope; - } - - public JobConfig getConfig() { - return config; - } - - public JobStatus getStatus() { - return status; - } - - } - - private final Attempt attempt; - private final JobInfo jobInfo; - - public JobInfo getJobInfo() { - return jobInfo; - } - - public Attempt getAttempt() { - return attempt; - } - - public AttemptWithJobInfo(final Attempt attempt, final JobInfo jobInfo) { - this.attempt = attempt; - this.jobInfo = jobInfo; - } - - public AttemptWithJobInfo(final Attempt attempt, final Job job) { - this.attempt = attempt; - this.jobInfo = new JobInfo( - job.getId(), - job.getConfigType(), - job.getScope(), - job.getConfig(), - job.getStatus()); - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Job.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Job.java deleted file mode 100644 index 3900ea85c8f0..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Job.java +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.models; - -import com.google.common.base.Preconditions; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.JobOutput; -import java.util.Comparator; -import java.util.EnumSet; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import javax.annotation.Nullable; - -public class Job { - - public static final Set REPLICATION_TYPES = EnumSet.of(ConfigType.SYNC, ConfigType.RESET_CONNECTION); - - private final long id; - private final ConfigType configType; - private final String scope; - private final JobConfig config; - private final JobStatus status; - private final Long startedAtInSecond; - private final long createdAtInSecond; - private final long updatedAtInSecond; - private final List attempts; - - public Job(final long id, - final ConfigType configType, - final String scope, - final JobConfig config, - final List attempts, - final JobStatus status, - final @Nullable Long startedAtInSecond, - final long createdAtInSecond, - final long updatedAtInSecond) { - this.id = id; - this.configType = configType; - this.scope = scope; - this.config = config; - this.attempts = attempts; - this.status = status; - this.startedAtInSecond = startedAtInSecond; - this.createdAtInSecond = createdAtInSecond; - this.updatedAtInSecond = updatedAtInSecond; - } - - public long getId() { - return id; - } - - public ConfigType getConfigType() { - return configType; - } - - public String getScope() { - return scope; - } - - public JobConfig getConfig() { - return config; - } - - public List getAttempts() { - return attempts; - } - - public int getAttemptsCount() { - return attempts.size(); - } - - public JobStatus getStatus() { - return status; - } - - public Optional getStartedAtInSecond() { - return Optional.ofNullable(startedAtInSecond); - } - - public long getCreatedAtInSecond() { - return createdAtInSecond; - } - - public long getUpdatedAtInSecond() { - return updatedAtInSecond; - } - - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - public Optional getSuccessfulAttempt() { - final List successfulAttempts = getAttempts() - .stream() - .filter(a -> a.getStatus() == AttemptStatus.SUCCEEDED) - .collect(Collectors.toList()); - - Preconditions.checkState(successfulAttempts.size() <= 1, String.format("Job %s has multiple successful attempts.", getId())); - if (successfulAttempts.size() == 1) { - return Optional.of(successfulAttempts.get(0)); - } else { - return Optional.empty(); - } - } - - public Optional getSuccessOutput() { - return getSuccessfulAttempt().flatMap(Attempt::getOutput); - } - - public Optional getLastFailedAttempt() { - return getAttempts() - .stream() - .sorted(Comparator.comparing(Attempt::getCreatedAtInSecond).reversed()) - .filter(a -> a.getStatus() == AttemptStatus.FAILED) - .findFirst(); - } - - public Optional getLastAttemptWithOutput() { - return getAttempts() - .stream() - .sorted(Comparator.comparing(Attempt::getCreatedAtInSecond).reversed()) - .filter(a -> a.getOutput().isPresent() && a.getOutput().get().getSync() != null && a.getOutput().get().getSync().getState() != null) - .findFirst(); - } - - public Optional getLastAttempt() { - return getAttempts() - .stream() - .max(Comparator.comparing(Attempt::getCreatedAtInSecond)); - } - - public Optional getAttemptByNumber(final int attemptNumber) { - return getAttempts() - .stream() - .filter(a -> a.getAttemptNumber() == attemptNumber) - .findFirst(); - } - - public boolean hasRunningAttempt() { - return getAttempts().stream().anyMatch(a -> !Attempt.isAttemptInTerminalState(a)); - } - - public boolean isJobInTerminalState() { - return JobStatus.TERMINAL_STATUSES.contains(getStatus()); - } - - public void validateStatusTransition(final JobStatus newStatus) throws IllegalStateException { - final Set validNewStatuses = JobStatus.VALID_STATUS_CHANGES.get(status); - - if (!validNewStatuses.contains(newStatus)) { - throw new IllegalStateException(String.format( - "Transitioning Job %d from JobStatus %s to %s is not allowed. The only valid statuses that an be transitioned to from %s are %s", - id, - status, - newStatus, - status, - JobStatus.VALID_STATUS_CHANGES.get(status))); - } - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final Job job = (Job) o; - return id == job.id && - createdAtInSecond == job.createdAtInSecond && - updatedAtInSecond == job.updatedAtInSecond && - Objects.equals(scope, job.scope) && - Objects.equals(config, job.config) && - status == job.status && - Objects.equals(startedAtInSecond, job.startedAtInSecond) && - Objects.equals(attempts, job.attempts); - } - - @Override - public int hashCode() { - return Objects.hash(id, scope, config, status, startedAtInSecond, createdAtInSecond, updatedAtInSecond, attempts); - } - - @Override - public String toString() { - return "Job{" + - "id=" + id + - ", scope='" + scope + '\'' + - ", config=" + config + - ", status=" + status + - ", startedAtInSecond=" + startedAtInSecond + - ", createdAtInSecond=" + createdAtInSecond + - ", updatedAtInSecond=" + updatedAtInSecond + - ", attempts=" + attempts + - '}'; - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/JobStatus.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/JobStatus.java deleted file mode 100644 index 87ab69bd5853..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/JobStatus.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.models; - -import com.google.common.collect.Sets; -import java.util.Map; -import java.util.Set; - -public enum JobStatus { - - PENDING, - RUNNING, - INCOMPLETE, - FAILED, - SUCCEEDED, - CANCELLED; - - public static final Set TERMINAL_STATUSES = Set.of(FAILED, SUCCEEDED, CANCELLED); - public static final Set NON_TERMINAL_STATUSES = Sets.difference(Set.of(values()), TERMINAL_STATUSES); - - public static final Map> VALID_STATUS_CHANGES = Map.of( - PENDING, Set.of(RUNNING, FAILED, CANCELLED), - RUNNING, Set.of(INCOMPLETE, SUCCEEDED, FAILED, CANCELLED), - INCOMPLETE, Set.of(PENDING, RUNNING, FAILED, CANCELLED, INCOMPLETE), - SUCCEEDED, Set.of(), - FAILED, Set.of(FAILED), - CANCELLED, Set.of()); - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/JobWithStatusAndTimestamp.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/JobWithStatusAndTimestamp.java deleted file mode 100644 index f3d8e0d8714e..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/JobWithStatusAndTimestamp.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.models; - -import java.util.Objects; - -public class JobWithStatusAndTimestamp { - - private final long id; - private final JobStatus status; - private final long createdAtInSecond; - private final long updatedAtInSecond; - - public JobWithStatusAndTimestamp(final long id, - final JobStatus status, - final long createdAtInSecond, - final long updatedAtInSecond) { - this.id = id; - this.status = status; - this.createdAtInSecond = createdAtInSecond; - this.updatedAtInSecond = updatedAtInSecond; - } - - public long getId() { - return id; - } - - public JobStatus getStatus() { - return status; - } - - public long getCreatedAtInSecond() { - return createdAtInSecond; - } - - public long getUpdatedAtInSecond() { - return updatedAtInSecond; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final JobWithStatusAndTimestamp jobWithStatusAndTimestamp = (JobWithStatusAndTimestamp) o; - return id == jobWithStatusAndTimestamp.id && - status == jobWithStatusAndTimestamp.status && - createdAtInSecond == jobWithStatusAndTimestamp.createdAtInSecond && - updatedAtInSecond == jobWithStatusAndTimestamp.updatedAtInSecond; - } - - @Override - public int hashCode() { - return Objects.hash(id, status, createdAtInSecond, updatedAtInSecond); - } - - @Override - public String toString() { - return "Job{" + - "id=" + id + - ", status=" + status + - ", createdAtInSecond=" + createdAtInSecond + - ", updatedAtInSecond=" + updatedAtInSecond + - '}'; - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/JobTracker.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/JobTracker.java deleted file mode 100644 index faf15595212d..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/JobTracker.java +++ /dev/null @@ -1,428 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.tracker; - -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import edu.umd.cs.findbugs.annotations.Nullable; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.map.MoreMaps; -import io.airbyte.config.AttemptSyncConfig; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.StandardCheckConnectionOutput; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.WorkspaceHelper; -import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.UUID; - -public class JobTracker { - - public enum JobState { - STARTED, - SUCCEEDED, - FAILED - } - - public static final String MESSAGE_NAME = "Connector Jobs"; - public static final String CONFIG = "config"; - public static final String CATALOG = "catalog"; - public static final String OPERATION = "operation."; - public static final String SET = "set"; - - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - - private final ConfigRepository configRepository; - private final JobPersistence jobPersistence; - private final WorkspaceHelper workspaceHelper; - private final TrackingClient trackingClient; - - public JobTracker(final ConfigRepository configRepository, final JobPersistence jobPersistence, final TrackingClient trackingClient) { - this(configRepository, jobPersistence, new WorkspaceHelper(configRepository, jobPersistence), trackingClient); - } - - @VisibleForTesting - JobTracker(final ConfigRepository configRepository, - final JobPersistence jobPersistence, - final WorkspaceHelper workspaceHelper, - final TrackingClient trackingClient) { - this.configRepository = configRepository; - this.jobPersistence = jobPersistence; - this.workspaceHelper = workspaceHelper; - this.trackingClient = trackingClient; - } - - public void trackCheckConnectionSource(final UUID jobId, - final UUID sourceDefinitionId, - final UUID workspaceId, - final JobState jobState, - final StandardCheckConnectionOutput output) { - Exceptions.swallow(() -> { - final Map checkConnMetadata = generateCheckConnectionMetadata(output); - final Map jobMetadata = generateJobMetadata(jobId.toString(), ConfigType.CHECK_CONNECTION_SOURCE); - final Map sourceDefMetadata = generateSourceDefinitionMetadata(sourceDefinitionId); - final Map stateMetadata = generateStateMetadata(jobState); - - track(workspaceId, MoreMaps.merge(checkConnMetadata, jobMetadata, sourceDefMetadata, stateMetadata)); - }); - } - - public void trackCheckConnectionDestination(final UUID jobId, - final UUID destinationDefinitionId, - final UUID workspaceId, - final JobState jobState, - final StandardCheckConnectionOutput output) { - Exceptions.swallow(() -> { - final Map checkConnMetadata = generateCheckConnectionMetadata(output); - final Map jobMetadata = generateJobMetadata(jobId.toString(), ConfigType.CHECK_CONNECTION_DESTINATION); - final Map destinationDefinitionMetadata = generateDestinationDefinitionMetadata(destinationDefinitionId); - final Map stateMetadata = generateStateMetadata(jobState); - - track(workspaceId, MoreMaps.merge(checkConnMetadata, jobMetadata, destinationDefinitionMetadata, stateMetadata)); - }); - } - - public void trackDiscover(final UUID jobId, final UUID sourceDefinitionId, final UUID workspaceId, final JobState jobState) { - Exceptions.swallow(() -> { - final Map jobMetadata = generateJobMetadata(jobId.toString(), ConfigType.DISCOVER_SCHEMA); - final Map sourceDefMetadata = generateSourceDefinitionMetadata(sourceDefinitionId); - final Map stateMetadata = generateStateMetadata(jobState); - - track(workspaceId, MoreMaps.merge(jobMetadata, sourceDefMetadata, stateMetadata)); - }); - } - - // used for tracking all asynchronous jobs (sync and reset). - public void trackSync(final Job job, final JobState jobState) { - Exceptions.swallow(() -> { - final ConfigType configType = job.getConfigType(); - final boolean allowedJob = configType == ConfigType.SYNC || configType == ConfigType.RESET_CONNECTION; - Preconditions.checkArgument(allowedJob, "Job type " + configType + " is not allowed!"); - final long jobId = job.getId(); - final Optional lastAttempt = job.getLastAttempt(); - final Optional attemptSyncConfig = lastAttempt.flatMap(Attempt::getSyncConfig); - - final UUID connectionId = UUID.fromString(job.getScope()); - final StandardSourceDefinition sourceDefinition = configRepository.getSourceDefinitionFromConnection(connectionId); - final StandardDestinationDefinition destinationDefinition = configRepository.getDestinationDefinitionFromConnection(connectionId); - - final Map jobMetadata = generateJobMetadata(String.valueOf(jobId), configType, job.getAttemptsCount()); - final Map jobAttemptMetadata = generateJobAttemptMetadata(jobId, jobState); - final Map sourceDefMetadata = generateSourceDefinitionMetadata(sourceDefinition); - final Map destinationDefMetadata = generateDestinationDefinitionMetadata(destinationDefinition); - final Map syncMetadata = generateSyncMetadata(connectionId); - final Map stateMetadata = generateStateMetadata(jobState); - final Map syncConfigMetadata = generateSyncConfigMetadata( - job.getConfig(), - attemptSyncConfig.orElse(null), - sourceDefinition.getSpec().getConnectionSpecification(), - destinationDefinition.getSpec().getConnectionSpecification()); - - final UUID workspaceId = workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(jobId); - track(workspaceId, - MoreMaps.merge( - jobMetadata, - jobAttemptMetadata, - sourceDefMetadata, - destinationDefMetadata, - syncMetadata, - stateMetadata, - syncConfigMetadata)); - }); - } - - public void trackSyncForInternalFailure(final Long jobId, - final UUID connectionId, - final Integer attempts, - final JobState jobState, - final Exception e) { - Exceptions.swallow(() -> { - final StandardSourceDefinition sourceDefinition = configRepository.getSourceDefinitionFromConnection(connectionId); - final StandardDestinationDefinition destinationDefinition = configRepository.getDestinationDefinitionFromConnection(connectionId); - - final Map jobMetadata = generateJobMetadata(String.valueOf(jobId), null, attempts); - final Map jobAttemptMetadata = generateJobAttemptMetadata(jobId, jobState); - final Map sourceDefMetadata = generateSourceDefinitionMetadata(sourceDefinition); - final Map destinationDefMetadata = generateDestinationDefinitionMetadata(destinationDefinition); - final Map syncMetadata = generateSyncMetadata(connectionId); - final Map stateMetadata = generateStateMetadata(jobState); - final Map generalMetadata = Map.of("connection_id", connectionId, "internal_error_cause", e.getMessage(), - "internal_error_type", e.getClass().getName()); - - final UUID workspaceId = workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(jobId); - - track(workspaceId, - MoreMaps.merge( - jobMetadata, - jobAttemptMetadata, - sourceDefMetadata, - destinationDefMetadata, - syncMetadata, - stateMetadata, - generalMetadata)); - }); - } - - private Map generateSyncConfigMetadata( - final JobConfig config, - @Nullable final AttemptSyncConfig attemptSyncConfig, - final JsonNode sourceConfigSchema, - final JsonNode destinationConfigSchema) { - if (config.getConfigType() == ConfigType.SYNC) { - final Map actorConfigMetadata = new HashMap<>(); - - if (attemptSyncConfig != null) { - final JsonNode sourceConfiguration = attemptSyncConfig.getSourceConfiguration(); - final JsonNode destinationConfiguration = attemptSyncConfig.getDestinationConfiguration(); - - final Map sourceMetadata = configToMetadata(CONFIG + ".source", sourceConfiguration, sourceConfigSchema); - final Map destinationMetadata = configToMetadata(CONFIG + ".destination", destinationConfiguration, destinationConfigSchema); - - actorConfigMetadata.putAll(sourceMetadata); - actorConfigMetadata.putAll(destinationMetadata); - } - - final Map catalogMetadata = getCatalogMetadata(config.getSync().getConfiguredAirbyteCatalog()); - return MoreMaps.merge(actorConfigMetadata, catalogMetadata); - } else { - return emptyMap(); - } - } - - private Map getCatalogMetadata(final ConfiguredAirbyteCatalog catalog) { - final Map output = new HashMap<>(); - - for (final ConfiguredAirbyteStream stream : catalog.getStreams()) { - output.put(CATALOG + ".sync_mode." + stream.getSyncMode().name().toLowerCase(), SET); - output.put(CATALOG + ".destination_sync_mode." + stream.getDestinationSyncMode().name().toLowerCase(), SET); - } - - return output; - } - - /** - * Flattens a config into a map. Uses the schema to determine which fields are const (i.e. - * non-sensitive). Non-const, non-boolean values are replaced with {@link #SET} to avoid leaking - * potentially-sensitive information. - *

- * anyOf/allOf schemas are treated as non-const values. These aren't (currently) used in config - * schemas anyway. - * - * @param jsonPath A prefix to add to all the keys in the returned map, with a period (`.`) - * separator - * @param schema The JSON schema that {@code config} conforms to - */ - protected static Map configToMetadata(final String jsonPath, final JsonNode config, final JsonNode schema) { - final Map metadata = configToMetadata(config, schema); - // Prepend all the keys with the root jsonPath - // But leave the values unchanged - final Map output = new HashMap<>(); - Jsons.mergeMaps(output, jsonPath, metadata); - return output; - } - - /** - * Does the actually interesting bits of configToMetadata. If config is an object, returns a - * flattened map. If config is _not_ an object (i.e. it's a primitive string/number/etc, or it's an - * array) then returns a map of {null: toMetadataValue(config)}. - */ - @SuppressWarnings("PMD.ForLoopCanBeForeach") - private static Map configToMetadata(final JsonNode config, final JsonNode schema) { - if (schema.hasNonNull("const") || schema.hasNonNull("enum")) { - // If this schema is a const or an enum, then just dump it into a map: - // * If it's an object, flatten it - // * Otherwise, do some basic conversions to value-ish data. - // It would be a weird thing to declare const: null, but in that case we don't want to report null - // anyway, so explicitly use hasNonNull. - return Jsons.flatten(config); - } else if (schema.has("oneOf")) { - // If this schema is a oneOf, then find the first sub-schema which the config matches - // and use that sub-schema to convert the config to a map - final JsonSchemaValidator validator = new JsonSchemaValidator(); - for (final Iterator it = schema.get("oneOf").elements(); it.hasNext();) { - final JsonNode subSchema = it.next(); - if (validator.test(subSchema, config)) { - return configToMetadata(config, subSchema); - } - } - // If we didn't match any of the subschemas, then something is wrong. Bail out silently. - return emptyMap(); - } else if (config.isObject()) { - // If the schema is not a oneOf, but the config is an object (i.e. the schema has "type": "object") - // then we need to recursively convert each field of the object to a map. - final Map output = new HashMap<>(); - final JsonNode maybeProperties = schema.get("properties"); - - // If additionalProperties is not set, or it's a boolean, then there's no schema for additional - // properties. Use the accept-all schema. - // Otherwise, it's an actual schema. - final JsonNode maybeAdditionalProperties = schema.get("additionalProperties"); - final JsonNode additionalPropertiesSchema; - if (maybeAdditionalProperties == null || maybeAdditionalProperties.isBoolean()) { - additionalPropertiesSchema = OBJECT_MAPPER.createObjectNode(); - } else { - additionalPropertiesSchema = maybeAdditionalProperties; - } - - for (final Iterator> it = config.fields(); it.hasNext();) { - final Entry entry = it.next(); - final String field = entry.getKey(); - final JsonNode value = entry.getValue(); - - final JsonNode propertySchema; - if (maybeProperties != null && maybeProperties.hasNonNull(field)) { - // If this property is explicitly declared, then use its schema - propertySchema = maybeProperties.get(field); - } else { - // otherwise, use the additionalProperties schema - propertySchema = additionalPropertiesSchema; - } - - Jsons.mergeMaps(output, field, configToMetadata(value, propertySchema)); - } - return output; - } else if (config.isBoolean()) { - return singletonMap(null, config.asBoolean()); - } else if ((!config.isTextual() && !config.isNull()) || (config.isTextual() && !config.asText().isEmpty())) { - // This is either non-textual (e.g. integer, array, etc) or non-empty text - return singletonMap(null, SET); - } else { - // Otherwise, this is an empty string, so just ignore it - return emptyMap(); - } - } - - private Map generateSyncMetadata(final UUID connectionId) throws ConfigNotFoundException, IOException, JsonValidationException { - final Map operationUsage = new HashMap<>(); - final StandardSync standardSync = configRepository.getStandardSync(connectionId); - for (final UUID operationId : standardSync.getOperationIds()) { - final StandardSyncOperation operation = configRepository.getStandardSyncOperation(operationId); - if (operation != null) { - final Integer usageCount = (Integer) operationUsage.getOrDefault(OPERATION + operation.getOperatorType(), 0); - operationUsage.put(OPERATION + operation.getOperatorType(), usageCount + 1); - } - } - - final Map streamCountData = new HashMap<>(); - final Integer streamCount = standardSync.getCatalog().getStreams().size(); - streamCountData.put("number_of_streams", streamCount); - - return MoreMaps.merge(TrackingMetadata.generateSyncMetadata(standardSync), operationUsage, streamCountData); - } - - private static Map generateStateMetadata(final JobState jobState) { - final Map metadata = new HashMap<>(); - - if (JobState.STARTED.equals(jobState)) { - metadata.put("attempt_stage", "STARTED"); - } else if (List.of(JobState.SUCCEEDED, JobState.FAILED).contains(jobState)) { - metadata.put("attempt_stage", "ENDED"); - metadata.put("attempt_completion_status", jobState); - } - - return Collections.unmodifiableMap(metadata); - } - - /** - * The CheckConnection jobs (both source and destination) of the - * {@link io.airbyte.scheduler.client.SynchronousSchedulerClient} interface can have a successful - * job with a failed check. Because of this, tracking just the job attempt status does not capture - * the whole picture. The `check_connection_outcome` field tracks this. - */ - private Map generateCheckConnectionMetadata(final StandardCheckConnectionOutput output) { - if (output == null) { - return Map.of(); - } - return Map.of("check_connection_outcome", output.getStatus().toString()); - } - - private Map generateDestinationDefinitionMetadata(final UUID destinationDefinitionId) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardDestinationDefinition destinationDefinition = configRepository.getStandardDestinationDefinition(destinationDefinitionId); - return generateDestinationDefinitionMetadata(destinationDefinition); - } - - private Map generateDestinationDefinitionMetadata(final StandardDestinationDefinition destinationDefinition) { - return TrackingMetadata.generateDestinationDefinitionMetadata(destinationDefinition); - } - - private Map generateSourceDefinitionMetadata(final UUID sourceDefinitionId) - throws JsonValidationException, ConfigNotFoundException, IOException { - final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(sourceDefinitionId); - return generateSourceDefinitionMetadata(sourceDefinition); - } - - private Map generateSourceDefinitionMetadata(final StandardSourceDefinition sourceDefinition) { - return TrackingMetadata.generateSourceDefinitionMetadata(sourceDefinition); - } - - private Map generateJobMetadata(final String jobId, final ConfigType configType) { - return generateJobMetadata(jobId, configType, 0); - } - - private Map generateJobMetadata(final String jobId, final ConfigType configType, final int attempt) { - final Map metadata = new HashMap<>(); - if (configType != null) { - metadata.put("job_type", configType); - } - metadata.put("job_id", jobId); - metadata.put("attempt_id", attempt); - - return Collections.unmodifiableMap(metadata); - } - - private Map generateJobAttemptMetadata(final long jobId, final JobState jobState) throws IOException { - final Job job = jobPersistence.getJob(jobId); - if (jobState != JobState.STARTED) { - return TrackingMetadata.generateJobAttemptMetadata(job); - } else { - return Map.of(); - } - } - - private void track(final UUID workspaceId, final Map metadata) - throws JsonValidationException, ConfigNotFoundException, IOException { - // unfortunate but in the case of jobs that cannot be linked to a workspace there not a sensible way - // track it. - if (workspaceId != null) { - final StandardWorkspace standardWorkspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); - if (standardWorkspace != null && standardWorkspace.getName() != null) { - final Map standardTrackingMetadata = Map.of( - "workspace_id", workspaceId, - "workspace_name", standardWorkspace.getName()); - - trackingClient.track(workspaceId, MESSAGE_NAME, MoreMaps.merge(metadata, standardTrackingMetadata)); - } - } - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java deleted file mode 100644 index 086de98d65bb..000000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.tracker; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.AttemptFailureSummary; -import io.airbyte.config.FailureReason; -import io.airbyte.config.JobOutput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.ScheduleData; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.ScheduleType; -import io.airbyte.config.StandardSyncSummary; -import io.airbyte.config.SyncStats; -import io.airbyte.config.helpers.ScheduleHelpers; -import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.Job; -import java.util.Collection; -import java.util.Comparator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.TimeUnit; -import org.apache.logging.log4j.util.Strings; - -public class TrackingMetadata { - - public static Map generateSyncMetadata(final StandardSync standardSync) { - final Builder metadata = ImmutableMap.builder(); - metadata.put("connection_id", standardSync.getConnectionId()); - - final String frequencyString; - if (standardSync.getScheduleType() != null) { - frequencyString = getFrequencyStringFromScheduleType(standardSync.getScheduleType(), standardSync.getScheduleData()); - } else if (standardSync.getManual()) { - frequencyString = "manual"; - } else { - final long intervalInMinutes = TimeUnit.SECONDS.toMinutes(ScheduleHelpers.getIntervalInSecond(standardSync.getSchedule())); - frequencyString = intervalInMinutes + " min"; - } - metadata.put("frequency", frequencyString); - - final int operationCount = standardSync.getOperationIds() != null ? standardSync.getOperationIds().size() : 0; - metadata.put("operation_count", operationCount); - if (standardSync.getNamespaceDefinition() != null) { - metadata.put("namespace_definition", standardSync.getNamespaceDefinition()); - } - - final boolean isUsingPrefix = standardSync.getPrefix() != null && !standardSync.getPrefix().isBlank(); - metadata.put("table_prefix", isUsingPrefix); - - final ResourceRequirements resourceRequirements = standardSync.getResourceRequirements(); - - if (resourceRequirements != null) { - if (!com.google.common.base.Strings.isNullOrEmpty(resourceRequirements.getCpuRequest())) { - metadata.put("sync_cpu_request", resourceRequirements.getCpuRequest()); - } - if (!com.google.common.base.Strings.isNullOrEmpty(resourceRequirements.getCpuLimit())) { - metadata.put("sync_cpu_limit", resourceRequirements.getCpuLimit()); - } - if (!com.google.common.base.Strings.isNullOrEmpty(resourceRequirements.getMemoryRequest())) { - metadata.put("sync_memory_request", resourceRequirements.getMemoryRequest()); - } - if (!com.google.common.base.Strings.isNullOrEmpty(resourceRequirements.getMemoryLimit())) { - metadata.put("sync_memory_limit", resourceRequirements.getMemoryLimit()); - } - } - return metadata.build(); - } - - public static Map generateDestinationDefinitionMetadata(final StandardDestinationDefinition destinationDefinition) { - final Builder metadata = ImmutableMap.builder(); - metadata.put("connector_destination", destinationDefinition.getName()); - metadata.put("connector_destination_definition_id", destinationDefinition.getDestinationDefinitionId()); - metadata.put("connector_destination_docker_repository", destinationDefinition.getDockerRepository()); - final String imageTag = destinationDefinition.getDockerImageTag(); - if (!Strings.isEmpty(imageTag)) { - metadata.put("connector_destination_version", imageTag); - } - return metadata.build(); - } - - public static Map generateSourceDefinitionMetadata(final StandardSourceDefinition sourceDefinition) { - final Builder metadata = ImmutableMap.builder(); - metadata.put("connector_source", sourceDefinition.getName()); - metadata.put("connector_source_definition_id", sourceDefinition.getSourceDefinitionId()); - metadata.put("connector_source_docker_repository", sourceDefinition.getDockerRepository()); - final String imageTag = sourceDefinition.getDockerImageTag(); - if (!Strings.isEmpty(imageTag)) { - metadata.put("connector_source_version", imageTag); - } - return metadata.build(); - } - - public static Map generateJobAttemptMetadata(final Job job) { - final Builder metadata = ImmutableMap.builder(); - if (job != null) { - final List attempts = job.getAttempts(); - if (attempts != null && !attempts.isEmpty()) { - final Attempt lastAttempt = attempts.get(attempts.size() - 1); - if (lastAttempt.getOutput() != null && lastAttempt.getOutput().isPresent()) { - final JobOutput jobOutput = lastAttempt.getOutput().get(); - if (jobOutput.getSync() != null) { - final StandardSyncSummary syncSummary = jobOutput.getSync().getStandardSyncSummary(); - final SyncStats totalStats = syncSummary.getTotalStats(); - final NormalizationSummary normalizationSummary = jobOutput.getSync().getNormalizationSummary(); - - if (syncSummary.getStartTime() != null) - metadata.put("sync_start_time", syncSummary.getStartTime()); - if (syncSummary.getEndTime() != null && syncSummary.getStartTime() != null) - metadata.put("duration", Math.round((syncSummary.getEndTime() - syncSummary.getStartTime()) / 1000.0)); - if (syncSummary.getBytesSynced() != null) - metadata.put("volume_mb", syncSummary.getBytesSynced()); - if (syncSummary.getRecordsSynced() != null) - metadata.put("volume_rows", syncSummary.getRecordsSynced()); - if (totalStats.getSourceStateMessagesEmitted() != null) - metadata.put("count_state_messages_from_source", syncSummary.getTotalStats().getSourceStateMessagesEmitted()); - if (totalStats.getDestinationStateMessagesEmitted() != null) - metadata.put("count_state_messages_from_destination", syncSummary.getTotalStats().getDestinationStateMessagesEmitted()); - if (totalStats.getMaxSecondsBeforeSourceStateMessageEmitted() != null) - metadata.put("max_seconds_before_source_state_message_emitted", - totalStats.getMaxSecondsBeforeSourceStateMessageEmitted()); - if (totalStats.getMeanSecondsBeforeSourceStateMessageEmitted() != null) - metadata.put("mean_seconds_before_source_state_message_emitted", - totalStats.getMeanSecondsBeforeSourceStateMessageEmitted()); - if (totalStats.getMaxSecondsBetweenStateMessageEmittedandCommitted() != null) - metadata.put("max_seconds_between_state_message_emit_and_commit", - totalStats.getMaxSecondsBetweenStateMessageEmittedandCommitted()); - if (totalStats.getMeanSecondsBetweenStateMessageEmittedandCommitted() != null) - metadata.put("mean_seconds_between_state_message_emit_and_commit", - totalStats.getMeanSecondsBetweenStateMessageEmittedandCommitted()); - - if (totalStats.getReplicationStartTime() != null) - metadata.put("replication_start_time", totalStats.getReplicationStartTime()); - if (totalStats.getReplicationEndTime() != null) - metadata.put("replication_end_time", totalStats.getReplicationEndTime()); - if (totalStats.getSourceReadStartTime() != null) - metadata.put("source_read_start_time", totalStats.getSourceReadStartTime()); - if (totalStats.getSourceReadEndTime() != null) - metadata.put("source_read_end_time", totalStats.getSourceReadEndTime()); - if (totalStats.getDestinationWriteStartTime() != null) - metadata.put("destination_write_start_time", totalStats.getDestinationWriteStartTime()); - if (totalStats.getDestinationWriteEndTime() != null) - metadata.put("destination_write_end_time", totalStats.getDestinationWriteEndTime()); - - if (normalizationSummary != null) { - if (normalizationSummary.getStartTime() != null) - metadata.put("normalization_start_time", normalizationSummary.getStartTime()); - if (normalizationSummary.getEndTime() != null) - metadata.put("normalization_end_time", normalizationSummary.getEndTime()); - } - } - } - - final List failureReasons = failureReasonsList(attempts); - if (!failureReasons.isEmpty()) { - metadata.put("failure_reasons", failureReasonsListAsJson(failureReasons).toString()); - metadata.put("main_failure_reason", failureReasonAsJson(failureReasons.get(0)).toString()); - } - } - } - return metadata.build(); - } - - private static List failureReasonsList(final List attempts) { - return attempts - .stream() - .map(Attempt::getFailureSummary) - .flatMap(Optional::stream) - .map(AttemptFailureSummary::getFailures) - .flatMap(Collection::stream) - .sorted(Comparator.comparing(FailureReason::getTimestamp)) - .toList(); - } - - private static ArrayNode failureReasonsListAsJson(final List failureReasons) { - return Jsons.arrayNode().addAll(failureReasons - .stream() - .map(TrackingMetadata::failureReasonAsJson) - .toList()); - } - - private static JsonNode failureReasonAsJson(final FailureReason failureReason) { - // we want the json to always include failureOrigin and failureType, even when they are null - final LinkedHashMap linkedHashMap = new LinkedHashMap<>(); - linkedHashMap.put("failureOrigin", failureReason.getFailureOrigin()); - linkedHashMap.put("failureType", failureReason.getFailureType()); - linkedHashMap.put("internalMessage", failureReason.getInternalMessage()); - linkedHashMap.put("externalMessage", failureReason.getExternalMessage()); - linkedHashMap.put("metadata", failureReason.getMetadata()); - linkedHashMap.put("retryable", failureReason.getRetryable()); - linkedHashMap.put("timestamp", failureReason.getTimestamp()); - - return Jsons.jsonNode(linkedHashMap); - } - - private static String getFrequencyStringFromScheduleType(final ScheduleType scheduleType, final ScheduleData scheduleData) { - switch (scheduleType) { - case MANUAL -> { - return "manual"; - } - case BASIC_SCHEDULE -> { - return TimeUnit.SECONDS.toMinutes(ScheduleHelpers.getIntervalInSecond(scheduleData.getBasicSchedule())) + " min"; - } - case CRON -> { - // TODO(https://github.com/airbytehq/airbyte/issues/2170): consider something more detailed. - return "cron"; - } - default -> { - throw new RuntimeException("Unexpected schedule type"); - } - } - } - -} diff --git a/airbyte-persistence/job-persistence/src/main/resources/example_config.json b/airbyte-persistence/job-persistence/src/main/resources/example_config.json deleted file mode 100644 index edf2274370a5..000000000000 --- a/airbyte-persistence/job-persistence/src/main/resources/example_config.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "username": "some_user", - "password": "hunter2", - "has_ssl": false, - "empty_string": "", - "null_value": null, - "one_of": { - "type_key": "foo", - "some_key": 100 - }, - "const_object": { - "sub_key": "bar", - "sub_array": [1, 2, 3], - "sub_object": { - "sub_sub_key": "baz" - } - }, - "const_null": null, - "enum_string": "foo", - "additionalPropertiesUnset": { - "foo": "bar" - }, - "additionalPropertiesBoolean": { - "foo": "bar" - }, - "additionalPropertiesSchema": { - "foo": 42 - }, - "additionalPropertiesConst": { - "foo": 42 - }, - "additionalPropertiesEnumString": "foo" -} diff --git a/airbyte-persistence/job-persistence/src/main/resources/example_config_schema.json b/airbyte-persistence/job-persistence/src/main/resources/example_config_schema.json deleted file mode 100644 index 193bd141bd0a..000000000000 --- a/airbyte-persistence/job-persistence/src/main/resources/example_config_schema.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "type": "object", - "properties": { - "username": { - "type": "string" - }, - "password": { - "type": "string" - }, - "has_ssl": { - "type": "boolean" - }, - "empty_string": { - "type": "string" - }, - "null_value": { - "type": "null" - }, - "one_of": { - "type": "object", - "oneOf": [ - { - "type": "object", - "properties": { - "type_key": { - "const": "foo" - }, - "some_key": { - "type": "integer" - } - } - } - ] - }, - "const_object": { - "const": { - "sub_key": "bar", - "sub_array": [1, 2, 3], - "sub_object": { - "sub_sub_key": "baz" - } - } - }, - "const_null": { - "const": null - }, - "enum_string": { - "type": "string", - "enum": ["foo", "bar"] - }, - "additionalPropertiesUnset": { - "type": "object" - }, - "additionalPropertiesBoolean": { - "type": "object", - "additionalProperties": true - }, - "additionalPropertiesSchema": { - "type": "object", - "additionalProperties": { - "type": "integer" - } - }, - "additionalPropertiesConst": { - "type": "object", - "additionalProperties": { - "const": 42 - } - }, - "additionalPropertiesEnumString": { - "type": "string", - "enum": ["foo", "bar"] - } - } -} diff --git a/airbyte-persistence/job-persistence/src/main/resources/job_history_purge.sql b/airbyte-persistence/job-persistence/src/main/resources/job_history_purge.sql deleted file mode 100644 index 449039a0f033..000000000000 --- a/airbyte-persistence/job-persistence/src/main/resources/job_history_purge.sql +++ /dev/null @@ -1,100 +0,0 @@ -DELETE -FROM - jobs -WHERE - jobs.id IN( - SELECT - jobs.id - FROM - jobs - LEFT JOIN( - SELECT - SCOPE, - COUNT( jobs.id ) AS jobCount - FROM - jobs - GROUP BY - SCOPE - ) counts ON - jobs.scope = counts.scope - WHERE - /* job must be at least MINIMUM_AGE_IN_DAYS old or connection has more than EXCESSIVE_NUMBER_OF_JOBS */ - ( - jobs.created_at <( - TO_TIMESTAMP( - ?, - 'YYYY-MM-DD' - )- INTERVAL '%d' DAY - ) - OR counts.jobCount >? - ) - AND jobs.id NOT IN( - /* cannot be the most recent job with saved state */ - SELECT - job_id AS latest_job_id_with_state - FROM - ( - SELECT - jobs.scope, - jobs.id AS job_id, - jobs.config_type, - jobs.created_at, - jobs.status, - bool_or( - attempts."output" -> 'sync' -> 'state' -> 'state' IS NOT NULL - ) AS outputStateExists, - ROW_NUMBER() OVER( - PARTITION BY SCOPE - ORDER BY - jobs.created_at DESC, - jobs.id DESC - ) AS stateRecency - FROM - jobs - LEFT JOIN attempts ON - jobs.id = attempts.job_id - GROUP BY - SCOPE, - jobs.id - HAVING - bool_or( - attempts."output" -> 'sync' -> 'state' -> 'state' IS NOT NULL - )= TRUE - ORDER BY - SCOPE, - jobs.created_at DESC, - jobs.id DESC - ) jobs_with_state - WHERE - stateRecency = 1 - ) - AND jobs.id NOT IN( - /* cannot be one of the last MINIMUM_RECENCY jobs for that connection/scope */ - SELECT - id - FROM - ( - SELECT - jobs.scope, - jobs.id, - jobs.created_at, - ROW_NUMBER() OVER( - PARTITION BY SCOPE - ORDER BY - jobs.created_at DESC, - jobs.id DESC - ) AS recency - FROM - jobs - GROUP BY - SCOPE, - jobs.id - ORDER BY - SCOPE, - jobs.created_at DESC, - jobs.id DESC - ) jobs_by_recency - WHERE - recency <=? - ) - ) diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java deleted file mode 100644 index 77c8e35fd265..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java +++ /dev/null @@ -1,491 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorDefinitionResourceRequirements; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.JobResetConnectionConfig; -import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.JobTypeResourceLimit; -import io.airbyte.config.JobTypeResourceLimit.JobType; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; -import io.airbyte.config.ResetSourceConfiguration; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.DestinationSyncMode; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.StreamDescriptor; -import io.airbyte.protocol.models.SyncMode; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class DefaultJobCreatorTest { - - private static final String STREAM1_NAME = "stream1"; - private static final String STREAM2_NAME = "stream2"; - private static final String STREAM3_NAME = "stream3"; - private static final String NAMESPACE = "namespace"; - private static final String FIELD_NAME = "id"; - private static final StreamDescriptor STREAM1_DESCRIPTOR = new StreamDescriptor().withName(STREAM1_NAME); - private static final StreamDescriptor STREAM2_DESCRIPTOR = new StreamDescriptor().withName(STREAM2_NAME).withNamespace(NAMESPACE); - - private static final String SOURCE_IMAGE_NAME = "daxtarity/sourceimagename"; - private static final Version SOURCE_PROTOCOL_VERSION = new Version("0.2.2"); - private static final String DESTINATION_IMAGE_NAME = "daxtarity/destinationimagename"; - private static final Version DESTINATION_PROTOCOL_VERSION = new Version("0.2.3"); - private static final SourceConnection SOURCE_CONNECTION; - private static final DestinationConnection DESTINATION_CONNECTION; - private static final StandardSync STANDARD_SYNC; - private static final StandardSyncOperation STANDARD_SYNC_OPERATION; - - private static final StandardSourceDefinition STANDARD_SOURCE_DEFINITION; - private static final StandardDestinationDefinition STANDARD_DESTINATION_DEFINITION; - private static final long JOB_ID = 12L; - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - - private JobPersistence jobPersistence; - private JobCreator jobCreator; - private ResourceRequirements workerResourceRequirements; - - private static final JsonNode PERSISTED_WEBHOOK_CONFIGS; - - private static final UUID WEBHOOK_CONFIG_ID; - private static final String WEBHOOK_NAME; - - static { - final UUID workspaceId = UUID.randomUUID(); - final UUID sourceId = UUID.randomUUID(); - final UUID sourceDefinitionId = UUID.randomUUID(); - WEBHOOK_CONFIG_ID = UUID.randomUUID(); - WEBHOOK_NAME = "test-name"; - - final JsonNode implementationJson = Jsons.jsonNode(ImmutableMap.builder() - .put("apiKey", "123-abc") - .put("hostname", "airbyte.io") - .build()); - - SOURCE_CONNECTION = new SourceConnection() - .withWorkspaceId(workspaceId) - .withSourceDefinitionId(sourceDefinitionId) - .withSourceId(sourceId) - .withConfiguration(implementationJson) - .withTombstone(false); - - final UUID destinationId = UUID.randomUUID(); - final UUID destinationDefinitionId = UUID.randomUUID(); - - DESTINATION_CONNECTION = new DestinationConnection() - .withWorkspaceId(workspaceId) - .withDestinationDefinitionId(destinationDefinitionId) - .withDestinationId(destinationId) - .withConfiguration(implementationJson) - .withTombstone(false); - - final UUID connectionId = UUID.randomUUID(); - final UUID operationId = UUID.randomUUID(); - - final ConfiguredAirbyteStream stream1 = new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM1_NAME, Field.of(FIELD_NAME, JsonSchemaType.STRING))) - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.APPEND); - final ConfiguredAirbyteStream stream2 = new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM2_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))) - .withSyncMode(SyncMode.INCREMENTAL) - .withDestinationSyncMode(DestinationSyncMode.APPEND); - final ConfiguredAirbyteStream stream3 = new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM3_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))) - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.OVERWRITE); - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of(stream1, stream2, stream3)); - - STANDARD_SYNC = new StandardSync() - .withConnectionId(connectionId) - .withName("presto to hudi") - .withNamespaceDefinition(NamespaceDefinitionType.SOURCE) - .withNamespaceFormat(null) - .withPrefix("presto_to_hudi") - .withStatus(StandardSync.Status.ACTIVE) - .withCatalog(catalog) - .withSourceId(sourceId) - .withDestinationId(destinationId) - .withOperationIds(List.of(operationId)); - - STANDARD_SYNC_OPERATION = new StandardSyncOperation() - .withOperationId(operationId) - .withName("normalize") - .withTombstone(false) - .withOperatorType(OperatorType.NORMALIZATION) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)); - - PERSISTED_WEBHOOK_CONFIGS = Jsons.deserialize( - String.format("{\"webhookConfigs\": [{\"id\": \"%s\", \"name\": \"%s\", \"authToken\": {\"_secret\": \"a-secret_v1\"}}]}", - WEBHOOK_CONFIG_ID, WEBHOOK_NAME)); - - STANDARD_SOURCE_DEFINITION = new StandardSourceDefinition().withCustom(false); - STANDARD_DESTINATION_DEFINITION = new StandardDestinationDefinition().withCustom(false); - } - - @BeforeEach - void setup() { - jobPersistence = mock(JobPersistence.class); - workerResourceRequirements = new ResourceRequirements() - .withCpuLimit("0.2") - .withCpuRequest("0.2") - .withMemoryLimit("200Mi") - .withMemoryRequest("200Mi"); - jobCreator = new DefaultJobCreator(jobPersistence, workerResourceRequirements); - } - - @Test - void testCreateSyncJob() throws IOException { - final JobSyncConfig jobSyncConfig = new JobSyncConfig() - .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) - .withNamespaceFormat(STANDARD_SYNC.getNamespaceFormat()) - .withPrefix(STANDARD_SYNC.getPrefix()) - .withSourceDockerImage(SOURCE_IMAGE_NAME) - .withSourceProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withDestinationDockerImage(DESTINATION_IMAGE_NAME) - .withDestinationProtocolVersion(DESTINATION_PROTOCOL_VERSION) - .withConfiguredAirbyteCatalog(STANDARD_SYNC.getCatalog()) - .withOperationSequence(List.of(STANDARD_SYNC_OPERATION)) - .withResourceRequirements(workerResourceRequirements) - .withSourceResourceRequirements(workerResourceRequirements) - .withDestinationResourceRequirements(workerResourceRequirements) - .withWebhookOperationConfigs(PERSISTED_WEBHOOK_CONFIGS) - .withIsSourceCustomConnector(false) - .withIsDestinationCustomConnector(false) - .withWorkspaceId(WORKSPACE_ID); - - final JobConfig jobConfig = new JobConfig() - .withConfigType(JobConfig.ConfigType.SYNC) - .withSync(jobSyncConfig); - - final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); - when(jobPersistence.enqueueJob(expectedScope, jobConfig)).thenReturn(Optional.of(JOB_ID)); - - final long jobId = jobCreator.createSyncJob( - SOURCE_CONNECTION, - DESTINATION_CONNECTION, - STANDARD_SYNC, - SOURCE_IMAGE_NAME, - SOURCE_PROTOCOL_VERSION, - DESTINATION_IMAGE_NAME, - DESTINATION_PROTOCOL_VERSION, - List.of(STANDARD_SYNC_OPERATION), - PERSISTED_WEBHOOK_CONFIGS, - STANDARD_SOURCE_DEFINITION, - STANDARD_DESTINATION_DEFINITION, WORKSPACE_ID).orElseThrow(); - assertEquals(JOB_ID, jobId); - } - - @Test - void testCreateSyncJobEnsureNoQueuing() throws IOException { - final JobSyncConfig jobSyncConfig = new JobSyncConfig() - .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) - .withNamespaceFormat(STANDARD_SYNC.getNamespaceFormat()) - .withPrefix(STANDARD_SYNC.getPrefix()) - .withSourceDockerImage(SOURCE_IMAGE_NAME) - .withDestinationProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withDestinationDockerImage(DESTINATION_IMAGE_NAME) - .withDestinationProtocolVersion(DESTINATION_PROTOCOL_VERSION) - .withConfiguredAirbyteCatalog(STANDARD_SYNC.getCatalog()) - .withOperationSequence(List.of(STANDARD_SYNC_OPERATION)) - .withResourceRequirements(workerResourceRequirements); - - final JobConfig jobConfig = new JobConfig() - .withConfigType(JobConfig.ConfigType.SYNC) - .withSync(jobSyncConfig); - - final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); - when(jobPersistence.enqueueJob(expectedScope, jobConfig)).thenReturn(Optional.empty()); - - assertTrue(jobCreator.createSyncJob( - SOURCE_CONNECTION, - DESTINATION_CONNECTION, - STANDARD_SYNC, - SOURCE_IMAGE_NAME, - SOURCE_PROTOCOL_VERSION, - DESTINATION_IMAGE_NAME, - DESTINATION_PROTOCOL_VERSION, - List.of(STANDARD_SYNC_OPERATION), - null, - STANDARD_SOURCE_DEFINITION, STANDARD_DESTINATION_DEFINITION, UUID.randomUUID()).isEmpty()); - } - - @Test - void testCreateSyncJobDefaultWorkerResourceReqs() throws IOException { - jobCreator.createSyncJob( - SOURCE_CONNECTION, - DESTINATION_CONNECTION, - STANDARD_SYNC, - SOURCE_IMAGE_NAME, - SOURCE_PROTOCOL_VERSION, - DESTINATION_IMAGE_NAME, - DESTINATION_PROTOCOL_VERSION, - List.of(STANDARD_SYNC_OPERATION), - null, - STANDARD_SOURCE_DEFINITION, STANDARD_DESTINATION_DEFINITION, WORKSPACE_ID); - - final JobSyncConfig expectedJobSyncConfig = new JobSyncConfig() - .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) - .withNamespaceFormat(STANDARD_SYNC.getNamespaceFormat()) - .withPrefix(STANDARD_SYNC.getPrefix()) - .withSourceDockerImage(SOURCE_IMAGE_NAME) - .withSourceProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withDestinationDockerImage(DESTINATION_IMAGE_NAME) - .withDestinationProtocolVersion(DESTINATION_PROTOCOL_VERSION) - .withConfiguredAirbyteCatalog(STANDARD_SYNC.getCatalog()) - .withOperationSequence(List.of(STANDARD_SYNC_OPERATION)) - .withResourceRequirements(workerResourceRequirements) - .withSourceResourceRequirements(workerResourceRequirements) - .withDestinationResourceRequirements(workerResourceRequirements) - .withIsSourceCustomConnector(false) - .withIsDestinationCustomConnector(false) - .withWorkspaceId(WORKSPACE_ID); - - final JobConfig expectedJobConfig = new JobConfig() - .withConfigType(JobConfig.ConfigType.SYNC) - .withSync(expectedJobSyncConfig); - - final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); - - verify(jobPersistence, times(1)).enqueueJob(expectedScope, expectedJobConfig); - } - - @Test - void testCreateSyncJobConnectionResourceReqs() throws IOException { - final ResourceRequirements standardSyncResourceRequirements = new ResourceRequirements() - .withCpuLimit("0.5") - .withCpuRequest("0.5") - .withMemoryLimit("500Mi") - .withMemoryRequest("500Mi"); - final StandardSync standardSync = Jsons.clone(STANDARD_SYNC).withResourceRequirements(standardSyncResourceRequirements); - - jobCreator.createSyncJob( - SOURCE_CONNECTION, - DESTINATION_CONNECTION, - standardSync, - SOURCE_IMAGE_NAME, - SOURCE_PROTOCOL_VERSION, - DESTINATION_IMAGE_NAME, - DESTINATION_PROTOCOL_VERSION, - List.of(STANDARD_SYNC_OPERATION), - null, - STANDARD_SOURCE_DEFINITION, STANDARD_DESTINATION_DEFINITION, WORKSPACE_ID); - - final JobSyncConfig expectedJobSyncConfig = new JobSyncConfig() - .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) - .withNamespaceFormat(STANDARD_SYNC.getNamespaceFormat()) - .withPrefix(STANDARD_SYNC.getPrefix()) - .withSourceDockerImage(SOURCE_IMAGE_NAME) - .withSourceProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withDestinationDockerImage(DESTINATION_IMAGE_NAME) - .withDestinationProtocolVersion(DESTINATION_PROTOCOL_VERSION) - .withConfiguredAirbyteCatalog(STANDARD_SYNC.getCatalog()) - .withOperationSequence(List.of(STANDARD_SYNC_OPERATION)) - .withResourceRequirements(standardSyncResourceRequirements) - .withSourceResourceRequirements(standardSyncResourceRequirements) - .withDestinationResourceRequirements(standardSyncResourceRequirements) - .withIsSourceCustomConnector(false) - .withIsDestinationCustomConnector(false) - .withWorkspaceId(WORKSPACE_ID); - - final JobConfig expectedJobConfig = new JobConfig() - .withConfigType(JobConfig.ConfigType.SYNC) - .withSync(expectedJobSyncConfig); - - final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); - - verify(jobPersistence, times(1)).enqueueJob(expectedScope, expectedJobConfig); - } - - @Test - void testCreateSyncJobSourceAndDestinationResourceReqs() throws IOException { - final ResourceRequirements sourceResourceRequirements = new ResourceRequirements() - .withCpuLimit("0.7") - .withCpuRequest("0.7") - .withMemoryLimit("700Mi") - .withMemoryRequest("700Mi"); - final ResourceRequirements destResourceRequirements = new ResourceRequirements() - .withCpuLimit("0.8") - .withCpuRequest("0.8") - .withMemoryLimit("800Mi") - .withMemoryRequest("800Mi"); - - jobCreator.createSyncJob( - SOURCE_CONNECTION, - DESTINATION_CONNECTION, - STANDARD_SYNC, - SOURCE_IMAGE_NAME, - SOURCE_PROTOCOL_VERSION, - DESTINATION_IMAGE_NAME, - DESTINATION_PROTOCOL_VERSION, - List.of(STANDARD_SYNC_OPERATION), - null, - new StandardSourceDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(sourceResourceRequirements)), - new StandardDestinationDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withJobSpecific(List.of( - new JobTypeResourceLimit().withJobType(JobType.SYNC).withResourceRequirements(destResourceRequirements)))), - WORKSPACE_ID); - - final JobSyncConfig expectedJobSyncConfig = new JobSyncConfig() - .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) - .withNamespaceFormat(STANDARD_SYNC.getNamespaceFormat()) - .withPrefix(STANDARD_SYNC.getPrefix()) - .withSourceDockerImage(SOURCE_IMAGE_NAME) - .withSourceProtocolVersion(SOURCE_PROTOCOL_VERSION) - .withDestinationDockerImage(DESTINATION_IMAGE_NAME) - .withDestinationProtocolVersion(DESTINATION_PROTOCOL_VERSION) - .withConfiguredAirbyteCatalog(STANDARD_SYNC.getCatalog()) - .withOperationSequence(List.of(STANDARD_SYNC_OPERATION)) - .withResourceRequirements(workerResourceRequirements) - .withSourceResourceRequirements(sourceResourceRequirements) - .withDestinationResourceRequirements(destResourceRequirements) - .withIsSourceCustomConnector(false) - .withIsDestinationCustomConnector(false) - .withWorkspaceId(WORKSPACE_ID); - - final JobConfig expectedJobConfig = new JobConfig() - .withConfigType(JobConfig.ConfigType.SYNC) - .withSync(expectedJobSyncConfig); - - final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); - - verify(jobPersistence, times(1)).enqueueJob(expectedScope, expectedJobConfig); - } - - @Test - void testCreateResetConnectionJob() throws IOException { - final List streamsToReset = List.of(STREAM1_DESCRIPTOR, STREAM2_DESCRIPTOR); - final ConfiguredAirbyteCatalog expectedCatalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM1_NAME, Field.of(FIELD_NAME, JsonSchemaType.STRING))) - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.OVERWRITE), - new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM2_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))) - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.OVERWRITE), - // this stream is not being reset, so it should have APPEND destination sync mode - new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM3_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))) - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.APPEND))); - - final JobResetConnectionConfig jobResetConnectionConfig = new JobResetConnectionConfig() - .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) - .withNamespaceFormat(STANDARD_SYNC.getNamespaceFormat()) - .withPrefix(STANDARD_SYNC.getPrefix()) - .withDestinationDockerImage(DESTINATION_IMAGE_NAME) - .withDestinationProtocolVersion(DESTINATION_PROTOCOL_VERSION) - .withConfiguredAirbyteCatalog(expectedCatalog) - .withOperationSequence(List.of(STANDARD_SYNC_OPERATION)) - .withResourceRequirements(workerResourceRequirements) - .withResetSourceConfiguration(new ResetSourceConfiguration().withStreamsToReset(streamsToReset)) - .withIsSourceCustomConnector(false) - .withIsDestinationCustomConnector(false) - .withWorkspaceId(DESTINATION_CONNECTION.getWorkspaceId()); - - final JobConfig jobConfig = new JobConfig() - .withConfigType(ConfigType.RESET_CONNECTION) - .withResetConnection(jobResetConnectionConfig); - - final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); - when(jobPersistence.enqueueJob(expectedScope, jobConfig)).thenReturn(Optional.of(JOB_ID)); - - final Optional jobId = jobCreator.createResetConnectionJob( - DESTINATION_CONNECTION, - STANDARD_SYNC, - DESTINATION_IMAGE_NAME, - DESTINATION_PROTOCOL_VERSION, - false, - List.of(STANDARD_SYNC_OPERATION), - streamsToReset); - - verify(jobPersistence).enqueueJob(expectedScope, jobConfig); - assertTrue(jobId.isPresent()); - assertEquals(JOB_ID, jobId.get()); - } - - @Test - void testCreateResetConnectionJobEnsureNoQueuing() throws IOException { - final List streamsToReset = List.of(STREAM1_DESCRIPTOR, STREAM2_DESCRIPTOR); - final ConfiguredAirbyteCatalog expectedCatalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM1_NAME, Field.of(FIELD_NAME, JsonSchemaType.STRING))) - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.OVERWRITE), - new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM2_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))) - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.OVERWRITE), - // this stream is not being reset, so it should have APPEND destination sync mode - new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM3_NAME, NAMESPACE, Field.of(FIELD_NAME, JsonSchemaType.STRING))) - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.APPEND))); - - final JobResetConnectionConfig jobResetConnectionConfig = new JobResetConnectionConfig() - .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) - .withNamespaceFormat(STANDARD_SYNC.getNamespaceFormat()) - .withPrefix(STANDARD_SYNC.getPrefix()) - .withDestinationDockerImage(DESTINATION_IMAGE_NAME) - .withDestinationProtocolVersion(DESTINATION_PROTOCOL_VERSION) - .withConfiguredAirbyteCatalog(expectedCatalog) - .withOperationSequence(List.of(STANDARD_SYNC_OPERATION)) - .withResourceRequirements(workerResourceRequirements) - .withResetSourceConfiguration(new ResetSourceConfiguration().withStreamsToReset(streamsToReset)) - .withIsSourceCustomConnector(false) - .withIsDestinationCustomConnector(false) - .withWorkspaceId(DESTINATION_CONNECTION.getWorkspaceId()); - - final JobConfig jobConfig = new JobConfig() - .withConfigType(ConfigType.RESET_CONNECTION) - .withResetConnection(jobResetConnectionConfig); - - final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); - when(jobPersistence.enqueueJob(expectedScope, jobConfig)).thenReturn(Optional.empty()); - - final Optional jobId = jobCreator.createResetConnectionJob( - DESTINATION_CONNECTION, - STANDARD_SYNC, - DESTINATION_IMAGE_NAME, - DESTINATION_PROTOCOL_VERSION, - false, - List.of(STANDARD_SYNC_OPERATION), - streamsToReset); - - verify(jobPersistence).enqueueJob(expectedScope, jobConfig); - assertTrue(jobId.isEmpty()); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java deleted file mode 100644 index d87f3a5f90a1..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java +++ /dev/null @@ -1,2228 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.AIRBYTE_METADATA; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.ATTEMPTS; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.JOBS; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.STREAM_STATS; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.SYNC_STATS; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.text.Sqls; -import io.airbyte.commons.version.AirbyteProtocolVersion; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.Version; -import io.airbyte.config.AttemptFailureSummary; -import io.airbyte.config.AttemptSyncConfig; -import io.airbyte.config.FailureReason; -import io.airbyte.config.FailureReason.FailureOrigin; -import io.airbyte.config.FailureReason.FailureType; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.JobGetSpecConfig; -import io.airbyte.config.JobOutput; -import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.config.StandardSyncOutput; -import io.airbyte.config.StandardSyncSummary; -import io.airbyte.config.State; -import io.airbyte.config.StreamSyncStats; -import io.airbyte.config.SyncStats; -import io.airbyte.db.Database; -import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.factory.DataSourceFactory; -import io.airbyte.db.instance.jobs.JobsDatabaseSchema; -import io.airbyte.db.instance.test.TestDatabaseProviders; -import io.airbyte.persistence.job.JobPersistence.AttemptStats; -import io.airbyte.persistence.job.JobPersistence.JobAttemptPair; -import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.AttemptStatus; -import io.airbyte.persistence.job.models.AttemptWithJobInfo; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.persistence.job.models.JobStatus; -import io.airbyte.persistence.job.models.JobWithStatusAndTimestamp; -import io.airbyte.test.utils.DatabaseConnectionHelper; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.nio.file.Path; -import java.sql.SQLException; -import java.time.Instant; -import java.time.LocalDateTime; -import java.time.ZoneOffset; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import javax.sql.DataSource; -import org.jooq.DSLContext; -import org.jooq.Record; -import org.jooq.Result; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import org.testcontainers.containers.PostgreSQLContainer; - -@SuppressWarnings({"PMD.JUnitTestsShouldIncludeAssert", "PMD.AvoidDuplicateLiterals"}) -@DisplayName("DefaultJobPersistance") -class DefaultJobPersistenceTest { - - private static final Instant NOW = Instant.now(); - private static final Path LOG_PATH = Path.of("/tmp/logs/all/the/way/down"); - private static final UUID CONNECTION_ID = UUID.randomUUID(); - private static final String SCOPE = CONNECTION_ID.toString(); - private static final String SPEC_SCOPE = SCOPE + "-spec"; - private static final String CHECK_SCOPE = SCOPE + "-check"; - private static final String SYNC_SCOPE = SCOPE + "-sync"; - private static final UUID CONNECTION_ID2 = UUID.randomUUID(); - private static final JobConfig SPEC_JOB_CONFIG = new JobConfig() - .withConfigType(ConfigType.GET_SPEC) - .withGetSpec(new JobGetSpecConfig()); - private static final JobConfig CHECK_JOB_CONFIG = new JobConfig() - .withConfigType(ConfigType.CHECK_CONNECTION_DESTINATION) - .withGetSpec(new JobGetSpecConfig()); - private static final JobConfig SYNC_JOB_CONFIG = new JobConfig() - .withConfigType(ConfigType.SYNC) - .withSync(new JobSyncConfig()); - - private static final JobConfig RESET_JOB_CONFIG = new JobConfig() - .withConfigType(ConfigType.RESET_CONNECTION) - .withSync(new JobSyncConfig()); - - private static final int DEFAULT_MINIMUM_AGE_IN_DAYS = 30; - private static final int DEFAULT_EXCESSIVE_NUMBER_OF_JOBS = 500; - private static final int DEFAULT_MINIMUM_RECENCY_COUNT = 10; - - private static PostgreSQLContainer container; - private Database jobDatabase; - private Supplier timeSupplier; - private JobPersistence jobPersistence; - private DataSource dataSource; - private DSLContext dslContext; - - @BeforeAll - static void dbSetup() { - container = new PostgreSQLContainer<>("postgres:13-alpine") - .withDatabaseName("airbyte") - .withUsername("docker") - .withPassword("docker"); - container.start(); - } - - @AfterAll - static void dbDown() { - container.close(); - } - - private static Attempt createAttempt(final int id, final long jobId, final AttemptStatus status, final Path logPath) { - return new Attempt( - id, - jobId, - logPath, - null, - null, - status, - null, - null, - NOW.getEpochSecond(), - NOW.getEpochSecond(), - NOW.getEpochSecond()); - } - - private static Attempt createUnfinishedAttempt(final int id, final long jobId, final AttemptStatus status, final Path logPath) { - return new Attempt( - id, - jobId, - logPath, - null, - null, - status, - null, - null, - NOW.getEpochSecond(), - NOW.getEpochSecond(), - null); - } - - private static Job createJob(final long id, final JobConfig jobConfig, final JobStatus status, final List attempts, final long time) { - return createJob(id, jobConfig, status, attempts, time, SCOPE); - } - - private static Job createJob( - final long id, - final JobConfig jobConfig, - final JobStatus status, - final List attempts, - final long time, - final String scope) { - return new Job( - id, - jobConfig.getConfigType(), - scope, - jobConfig, - attempts, - status, - null, - time, - time); - } - - @SuppressWarnings("unchecked") - @BeforeEach - void setup() throws Exception { - dataSource = DatabaseConnectionHelper.createDataSource(container); - dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); - final TestDatabaseProviders databaseProviders = new TestDatabaseProviders(dataSource, dslContext); - jobDatabase = databaseProviders.createNewJobsDatabase(); - resetDb(); - - timeSupplier = mock(Supplier.class); - when(timeSupplier.get()).thenReturn(NOW); - - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, - DEFAULT_MINIMUM_RECENCY_COUNT); - } - - @AfterEach - void tearDown() throws Exception { - dslContext.close(); - DataSourceFactory.close(dataSource); - } - - private void resetDb() throws SQLException { - // todo (cgardens) - truncate whole db. - jobDatabase.query(ctx -> ctx.truncateTable(JOBS).cascade().execute()); - jobDatabase.query(ctx -> ctx.truncateTable(ATTEMPTS).cascade().execute()); - jobDatabase.query(ctx -> ctx.truncateTable(AIRBYTE_METADATA).cascade().execute()); - jobDatabase.query(ctx -> ctx.truncateTable(SYNC_STATS)); - } - - private Result getJobRecord(final long jobId) throws SQLException { - return jobDatabase.query(ctx -> ctx.fetch(DefaultJobPersistence.BASE_JOB_SELECT_AND_JOIN + "WHERE jobs.id = ?", jobId)); - } - - @Test - @DisplayName("Should set a job to incomplete if an attempt fails") - void testCompleteAttemptFailed() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - - jobPersistence.failAttempt(jobId, attemptNumber); - - final Job actual = jobPersistence.getJob(jobId); - final Job expected = createJob( - jobId, - SPEC_JOB_CONFIG, - JobStatus.INCOMPLETE, - Lists.newArrayList(createAttempt(0, jobId, AttemptStatus.FAILED, LOG_PATH)), - NOW.getEpochSecond()); - assertEquals(expected, actual); - } - - @Test - @DisplayName("Should set a job to succeeded if an attempt succeeds") - void testCompleteAttemptSuccess() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - - jobPersistence.succeedAttempt(jobId, attemptNumber); - - final Job actual = jobPersistence.getJob(jobId); - final Job expected = createJob( - jobId, - SPEC_JOB_CONFIG, - JobStatus.SUCCEEDED, - Lists.newArrayList(createAttempt(0, jobId, AttemptStatus.SUCCEEDED, LOG_PATH)), - NOW.getEpochSecond()); - assertEquals(expected, actual); - } - - @Test - @DisplayName("Should be able to read what is written") - void testWriteOutput() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - final Job created = jobPersistence.getJob(jobId); - final SyncStats syncStats = - new SyncStats().withBytesEmitted(100L).withRecordsEmitted(9L).withRecordsCommitted(10L).withDestinationStateMessagesEmitted(1L) - .withSourceStateMessagesEmitted(4L).withMaxSecondsBeforeSourceStateMessageEmitted(5L).withMeanSecondsBeforeSourceStateMessageEmitted(2L) - .withMaxSecondsBetweenStateMessageEmittedandCommitted(10L).withMeanSecondsBetweenStateMessageEmittedandCommitted(3L); - final FailureReason failureReason1 = new FailureReason().withFailureOrigin(FailureOrigin.DESTINATION).withFailureType(FailureType.SYSTEM_ERROR) - .withExternalMessage("There was a normalization error"); - final FailureReason failureReason2 = new FailureReason().withFailureOrigin(FailureOrigin.SOURCE).withFailureType(FailureType.CONFIG_ERROR) - .withExternalMessage("There was another normalization error"); - - final NormalizationSummary normalizationSummary = - new NormalizationSummary().withStartTime(10L).withEndTime(500L).withFailures(List.of(failureReason1, failureReason2)); - final StandardSyncOutput standardSyncOutput = - new StandardSyncOutput().withStandardSyncSummary(new StandardSyncSummary().withTotalStats(syncStats)) - .withNormalizationSummary(normalizationSummary); - final JobOutput jobOutput = new JobOutput().withOutputType(JobOutput.OutputType.DISCOVER_CATALOG).withSync(standardSyncOutput); - - when(timeSupplier.get()).thenReturn(Instant.ofEpochMilli(4242)); - jobPersistence.writeOutput(jobId, attemptNumber, jobOutput); - - final Job updated = jobPersistence.getJob(jobId); - - assertEquals(Optional.of(jobOutput), updated.getAttempts().get(0).getOutput()); - assertNotEquals(created.getAttempts().get(0).getUpdatedAtInSecond(), updated.getAttempts().get(0).getUpdatedAtInSecond()); - - final SyncStats storedSyncStats = jobPersistence.getAttemptStats(jobId, attemptNumber).combinedStats(); - assertEquals(100L, storedSyncStats.getBytesEmitted()); - assertEquals(9L, storedSyncStats.getRecordsEmitted()); - assertEquals(10L, storedSyncStats.getRecordsCommitted()); - assertEquals(4L, storedSyncStats.getSourceStateMessagesEmitted()); - assertEquals(1L, storedSyncStats.getDestinationStateMessagesEmitted()); - assertEquals(5L, storedSyncStats.getMaxSecondsBeforeSourceStateMessageEmitted()); - assertEquals(2L, storedSyncStats.getMeanSecondsBeforeSourceStateMessageEmitted()); - assertEquals(10L, storedSyncStats.getMaxSecondsBetweenStateMessageEmittedandCommitted()); - assertEquals(3L, storedSyncStats.getMeanSecondsBetweenStateMessageEmittedandCommitted()); - - final NormalizationSummary storedNormalizationSummary = jobPersistence.getNormalizationSummary(jobId, attemptNumber).stream().findFirst().get(); - assertEquals(10L, storedNormalizationSummary.getStartTime()); - assertEquals(500L, storedNormalizationSummary.getEndTime()); - assertEquals(List.of(failureReason1, failureReason2), storedNormalizationSummary.getFailures()); - } - - @Test - @DisplayName("Should be able to read AttemptSyncConfig that was written") - void testWriteAttemptSyncConfig() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - final Job created = jobPersistence.getJob(jobId); - final AttemptSyncConfig attemptSyncConfig = new AttemptSyncConfig() - .withSourceConfiguration(Jsons.jsonNode(Map.of("source", "s_config_value"))) - .withDestinationConfiguration(Jsons.jsonNode(Map.of("destination", "d_config_value"))) - .withState(new State().withState(Jsons.jsonNode(ImmutableMap.of("state_key", "state_value")))); - - when(timeSupplier.get()).thenReturn(Instant.ofEpochMilli(4242)); - jobPersistence.writeAttemptSyncConfig(jobId, attemptNumber, attemptSyncConfig); - - final Job updated = jobPersistence.getJob(jobId); - assertEquals(Optional.of(attemptSyncConfig), updated.getAttempts().get(0).getSyncConfig()); - assertNotEquals(created.getAttempts().get(0).getUpdatedAtInSecond(), updated.getAttempts().get(0).getUpdatedAtInSecond()); - } - - @Test - @DisplayName("Should be able to read attemptFailureSummary that was written") - void testWriteAttemptFailureSummary() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - final Job created = jobPersistence.getJob(jobId); - final AttemptFailureSummary failureSummary = new AttemptFailureSummary().withFailures( - Collections.singletonList(new FailureReason().withFailureOrigin(FailureOrigin.SOURCE))); - - when(timeSupplier.get()).thenReturn(Instant.ofEpochMilli(4242)); - jobPersistence.writeAttemptFailureSummary(jobId, attemptNumber, failureSummary); - - final Job updated = jobPersistence.getJob(jobId); - assertEquals(Optional.of(failureSummary), updated.getAttempts().get(0).getFailureSummary()); - assertNotEquals(created.getAttempts().get(0).getUpdatedAtInSecond(), updated.getAttempts().get(0).getUpdatedAtInSecond()); - } - - @Nested - @DisplayName("Stats Related Tests") - class Stats { - - @Test - @DisplayName("Writing stats the first time should only write record and bytes information correctly") - void testWriteStatsFirst() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - final var streamStats = List.of( - new StreamSyncStats().withStreamName("name1").withStreamNamespace("ns") - .withStats(new SyncStats().withBytesEmitted(500L).withRecordsEmitted(500L).withEstimatedBytes(10000L).withEstimatedRecords(2000L)), - new StreamSyncStats().withStreamName("name2").withStreamNamespace("ns") - .withStats(new SyncStats().withBytesEmitted(500L).withRecordsEmitted(500L).withEstimatedBytes(10000L).withEstimatedRecords(2000L))); - jobPersistence.writeStats(jobId, attemptNumber, 1000, 1000, 1000, 1000, streamStats); - - final AttemptStats stats = jobPersistence.getAttemptStats(jobId, attemptNumber); - final var combined = stats.combinedStats(); - assertEquals(1000, combined.getBytesEmitted()); - assertEquals(1000, combined.getRecordsEmitted()); - assertEquals(1000, combined.getEstimatedBytes()); - assertEquals(1000, combined.getEstimatedRecords()); - - // As of this writing, committed and state messages are not expected. - assertEquals(null, combined.getRecordsCommitted()); - assertEquals(null, combined.getDestinationStateMessagesEmitted()); - - final var actStreamStats = stats.perStreamStats(); - assertEquals(2, actStreamStats.size()); - assertEquals(streamStats, actStreamStats); - } - - @Test - @DisplayName("Writing stats multiple times should write record and bytes information correctly without exceptions") - void testWriteStatsRepeated() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - - // First write. - var streamStats = List.of( - new StreamSyncStats().withStreamName("name1").withStreamNamespace("ns") - .withStats(new SyncStats().withBytesEmitted(500L).withRecordsEmitted(500L).withEstimatedBytes(10000L).withEstimatedRecords(2000L))); - jobPersistence.writeStats(jobId, attemptNumber, 1000, 1000, 1000, 1000, streamStats); - - // Second write. - when(timeSupplier.get()).thenReturn(Instant.now()); - streamStats = List.of( - new StreamSyncStats().withStreamName("name1").withStreamNamespace("ns") - .withStats(new SyncStats().withBytesEmitted(1000L).withRecordsEmitted(1000L).withEstimatedBytes(10000L).withEstimatedRecords(2000L))); - jobPersistence.writeStats(jobId, attemptNumber, 2000, 2000, 2000, 2000, streamStats); - - final AttemptStats stats = jobPersistence.getAttemptStats(jobId, attemptNumber); - final var combined = stats.combinedStats(); - assertEquals(2000, combined.getBytesEmitted()); - assertEquals(2000, combined.getRecordsEmitted()); - assertEquals(2000, combined.getEstimatedBytes()); - assertEquals(2000, combined.getEstimatedRecords()); - - final var actStreamStats = stats.perStreamStats(); - assertEquals(1, actStreamStats.size()); - assertEquals(streamStats, actStreamStats); - - } - - @Test - @DisplayName("Writing multiple stats of the same attempt id, stream name and namespace should update the previous record") - void testWriteStatsUpsert() throws IOException, SQLException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - - // First write. - var streamStats = List.of( - new StreamSyncStats().withStreamName("name1").withStreamNamespace("ns") - .withStats(new SyncStats().withBytesEmitted(500L).withRecordsEmitted(500L).withEstimatedBytes(10000L).withEstimatedRecords(2000L))); - jobPersistence.writeStats(jobId, attemptNumber, 1000, 1000, 1000, 1000, streamStats); - - // Second write. - when(timeSupplier.get()).thenReturn(Instant.now()); - streamStats = List.of( - new StreamSyncStats().withStreamName("name1").withStreamNamespace("ns") - .withStats(new SyncStats().withBytesEmitted(1000L).withRecordsEmitted(1000L).withEstimatedBytes(10000L).withEstimatedRecords(2000L))); - jobPersistence.writeStats(jobId, attemptNumber, 2000, 2000, 2000, 2000, streamStats); - - final var syncStatsRec = jobDatabase.query(ctx -> { - final var attemptId = DefaultJobPersistence.getAttemptId(jobId, attemptNumber, ctx); - return ctx.fetch("SELECT * from sync_stats where attempt_id = ?", attemptId).stream().findFirst().get(); - }); - - // Check time stamps to confirm upsert. - assertNotEquals(syncStatsRec.get(SYNC_STATS.CREATED_AT), syncStatsRec.get(SYNC_STATS.UPDATED_AT)); - - final var streamStatsRec = jobDatabase.query(ctx -> { - final var attemptId = DefaultJobPersistence.getAttemptId(jobId, attemptNumber, ctx); - return ctx.fetch("SELECT * from stream_stats where attempt_id = ?", attemptId).stream().findFirst().get(); - }); - // Check time stamps to confirm upsert. - assertNotEquals(streamStatsRec.get(STREAM_STATS.CREATED_AT), streamStatsRec.get(STREAM_STATS.UPDATED_AT)); - } - - @Test - @DisplayName("Writing multiple stats a stream with null namespace should write correctly without exceptions") - void testWriteNullNamespace() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - - // First write. - var streamStats = List.of( - new StreamSyncStats().withStreamName("name1") - .withStats(new SyncStats().withBytesEmitted(500L).withRecordsEmitted(500L).withEstimatedBytes(10000L).withEstimatedRecords(2000L))); - jobPersistence.writeStats(jobId, attemptNumber, 1000, 1000, 1000, 1000, streamStats); - - // Second write. - when(timeSupplier.get()).thenReturn(Instant.now()); - streamStats = List.of( - new StreamSyncStats().withStreamName("name1") - .withStats(new SyncStats().withBytesEmitted(1000L).withRecordsEmitted(1000L).withEstimatedBytes(10000L).withEstimatedRecords(2000L))); - jobPersistence.writeStats(jobId, attemptNumber, 2000, 2000, 2000, 2000, streamStats); - - final AttemptStats stats = jobPersistence.getAttemptStats(jobId, attemptNumber); - final var combined = stats.combinedStats(); - assertEquals(2000, combined.getBytesEmitted()); - assertEquals(2000, combined.getRecordsEmitted()); - assertEquals(2000, combined.getEstimatedBytes()); - assertEquals(2000, combined.getEstimatedRecords()); - - final var actStreamStats = stats.perStreamStats(); - assertEquals(1, actStreamStats.size()); - assertEquals(streamStats, actStreamStats); - } - - @Test - @DisplayName("Writing multiple stats a stream with null namespace should write correctly without exceptions") - void testGetStatsNoResult() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - - final AttemptStats stats = jobPersistence.getAttemptStats(jobId, attemptNumber); - assertNull(stats.combinedStats()); - assertEquals(0, stats.perStreamStats().size()); - - } - - @Test - @DisplayName("Retrieving all attempts stats for a job should return the right information") - void testGetMultipleStats() throws IOException { - final long jobOneId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int jobOneAttemptNumberOne = jobPersistence.createAttempt(jobOneId, LOG_PATH); - - // First write for first attempt. - var streamStats = List.of( - new StreamSyncStats().withStreamName("name1") - .withStats(new SyncStats().withBytesEmitted(500L).withRecordsEmitted(500L).withEstimatedBytes(10000L).withEstimatedRecords(2000L))); - jobPersistence.writeStats(jobOneId, jobOneAttemptNumberOne, 1000, 1000, 1000, 1000, streamStats); - - // Second write for first attempt. This is the record that should be returned. - when(timeSupplier.get()).thenReturn(Instant.now()); - streamStats = List.of( - new StreamSyncStats().withStreamName("name1") - .withStats(new SyncStats().withBytesEmitted(1000L).withRecordsEmitted(1000L).withEstimatedBytes(10000L).withEstimatedRecords(2000L))); - jobPersistence.writeStats(jobOneId, jobOneAttemptNumberOne, 2000, 2000, 2000, 2000, streamStats); - jobPersistence.failAttempt(jobOneId, jobOneAttemptNumberOne); - - // Second attempt for first job. - final int jobOneAttemptNumberTwo = jobPersistence.createAttempt(jobOneId, LOG_PATH); - jobPersistence.writeStats(jobOneId, jobOneAttemptNumberTwo, 1000, 1000, 1000, 1000, streamStats); - - // First attempt for second job. - final long jobTwoId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int jobTwoAttemptNumberOne = jobPersistence.createAttempt(jobTwoId, LOG_PATH); - jobPersistence.writeStats(jobTwoId, jobTwoAttemptNumberOne, 1000, 1000, 1000, 1000, streamStats); - - final var stats = jobPersistence.getAttemptStats(List.of(jobOneId, jobTwoId)); - final var exp = Map.of( - new JobAttemptPair(jobOneId, jobOneAttemptNumberOne), - new AttemptStats( - new SyncStats().withRecordsEmitted(2000L).withBytesEmitted(2000L).withEstimatedBytes(2000L).withEstimatedRecords(2000L), - List.of(new StreamSyncStats().withStreamName("name1").withStats( - new SyncStats().withEstimatedBytes(10000L).withEstimatedRecords(2000L).withBytesEmitted(1000L).withRecordsEmitted(1000L)))), - new JobAttemptPair(jobOneId, jobOneAttemptNumberTwo), - new AttemptStats( - new SyncStats().withRecordsEmitted(1000L).withBytesEmitted(1000L).withEstimatedBytes(1000L).withEstimatedRecords(1000L), - List.of(new StreamSyncStats().withStreamName("name1").withStats( - new SyncStats().withEstimatedBytes(10000L).withEstimatedRecords(2000L).withBytesEmitted(1000L).withRecordsEmitted(1000L)))), - new JobAttemptPair(jobTwoId, jobTwoAttemptNumberOne), - new AttemptStats( - new SyncStats().withRecordsEmitted(1000L).withBytesEmitted(1000L).withEstimatedBytes(1000L).withEstimatedRecords(1000L), - List.of(new StreamSyncStats().withStreamName("name1").withStats( - new SyncStats().withEstimatedBytes(10000L).withEstimatedRecords(2000L).withBytesEmitted(1000L).withRecordsEmitted(1000L))))); - - assertEquals(exp, stats); - - } - - @Test - @DisplayName("Retrieving stats for an empty list should not cause an exception.") - void testGetStatsForEmptyJobList() throws IOException { - assertNotNull(jobPersistence.getAttemptStats(List.of())); - } - - @Test - @DisplayName("Retrieving stats for a bad job attempt input should not cause an exception.") - void testGetStatsForBadJobAttemptInput() throws IOException { - assertNotNull(jobPersistence.getAttemptStats(-1, -1)); - } - - } - - @Test - @DisplayName("When getting the last replication job should return the most recently created job") - void testGetLastSyncJobWithMultipleAttempts() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - jobPersistence.failAttempt(jobId, jobPersistence.createAttempt(jobId, LOG_PATH)); - jobPersistence.failAttempt(jobId, jobPersistence.createAttempt(jobId, LOG_PATH)); - - final Optional actual = jobPersistence.getLastReplicationJob(UUID.fromString(SCOPE)); - - final Job expected = createJob( - jobId, - SYNC_JOB_CONFIG, - JobStatus.INCOMPLETE, - Lists.newArrayList( - createAttempt(0, jobId, AttemptStatus.FAILED, LOG_PATH), - createAttempt(1, jobId, AttemptStatus.FAILED, LOG_PATH)), - NOW.getEpochSecond()); - - assertEquals(Optional.of(expected), actual); - } - - @Test - @DisplayName("Should extract a Job model from a JOOQ result set") - void testGetJobFromRecord() throws IOException, SQLException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - - final Optional actual = DefaultJobPersistence.getJobFromResult(getJobRecord(jobId)); - - final Job expected = createJob(jobId, SPEC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond()); - assertEquals(Optional.of(expected), actual); - } - - @Test - @DisplayName("Should be able to import database that was exported") - void testExportImport() throws IOException, SQLException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber0 = jobPersistence.createAttempt(jobId, LOG_PATH); - jobPersistence.failAttempt(jobId, attemptNumber0); - final Path secondAttemptLogPath = LOG_PATH.resolve("2"); - final int attemptNumber1 = jobPersistence.createAttempt(jobId, secondAttemptLogPath); - jobPersistence.succeedAttempt(jobId, attemptNumber1); - - final Map> inputStreams = jobPersistence.exportDatabase(); - - // Collect streams to memory for temporary storage - final Map> tempData = new HashMap<>(); - final Map> outputStreams = new HashMap<>(); - for (final Entry> entry : inputStreams.entrySet()) { - final List tableData = entry.getValue().collect(Collectors.toList()); - tempData.put(entry.getKey(), tableData); - outputStreams.put(entry.getKey(), tableData.stream()); - } - resetDb(); - - jobPersistence.importDatabase("test", outputStreams); - - final List actualList = jobPersistence.listJobs(SPEC_JOB_CONFIG.getConfigType(), CONNECTION_ID.toString(), 9999, 0); - final Job actual = actualList.get(0); - final Job expected = createJob( - jobId, - SPEC_JOB_CONFIG, - JobStatus.SUCCEEDED, - Lists.newArrayList( - createAttempt(0, jobId, AttemptStatus.FAILED, LOG_PATH), - createAttempt(1, jobId, AttemptStatus.SUCCEEDED, secondAttemptLogPath)), - NOW.getEpochSecond()); - - assertEquals(1, actualList.size()); - assertEquals(expected, actual); - } - - @Test - @DisplayName("Should return correct set of jobs when querying on end timestamp") - void testListJobsWithTimestamp() throws IOException { - // TODO : Once we fix the problem of precision loss in DefaultJobPersistence, change the test value - // to contain milliseconds as well - final Instant now = Instant.parse("2021-01-01T00:00:00Z"); - final Supplier timeSupplier = incrementingSecondSupplier(now); - - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, - DEFAULT_MINIMUM_RECENCY_COUNT); - final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int syncJobAttemptNumber0 = jobPersistence.createAttempt(syncJobId, LOG_PATH); - jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); - final Path syncJobSecondAttemptLogPath = LOG_PATH.resolve("2"); - final int syncJobAttemptNumber1 = jobPersistence.createAttempt(syncJobId, syncJobSecondAttemptLogPath); - jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber1); - - final long specJobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int specJobAttemptNumber0 = jobPersistence.createAttempt(specJobId, LOG_PATH); - jobPersistence.failAttempt(specJobId, specJobAttemptNumber0); - final Path specJobSecondAttemptLogPath = LOG_PATH.resolve("2"); - final int specJobAttemptNumber1 = jobPersistence.createAttempt(specJobId, specJobSecondAttemptLogPath); - jobPersistence.succeedAttempt(specJobId, specJobAttemptNumber1); - - final List jobs = jobPersistence.listJobs(ConfigType.SYNC, Instant.EPOCH); - assertEquals(jobs.size(), 1); - assertEquals(jobs.get(0).getId(), syncJobId); - assertEquals(jobs.get(0).getAttempts().size(), 2); - assertEquals(jobs.get(0).getAttempts().get(0).getAttemptNumber(), 0); - assertEquals(jobs.get(0).getAttempts().get(1).getAttemptNumber(), 1); - - final Path syncJobThirdAttemptLogPath = LOG_PATH.resolve("3"); - final int syncJobAttemptNumber2 = jobPersistence.createAttempt(syncJobId, syncJobThirdAttemptLogPath); - jobPersistence.succeedAttempt(syncJobId, syncJobAttemptNumber2); - - final long newSyncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int newSyncJobAttemptNumber0 = jobPersistence.createAttempt(newSyncJobId, LOG_PATH); - jobPersistence.failAttempt(newSyncJobId, newSyncJobAttemptNumber0); - final Path newSyncJobSecondAttemptLogPath = LOG_PATH.resolve("2"); - final int newSyncJobAttemptNumber1 = jobPersistence.createAttempt(newSyncJobId, newSyncJobSecondAttemptLogPath); - jobPersistence.succeedAttempt(newSyncJobId, newSyncJobAttemptNumber1); - - final Long maxEndedAtTimestamp = - jobs.get(0).getAttempts().stream().map(c -> c.getEndedAtInSecond().orElseThrow()).max(Long::compareTo).orElseThrow(); - - final List secondQueryJobs = jobPersistence.listJobs(ConfigType.SYNC, Instant.ofEpochSecond(maxEndedAtTimestamp)); - assertEquals(secondQueryJobs.size(), 2); - assertEquals(secondQueryJobs.get(0).getId(), syncJobId); - assertEquals(secondQueryJobs.get(0).getAttempts().size(), 1); - assertEquals(secondQueryJobs.get(0).getAttempts().get(0).getAttemptNumber(), 2); - - assertEquals(secondQueryJobs.get(1).getId(), newSyncJobId); - assertEquals(secondQueryJobs.get(1).getAttempts().size(), 2); - assertEquals(secondQueryJobs.get(1).getAttempts().get(0).getAttemptNumber(), 0); - assertEquals(secondQueryJobs.get(1).getAttempts().get(1).getAttemptNumber(), 1); - - Long maxEndedAtTimestampAfterSecondQuery = -1L; - for (final Job c : secondQueryJobs) { - final List attempts = c.getAttempts(); - final Long maxEndedAtTimestampForJob = attempts.stream().map(attempt -> attempt.getEndedAtInSecond().orElseThrow()) - .max(Long::compareTo).orElseThrow(); - if (maxEndedAtTimestampForJob > maxEndedAtTimestampAfterSecondQuery) { - maxEndedAtTimestampAfterSecondQuery = maxEndedAtTimestampForJob; - } - } - - assertEquals(0, jobPersistence.listJobs(ConfigType.SYNC, Instant.ofEpochSecond(maxEndedAtTimestampAfterSecondQuery)).size()); - } - - @Test - @DisplayName("Should return correct list of AttemptWithJobInfo when querying on end timestamp, sorted by attempt end time") - void testListAttemptsWithJobInfo() throws IOException { - final Instant now = Instant.parse("2021-01-01T00:00:00Z"); - final Supplier timeSupplier = incrementingSecondSupplier(now); - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, - DEFAULT_MINIMUM_RECENCY_COUNT); - - final long job1 = jobPersistence.enqueueJob(SCOPE + "-1", SYNC_JOB_CONFIG).orElseThrow(); - final long job2 = jobPersistence.enqueueJob(SCOPE + "-2", SYNC_JOB_CONFIG).orElseThrow(); - - final int job1Attempt1 = jobPersistence.createAttempt(job1, LOG_PATH.resolve("1")); - final int job2Attempt1 = jobPersistence.createAttempt(job2, LOG_PATH.resolve("2")); - jobPersistence.failAttempt(job1, job1Attempt1); - jobPersistence.failAttempt(job2, job2Attempt1); - - final int job1Attempt2 = jobPersistence.createAttempt(job1, LOG_PATH.resolve("3")); - final int job2Attempt2 = jobPersistence.createAttempt(job2, LOG_PATH.resolve("4")); - jobPersistence.failAttempt(job2, job2Attempt2); // job 2 attempt 2 fails before job 1 attempt 2 fails - jobPersistence.failAttempt(job1, job1Attempt2); - - final int job1Attempt3 = jobPersistence.createAttempt(job1, LOG_PATH.resolve("5")); - final int job2Attempt3 = jobPersistence.createAttempt(job2, LOG_PATH.resolve("6")); - jobPersistence.succeedAttempt(job1, job1Attempt3); - jobPersistence.succeedAttempt(job2, job2Attempt3); - - final List allAttempts = jobPersistence.listAttemptsWithJobInfo(ConfigType.SYNC, Instant.ofEpochSecond(0)); - assertEquals(6, allAttempts.size()); - - assertEquals(job1, allAttempts.get(0).getJobInfo().getId()); - assertEquals(job1Attempt1, allAttempts.get(0).getAttempt().getAttemptNumber()); - - assertEquals(job2, allAttempts.get(1).getJobInfo().getId()); - assertEquals(job2Attempt1, allAttempts.get(1).getAttempt().getAttemptNumber()); - - assertEquals(job2, allAttempts.get(2).getJobInfo().getId()); - assertEquals(job2Attempt2, allAttempts.get(2).getAttempt().getAttemptNumber()); - - assertEquals(job1, allAttempts.get(3).getJobInfo().getId()); - assertEquals(job1Attempt2, allAttempts.get(3).getAttempt().getAttemptNumber()); - - assertEquals(job1, allAttempts.get(4).getJobInfo().getId()); - assertEquals(job1Attempt3, allAttempts.get(4).getAttempt().getAttemptNumber()); - - assertEquals(job2, allAttempts.get(5).getJobInfo().getId()); - assertEquals(job2Attempt3, allAttempts.get(5).getAttempt().getAttemptNumber()); - - final List attemptsAfterTimestamp = jobPersistence.listAttemptsWithJobInfo(ConfigType.SYNC, - Instant.ofEpochSecond(allAttempts.get(2).getAttempt().getEndedAtInSecond().orElseThrow())); - assertEquals(3, attemptsAfterTimestamp.size()); - - assertEquals(job1, attemptsAfterTimestamp.get(0).getJobInfo().getId()); - assertEquals(job1Attempt2, attemptsAfterTimestamp.get(0).getAttempt().getAttemptNumber()); - - assertEquals(job1, attemptsAfterTimestamp.get(1).getJobInfo().getId()); - assertEquals(job1Attempt3, attemptsAfterTimestamp.get(1).getAttempt().getAttemptNumber()); - - assertEquals(job2, attemptsAfterTimestamp.get(2).getJobInfo().getId()); - assertEquals(job2Attempt3, attemptsAfterTimestamp.get(2).getAttempt().getAttemptNumber()); - } - - private static Supplier incrementingSecondSupplier(final Instant startTime) { - // needs to be an array to work with lambda - final int[] intArray = {0}; - - final Supplier timeSupplier = () -> startTime.plusSeconds(intArray[0]++); - return timeSupplier; - } - - @Test - @DisplayName("Should have valid yaml schemas in exported database") - void testYamlSchemas() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber0 = jobPersistence.createAttempt(jobId, LOG_PATH); - jobPersistence.failAttempt(jobId, attemptNumber0); - final Path secondAttemptLogPath = LOG_PATH.resolve("2"); - final int attemptNumber1 = jobPersistence.createAttempt(jobId, secondAttemptLogPath); - jobPersistence.succeedAttempt(jobId, attemptNumber1); - final JsonSchemaValidator jsonSchemaValidator = new JsonSchemaValidator(); - - final Map> inputStreams = jobPersistence.exportDatabase(); - inputStreams.forEach((tableSchema, tableStream) -> { - final String tableName = tableSchema.name(); - final JsonNode schema = tableSchema.getTableDefinition(); - assertNotNull(schema, - "Json schema files should be created in airbyte-persistence/job-persistence/src/main/resources/tables for every table in the Database to validate its content"); - tableStream.forEach(row -> { - try { - jsonSchemaValidator.ensure(schema, row); - } catch (final JsonValidationException e) { - fail(String.format("JSON Schema validation failed for %s with record %s", tableName, row.toPrettyString())); - } - }); - }); - } - - @Test - void testSecretMigrationMetadata() throws IOException { - boolean isMigrated = jobPersistence.isSecretMigrated(); - assertFalse(isMigrated); - jobPersistence.setSecretMigrationDone(); - isMigrated = jobPersistence.isSecretMigrated(); - assertTrue(isMigrated); - } - - @Test - void testAirbyteProtocolVersionMaxMetadata() throws IOException { - assertTrue(jobPersistence.getAirbyteProtocolVersionMax().isEmpty()); - - final Version maxVersion1 = new Version("0.1.0"); - jobPersistence.setAirbyteProtocolVersionMax(maxVersion1); - final Optional maxVersion1read = jobPersistence.getAirbyteProtocolVersionMax(); - assertEquals(maxVersion1, maxVersion1read.orElseThrow()); - - final Version maxVersion2 = new Version("1.2.1"); - jobPersistence.setAirbyteProtocolVersionMax(maxVersion2); - final Optional maxVersion2read = jobPersistence.getAirbyteProtocolVersionMax(); - assertEquals(maxVersion2, maxVersion2read.orElseThrow()); - } - - @Test - void testAirbyteProtocolVersionMinMetadata() throws IOException { - assertTrue(jobPersistence.getAirbyteProtocolVersionMin().isEmpty()); - - final Version minVersion1 = new Version("1.1.0"); - jobPersistence.setAirbyteProtocolVersionMin(minVersion1); - final Optional minVersion1read = jobPersistence.getAirbyteProtocolVersionMin(); - assertEquals(minVersion1, minVersion1read.orElseThrow()); - - final Version minVersion2 = new Version("3.0.1"); - jobPersistence.setAirbyteProtocolVersionMin(minVersion2); - final Optional minVersion2read = jobPersistence.getAirbyteProtocolVersionMin(); - assertEquals(minVersion2, minVersion2read.orElseThrow()); - } - - @Test - void testAirbyteProtocolVersionRange() throws IOException { - final Version v1 = new Version("1.5.0"); - final Version v2 = new Version("2.5.0"); - final Optional range = jobPersistence.getCurrentProtocolVersionRange(); - assertEquals(Optional.empty(), range); - - jobPersistence.setAirbyteProtocolVersionMax(v2); - final Optional range2 = jobPersistence.getCurrentProtocolVersionRange(); - assertEquals(Optional.of(new AirbyteProtocolVersionRange(AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION, v2)), range2); - - jobPersistence.setAirbyteProtocolVersionMin(v1); - final Optional range3 = jobPersistence.getCurrentProtocolVersionRange(); - assertEquals(Optional.of(new AirbyteProtocolVersionRange(v1, v2)), range3); - } - - private long createJobAt(final Instant created_at) throws IOException { - when(timeSupplier.get()).thenReturn(created_at); - return jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - } - - @Nested - class TemporalWorkflowInfo { - - @Test - void testSuccessfulGet() throws IOException, SQLException { - final var jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final var attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - - final var defaultWorkflowId = jobPersistence.getAttemptTemporalWorkflowId(jobId, attemptNumber); - assertTrue(defaultWorkflowId.isEmpty()); - - jobDatabase.query(ctx -> ctx.execute( - "UPDATE attempts SET temporal_workflow_id = '56a81f3a-006c-42d7-bce2-29d675d08ea4' WHERE job_id = ? AND attempt_number =?", jobId, - attemptNumber)); - final var workflowId = jobPersistence.getAttemptTemporalWorkflowId(jobId, attemptNumber).get(); - assertEquals(workflowId, "56a81f3a-006c-42d7-bce2-29d675d08ea4"); - } - - @Test - void testGetMissingAttempt() throws IOException { - assertTrue(jobPersistence.getAttemptTemporalWorkflowId(0, 0).isEmpty()); - } - - @Test - void testSuccessfulSet() throws IOException, SQLException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final var attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - final var temporalWorkflowId = "test-id-usually-uuid"; - final var syncQueue = "SYNC"; - - jobPersistence.setAttemptTemporalWorkflowInfo(jobId, attemptNumber, temporalWorkflowId, syncQueue); - - final var workflowId = jobPersistence.getAttemptTemporalWorkflowId(jobId, attemptNumber).get(); - assertEquals(workflowId, temporalWorkflowId); - - final var taskQueue = jobDatabase.query(ctx -> ctx.fetch( - "SELECT processing_task_queue FROM attempts WHERE job_id = ? AND attempt_number =?", jobId, - attemptNumber)).stream().findFirst().get().get("processing_task_queue", String.class); - assertEquals(syncQueue, taskQueue); - } - - } - - @Nested - class GetAndSetVersion { - - @Test - void testSetVersion() throws IOException { - final String version = UUID.randomUUID().toString(); - jobPersistence.setVersion(version); - assertEquals(version, jobPersistence.getVersion().orElseThrow()); - } - - @Test - void testSetVersionReplacesExistingId() throws IOException { - final String deploymentId1 = UUID.randomUUID().toString(); - final String deploymentId2 = UUID.randomUUID().toString(); - jobPersistence.setVersion(deploymentId1); - jobPersistence.setVersion(deploymentId2); - assertEquals(deploymentId2, jobPersistence.getVersion().orElseThrow()); - } - - } - - @Nested - class GetAndSetDeployment { - - @Test - void testSetDeployment() throws IOException { - final UUID deploymentId = UUID.randomUUID(); - jobPersistence.setDeployment(deploymentId); - assertEquals(deploymentId, jobPersistence.getDeployment().orElseThrow()); - } - - @Test - void testSetDeploymentIdDoesNotReplaceExistingId() throws IOException { - final UUID deploymentId1 = UUID.randomUUID(); - final UUID deploymentId2 = UUID.randomUUID(); - jobPersistence.setDeployment(deploymentId1); - jobPersistence.setDeployment(deploymentId2); - assertEquals(deploymentId1, jobPersistence.getDeployment().orElseThrow()); - } - - } - - @Nested - @DisplayName("When cancelling job") - class CancelJob { - - @Test - @DisplayName("Should cancel job and leave job in cancelled state") - void testCancelJob() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final Job created = jobPersistence.getJob(jobId); - - when(timeSupplier.get()).thenReturn(Instant.ofEpochMilli(4242)); - jobPersistence.cancelJob(jobId); - - final Job updated = jobPersistence.getJob(jobId); - assertEquals(JobStatus.CANCELLED, updated.getStatus()); - assertNotEquals(created.getUpdatedAtInSecond(), updated.getUpdatedAtInSecond()); - } - - @Test - @DisplayName("Should not raise an exception if job is already succeeded") - void testCancelJobAlreadySuccessful() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - jobPersistence.succeedAttempt(jobId, attemptNumber); - - assertDoesNotThrow(() -> jobPersistence.cancelJob(jobId)); - - final Job updated = jobPersistence.getJob(jobId); - assertEquals(JobStatus.SUCCEEDED, updated.getStatus()); - } - - } - - @Nested - @DisplayName("When creating attempt") - class CreateAttempt { - - @Test - @DisplayName("Should create an attempt") - void testCreateAttempt() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - jobPersistence.createAttempt(jobId, LOG_PATH); - - final Job actual = jobPersistence.getJob(jobId); - final Job expected = createJob( - jobId, - SPEC_JOB_CONFIG, - JobStatus.RUNNING, - Lists.newArrayList(createUnfinishedAttempt(0, jobId, AttemptStatus.RUNNING, LOG_PATH)), - NOW.getEpochSecond()); - assertEquals(expected, actual); - } - - @Test - @DisplayName("Should increment attempt id if creating multiple attemps") - void testCreateAttemptAttemptId() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber1 = jobPersistence.createAttempt(jobId, LOG_PATH); - jobPersistence.failAttempt(jobId, attemptNumber1); - - final Job jobAfterOneAttempts = jobPersistence.getJob(jobId); - assertEquals(0, attemptNumber1); - assertEquals(0, jobAfterOneAttempts.getAttempts().get(0).getAttemptNumber()); - - final int attemptNumber2 = jobPersistence.createAttempt(jobId, LOG_PATH); - final Job jobAfterTwoAttempts = jobPersistence.getJob(jobId); - assertEquals(1, attemptNumber2); - assertEquals(Sets.newHashSet(0, 1), jobAfterTwoAttempts.getAttempts().stream().map(Attempt::getAttemptNumber).collect(Collectors.toSet())); - } - - @Test - @DisplayName("Should not create an attempt if an attempt is running") - void testCreateAttemptWhileAttemptAlreadyRunning() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - jobPersistence.createAttempt(jobId, LOG_PATH); - - assertThrows(IllegalStateException.class, () -> jobPersistence.createAttempt(jobId, LOG_PATH)); - - final Job actual = jobPersistence.getJob(jobId); - final Job expected = createJob( - jobId, - SPEC_JOB_CONFIG, - JobStatus.RUNNING, - Lists.newArrayList(createUnfinishedAttempt(0, jobId, AttemptStatus.RUNNING, LOG_PATH)), - NOW.getEpochSecond()); - assertEquals(expected, actual); - } - - @Test - @DisplayName("Should not create an attempt if job is in terminal state") - void testCreateAttemptTerminal() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - jobPersistence.succeedAttempt(jobId, attemptNumber); - - assertThrows(IllegalStateException.class, () -> jobPersistence.createAttempt(jobId, LOG_PATH)); - - final Job actual = jobPersistence.getJob(jobId); - final Job expected = createJob( - jobId, - SPEC_JOB_CONFIG, - JobStatus.SUCCEEDED, - Lists.newArrayList(createAttempt(0, jobId, AttemptStatus.SUCCEEDED, LOG_PATH)), - NOW.getEpochSecond()); - assertEquals(expected, actual); - } - - } - - @Nested - @DisplayName("When enqueueing job") - class EnqueueJob { - - @Test - @DisplayName("Should create initial job without attempt") - void testCreateJobAndGetWithoutAttemptJob() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - - final Job actual = jobPersistence.getJob(jobId); - final Job expected = createJob(jobId, SPEC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond()); - assertEquals(expected, actual); - } - - @Test - @DisplayName("Should not create a second job if a job under the same scope is in a non-terminal state") - void testCreateJobNoQueueing() throws IOException { - final Optional jobId1 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG); - final Optional jobId2 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG); - - assertTrue(jobId1.isPresent()); - assertTrue(jobId2.isEmpty()); - - final Job actual = jobPersistence.getJob(jobId1.get()); - final Job expected = createJob(jobId1.get(), SYNC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond()); - assertEquals(expected, actual); - } - - @Test - @DisplayName("Should create a second job if a previous job under the same scope has failed") - void testCreateJobIfPrevJobFailed() throws IOException { - final Optional jobId1 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG); - assertTrue(jobId1.isPresent()); - - jobPersistence.failJob(jobId1.get()); - final Optional jobId2 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG); - assertTrue(jobId2.isPresent()); - - final Job actual = jobPersistence.getJob(jobId2.get()); - final Job expected = createJob(jobId2.get(), SYNC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond()); - assertEquals(expected, actual); - } - - } - - @Nested - @DisplayName("When failing job") - class FailJob { - - @Test - @DisplayName("Should set job status to failed") - void failJob() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final Job created = jobPersistence.getJob(jobId); - - when(timeSupplier.get()).thenReturn(Instant.ofEpochMilli(4242)); - jobPersistence.failJob(jobId); - - final Job updated = jobPersistence.getJob(jobId); - assertEquals(JobStatus.FAILED, updated.getStatus()); - assertNotEquals(created.getUpdatedAtInSecond(), updated.getUpdatedAtInSecond()); - } - - @Test - @DisplayName("Should not raise an exception if job is already succeeded") - void testFailJobAlreadySucceeded() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - jobPersistence.succeedAttempt(jobId, attemptNumber); - - assertDoesNotThrow(() -> jobPersistence.failJob(jobId)); - - final Job updated = jobPersistence.getJob(jobId); - assertEquals(JobStatus.SUCCEEDED, updated.getStatus()); - } - - } - - @Nested - @DisplayName("When getting last replication job") - class GetLastReplicationJob { - - @Test - @DisplayName("Should return nothing if no job exists") - void testGetLastReplicationJobForConnectionIdEmpty() throws IOException { - final Optional actual = jobPersistence.getLastReplicationJob(CONNECTION_ID); - - assertTrue(actual.isEmpty()); - } - - @Test - @DisplayName("Should return the last sync job") - void testGetLastSyncJobForConnectionId() throws IOException { - final long jobId1 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - jobPersistence.succeedAttempt(jobId1, jobPersistence.createAttempt(jobId1, LOG_PATH)); - - final Instant afterNow = NOW.plusSeconds(1000); - when(timeSupplier.get()).thenReturn(afterNow); - final long jobId2 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - - final Optional actual = jobPersistence.getLastReplicationJob(CONNECTION_ID); - final Job expected = createJob(jobId2, SYNC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), afterNow.getEpochSecond()); - - assertEquals(Optional.of(expected), actual); - } - - @Test - @DisplayName("Should return the last reset job") - void testGetLastResetJobForConnectionId() throws IOException { - final long jobId1 = jobPersistence.enqueueJob(SCOPE, RESET_JOB_CONFIG).orElseThrow(); - jobPersistence.succeedAttempt(jobId1, jobPersistence.createAttempt(jobId1, LOG_PATH)); - - final Instant afterNow = NOW.plusSeconds(1000); - when(timeSupplier.get()).thenReturn(afterNow); - final long jobId2 = jobPersistence.enqueueJob(SCOPE, RESET_JOB_CONFIG).orElseThrow(); - - final Optional actual = jobPersistence.getLastReplicationJob(CONNECTION_ID); - final Job expected = createJob(jobId2, RESET_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), afterNow.getEpochSecond()); - - assertEquals(Optional.of(expected), actual); - } - - } - - @Nested - @DisplayName("When getting last sync job") - class GetLastSyncJob { - - @Test - @DisplayName("Should return nothing if no job exists") - void testGetLastSyncJobForConnectionIdEmpty() throws IOException { - final Optional actual = jobPersistence.getLastSyncJob(CONNECTION_ID); - - assertTrue(actual.isEmpty()); - } - - @Test - @DisplayName("Should return the last enqueued sync job") - void testGetLastSyncJobForConnectionId() throws IOException { - final long jobId1 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - jobPersistence.succeedAttempt(jobId1, jobPersistence.createAttempt(jobId1, LOG_PATH)); - - final Instant afterNow = NOW.plusSeconds(1000); - when(timeSupplier.get()).thenReturn(afterNow); - final long jobId2 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId2, LOG_PATH); - - // Return the latest sync job even if failed - jobPersistence.failAttempt(jobId2, attemptNumber); - final Attempt attempt = jobPersistence.getJob(jobId2).getAttempts().stream().findFirst().orElseThrow(); - jobPersistence.failJob(jobId2); - - final Optional actual = jobPersistence.getLastSyncJob(CONNECTION_ID); - final Job expected = createJob(jobId2, SYNC_JOB_CONFIG, JobStatus.FAILED, List.of(attempt), afterNow.getEpochSecond()); - - assertEquals(Optional.of(expected), actual); - } - - @Test - @DisplayName("Should return nothing if only reset job exists") - void testGetLastSyncJobForConnectionIdEmptyBecauseOnlyReset() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, RESET_JOB_CONFIG).orElseThrow(); - jobPersistence.succeedAttempt(jobId, jobPersistence.createAttempt(jobId, LOG_PATH)); - - final Instant afterNow = NOW.plusSeconds(1000); - when(timeSupplier.get()).thenReturn(afterNow); - - final Optional actual = jobPersistence.getLastSyncJob(CONNECTION_ID); - - assertTrue(actual.isEmpty()); - } - - } - - @Nested - @DisplayName("When getting the last sync job for multiple connections") - class GetLastSyncJobForConnections { - - private static final UUID CONNECTION_ID_1 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_2 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_3 = UUID.randomUUID(); - private static final String SCOPE_1 = CONNECTION_ID_1.toString(); - private static final String SCOPE_2 = CONNECTION_ID_2.toString(); - private static final String SCOPE_3 = CONNECTION_ID_3.toString(); - private static final List CONNECTION_IDS = List.of(CONNECTION_ID_1, CONNECTION_ID_2, CONNECTION_ID_3); - - @Test - @DisplayName("Should return nothing if no sync job exists") - void testGetLastSyncJobsForConnectionsEmpty() throws IOException { - final List actual = jobPersistence.getLastSyncJobForConnections(CONNECTION_IDS); - - assertTrue(actual.isEmpty()); - } - - @Test - @DisplayName("Should return the last enqueued sync job for each connection") - void testGetLastSyncJobForConnections() throws IOException { - final long scope1Job1 = jobPersistence.enqueueJob(SCOPE_1, SYNC_JOB_CONFIG).orElseThrow(); - jobPersistence.succeedAttempt(scope1Job1, jobPersistence.createAttempt(scope1Job1, LOG_PATH)); - - final long scope2Job1 = jobPersistence.enqueueJob(SCOPE_2, SYNC_JOB_CONFIG).orElseThrow(); - jobPersistence.succeedAttempt(scope2Job1, jobPersistence.createAttempt(scope2Job1, LOG_PATH)); - - final long scope3Job1 = jobPersistence.enqueueJob(SCOPE_3, SYNC_JOB_CONFIG).orElseThrow(); - - final Instant afterNow = NOW.plusSeconds(1000); - when(timeSupplier.get()).thenReturn(afterNow); - - final long scope1Job2 = jobPersistence.enqueueJob(SCOPE_1, SYNC_JOB_CONFIG).orElseThrow(); - final int scope1Job2AttemptNumber = jobPersistence.createAttempt(scope1Job2, LOG_PATH); - - // should return the latest sync job even if failed - jobPersistence.failAttempt(scope1Job2, scope1Job2AttemptNumber); - final Attempt scope1Job2attempt = jobPersistence.getJob(scope1Job2).getAttempts().stream().findFirst().orElseThrow(); - jobPersistence.failJob(scope1Job2); - - // will leave this job running - final long scope2Job2 = jobPersistence.enqueueJob(SCOPE_2, SYNC_JOB_CONFIG).orElseThrow(); - jobPersistence.createAttempt(scope2Job2, LOG_PATH); - final Attempt scope2Job2attempt = jobPersistence.getJob(scope2Job2).getAttempts().stream().findFirst().orElseThrow(); - - final List actual = jobPersistence.getLastSyncJobForConnections(CONNECTION_IDS); - final List expected = new ArrayList<>(); - expected.add(createJob(scope1Job2, SYNC_JOB_CONFIG, JobStatus.FAILED, List.of(scope1Job2attempt), afterNow.getEpochSecond(), SCOPE_1)); - expected.add(createJob(scope2Job2, SYNC_JOB_CONFIG, JobStatus.RUNNING, List.of(scope2Job2attempt), afterNow.getEpochSecond(), SCOPE_2)); - expected.add(createJob(scope3Job1, SYNC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond(), SCOPE_3)); - - assertTrue(expected.size() == actual.size() && expected.containsAll(actual) && actual.containsAll(expected)); - } - - @Test - @DisplayName("Should return nothing if only reset job exists") - void testGetLastSyncJobsForConnectionsEmptyBecauseOnlyReset() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE_1, RESET_JOB_CONFIG).orElseThrow(); - jobPersistence.succeedAttempt(jobId, jobPersistence.createAttempt(jobId, LOG_PATH)); - - final Instant afterNow = NOW.plusSeconds(1000); - when(timeSupplier.get()).thenReturn(afterNow); - - final List actual = jobPersistence.getLastSyncJobForConnections(CONNECTION_IDS); - - assertTrue(actual.isEmpty()); - } - - } - - @Nested - @DisplayName("When getting the last running sync job for multiple connections") - class GetRunningSyncJobForConnections { - - private static final UUID CONNECTION_ID_1 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_2 = UUID.randomUUID(); - private static final UUID CONNECTION_ID_3 = UUID.randomUUID(); - private static final String SCOPE_1 = CONNECTION_ID_1.toString(); - private static final String SCOPE_2 = CONNECTION_ID_2.toString(); - private static final String SCOPE_3 = CONNECTION_ID_3.toString(); - private static final List CONNECTION_IDS = List.of(CONNECTION_ID_1, CONNECTION_ID_2, CONNECTION_ID_3); - - @Test - @DisplayName("Should return nothing if no sync job exists") - void testGetRunningSyncJobsForConnectionsEmpty() throws IOException { - final List actual = jobPersistence.getRunningSyncJobForConnections(CONNECTION_IDS); - - assertTrue(actual.isEmpty()); - } - - @Test - @DisplayName("Should return the last running sync job for each connection") - void testGetRunningSyncJobsForConnections() throws IOException { - // succeeded jobs should not be present in the result - final long scope1Job1 = jobPersistence.enqueueJob(SCOPE_1, SYNC_JOB_CONFIG).orElseThrow(); - jobPersistence.succeedAttempt(scope1Job1, jobPersistence.createAttempt(scope1Job1, LOG_PATH)); - - // fail scope2's first job, but later start a running job that should show up in the result - final long scope2Job1 = jobPersistence.enqueueJob(SCOPE_2, SYNC_JOB_CONFIG).orElseThrow(); - final int scope2Job1AttemptNumber = jobPersistence.createAttempt(scope2Job1, LOG_PATH); - jobPersistence.failAttempt(scope2Job1, scope2Job1AttemptNumber); - jobPersistence.failJob(scope2Job1); - - // pending jobs should be present in the result - final long scope3Job1 = jobPersistence.enqueueJob(SCOPE_3, SYNC_JOB_CONFIG).orElseThrow(); - - final Instant afterNow = NOW.plusSeconds(1000); - when(timeSupplier.get()).thenReturn(afterNow); - - // create a running job/attempt for scope2 - final long scope2Job2 = jobPersistence.enqueueJob(SCOPE_2, SYNC_JOB_CONFIG).orElseThrow(); - jobPersistence.createAttempt(scope2Job2, LOG_PATH); - final Attempt scope2Job2attempt = jobPersistence.getJob(scope2Job2).getAttempts().stream().findFirst().orElseThrow(); - - final List expected = new ArrayList<>(); - expected.add(createJob(scope2Job2, SYNC_JOB_CONFIG, JobStatus.RUNNING, List.of(scope2Job2attempt), afterNow.getEpochSecond(), SCOPE_2)); - expected.add(createJob(scope3Job1, SYNC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond(), SCOPE_3)); - - final List actual = jobPersistence.getRunningSyncJobForConnections(CONNECTION_IDS); - assertTrue(expected.size() == actual.size() && expected.containsAll(actual) && actual.containsAll(expected)); - } - - @Test - @DisplayName("Should return nothing if only a running reset job exists") - void testGetRunningSyncJobsForConnectionsEmptyBecauseOnlyReset() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE_1, RESET_JOB_CONFIG).orElseThrow(); - jobPersistence.createAttempt(jobId, LOG_PATH); - - final Instant afterNow = NOW.plusSeconds(1000); - when(timeSupplier.get()).thenReturn(afterNow); - - final List actual = jobPersistence.getRunningSyncJobForConnections(CONNECTION_IDS); - - assertTrue(actual.isEmpty()); - } - - } - - @Nested - @DisplayName("When getting first replication job") - class GetFirstReplicationJob { - - @Test - @DisplayName("Should return nothing if no job exists") - void testGetFirstSyncJobForConnectionIdEmpty() throws IOException { - final Optional actual = jobPersistence.getFirstReplicationJob(CONNECTION_ID); - - assertTrue(actual.isEmpty()); - } - - @Test - @DisplayName("Should return the first job") - void testGetFirstSyncJobForConnectionId() throws IOException { - final long jobId1 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - jobPersistence.succeedAttempt(jobId1, jobPersistence.createAttempt(jobId1, LOG_PATH)); - final List attemptsWithJobInfo = jobPersistence.listAttemptsWithJobInfo(SYNC_JOB_CONFIG.getConfigType(), Instant.EPOCH); - final List attempts = Collections.singletonList(attemptsWithJobInfo.get(0).getAttempt()); - - final Instant afterNow = NOW.plusSeconds(1000); - when(timeSupplier.get()).thenReturn(afterNow); - jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - - final Optional actual = jobPersistence.getFirstReplicationJob(CONNECTION_ID); - final Job expected = createJob(jobId1, SYNC_JOB_CONFIG, JobStatus.SUCCEEDED, attempts, NOW.getEpochSecond()); - - assertEquals(Optional.of(expected), actual); - } - - } - - @Nested - @DisplayName("When getting next job") - class GetNextJob { - - @Test - @DisplayName("Should always return oldest pending job") - void testGetOldestPendingJob() throws IOException { - final long jobId = createJobAt(NOW); - createJobAt(NOW.plusSeconds(1000)); - - final Optional actual = jobPersistence.getNextJob(); - - final Job expected = createJob(jobId, SPEC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond()); - assertEquals(Optional.of(expected), actual); - } - - @Test - @DisplayName("Should return nothing if no jobs pending") - void testGetOldestPendingJobOnlyPendingJobs() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - jobPersistence.cancelJob(jobId); - - final Optional actual = jobPersistence.getNextJob(); - - assertTrue(actual.isEmpty()); - } - - @Test - @DisplayName("Should return job if job is pending even if it has multiple failed attempts") - void testGetNextJobWithMultipleAttempts() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - jobPersistence.failAttempt(jobId, jobPersistence.createAttempt(jobId, LOG_PATH)); - jobPersistence.failAttempt(jobId, jobPersistence.createAttempt(jobId, LOG_PATH)); - jobPersistence.resetJob(jobId); - - final Optional actual = jobPersistence.getNextJob(); - - final Job expected = createJob( - jobId, - SPEC_JOB_CONFIG, - JobStatus.PENDING, - Lists.newArrayList( - createAttempt(0, jobId, AttemptStatus.FAILED, LOG_PATH), - createAttempt(1, jobId, AttemptStatus.FAILED, LOG_PATH)), - NOW.getEpochSecond()); - - assertEquals(Optional.of(expected), actual); - } - - @Test - @DisplayName("Should return oldest pending job even if another job with same scope failed") - void testGetOldestPendingJobWithOtherJobWithSameScopeFailed() throws IOException { - // create a job and set it to incomplete. - final long jobId = createJobAt(NOW.minusSeconds(1000)); - jobPersistence.createAttempt(jobId, LOG_PATH); - jobPersistence.failJob(jobId); - - // create a pending job. - final long jobId2 = createJobAt(NOW); - - final Optional actual = jobPersistence.getNextJob(); - - final Job expected = createJob(jobId2, SPEC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond()); - assertEquals(Optional.of(expected), actual); - } - - @Test - @DisplayName("Should return oldest pending job even if another job with same scope cancelled") - void testGetOldestPendingJobWithOtherJobWithSameScopeCancelled() throws IOException { - // create a job and set it to incomplete. - final long jobId = createJobAt(NOW.minusSeconds(1000)); - jobPersistence.cancelJob(jobId); - - // create a pending job. - final long jobId2 = createJobAt(NOW); - - final Optional actual = jobPersistence.getNextJob(); - - final Job expected = createJob(jobId2, SPEC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond()); - assertEquals(Optional.of(expected), actual); - } - - @Test - @DisplayName("Should return oldest pending job even if another job with same scope succeeded") - void testGetOldestPendingJobWithOtherJobWithSameScopeSucceeded() throws IOException { - // create a job and set it to incomplete. - final long jobId = createJobAt(NOW.minusSeconds(1000)); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - jobPersistence.succeedAttempt(jobId, attemptNumber); - - // create a pending job. - final long jobId2 = createJobAt(NOW); - - final Optional actual = jobPersistence.getNextJob(); - - final Job expected = createJob(jobId2, SPEC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond()); - assertEquals(Optional.of(expected), actual); - } - - @Test - @DisplayName("Should not return pending job if job with same scope is running") - void testGetOldestPendingJobWithOtherJobWithSameScopeRunning() throws IOException { - // create a job and set it to running. - final long jobId = createJobAt(NOW.minusSeconds(1000)); - jobPersistence.createAttempt(jobId, LOG_PATH); - - // create a pending job. - createJobAt(NOW); - - final Optional actual = jobPersistence.getNextJob(); - - assertTrue(actual.isEmpty()); - } - - @Test - @DisplayName("Should not return pending job if job with same scope is incomplete") - void testGetOldestPendingJobWithOtherJobWithSameScopeIncomplete() throws IOException { - // create a job and set it to incomplete. - final long jobId = createJobAt(NOW.minusSeconds(1000)); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - jobPersistence.failAttempt(jobId, attemptNumber); - - // create a pending job. - final Instant afterNow = NOW.plusSeconds(1000); - when(timeSupplier.get()).thenReturn(afterNow); - createJobAt(NOW); - - final Optional actual = jobPersistence.getNextJob(); - - assertTrue(actual.isEmpty()); - } - - } - - @Nested - @DisplayName("When getting the count of jobs") - class GetJobCount { - - @Test - @DisplayName("Should return the total job count for the connection") - void testGetJobCount() throws IOException { - final int numJobsToCreate = 10; - for (int i = 0; i < numJobsToCreate; i++) { - jobPersistence.enqueueJob(CONNECTION_ID.toString(), SPEC_JOB_CONFIG); - } - - final Long actualJobCount = jobPersistence.getJobCount(Set.of(SPEC_JOB_CONFIG.getConfigType()), CONNECTION_ID.toString()); - - assertEquals(numJobsToCreate, actualJobCount); - } - - @Test - @DisplayName("Should return 0 if there are no jobs for this connection") - void testGetJobCountNoneForConnection() throws IOException { - final UUID otherConnectionId1 = UUID.randomUUID(); - final UUID otherConnectionId2 = UUID.randomUUID(); - - jobPersistence.enqueueJob(otherConnectionId1.toString(), SPEC_JOB_CONFIG); - jobPersistence.enqueueJob(otherConnectionId2.toString(), SPEC_JOB_CONFIG); - - final Long actualJobCount = jobPersistence.getJobCount(Set.of(SPEC_JOB_CONFIG.getConfigType()), CONNECTION_ID.toString()); - - assertEquals(0, actualJobCount); - } - - } - - @Nested - @DisplayName("When listing jobs, use paged results") - class ListJobs { - - @Test - @DisplayName("Should return the correct page of results with multiple pages of history") - void testListJobsByPage() throws IOException { - final List ids = new ArrayList(); - for (int i = 0; i < 50; i++) { - final long jobId = jobPersistence.enqueueJob(CONNECTION_ID.toString(), SPEC_JOB_CONFIG).orElseThrow(); - ids.add(jobId); - - // create two attempts per job to verify pagination is applied at the job record level - final int attemptNum1 = jobPersistence.createAttempt(jobId, LOG_PATH); - jobPersistence.failAttempt(jobId, attemptNum1); - jobPersistence.createAttempt(jobId, LOG_PATH); - - // also create a job for another connection, to verify the query is properly filtering down to only - // jobs for the desired connection - jobPersistence.enqueueJob(CONNECTION_ID2.toString(), SPEC_JOB_CONFIG).orElseThrow(); - } - final int pagesize = 10; - final int offset = 3; - - final List actualList = jobPersistence.listJobs(SPEC_JOB_CONFIG.getConfigType(), CONNECTION_ID.toString(), pagesize, offset); - assertEquals(pagesize, actualList.size()); - assertEquals(ids.get(ids.size() - 1 - offset), actualList.get(0).getId()); - } - - @Test - @DisplayName("Should return the results in the correct sort order") - void testListJobsSortsDescending() throws IOException { - final List ids = new ArrayList(); - for (int i = 0; i < 100; i++) { - // These have strictly the same created_at due to the setup() above, so should come back sorted by - // id desc instead. - final long jobId = jobPersistence.enqueueJob(CONNECTION_ID.toString(), SPEC_JOB_CONFIG).orElseThrow(); - ids.add(jobId); - } - final int pagesize = 200; - final int offset = 0; - final List actualList = jobPersistence.listJobs(SPEC_JOB_CONFIG.getConfigType(), CONNECTION_ID.toString(), pagesize, offset); - for (int i = 0; i < 100; i++) { - assertEquals(ids.get(ids.size() - (i + 1)), actualList.get(i).getId(), "Job ids should have been in order but weren't."); - } - } - - @Test - @DisplayName("Should list all jobs") - void testListJobs() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - - final List actualList = jobPersistence.listJobs(SPEC_JOB_CONFIG.getConfigType(), CONNECTION_ID.toString(), 9999, 0); - - final Job actual = actualList.get(0); - final Job expected = createJob(jobId, SPEC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond()); - - assertEquals(1, actualList.size()); - assertEquals(expected, actual); - } - - @Test - @DisplayName("Should list all jobs matching multiple config types") - void testListJobsMultipleConfigTypes() throws IOException { - final long specJobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final long checkJobId = jobPersistence.enqueueJob(SCOPE, CHECK_JOB_CONFIG).orElseThrow(); - // add a third config type that is not added in the listJobs request, to verify that it is not - // included in the results - jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - - final List actualList = - jobPersistence.listJobs(Set.of(SPEC_JOB_CONFIG.getConfigType(), CHECK_JOB_CONFIG.getConfigType()), CONNECTION_ID.toString(), 9999, 0); - - final List expectedList = - List.of(createJob(checkJobId, CHECK_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond()), - createJob(specJobId, SPEC_JOB_CONFIG, JobStatus.PENDING, Collections.emptyList(), NOW.getEpochSecond())); - - assertEquals(expectedList, actualList); - } - - @Test - @DisplayName("Should list all jobs with all attempts") - void testListJobsWithMultipleAttempts() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber0 = jobPersistence.createAttempt(jobId, LOG_PATH); - - jobPersistence.failAttempt(jobId, attemptNumber0); - - final Path secondAttemptLogPath = LOG_PATH.resolve("2"); - final int attemptNumber1 = jobPersistence.createAttempt(jobId, secondAttemptLogPath); - - jobPersistence.succeedAttempt(jobId, attemptNumber1); - - final List actualList = jobPersistence.listJobs(SPEC_JOB_CONFIG.getConfigType(), CONNECTION_ID.toString(), 9999, 0); - - final Job actual = actualList.get(0); - final Job expected = createJob( - jobId, - SPEC_JOB_CONFIG, - JobStatus.SUCCEEDED, - Lists.newArrayList( - createAttempt(0, jobId, AttemptStatus.FAILED, LOG_PATH), - createAttempt(1, jobId, AttemptStatus.SUCCEEDED, secondAttemptLogPath)), - NOW.getEpochSecond()); - - assertEquals(1, actualList.size()); - assertEquals(expected, actual); - } - - @Test - @DisplayName("Should list all jobs with all attempts in descending order") - void testListJobsWithMultipleAttemptsInDescOrder() throws IOException { - // create first job with multiple attempts - final var jobId1 = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final var job1Attempt1 = jobPersistence.createAttempt(jobId1, LOG_PATH); - jobPersistence.failAttempt(jobId1, job1Attempt1); - final var job1Attempt2LogPath = LOG_PATH.resolve("2"); - final int job1Attempt2 = jobPersistence.createAttempt(jobId1, job1Attempt2LogPath); - jobPersistence.succeedAttempt(jobId1, job1Attempt2); - - // create second job with multiple attempts - final var laterTime = NOW.plusSeconds(1000); - when(timeSupplier.get()).thenReturn(laterTime); - final var jobId2 = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final var job2Attempt1LogPath = LOG_PATH.resolve("3"); - final var job2Attempt1 = jobPersistence.createAttempt(jobId2, job2Attempt1LogPath); - jobPersistence.succeedAttempt(jobId2, job2Attempt1); - - final List actualList = jobPersistence.listJobs(SPEC_JOB_CONFIG.getConfigType(), CONNECTION_ID.toString(), 9999, 0); - - assertEquals(2, actualList.size()); - assertEquals(jobId2, actualList.get(0).getId()); - } - - @Test - @DisplayName("Should list jobs including the specified job") - void testListJobsIncludingId() throws IOException { - final List ids = new ArrayList<>(); - for (int i = 0; i < 100; i++) { - // This makes each enqueued job have an increasingly higher createdAt time - when(timeSupplier.get()).thenReturn(Instant.ofEpochSecond(i)); - // Alternate between spec and check job config types to verify that both config types are fetched - // properly - final JobConfig jobConfig = i % 2 == 0 ? SPEC_JOB_CONFIG : CHECK_JOB_CONFIG; - final long jobId = jobPersistence.enqueueJob(CONNECTION_ID.toString(), jobConfig).orElseThrow(); - ids.add(jobId); - // also create an attempt for each job to verify that joining with attempts does not cause failures - jobPersistence.createAttempt(jobId, LOG_PATH); - } - - final int includingIdIndex = 90; - final int pageSize = 25; - final List actualList = jobPersistence.listJobsIncludingId(Set.of(SPEC_JOB_CONFIG.getConfigType(), CHECK_JOB_CONFIG.getConfigType()), - CONNECTION_ID.toString(), ids.get(includingIdIndex), pageSize); - final List expectedJobIds = Lists.reverse(ids.subList(ids.size() - pageSize, ids.size())); - assertEquals(expectedJobIds, actualList.stream().map(Job::getId).toList()); - } - - @Test - @DisplayName("Should list jobs including the specified job, including multiple pages if necessary") - void testListJobsIncludingIdMultiplePages() throws IOException { - final List ids = new ArrayList<>(); - for (int i = 0; i < 100; i++) { - // This makes each enqueued job have an increasingly higher createdAt time - when(timeSupplier.get()).thenReturn(Instant.ofEpochSecond(i)); - // Alternate between spec and check job config types to verify that both config types are fetched - // properly - final JobConfig jobConfig = i % 2 == 0 ? SPEC_JOB_CONFIG : CHECK_JOB_CONFIG; - final long jobId = jobPersistence.enqueueJob(CONNECTION_ID.toString(), jobConfig).orElseThrow(); - ids.add(jobId); - // also create an attempt for each job to verify that joining with attempts does not cause failures - jobPersistence.createAttempt(jobId, LOG_PATH); - } - - // including id is on the second page, so response should contain two pages of jobs - final int includingIdIndex = 60; - final int pageSize = 25; - final List actualList = jobPersistence.listJobsIncludingId(Set.of(SPEC_JOB_CONFIG.getConfigType(), CHECK_JOB_CONFIG.getConfigType()), - CONNECTION_ID.toString(), ids.get(includingIdIndex), pageSize); - final List expectedJobIds = Lists.reverse(ids.subList(ids.size() - (pageSize * 2), ids.size())); - assertEquals(expectedJobIds, actualList.stream().map(Job::getId).toList()); - } - - @Test - @DisplayName("Should return an empty list if there is no job with the includingJob ID for this connection") - void testListJobsIncludingIdFromWrongConnection() throws IOException { - for (int i = 0; i < 10; i++) { - jobPersistence.enqueueJob(CONNECTION_ID.toString(), SPEC_JOB_CONFIG); - } - - final long otherConnectionJobId = jobPersistence.enqueueJob(UUID.randomUUID().toString(), SPEC_JOB_CONFIG).orElseThrow(); - - final List actualList = - jobPersistence.listJobsIncludingId(Set.of(SPEC_JOB_CONFIG.getConfigType()), CONNECTION_ID.toString(), otherConnectionJobId, 25); - assertEquals(List.of(), actualList); - } - - } - - @Nested - @DisplayName("When listing job with status") - class ListJobsWithStatus { - - @Test - @DisplayName("Should only list jobs with requested status") - void testListJobsWithStatus() throws IOException { - // not failed. - jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG); - // failed - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - jobPersistence.failAttempt(jobId, attemptNumber); - - final List actualList = jobPersistence.listJobsWithStatus(JobStatus.INCOMPLETE); - - final Job actual = actualList.get(0); - final Job expected = createJob( - jobId, - SPEC_JOB_CONFIG, - JobStatus.INCOMPLETE, - Lists.newArrayList( - createAttempt(0, jobId, AttemptStatus.FAILED, LOG_PATH)), - NOW.getEpochSecond()); - - assertEquals(1, actualList.size()); - assertEquals(expected, actual); - } - - @Test - @DisplayName("Should only list jobs with requested status and config type") - void testListJobsWithStatusAndConfigType() throws IOException, InterruptedException { - // not failed. - final long pendingSpecJobId = jobPersistence.enqueueJob(SPEC_SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final long pendingSyncJobId = jobPersistence.enqueueJob(SYNC_SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final long pendingCheckJobId = jobPersistence.enqueueJob(CHECK_SCOPE, CHECK_JOB_CONFIG).orElseThrow(); - - // failed - final long failedSpecJobId = jobPersistence.enqueueJob(SPEC_SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(failedSpecJobId, LOG_PATH); - jobPersistence.failAttempt(failedSpecJobId, attemptNumber); - - final List allPendingJobs = jobPersistence.listJobsWithStatus(JobStatus.PENDING); - - final Job expectedPendingSpecJob = - createJob(pendingSpecJobId, SPEC_JOB_CONFIG, JobStatus.PENDING, Lists.newArrayList(), NOW.getEpochSecond(), SPEC_SCOPE); - final Job expectedPendingCheckJob = - createJob(pendingCheckJobId, CHECK_JOB_CONFIG, JobStatus.PENDING, Lists.newArrayList(), NOW.getEpochSecond(), CHECK_SCOPE); - final Job expectedPendingSyncJob = - createJob(pendingSyncJobId, SYNC_JOB_CONFIG, JobStatus.PENDING, Lists.newArrayList(), NOW.getEpochSecond(), SYNC_SCOPE); - - final List allPendingSyncAndSpecJobs = jobPersistence.listJobsWithStatus(Set.of(ConfigType.GET_SPEC, ConfigType.SYNC), JobStatus.PENDING); - - final List incompleteJobs = jobPersistence.listJobsWithStatus(SPEC_JOB_CONFIG.getConfigType(), JobStatus.INCOMPLETE); - final Job actualIncompleteJob = incompleteJobs.get(0); - final Job expectedIncompleteJob = createJob( - failedSpecJobId, - SPEC_JOB_CONFIG, - JobStatus.INCOMPLETE, - Lists.newArrayList( - createAttempt(0, failedSpecJobId, AttemptStatus.FAILED, LOG_PATH)), - NOW.getEpochSecond(), - SPEC_SCOPE); - - assertEquals(Sets.newHashSet(expectedPendingCheckJob, expectedPendingSpecJob, expectedPendingSyncJob), Sets.newHashSet(allPendingJobs)); - assertEquals(Sets.newHashSet(expectedPendingSpecJob, expectedPendingSyncJob), Sets.newHashSet(allPendingSyncAndSpecJobs)); - - assertEquals(1, incompleteJobs.size()); - assertEquals(expectedIncompleteJob, actualIncompleteJob); - } - - @Test - @DisplayName("Should only list jobs for the requested connection and with the requested statuses and config types") - void testListJobsWithStatusesAndConfigTypesForConnection() throws IOException, InterruptedException { - final UUID desiredConnectionId = UUID.randomUUID(); - final UUID otherConnectionId = UUID.randomUUID(); - - // desired connection, statuses, and config types - final long desiredJobId1 = jobPersistence.enqueueJob(desiredConnectionId.toString(), SYNC_JOB_CONFIG).orElseThrow(); - jobPersistence.succeedAttempt(desiredJobId1, jobPersistence.createAttempt(desiredJobId1, LOG_PATH)); - final long desiredJobId2 = jobPersistence.enqueueJob(desiredConnectionId.toString(), SYNC_JOB_CONFIG).orElseThrow(); - final long desiredJobId3 = jobPersistence.enqueueJob(desiredConnectionId.toString(), CHECK_JOB_CONFIG).orElseThrow(); - jobPersistence.succeedAttempt(desiredJobId3, jobPersistence.createAttempt(desiredJobId3, LOG_PATH)); - final long desiredJobId4 = jobPersistence.enqueueJob(desiredConnectionId.toString(), CHECK_JOB_CONFIG).orElseThrow(); - - // right connection id and status, wrong config type - jobPersistence.enqueueJob(desiredConnectionId.toString(), SPEC_JOB_CONFIG).orElseThrow(); - // right config type and status, wrong connection id - jobPersistence.enqueueJob(otherConnectionId.toString(), SYNC_JOB_CONFIG).orElseThrow(); - // right connection id and config type, wrong status - final long otherJobId3 = jobPersistence.enqueueJob(desiredConnectionId.toString(), CHECK_JOB_CONFIG).orElseThrow(); - jobPersistence.failAttempt(otherJobId3, jobPersistence.createAttempt(otherJobId3, LOG_PATH)); - - final List actualJobs = jobPersistence.listJobsForConnectionWithStatuses(desiredConnectionId, - Set.of(ConfigType.SYNC, ConfigType.CHECK_CONNECTION_DESTINATION), Set.of(JobStatus.PENDING, JobStatus.SUCCEEDED)); - - final Job expectedDesiredJob1 = createJob(desiredJobId1, SYNC_JOB_CONFIG, JobStatus.SUCCEEDED, - Lists.newArrayList(createAttempt(0, desiredJobId1, AttemptStatus.SUCCEEDED, LOG_PATH)), - NOW.getEpochSecond(), desiredConnectionId.toString()); - final Job expectedDesiredJob2 = - createJob(desiredJobId2, SYNC_JOB_CONFIG, JobStatus.PENDING, Lists.newArrayList(), NOW.getEpochSecond(), desiredConnectionId.toString()); - final Job expectedDesiredJob3 = createJob(desiredJobId3, CHECK_JOB_CONFIG, JobStatus.SUCCEEDED, - Lists.newArrayList(createAttempt(0, desiredJobId3, AttemptStatus.SUCCEEDED, LOG_PATH)), - NOW.getEpochSecond(), desiredConnectionId.toString()); - final Job expectedDesiredJob4 = - createJob(desiredJobId4, CHECK_JOB_CONFIG, JobStatus.PENDING, Lists.newArrayList(), NOW.getEpochSecond(), desiredConnectionId.toString()); - - assertEquals(Sets.newHashSet(expectedDesiredJob1, expectedDesiredJob2, expectedDesiredJob3, expectedDesiredJob4), Sets.newHashSet(actualJobs)); - } - - } - - @Nested - @DisplayName("When resetting job") - class ResetJob { - - @Test - @DisplayName("Should reset job and put job in pending state") - void testResetJob() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(jobId, LOG_PATH); - final Job created = jobPersistence.getJob(jobId); - - jobPersistence.failAttempt(jobId, attemptNumber); - when(timeSupplier.get()).thenReturn(Instant.ofEpochMilli(4242)); - jobPersistence.resetJob(jobId); - - final Job updated = jobPersistence.getJob(jobId); - assertEquals(JobStatus.PENDING, updated.getStatus()); - assertNotEquals(created.getUpdatedAtInSecond(), updated.getUpdatedAtInSecond()); - } - - @Test - @DisplayName("Should not be able to reset a cancelled job") - void testResetJobCancelled() throws IOException { - final long jobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - - jobPersistence.cancelJob(jobId); - assertDoesNotThrow(() -> jobPersistence.resetJob(jobId)); - - final Job updated = jobPersistence.getJob(jobId); - assertEquals(JobStatus.CANCELLED, updated.getStatus()); - } - - } - - @Nested - @DisplayName("When purging job history") - class PurgeJobHistory { - - private Job persistJobForJobHistoryTesting(final String scope, final JobConfig jobConfig, final JobStatus status, final LocalDateTime runDate) - throws IOException, SQLException { - final Optional id = jobDatabase.query( - ctx -> ctx.fetch( - "INSERT INTO jobs(config_type, scope, created_at, updated_at, status, config) " + - "SELECT CAST(? AS JOB_CONFIG_TYPE), ?, ?, ?, CAST(? AS JOB_STATUS), CAST(? as JSONB) " + - "RETURNING id ", - Sqls.toSqlName(jobConfig.getConfigType()), - scope, - runDate, - runDate, - Sqls.toSqlName(status), - Jsons.serialize(jobConfig))) - .stream() - .findFirst() - .map(r -> r.getValue("id", Long.class)); - return jobPersistence.getJob(id.get()); - } - - private void persistAttemptForJobHistoryTesting(final Job job, final String logPath, final LocalDateTime runDate, final boolean shouldHaveState) - throws IOException, SQLException { - final String attemptOutputWithState = "{\n" - + " \"sync\": {\n" - + " \"state\": {\n" - + " \"state\": {\n" - + " \"bookmarks\": {" - + "}}}}}"; - final String attemptOutputWithoutState = "{\n" - + " \"sync\": {\n" - + " \"output_catalog\": {" - + "}}}"; - jobDatabase.query(ctx -> ctx.fetch( - "INSERT INTO attempts(job_id, attempt_number, log_path, status, created_at, updated_at, output) " - + "VALUES(?, ?, ?, CAST(? AS ATTEMPT_STATUS), ?, ?, CAST(? as JSONB)) RETURNING attempt_number", - job.getId(), - job.getAttemptsCount(), - logPath, - Sqls.toSqlName(AttemptStatus.FAILED), - runDate, - runDate, - shouldHaveState ? attemptOutputWithState : attemptOutputWithoutState) - .stream() - .findFirst() - .map(r -> r.get("attempt_number", Integer.class)) - .orElseThrow(() -> new RuntimeException("This should not happen"))); - } - - /** - * Testing job history deletion is sensitive to exactly how the constants are configured for - * controlling deletion logic. Thus, the test case injects overrides for those constants, testing a - * comprehensive set of combinations to make sure that the logic is robust to reasonable - * configurations. Extreme configurations such as zero-day retention period are not covered. - * - * Business rules for deletions. 1. Job must be older than X days or its conn has excessive number - * of jobs 2. Job cannot be one of the last N jobs on that conn (last N jobs are always kept). 3. - * Job cannot be holding the most recent saved state (most recent saved state is always kept). - * - * Testing Goal: Set up jobs according to the parameters passed in. Then delete according to the - * rules, and make sure the right number of jobs are left. Against one connection/scope, - *

    - *
  1. Setup: create a history of jobs that goes back many days (but produces no more than one job a - * day)
  2. - *
  3. Setup: the most recent job with state in it should be at least N jobs back
  4. - *
  5. Assert: ensure that after purging, there are the right number of jobs left (and at least min - * recency), including the one with the most recent state.
  6. - *
  7. Assert: ensure that after purging, there are the right number of jobs left (and at least min - * recency), including the X most recent
  8. - *
  9. Assert: ensure that after purging, all other job history has been deleted.
  10. - *
- * - * @param numJobs How many test jobs to generate; make this enough that all other parameters are - * fully included, for predictable results. - * @param tooManyJobs Takes the place of DefaultJobPersistence.JOB_HISTORY_EXCESSIVE_NUMBER_OF_JOBS - * - how many jobs are needed before it ignores date-based age of job when doing deletions. - * @param ageCutoff Takes the place of DefaultJobPersistence.JOB_HISTORY_MINIMUM_AGE_IN_DAYS - - * retention period in days for the most recent jobs; older than this gets deleted. - * @param recencyCutoff Takes the place of DefaultJobPersistence.JOB_HISTORY_MINIMUM_RECENCY - - * retention period in number of jobs; at least this many jobs will be retained after - * deletion (provided enough existed in the first place). - * @param lastStatePosition How far back in the list is the job with the latest saved state. This - * can be manipulated to have the saved-state job inside or prior to the retention period. - * @param expectedAfterPurge How many matching jobs are expected after deletion, given the input - * parameters. This was calculated by a human based on understanding the requirements. - * @param goalOfTestScenario Description of the purpose of that test scenario, so it's easier to - * maintain and understand failures. - * - */ - @DisplayName("Should purge older job history but maintain certain more recent ones") - @ParameterizedTest - // Cols: numJobs, tooManyJobsCutoff, ageCutoff, recencyCutoff, lastSavedStatePosition, - // expectedAfterPurge, description - @CsvSource({ - "50,100,10,5,9,10,'Validate age cutoff alone'", - "50,100,10,5,13,11,'Validate saved state after age cutoff'", - "50,100,10,15,9,15,'Validate recency cutoff alone'", - "50,100,10,15,17,16,'Validate saved state after recency cutoff'", - "50,20,30,10,9,10,'Validate excess jobs cutoff alone'", - "50,20,30,10,25,11,'Validate saved state after excess jobs cutoff'", - "50,20,30,20,9,20,'Validate recency cutoff with excess jobs cutoff'", - "50,20,30,20,25,21,'Validate saved state after recency and excess jobs cutoff but before age'", - "50,20,30,20,35,21,'Validate saved state after recency and excess jobs cutoff and after age'" - }) - void testPurgeJobHistory(final int numJobs, - final int tooManyJobs, - final int ageCutoff, - final int recencyCutoff, - final int lastStatePosition, - final int expectedAfterPurge, - final String goalOfTestScenario) - throws IOException, SQLException { - final String CURRENT_SCOPE = UUID.randomUUID().toString(); - - // Decoys - these jobs will help mess up bad sql queries, even though they shouldn't be deleted. - final String DECOY_SCOPE = UUID.randomUUID().toString(); - - // Reconfigure constants to test various combinations of tuning knobs and make sure all work. - final DefaultJobPersistence jobPersistence = - new DefaultJobPersistence(jobDatabase, timeSupplier, ageCutoff, tooManyJobs, recencyCutoff); - - final LocalDateTime fakeNow = LocalDateTime.of(2021, 6, 20, 0, 0); - - // Jobs are created in reverse chronological order; id order is the inverse of old-to-new date - // order. - // The most-recent job is in allJobs[0] which means keeping the 10 most recent is [0-9], simplifying - // testing math as we don't have to care how many jobs total existed and were deleted. - final List allJobs = new ArrayList<>(); - final List decoyJobs = new ArrayList<>(); - for (int i = 0; i < numJobs; i++) { - allJobs.add(persistJobForJobHistoryTesting(CURRENT_SCOPE, SYNC_JOB_CONFIG, JobStatus.FAILED, fakeNow.minusDays(i))); - decoyJobs.add(persistJobForJobHistoryTesting(DECOY_SCOPE, SYNC_JOB_CONFIG, JobStatus.FAILED, fakeNow.minusDays(i))); - } - - // At least one job should have state. Find the desired job and add state to it. - final Job lastJobWithState = addStateToJob(allJobs.get(lastStatePosition)); - addStateToJob(decoyJobs.get(lastStatePosition - 1)); - addStateToJob(decoyJobs.get(lastStatePosition + 1)); - - // An older job with state should also exist, so we ensure we picked the most-recent with queries. - addStateToJob(allJobs.get(lastStatePosition + 1)); - - // sanity check that the attempt does have saved state so the purge history sql detects it correctly - assertTrue(lastJobWithState.getAttempts().get(0).getOutput() != null, - goalOfTestScenario + " - missing saved state on job that was supposed to have it."); - - // Execute the job history purge and check what jobs are left. - ((DefaultJobPersistence) jobPersistence).purgeJobHistory(fakeNow); - final List afterPurge = jobPersistence.listJobs(ConfigType.SYNC, CURRENT_SCOPE, 9999, 0); - - // Test - contains expected number of jobs and no more than that - assertEquals(expectedAfterPurge, afterPurge.size(), goalOfTestScenario + " - Incorrect number of jobs remain after deletion."); - - // Test - most-recent are actually the most recent by date (see above, reverse order) - for (int i = 0; i < Math.min(ageCutoff, recencyCutoff); i++) { - assertEquals(allJobs.get(i).getId(), afterPurge.get(i).getId(), goalOfTestScenario + " - Incorrect sort order after deletion."); - } - - // Test - job with latest state is always kept despite being older than some cutoffs - assertTrue(afterPurge.contains(lastJobWithState), goalOfTestScenario + " - Missing last job with saved state after deletion."); - } - - private Job addStateToJob(final Job job) throws IOException, SQLException { - persistAttemptForJobHistoryTesting(job, LOG_PATH.toString(), - LocalDateTime.ofEpochSecond(job.getCreatedAtInSecond(), 0, ZoneOffset.UTC), true); - return jobPersistence.getJob(job.getId()); // reload job to include its attempts - } - - } - - @Nested - @DisplayName("When listing job statuses and timestamps with specified connection id and timestamp") - class ListJobStatusAndTimestampWithConnection { - - @Test - @DisplayName("Should list only job statuses and timestamps of specified connection id") - void testConnectionIdFiltering() throws IOException { - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, - DEFAULT_MINIMUM_RECENCY_COUNT); - - // create a connection with a non-relevant connection id that should be ignored for the duration of - // the test - final long wrongConnectionSyncJobId = jobPersistence.enqueueJob(UUID.randomUUID().toString(), SYNC_JOB_CONFIG).orElseThrow(); - final int wrongSyncJobAttemptNumber0 = jobPersistence.createAttempt(wrongConnectionSyncJobId, LOG_PATH); - jobPersistence.failAttempt(wrongConnectionSyncJobId, wrongSyncJobAttemptNumber0); - assertEquals(0, jobPersistence.listJobStatusAndTimestampWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), Instant.EPOCH).size()); - - // create a connection with relevant connection id - final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int syncJobAttemptNumber0 = jobPersistence.createAttempt(syncJobId, LOG_PATH); - jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); - - // check to see current status of only relevantly scoped job - final List jobs = - jobPersistence.listJobStatusAndTimestampWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), Instant.EPOCH); - assertEquals(jobs.size(), 1); - assertEquals(JobStatus.INCOMPLETE, jobs.get(0).getStatus()); - } - - @Test - @DisplayName("Should list jobs statuses filtered by different timestamps") - void testTimestampFiltering() throws IOException { - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, - DEFAULT_MINIMUM_RECENCY_COUNT); - - // Create and fail initial job - final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int syncJobAttemptNumber0 = jobPersistence.createAttempt(syncJobId, LOG_PATH); - jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); - jobPersistence.failJob(syncJobId); - - // Check to see current status of all jobs from beginning of time, expecting only 1 job - final List initialJobs = - jobPersistence.listJobStatusAndTimestampWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), Instant.EPOCH); - assertEquals(initialJobs.size(), 1); - assertEquals(JobStatus.FAILED, initialJobs.get(0).getStatus()); - - // Edit time supplier to return later time - final Instant timeAfterFirstJob = NOW.plusSeconds(60); - when(timeSupplier.get()).thenReturn(timeAfterFirstJob); - - // Create and succeed second job - final long newSyncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int newSyncJobAttemptNumber = jobPersistence.createAttempt(newSyncJobId, LOG_PATH); - jobPersistence.succeedAttempt(newSyncJobId, newSyncJobAttemptNumber); - - // Check to see current status of all jobs from beginning of time, expecting both jobs in createAt - // descending order (most recent first) - final List allQueryJobs = - jobPersistence.listJobStatusAndTimestampWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), Instant.EPOCH); - assertEquals(2, allQueryJobs.size()); - assertEquals(JobStatus.SUCCEEDED, allQueryJobs.get(0).getStatus()); - assertEquals(JobStatus.FAILED, allQueryJobs.get(1).getStatus()); - - // Look up jobs with a timestamp after the first job. Expecting only the second job status - final List timestampFilteredJobs = - jobPersistence.listJobStatusAndTimestampWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), timeAfterFirstJob); - assertEquals(1, timestampFilteredJobs.size()); - assertEquals(JobStatus.SUCCEEDED, timestampFilteredJobs.get(0).getStatus()); - // TODO: issues will be fixed in scope of https://github.com/airbytehq/airbyte/issues/13192 - // assertTrue(timeAfterFirstJob.getEpochSecond() <= - // timestampFilteredJobs.get(0).getCreatedAtInSecond()); - // assertTrue(timeAfterFirstJob.getEpochSecond() <= - // timestampFilteredJobs.get(0).getUpdatedAtInSecond()); - - // Check to see if timestamp filtering is working by only looking up jobs with timestamp after - // second job. Expecting no job status output - final Instant timeAfterSecondJob = timeAfterFirstJob.plusSeconds(60); - assertEquals(0, - jobPersistence.listJobStatusAndTimestampWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), timeAfterSecondJob).size()); - } - - @Test - @DisplayName("Should list jobs statuses of differing status types") - void testMultipleJobStatusTypes() throws IOException { - final Supplier timeSupplier = incrementingSecondSupplier(NOW); - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, - DEFAULT_MINIMUM_RECENCY_COUNT); - - // Create and fail initial job - final long syncJobId1 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int syncJobAttemptNumber1 = jobPersistence.createAttempt(syncJobId1, LOG_PATH); - jobPersistence.failAttempt(syncJobId1, syncJobAttemptNumber1); - jobPersistence.failJob(syncJobId1); - - // Create and succeed second job - final long syncJobId2 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int syncJobAttemptNumber2 = jobPersistence.createAttempt(syncJobId2, LOG_PATH); - jobPersistence.succeedAttempt(syncJobId2, syncJobAttemptNumber2); - - // Create and cancel third job - final long syncJobId3 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - jobPersistence.createAttempt(syncJobId3, LOG_PATH); - jobPersistence.cancelJob(syncJobId3); - - // Check to see current status of all jobs from beginning of time, expecting all jobs in createAt - // descending order (most recent first) - final List allJobs = - jobPersistence.listJobStatusAndTimestampWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), Instant.EPOCH); - assertEquals(3, allJobs.size()); - assertEquals(JobStatus.CANCELLED, allJobs.get(0).getStatus()); - assertEquals(JobStatus.SUCCEEDED, allJobs.get(1).getStatus()); - assertEquals(JobStatus.FAILED, allJobs.get(2).getStatus()); - } - - @Test - @DisplayName("Should list jobs statuses of differing job config types") - void testMultipleConfigTypes() throws IOException { - final Set configTypes = Sets.newHashSet(ConfigType.GET_SPEC, ConfigType.CHECK_CONNECTION_DESTINATION); - final Supplier timeSupplier = incrementingSecondSupplier(NOW); - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, - DEFAULT_MINIMUM_RECENCY_COUNT); - - // pending status - final long failedSpecJobId = jobPersistence.enqueueJob(SCOPE, CHECK_JOB_CONFIG).orElseThrow(); - jobPersistence.failJob(failedSpecJobId); - - // incomplete status - final long incompleteSpecJobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); - final int attemptNumber = jobPersistence.createAttempt(incompleteSpecJobId, LOG_PATH); - jobPersistence.failAttempt(incompleteSpecJobId, attemptNumber); - - // this job should be ignored since it's not in the configTypes we're querying for - jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - - // expect order to be from most recent to least recent - final List allJobs = - jobPersistence.listJobStatusAndTimestampWithConnection(CONNECTION_ID, configTypes, Instant.EPOCH); - assertEquals(2, allJobs.size()); - assertEquals(JobStatus.INCOMPLETE, allJobs.get(0).getStatus()); - assertEquals(JobStatus.FAILED, allJobs.get(1).getStatus()); - } - - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/JobNotifierTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/JobNotifierTest.java deleted file mode 100644 index 4bdef113d422..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/JobNotifierTest.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.Notification; -import io.airbyte.config.Notification.NotificationType; -import io.airbyte.config.SlackNotificationConfiguration; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.notification.NotificationClient; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.persistence.job.models.JobStatus; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.time.Instant; -import java.time.ZoneId; -import java.time.format.DateTimeFormatter; -import java.time.format.FormatStyle; -import java.util.Collections; -import java.util.List; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -class JobNotifierTest { - - private static final String WEBAPP_URL = "http://localhost:8000"; - private static final Instant NOW = Instant.now(); - private static final String TEST_DOCKER_REPO = "airbyte/test-image"; - private static final String TEST_DOCKER_TAG = "0.1.0"; - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - - private final WebUrlHelper webUrlHelper = new WebUrlHelper(WEBAPP_URL); - - private ConfigRepository configRepository; - private WorkspaceHelper workspaceHelper; - private JobNotifier jobNotifier; - private NotificationClient notificationClient; - private TrackingClient trackingClient; - - @BeforeEach - void setup() { - configRepository = mock(ConfigRepository.class); - workspaceHelper = mock(WorkspaceHelper.class); - trackingClient = mock(TrackingClient.class); - - jobNotifier = Mockito.spy(new JobNotifier(webUrlHelper, configRepository, workspaceHelper, trackingClient)); - notificationClient = mock(NotificationClient.class); - when(jobNotifier.getNotificationClient(getSlackNotification())).thenReturn(notificationClient); - } - - @Test - void testFailJob() throws IOException, InterruptedException, JsonValidationException, ConfigNotFoundException { - final Job job = createJob(); - final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() - .withName("source-test") - .withDockerRepository(TEST_DOCKER_REPO) - .withDockerImageTag(TEST_DOCKER_TAG) - .withSourceDefinitionId(UUID.randomUUID()); - final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() - .withName("destination-test") - .withDockerRepository(TEST_DOCKER_REPO) - .withDockerImageTag(TEST_DOCKER_TAG) - .withDestinationDefinitionId(UUID.randomUUID()); - when(configRepository.getSourceDefinitionFromConnection(any())).thenReturn(sourceDefinition); - when(configRepository.getDestinationDefinitionFromConnection(any())).thenReturn(destinationDefinition); - when(configRepository.getStandardSourceDefinition(any())).thenReturn(sourceDefinition); - when(configRepository.getStandardDestinationDefinition(any())).thenReturn(destinationDefinition); - when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)).thenReturn(getWorkspace()); - when(workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(job.getId())).thenReturn(WORKSPACE_ID); - when(notificationClient.notifyJobFailure(anyString(), anyString(), anyString(), anyString(), anyLong())).thenReturn(true); - - jobNotifier.failJob("JobNotifierTest was running", job); - final DateTimeFormatter formatter = DateTimeFormatter.ofLocalizedDateTime(FormatStyle.FULL).withZone(ZoneId.systemDefault()); - verify(notificationClient).notifyJobFailure( - "source-test", - "destination-test", - String.format("sync started on %s, running for 1 day 10 hours 17 minutes 36 seconds, as the JobNotifierTest was running.", - formatter.format(Instant.ofEpochSecond(job.getStartedAtInSecond().get()))), - String.format("http://localhost:8000/workspaces/%s/connections/%s", WORKSPACE_ID, job.getScope()), - job.getId()); - - final Builder metadata = ImmutableMap.builder(); - metadata.put("connection_id", UUID.fromString(job.getScope())); - metadata.put("connector_source_definition_id", sourceDefinition.getSourceDefinitionId()); - metadata.put("connector_source", "source-test"); - metadata.put("connector_source_version", TEST_DOCKER_TAG); - metadata.put("connector_source_docker_repository", sourceDefinition.getDockerRepository()); - metadata.put("connector_destination_definition_id", destinationDefinition.getDestinationDefinitionId()); - metadata.put("connector_destination", "destination-test"); - metadata.put("connector_destination_version", TEST_DOCKER_TAG); - metadata.put("connector_destination_docker_repository", destinationDefinition.getDockerRepository()); - metadata.put("notification_type", NotificationType.SLACK); - verify(trackingClient).track(WORKSPACE_ID, JobNotifier.FAILURE_NOTIFICATION, metadata.build()); - } - - private static StandardWorkspace getWorkspace() { - return new StandardWorkspace() - .withCustomerId(UUID.randomUUID()) - .withNotifications(List.of(getSlackNotification())); - } - - private static Job createJob() { - return new Job( - 10L, - ConfigType.SYNC, - UUID.randomUUID().toString(), - new JobConfig(), - Collections.emptyList(), - JobStatus.FAILED, - NOW.getEpochSecond(), - NOW.getEpochSecond(), - NOW.getEpochSecond() + 123456L); - } - - private static Notification getSlackNotification() { - return new Notification() - .withNotificationType(NotificationType.SLACK) - .withSlackConfiguration(new SlackNotificationConfiguration() - .withWebhook("http://random.webhook.url/hooks.slack.com/")); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/ResourceRequirementsUtilsTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/ResourceRequirementsUtilsTest.java deleted file mode 100644 index 477440ac1471..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/ResourceRequirementsUtilsTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.config.ActorDefinitionResourceRequirements; -import io.airbyte.config.JobTypeResourceLimit; -import io.airbyte.config.JobTypeResourceLimit.JobType; -import io.airbyte.config.ResourceRequirements; -import java.util.List; -import org.junit.jupiter.api.Test; - -class ResourceRequirementsUtilsTest { - - private static final String FIVE_HUNDRED_MEM = "500Mi"; - - @Test - void testNoReqsSet() { - final ResourceRequirements result = ResourceRequirementsUtils.getResourceRequirements( - null, - null, - null, - JobType.SYNC); - - assertEquals(new ResourceRequirements(), result); - } - - @Test - void testWorkerDefaultReqsSet() { - final ResourceRequirements workerDefaultReqs = new ResourceRequirements().withCpuRequest("1").withCpuLimit("1"); - final ResourceRequirements reqs = ResourceRequirementsUtils.getResourceRequirements( - null, - null, - workerDefaultReqs, - JobType.SYNC); - - assertEquals(workerDefaultReqs, reqs); - } - - @Test - void testDefinitionDefaultReqsOverrideWorker() { - final ResourceRequirements workerDefaultReqs = new ResourceRequirements().withCpuRequest("1").withCpuLimit("1"); - final ResourceRequirements definitionDefaultReqs = new ResourceRequirements().withCpuLimit("2").withMemoryRequest("100Mi"); - final ActorDefinitionResourceRequirements definitionReqs = new ActorDefinitionResourceRequirements().withDefault(definitionDefaultReqs); - - final ResourceRequirements result = ResourceRequirementsUtils.getResourceRequirements( - null, - definitionReqs, - workerDefaultReqs, - JobType.SYNC); - - final ResourceRequirements expectedReqs = new ResourceRequirements() - .withCpuRequest("1") - .withCpuLimit("2") - .withMemoryRequest("100Mi"); - - assertEquals(expectedReqs, result); - } - - @Test - void testJobSpecificReqsOverrideDefault() { - final ResourceRequirements workerDefaultReqs = new ResourceRequirements().withCpuRequest("1").withCpuLimit("1"); - final ResourceRequirements definitionDefaultReqs = new ResourceRequirements().withCpuLimit("2").withMemoryRequest("100Mi"); - final JobTypeResourceLimit jobTypeResourceLimit = new JobTypeResourceLimit().withJobType(JobType.SYNC).withResourceRequirements( - new ResourceRequirements().withCpuRequest("2").withMemoryRequest("200Mi").withMemoryLimit("300Mi")); - final ActorDefinitionResourceRequirements definitionReqs = new ActorDefinitionResourceRequirements() - .withDefault(definitionDefaultReqs) - .withJobSpecific(List.of(jobTypeResourceLimit)); - - final ResourceRequirements result = ResourceRequirementsUtils.getResourceRequirements( - null, - definitionReqs, - workerDefaultReqs, - JobType.SYNC); - - final ResourceRequirements expectedReqs = new ResourceRequirements() - .withCpuRequest("2") - .withCpuLimit("2") - .withMemoryRequest("200Mi") - .withMemoryLimit("300Mi"); - assertEquals(expectedReqs, result); - } - - @Test - void testConnectionResourceRequirementsOverrideDefault() { - final ResourceRequirements workerDefaultReqs = new ResourceRequirements().withCpuRequest("1"); - final ResourceRequirements definitionDefaultReqs = new ResourceRequirements().withCpuLimit("2").withCpuRequest("2"); - final JobTypeResourceLimit jobTypeResourceLimit = new JobTypeResourceLimit().withJobType(JobType.SYNC).withResourceRequirements( - new ResourceRequirements().withCpuLimit("3").withMemoryRequest("200Mi")); - final ActorDefinitionResourceRequirements definitionReqs = new ActorDefinitionResourceRequirements() - .withDefault(definitionDefaultReqs) - .withJobSpecific(List.of(jobTypeResourceLimit)); - final ResourceRequirements connectionResourceRequirements = - new ResourceRequirements().withMemoryRequest("400Mi").withMemoryLimit(FIVE_HUNDRED_MEM); - - final ResourceRequirements result = ResourceRequirementsUtils.getResourceRequirements( - connectionResourceRequirements, - definitionReqs, - workerDefaultReqs, - JobType.SYNC); - - final ResourceRequirements expectedReqs = new ResourceRequirements() - .withCpuRequest("2") - .withCpuLimit("3") - .withMemoryRequest("400Mi") - .withMemoryLimit(FIVE_HUNDRED_MEM); - assertEquals(expectedReqs, result); - } - - @Test - void testConnectionResourceRequirementsOverrideWorker() { - final ResourceRequirements workerDefaultReqs = new ResourceRequirements().withCpuRequest("1").withCpuLimit("1"); - final ResourceRequirements connectionResourceRequirements = new ResourceRequirements().withCpuLimit("2").withMemoryLimit(FIVE_HUNDRED_MEM); - - final ResourceRequirements result = ResourceRequirementsUtils.getResourceRequirements(connectionResourceRequirements, workerDefaultReqs); - - final ResourceRequirements expectedReqs = new ResourceRequirements() - .withCpuRequest("1") - .withCpuLimit("2") - .withMemoryLimit(FIVE_HUNDRED_MEM); - assertEquals(expectedReqs, result); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/WebUrlHelperTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/WebUrlHelperTest.java deleted file mode 100644 index ad1095e41cd4..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/WebUrlHelperTest.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import java.util.UUID; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class WebUrlHelperTest { - - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final UUID CONNECTION_ID = UUID.randomUUID(); - private static final String LOCALHOST_8000 = "http://localhost:8000"; - - @Test - void testGetBaseUrl() { - final WebUrlHelper webUrlHelper = new WebUrlHelper(LOCALHOST_8000); - Assertions.assertEquals(LOCALHOST_8000, webUrlHelper.getBaseUrl()); - } - - @Test - void testGetBaseUrlTrailingSlash() { - final WebUrlHelper webUrlHelper = new WebUrlHelper("http://localhost:8001/"); - Assertions.assertEquals("http://localhost:8001", webUrlHelper.getBaseUrl()); - } - - @Test - void testGetWorkspaceUrl() { - final WebUrlHelper webUrlHelper = new WebUrlHelper(LOCALHOST_8000); - final String workspaceUrl = webUrlHelper.getWorkspaceUrl(WORKSPACE_ID); - final String expectedUrl = String.format("http://localhost:8000/workspaces/%s", WORKSPACE_ID); - Assertions.assertEquals(expectedUrl, workspaceUrl); - } - - @Test - void testGetConnectionUrl() { - final WebUrlHelper webUrlHelper = new WebUrlHelper(LOCALHOST_8000); - final String connectionUrl = webUrlHelper.getConnectionUrl(WORKSPACE_ID, CONNECTION_ID); - final String expectedUrl = String.format("http://localhost:8000/workspaces/%s/connections/%s", WORKSPACE_ID, CONNECTION_ID); - Assertions.assertEquals(expectedUrl, connectionUrl); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/WorkspaceHelperTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/WorkspaceHelperTest.java deleted file mode 100644 index 1bcbff8b40f6..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/WorkspaceHelperTest.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.AdditionalMatchers.not; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.persistence.job.models.JobStatus; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.ArrayList; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -class WorkspaceHelperTest { - - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); - private static final UUID SOURCE_ID = UUID.randomUUID(); - private static final UUID DEST_DEFINITION_ID = UUID.randomUUID(); - private static final UUID DEST_ID = UUID.randomUUID(); - private static final UUID CONNECTION_ID = UUID.randomUUID(); - private static final UUID OPERATION_ID = UUID.randomUUID(); - private static final SourceConnection SOURCE = new SourceConnection() - .withSourceId(SOURCE_ID) - .withSourceDefinitionId(SOURCE_DEFINITION_ID) - .withWorkspaceId(WORKSPACE_ID) - .withConfiguration(Jsons.deserialize("{}")) - .withName("source") - .withTombstone(false); - private static final DestinationConnection DEST = new DestinationConnection() - .withDestinationId(DEST_ID) - .withDestinationDefinitionId(DEST_DEFINITION_ID) - .withWorkspaceId(WORKSPACE_ID) - .withConfiguration(Jsons.deserialize("{}")) - .withName("dest") - .withTombstone(false); - private static final StandardSync CONNECTION = new StandardSync() - .withName("a name") - .withConnectionId(CONNECTION_ID) - .withSourceId(SOURCE_ID) - .withDestinationId(DEST_ID).withCatalog(new ConfiguredAirbyteCatalog().withStreams(new ArrayList<>())) - .withManual(true); - private static final StandardSyncOperation OPERATION = new StandardSyncOperation() - .withOperationId(OPERATION_ID) - .withWorkspaceId(WORKSPACE_ID) - .withOperatorType(OperatorType.DBT) - .withName("the new normal") - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) - .withTombstone(false); - - ConfigRepository configRepository; - JobPersistence jobPersistence; - WorkspaceHelper workspaceHelper; - - @BeforeEach - void setup() throws IOException, JsonValidationException, ConfigNotFoundException { - jobPersistence = mock(JobPersistence.class); - - configRepository = mock(ConfigRepository.class); - when(configRepository.getSourceConnection(SOURCE_ID)).thenReturn(SOURCE); - when(configRepository.getSourceConnection(not(eq(SOURCE_ID)))).thenThrow(ConfigNotFoundException.class); - when(configRepository.getDestinationConnection(DEST_ID)).thenReturn(DEST); - when(configRepository.getDestinationConnection(not(eq(DEST_ID)))).thenThrow(ConfigNotFoundException.class); - when(configRepository.getStandardSync(CONNECTION_ID)).thenReturn(CONNECTION); - when(configRepository.getStandardSync(not(eq(CONNECTION_ID)))).thenThrow(ConfigNotFoundException.class); - when(configRepository.getStandardSyncOperation(OPERATION_ID)).thenReturn(OPERATION); - when(configRepository.getStandardSyncOperation(not(eq(OPERATION_ID)))).thenThrow(ConfigNotFoundException.class); - - workspaceHelper = new WorkspaceHelper(configRepository, jobPersistence); - } - - @Test - void testMissingObjectsRuntimeException() { - assertThrows(RuntimeException.class, () -> workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(UUID.randomUUID())); - assertThrows(RuntimeException.class, () -> workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(UUID.randomUUID())); - assertThrows(RuntimeException.class, () -> workspaceHelper.getWorkspaceForConnectionIdIgnoreExceptions(UUID.randomUUID())); - assertThrows(RuntimeException.class, () -> workspaceHelper.getWorkspaceForConnectionIgnoreExceptions(UUID.randomUUID(), UUID.randomUUID())); - assertThrows(RuntimeException.class, () -> workspaceHelper.getWorkspaceForOperationIdIgnoreExceptions(UUID.randomUUID())); - assertThrows(RuntimeException.class, () -> workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(0L)); - } - - @Test - void testMissingObjectsProperException() { - assertThrows(ConfigNotFoundException.class, () -> workspaceHelper.getWorkspaceForSourceId(UUID.randomUUID())); - assertThrows(ConfigNotFoundException.class, () -> workspaceHelper.getWorkspaceForDestinationId(UUID.randomUUID())); - assertThrows(ConfigNotFoundException.class, () -> workspaceHelper.getWorkspaceForConnectionId(UUID.randomUUID())); - assertThrows(ConfigNotFoundException.class, () -> workspaceHelper.getWorkspaceForConnection(UUID.randomUUID(), UUID.randomUUID())); - assertThrows(ConfigNotFoundException.class, () -> workspaceHelper.getWorkspaceForOperationId(UUID.randomUUID())); - assertThrows(ConfigNotFoundException.class, () -> workspaceHelper.getWorkspaceForJobId(0L)); - } - - @Test - @DisplayName("Validate that source caching is working") - void testSource() throws IOException, JsonValidationException, ConfigNotFoundException { - final UUID retrievedWorkspace = workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(SOURCE_ID); - assertEquals(WORKSPACE_ID, retrievedWorkspace); - verify(configRepository, times(1)).getSourceConnection(SOURCE_ID); - - workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(SOURCE_ID); - // There should have been no other call to configRepository - verify(configRepository, times(1)).getSourceConnection(SOURCE_ID); - } - - @Test - @DisplayName("Validate that destination caching is working") - void testDestination() throws IOException, JsonValidationException, ConfigNotFoundException { - final UUID retrievedWorkspace = workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(DEST_ID); - assertEquals(WORKSPACE_ID, retrievedWorkspace); - verify(configRepository, times(1)).getDestinationConnection(DEST_ID); - - workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(DEST_ID); - // There should have been no other call to configRepository - verify(configRepository, times(1)).getDestinationConnection(DEST_ID); - } - - @Test - void testConnection() throws IOException, JsonValidationException, ConfigNotFoundException { - // test retrieving by connection id - final UUID retrievedWorkspace = workspaceHelper.getWorkspaceForConnectionIdIgnoreExceptions(CONNECTION_ID); - assertEquals(WORKSPACE_ID, retrievedWorkspace); - - // test retrieving by source and destination ids - final UUID retrievedWorkspaceBySourceAndDestination = workspaceHelper.getWorkspaceForConnectionIdIgnoreExceptions(CONNECTION_ID); - assertEquals(WORKSPACE_ID, retrievedWorkspaceBySourceAndDestination); - verify(configRepository, times(1)).getStandardSync(CONNECTION_ID); - - workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(DEST_ID); - // There should have been no other call to configRepository - verify(configRepository, times(1)).getStandardSync(CONNECTION_ID); - } - - @Test - void testOperation() throws IOException, JsonValidationException, ConfigNotFoundException { - // test retrieving by connection id - final UUID retrievedWorkspace = workspaceHelper.getWorkspaceForOperationIdIgnoreExceptions(OPERATION_ID); - assertEquals(WORKSPACE_ID, retrievedWorkspace); - verify(configRepository, times(1)).getStandardSyncOperation(OPERATION_ID); - - workspaceHelper.getWorkspaceForOperationIdIgnoreExceptions(OPERATION_ID); - verify(configRepository, times(1)).getStandardSyncOperation(OPERATION_ID); - } - - @Test - void testConnectionAndJobs() throws IOException { - // test jobs - final long jobId = 123; - final Job job = new Job( - jobId, - JobConfig.ConfigType.SYNC, - CONNECTION_ID.toString(), - new JobConfig().withConfigType(JobConfig.ConfigType.SYNC).withSync(new JobSyncConfig()), - new ArrayList<>(), - JobStatus.PENDING, - System.currentTimeMillis(), - System.currentTimeMillis(), - System.currentTimeMillis()); - when(jobPersistence.getJob(jobId)).thenReturn(job); - - final UUID jobWorkspace = workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(jobId); - assertEquals(WORKSPACE_ID, jobWorkspace); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/JobErrorReporterTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/JobErrorReporterTest.java deleted file mode 100644 index 96ff4545af61..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/JobErrorReporterTest.java +++ /dev/null @@ -1,540 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -import static org.mockito.Mockito.mock; - -import io.airbyte.config.AttemptFailureSummary; -import io.airbyte.config.Configs.DeploymentMode; -import io.airbyte.config.FailureReason; -import io.airbyte.config.FailureReason.FailureOrigin; -import io.airbyte.config.FailureReason.FailureType; -import io.airbyte.config.Metadata; -import io.airbyte.config.NormalizationDestinationDefinitionConfig; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.WebUrlHelper; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -class JobErrorReporterTest { - - private static final UUID JOB_ID = UUID.randomUUID(); - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final UUID CONNECTION_ID = UUID.randomUUID(); - private static final String CONNECTION_URL = "http://localhost:8000/connection/my_connection"; - private static final String WORKSPACE_URL = "http://localhost:8000/workspace/my_workspace"; - private static final DeploymentMode DEPLOYMENT_MODE = DeploymentMode.OSS; - private static final String AIRBYTE_VERSION = "0.1.40"; - private static final String NORMALIZATION_IMAGE = "airbyte/normalization"; - private static final String NORMALIZATION_VERSION = "0.2.24"; - private static final String NORMALIZATION_INTEGRATION_TYPE = "snowflake"; - private static final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); - private static final String SOURCE_DEFINITION_NAME = "stripe"; - private static final String SOURCE_DOCKER_REPOSITORY = "airbyte/source-stripe"; - private static final String SOURCE_DOCKER_IMAGE = "airbyte/source-stripe:1.2.3"; - private static final StandardSourceDefinition.ReleaseStage SOURCE_RELEASE_STAGE = StandardSourceDefinition.ReleaseStage.BETA; - private static final UUID DESTINATION_DEFINITION_ID = UUID.randomUUID(); - private static final String DESTINATION_DEFINITION_NAME = "snowflake"; - private static final String DESTINATION_DOCKER_REPOSITORY = "airbyte/destination-snowflake"; - private static final String DESTINATION_DOCKER_IMAGE = "airbyte/destination-snowflake:1.2.3"; - private static final StandardDestinationDefinition.ReleaseStage DESTINATION_RELEASE_STAGE = StandardDestinationDefinition.ReleaseStage.BETA; - private static final String FROM_TRACE_MESSAGE = "from_trace_message"; - private static final String JOB_ID_KEY = "job_id"; - private static final String WORKSPACE_ID_KEY = "workspace_id"; - private static final String WORKSPACE_URL_KEY = "workspace_url"; - private static final String CONNECTION_ID_KEY = "connection_id"; - private static final String CONNECTION_URL_KEY = "connection_url"; - private static final String DEPLOYMENT_MODE_KEY = "deployment_mode"; - private static final String AIRBYTE_VERSION_KEY = "airbyte_version"; - private static final String FAILURE_ORIGIN_KEY = "failure_origin"; - private static final String SOURCE = "source"; - private static final String PREFIX_FORMAT_STRING = "%s_%s"; - private static final String FAILURE_TYPE_KEY = "failure_type"; - private static final String SYSTEM_ERROR = "system_error"; - private static final String CONNECTOR_DEFINITION_ID_KEY = "connector_definition_id"; - private static final String CONNECTOR_REPOSITORY_KEY = "connector_repository"; - private static final String CONNECTOR_NAME_KEY = "connector_name"; - private static final String CONNECTOR_RELEASE_STAGE_KEY = "connector_release_stage"; - private static final String CONNECTOR_COMMAND_KEY = "connector_command"; - private static final String NORMALIZATION_REPOSITORY_KEY = "normalization_repository"; - private static final String CHECK_COMMAND = "check"; - private static final String DISCOVER_COMMAND = "discover"; - private static final String SPEC_COMMAND = "spec"; - private static final String READ_COMMAND = "read"; - private static final String WRITE_COMMAND = "write"; - - private ConfigRepository configRepository; - private JobErrorReportingClient jobErrorReportingClient; - private WebUrlHelper webUrlHelper; - private JobErrorReporter jobErrorReporter; - - @BeforeEach - void setup() { - configRepository = mock(ConfigRepository.class); - jobErrorReportingClient = mock(JobErrorReportingClient.class); - webUrlHelper = mock(WebUrlHelper.class); - jobErrorReporter = new JobErrorReporter( - configRepository, DEPLOYMENT_MODE, AIRBYTE_VERSION, webUrlHelper, jobErrorReportingClient); - - Mockito.when(webUrlHelper.getConnectionUrl(WORKSPACE_ID, CONNECTION_ID)).thenReturn(CONNECTION_URL); - Mockito.when(webUrlHelper.getWorkspaceUrl(WORKSPACE_ID)).thenReturn(WORKSPACE_URL); - } - - @Test - void testReportSyncJobFailure() { - final AttemptFailureSummary mFailureSummary = Mockito.mock(AttemptFailureSummary.class); - - final FailureReason sourceFailureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, READ_COMMAND)) - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.SYSTEM_ERROR); - - final FailureReason destinationFailureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, WRITE_COMMAND)) - .withFailureOrigin(FailureOrigin.DESTINATION) - .withFailureType(FailureType.SYSTEM_ERROR); - - final FailureReason normalizationFailureReason = new FailureReason() - .withMetadata(new Metadata().withAdditionalProperty(FROM_TRACE_MESSAGE, true)) - .withFailureOrigin(FailureOrigin.NORMALIZATION) - .withFailureType(FailureType.SYSTEM_ERROR); - - final FailureReason nonTraceMessageFailureReason = new FailureReason().withFailureOrigin(FailureOrigin.SOURCE); - final FailureReason replicationFailureReason = new FailureReason().withFailureOrigin(FailureOrigin.REPLICATION); - - Mockito.when(mFailureSummary.getFailures()).thenReturn(List.of( - sourceFailureReason, destinationFailureReason, normalizationFailureReason, nonTraceMessageFailureReason, replicationFailureReason)); - - final long syncJobId = 1L; - final SyncJobReportingContext jobReportingContext = new SyncJobReportingContext( - syncJobId, - SOURCE_DOCKER_IMAGE, - DESTINATION_DOCKER_IMAGE); - - Mockito.when(configRepository.getSourceDefinitionFromConnection(CONNECTION_ID)) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPOSITORY) - .withReleaseStage(SOURCE_RELEASE_STAGE) - .withSourceDefinitionId(SOURCE_DEFINITION_ID) - .withName(SOURCE_DEFINITION_NAME)); - - Mockito.when(configRepository.getDestinationDefinitionFromConnection(CONNECTION_ID)) - .thenReturn(new StandardDestinationDefinition() - .withDockerRepository(DESTINATION_DOCKER_REPOSITORY) - .withReleaseStage(DESTINATION_RELEASE_STAGE) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID) - .withNormalizationConfig(new NormalizationDestinationDefinitionConfig() - .withNormalizationTag(NORMALIZATION_VERSION) - .withNormalizationRepository(NORMALIZATION_IMAGE) - .withNormalizationIntegrationType(NORMALIZATION_INTEGRATION_TYPE)) - .withName(DESTINATION_DEFINITION_NAME)); - - final StandardWorkspace mWorkspace = Mockito.mock(StandardWorkspace.class); - Mockito.when(mWorkspace.getWorkspaceId()).thenReturn(WORKSPACE_ID); - Mockito.when(configRepository.getStandardWorkspaceFromConnection(CONNECTION_ID, true)).thenReturn(mWorkspace); - - jobErrorReporter.reportSyncJobFailure(CONNECTION_ID, mFailureSummary, jobReportingContext); - - final Map expectedSourceMetadata = Map.ofEntries( - Map.entry(JOB_ID_KEY, String.valueOf(syncJobId)), - Map.entry(WORKSPACE_ID_KEY, WORKSPACE_ID.toString()), - Map.entry(WORKSPACE_URL_KEY, WORKSPACE_URL), - Map.entry(CONNECTION_ID_KEY, CONNECTION_ID.toString()), - Map.entry(CONNECTION_URL_KEY, CONNECTION_URL), - Map.entry(DEPLOYMENT_MODE_KEY, DEPLOYMENT_MODE.name()), - Map.entry(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION), - Map.entry(FAILURE_ORIGIN_KEY, SOURCE), - Map.entry(FAILURE_TYPE_KEY, SYSTEM_ERROR), - Map.entry(CONNECTOR_COMMAND_KEY, READ_COMMAND), - Map.entry(CONNECTOR_DEFINITION_ID_KEY, SOURCE_DEFINITION_ID.toString()), - Map.entry(CONNECTOR_REPOSITORY_KEY, SOURCE_DOCKER_REPOSITORY), - Map.entry(CONNECTOR_NAME_KEY, SOURCE_DEFINITION_NAME), - Map.entry(CONNECTOR_RELEASE_STAGE_KEY, SOURCE_RELEASE_STAGE.toString())); - - final Map expectedDestinationMetadata = Map.ofEntries( - Map.entry(JOB_ID_KEY, String.valueOf(syncJobId)), - Map.entry(WORKSPACE_ID_KEY, WORKSPACE_ID.toString()), - Map.entry(WORKSPACE_URL_KEY, WORKSPACE_URL), - Map.entry(CONNECTION_ID_KEY, CONNECTION_ID.toString()), - Map.entry(CONNECTION_URL_KEY, CONNECTION_URL), - Map.entry(DEPLOYMENT_MODE_KEY, DEPLOYMENT_MODE.name()), - Map.entry(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION), - Map.entry(FAILURE_ORIGIN_KEY, "destination"), - Map.entry(FAILURE_TYPE_KEY, SYSTEM_ERROR), - Map.entry(CONNECTOR_COMMAND_KEY, WRITE_COMMAND), - Map.entry(CONNECTOR_DEFINITION_ID_KEY, DESTINATION_DEFINITION_ID.toString()), - Map.entry(CONNECTOR_REPOSITORY_KEY, DESTINATION_DOCKER_REPOSITORY), - Map.entry(CONNECTOR_NAME_KEY, DESTINATION_DEFINITION_NAME), - Map.entry(CONNECTOR_RELEASE_STAGE_KEY, DESTINATION_RELEASE_STAGE.toString())); - - final Map expectedNormalizationMetadata = Map.ofEntries( - Map.entry(JOB_ID_KEY, String.valueOf(syncJobId)), - Map.entry(WORKSPACE_ID_KEY, WORKSPACE_ID.toString()), - Map.entry(WORKSPACE_URL_KEY, WORKSPACE_URL), - Map.entry(CONNECTION_ID_KEY, CONNECTION_ID.toString()), - Map.entry(CONNECTION_URL_KEY, CONNECTION_URL), - Map.entry(DEPLOYMENT_MODE_KEY, DEPLOYMENT_MODE.name()), - Map.entry(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION), - Map.entry(FAILURE_ORIGIN_KEY, "normalization"), - Map.entry(FAILURE_TYPE_KEY, SYSTEM_ERROR), - Map.entry(NORMALIZATION_REPOSITORY_KEY, NORMALIZATION_IMAGE), - Map.entry(String.format(PREFIX_FORMAT_STRING, SOURCE, CONNECTOR_DEFINITION_ID_KEY), SOURCE_DEFINITION_ID.toString()), - Map.entry(String.format(PREFIX_FORMAT_STRING, SOURCE, CONNECTOR_REPOSITORY_KEY), SOURCE_DOCKER_REPOSITORY), - Map.entry(String.format(PREFIX_FORMAT_STRING, SOURCE, CONNECTOR_NAME_KEY), SOURCE_DEFINITION_NAME), - Map.entry(String.format(PREFIX_FORMAT_STRING, SOURCE, CONNECTOR_RELEASE_STAGE_KEY), SOURCE_RELEASE_STAGE.toString()), - Map.entry(CONNECTOR_DEFINITION_ID_KEY, DESTINATION_DEFINITION_ID.toString()), - Map.entry(CONNECTOR_REPOSITORY_KEY, DESTINATION_DOCKER_REPOSITORY), - Map.entry(CONNECTOR_NAME_KEY, DESTINATION_DEFINITION_NAME), - Map.entry(CONNECTOR_RELEASE_STAGE_KEY, DESTINATION_RELEASE_STAGE.toString())); - - Mockito.verify(jobErrorReportingClient).reportJobFailureReason(mWorkspace, sourceFailureReason, SOURCE_DOCKER_IMAGE, expectedSourceMetadata); - Mockito.verify(jobErrorReportingClient).reportJobFailureReason(mWorkspace, destinationFailureReason, DESTINATION_DOCKER_IMAGE, - expectedDestinationMetadata); - Mockito.verify(jobErrorReportingClient).reportJobFailureReason( - mWorkspace, normalizationFailureReason, String.format("%s:%s", NORMALIZATION_IMAGE, NORMALIZATION_VERSION), expectedNormalizationMetadata); - Mockito.verifyNoMoreInteractions(jobErrorReportingClient); - } - - @Test - void testReportSyncJobFailureDoesNotThrow() { - final AttemptFailureSummary mFailureSummary = Mockito.mock(AttemptFailureSummary.class); - final SyncJobReportingContext jobContext = new SyncJobReportingContext(1L, SOURCE_DOCKER_IMAGE, DESTINATION_DOCKER_IMAGE); - - final FailureReason sourceFailureReason = new FailureReason() - .withMetadata(new Metadata().withAdditionalProperty(FROM_TRACE_MESSAGE, true)) - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.SYSTEM_ERROR); - - Mockito.when(mFailureSummary.getFailures()).thenReturn(List.of(sourceFailureReason)); - - Mockito.when(configRepository.getSourceDefinitionFromConnection(CONNECTION_ID)) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPOSITORY) - .withReleaseStage(SOURCE_RELEASE_STAGE) - .withSourceDefinitionId(SOURCE_DEFINITION_ID) - .withName(SOURCE_DEFINITION_NAME)); - - final StandardWorkspace mWorkspace = Mockito.mock(StandardWorkspace.class); - Mockito.when(mWorkspace.getWorkspaceId()).thenReturn(WORKSPACE_ID); - Mockito.when(configRepository.getStandardWorkspaceFromConnection(CONNECTION_ID, true)).thenReturn(mWorkspace); - Mockito.when(webUrlHelper.getConnectionUrl(WORKSPACE_ID, CONNECTION_ID)).thenReturn(CONNECTION_URL); - - Mockito.doThrow(new RuntimeException("some exception")) - .when(jobErrorReportingClient) - .reportJobFailureReason(Mockito.any(), Mockito.eq(sourceFailureReason), Mockito.any(), Mockito.any()); - - Assertions.assertDoesNotThrow(() -> jobErrorReporter.reportSyncJobFailure(CONNECTION_ID, mFailureSummary, jobContext)); - Mockito.verify(jobErrorReportingClient, Mockito.times(1)) - .reportJobFailureReason(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); - } - - @Test - void testReportSourceCheckJobFailure() throws JsonValidationException, ConfigNotFoundException, IOException { - final FailureReason failureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, CHECK_COMMAND)) - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.SYSTEM_ERROR); - - final ConnectorJobReportingContext jobContext = new ConnectorJobReportingContext(JOB_ID, SOURCE_DOCKER_IMAGE); - - Mockito.when(configRepository.getStandardSourceDefinition(SOURCE_DEFINITION_ID)) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPOSITORY) - .withReleaseStage(SOURCE_RELEASE_STAGE) - .withSourceDefinitionId(SOURCE_DEFINITION_ID) - .withName(SOURCE_DEFINITION_NAME)); - - final StandardWorkspace mWorkspace = Mockito.mock(StandardWorkspace.class); - Mockito.when(mWorkspace.getWorkspaceId()).thenReturn(WORKSPACE_ID); - Mockito.when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)).thenReturn(mWorkspace); - - jobErrorReporter.reportSourceCheckJobFailure(SOURCE_DEFINITION_ID, WORKSPACE_ID, failureReason, jobContext); - - final Map expectedMetadata = Map.ofEntries( - Map.entry(JOB_ID_KEY, JOB_ID.toString()), - Map.entry(WORKSPACE_ID_KEY, WORKSPACE_ID.toString()), - Map.entry(WORKSPACE_URL_KEY, WORKSPACE_URL), - Map.entry(DEPLOYMENT_MODE_KEY, DEPLOYMENT_MODE.name()), - Map.entry(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION), - Map.entry(FAILURE_ORIGIN_KEY, SOURCE), - Map.entry(FAILURE_TYPE_KEY, SYSTEM_ERROR), - Map.entry(CONNECTOR_DEFINITION_ID_KEY, SOURCE_DEFINITION_ID.toString()), - Map.entry(CONNECTOR_REPOSITORY_KEY, SOURCE_DOCKER_REPOSITORY), - Map.entry(CONNECTOR_NAME_KEY, SOURCE_DEFINITION_NAME), - Map.entry(CONNECTOR_RELEASE_STAGE_KEY, SOURCE_RELEASE_STAGE.toString()), - Map.entry(CONNECTOR_COMMAND_KEY, CHECK_COMMAND)); - - Mockito.verify(jobErrorReportingClient).reportJobFailureReason(mWorkspace, failureReason, SOURCE_DOCKER_IMAGE, expectedMetadata); - Mockito.verifyNoMoreInteractions(jobErrorReportingClient); - } - - @Test - void testReportSourceCheckJobFailureNullWorkspaceId() throws JsonValidationException, ConfigNotFoundException, IOException { - final FailureReason failureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, CHECK_COMMAND)) - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.SYSTEM_ERROR); - - final ConnectorJobReportingContext jobContext = new ConnectorJobReportingContext(JOB_ID, SOURCE_DOCKER_IMAGE); - - Mockito.when(configRepository.getStandardSourceDefinition(SOURCE_DEFINITION_ID)) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPOSITORY) - .withReleaseStage(SOURCE_RELEASE_STAGE) - .withSourceDefinitionId(SOURCE_DEFINITION_ID) - .withName(SOURCE_DEFINITION_NAME)); - - jobErrorReporter.reportSourceCheckJobFailure(SOURCE_DEFINITION_ID, null, failureReason, jobContext); - - final Map expectedMetadata = Map.ofEntries( - Map.entry(JOB_ID_KEY, JOB_ID.toString()), - Map.entry(DEPLOYMENT_MODE_KEY, DEPLOYMENT_MODE.name()), - Map.entry(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION), - Map.entry(FAILURE_ORIGIN_KEY, SOURCE), - Map.entry(FAILURE_TYPE_KEY, SYSTEM_ERROR), - Map.entry(CONNECTOR_DEFINITION_ID_KEY, SOURCE_DEFINITION_ID.toString()), - Map.entry(CONNECTOR_REPOSITORY_KEY, SOURCE_DOCKER_REPOSITORY), - Map.entry(CONNECTOR_NAME_KEY, SOURCE_DEFINITION_NAME), - Map.entry(CONNECTOR_RELEASE_STAGE_KEY, SOURCE_RELEASE_STAGE.toString()), - Map.entry(CONNECTOR_COMMAND_KEY, CHECK_COMMAND)); - - Mockito.verify(jobErrorReportingClient).reportJobFailureReason(null, failureReason, SOURCE_DOCKER_IMAGE, expectedMetadata); - Mockito.verifyNoMoreInteractions(jobErrorReportingClient); - } - - @Test - void testReportDestinationCheckJobFailure() throws JsonValidationException, ConfigNotFoundException, IOException { - final FailureReason failureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, CHECK_COMMAND)) - .withFailureOrigin(FailureOrigin.DESTINATION) - .withFailureType(FailureType.SYSTEM_ERROR); - - final ConnectorJobReportingContext jobContext = new ConnectorJobReportingContext(JOB_ID, DESTINATION_DOCKER_IMAGE); - - Mockito.when(configRepository.getStandardDestinationDefinition(DESTINATION_DEFINITION_ID)) - .thenReturn(new StandardDestinationDefinition() - .withDockerRepository(DESTINATION_DOCKER_REPOSITORY) - .withReleaseStage(DESTINATION_RELEASE_STAGE) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID) - .withName(DESTINATION_DEFINITION_NAME)); - - final StandardWorkspace mWorkspace = Mockito.mock(StandardWorkspace.class); - Mockito.when(mWorkspace.getWorkspaceId()).thenReturn(WORKSPACE_ID); - Mockito.when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)).thenReturn(mWorkspace); - - jobErrorReporter.reportDestinationCheckJobFailure(DESTINATION_DEFINITION_ID, WORKSPACE_ID, failureReason, jobContext); - - final Map expectedMetadata = Map.ofEntries( - Map.entry(JOB_ID_KEY, JOB_ID.toString()), - Map.entry(WORKSPACE_ID_KEY, WORKSPACE_ID.toString()), - Map.entry(WORKSPACE_URL_KEY, WORKSPACE_URL), - Map.entry(DEPLOYMENT_MODE_KEY, DEPLOYMENT_MODE.name()), - Map.entry(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION), - Map.entry(FAILURE_ORIGIN_KEY, "destination"), - Map.entry(FAILURE_TYPE_KEY, SYSTEM_ERROR), - Map.entry(CONNECTOR_DEFINITION_ID_KEY, DESTINATION_DEFINITION_ID.toString()), - Map.entry(CONNECTOR_REPOSITORY_KEY, DESTINATION_DOCKER_REPOSITORY), - Map.entry(CONNECTOR_NAME_KEY, DESTINATION_DEFINITION_NAME), - Map.entry(CONNECTOR_RELEASE_STAGE_KEY, DESTINATION_RELEASE_STAGE.toString()), - Map.entry(CONNECTOR_COMMAND_KEY, CHECK_COMMAND)); - - Mockito.verify(jobErrorReportingClient).reportJobFailureReason(mWorkspace, failureReason, DESTINATION_DOCKER_IMAGE, expectedMetadata); - Mockito.verifyNoMoreInteractions(jobErrorReportingClient); - } - - @Test - void testReportDestinationCheckJobFailureNullWorkspaceId() throws JsonValidationException, ConfigNotFoundException, IOException { - final FailureReason failureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, CHECK_COMMAND)) - .withFailureOrigin(FailureOrigin.DESTINATION) - .withFailureType(FailureType.SYSTEM_ERROR); - - final ConnectorJobReportingContext jobContext = new ConnectorJobReportingContext(JOB_ID, DESTINATION_DOCKER_IMAGE); - - Mockito.when(configRepository.getStandardDestinationDefinition(DESTINATION_DEFINITION_ID)) - .thenReturn(new StandardDestinationDefinition() - .withDockerRepository(DESTINATION_DOCKER_REPOSITORY) - .withReleaseStage(DESTINATION_RELEASE_STAGE) - .withDestinationDefinitionId(DESTINATION_DEFINITION_ID) - .withName(DESTINATION_DEFINITION_NAME)); - - jobErrorReporter.reportDestinationCheckJobFailure(DESTINATION_DEFINITION_ID, null, failureReason, jobContext); - - final Map expectedMetadata = Map.ofEntries( - Map.entry(JOB_ID_KEY, JOB_ID.toString()), - Map.entry(DEPLOYMENT_MODE_KEY, DEPLOYMENT_MODE.name()), - Map.entry(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION), - Map.entry(FAILURE_ORIGIN_KEY, "destination"), - Map.entry(FAILURE_TYPE_KEY, SYSTEM_ERROR), - Map.entry(CONNECTOR_DEFINITION_ID_KEY, DESTINATION_DEFINITION_ID.toString()), - Map.entry(CONNECTOR_REPOSITORY_KEY, DESTINATION_DOCKER_REPOSITORY), - Map.entry(CONNECTOR_NAME_KEY, DESTINATION_DEFINITION_NAME), - Map.entry(CONNECTOR_RELEASE_STAGE_KEY, DESTINATION_RELEASE_STAGE.toString()), - Map.entry(CONNECTOR_COMMAND_KEY, CHECK_COMMAND)); - - Mockito.verify(jobErrorReportingClient).reportJobFailureReason(null, failureReason, DESTINATION_DOCKER_IMAGE, expectedMetadata); - Mockito.verifyNoMoreInteractions(jobErrorReportingClient); - } - - @Test - void testReportDiscoverJobFailure() throws JsonValidationException, ConfigNotFoundException, IOException { - final FailureReason failureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, DISCOVER_COMMAND)) - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.SYSTEM_ERROR); - - final ConnectorJobReportingContext jobContext = new ConnectorJobReportingContext(JOB_ID, SOURCE_DOCKER_IMAGE); - - Mockito.when(configRepository.getStandardSourceDefinition(SOURCE_DEFINITION_ID)) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPOSITORY) - .withReleaseStage(SOURCE_RELEASE_STAGE) - .withSourceDefinitionId(SOURCE_DEFINITION_ID) - .withName(SOURCE_DEFINITION_NAME)); - - final StandardWorkspace mWorkspace = Mockito.mock(StandardWorkspace.class); - Mockito.when(mWorkspace.getWorkspaceId()).thenReturn(WORKSPACE_ID); - Mockito.when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)).thenReturn(mWorkspace); - - jobErrorReporter.reportDiscoverJobFailure(SOURCE_DEFINITION_ID, WORKSPACE_ID, failureReason, jobContext); - - final Map expectedMetadata = Map.ofEntries( - Map.entry(JOB_ID_KEY, JOB_ID.toString()), - Map.entry(WORKSPACE_ID_KEY, WORKSPACE_ID.toString()), - Map.entry(WORKSPACE_URL_KEY, WORKSPACE_URL), - Map.entry(DEPLOYMENT_MODE_KEY, DEPLOYMENT_MODE.name()), - Map.entry(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION), - Map.entry(FAILURE_ORIGIN_KEY, SOURCE), - Map.entry(FAILURE_TYPE_KEY, SYSTEM_ERROR), - Map.entry(CONNECTOR_DEFINITION_ID_KEY, SOURCE_DEFINITION_ID.toString()), - Map.entry(CONNECTOR_REPOSITORY_KEY, SOURCE_DOCKER_REPOSITORY), - Map.entry(CONNECTOR_NAME_KEY, SOURCE_DEFINITION_NAME), - Map.entry(CONNECTOR_RELEASE_STAGE_KEY, SOURCE_RELEASE_STAGE.toString()), - Map.entry(CONNECTOR_COMMAND_KEY, DISCOVER_COMMAND)); - - Mockito.verify(jobErrorReportingClient).reportJobFailureReason(mWorkspace, failureReason, SOURCE_DOCKER_IMAGE, expectedMetadata); - Mockito.verifyNoMoreInteractions(jobErrorReportingClient); - } - - @Test - void testReportDiscoverJobFailureNullWorkspaceId() throws JsonValidationException, ConfigNotFoundException, IOException { - final FailureReason failureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, DISCOVER_COMMAND)) - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.SYSTEM_ERROR); - - final ConnectorJobReportingContext jobContext = new ConnectorJobReportingContext(JOB_ID, SOURCE_DOCKER_IMAGE); - - Mockito.when(configRepository.getStandardSourceDefinition(SOURCE_DEFINITION_ID)) - .thenReturn(new StandardSourceDefinition() - .withDockerRepository(SOURCE_DOCKER_REPOSITORY) - .withReleaseStage(SOURCE_RELEASE_STAGE) - .withSourceDefinitionId(SOURCE_DEFINITION_ID) - .withName(SOURCE_DEFINITION_NAME)); - - jobErrorReporter.reportDiscoverJobFailure(SOURCE_DEFINITION_ID, null, failureReason, jobContext); - - final Map expectedMetadata = Map.ofEntries( - Map.entry(JOB_ID_KEY, JOB_ID.toString()), - Map.entry(DEPLOYMENT_MODE_KEY, DEPLOYMENT_MODE.name()), - Map.entry(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION), - Map.entry(FAILURE_ORIGIN_KEY, SOURCE), - Map.entry(FAILURE_TYPE_KEY, SYSTEM_ERROR), - Map.entry(CONNECTOR_DEFINITION_ID_KEY, SOURCE_DEFINITION_ID.toString()), - Map.entry(CONNECTOR_REPOSITORY_KEY, SOURCE_DOCKER_REPOSITORY), - Map.entry(CONNECTOR_NAME_KEY, SOURCE_DEFINITION_NAME), - Map.entry(CONNECTOR_RELEASE_STAGE_KEY, SOURCE_RELEASE_STAGE.toString()), - Map.entry(CONNECTOR_COMMAND_KEY, DISCOVER_COMMAND)); - - Mockito.verify(jobErrorReportingClient).reportJobFailureReason(null, failureReason, SOURCE_DOCKER_IMAGE, expectedMetadata); - Mockito.verifyNoMoreInteractions(jobErrorReportingClient); - } - - @Test - void testReportSpecJobFailure() { - final FailureReason failureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, SPEC_COMMAND)) - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.SYSTEM_ERROR); - - final ConnectorJobReportingContext jobContext = new ConnectorJobReportingContext(JOB_ID, SOURCE_DOCKER_IMAGE); - - jobErrorReporter.reportSpecJobFailure(failureReason, jobContext); - - final Map expectedMetadata = Map.ofEntries( - Map.entry(JOB_ID_KEY, JOB_ID.toString()), - Map.entry(DEPLOYMENT_MODE_KEY, DEPLOYMENT_MODE.name()), - Map.entry(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION), - Map.entry(FAILURE_ORIGIN_KEY, SOURCE), - Map.entry(FAILURE_TYPE_KEY, SYSTEM_ERROR), - Map.entry(CONNECTOR_REPOSITORY_KEY, SOURCE_DOCKER_REPOSITORY), - Map.entry(CONNECTOR_COMMAND_KEY, SPEC_COMMAND)); - - Mockito.verify(jobErrorReportingClient).reportJobFailureReason(null, failureReason, SOURCE_DOCKER_IMAGE, expectedMetadata); - Mockito.verifyNoMoreInteractions(jobErrorReportingClient); - } - - @Test - void testReportUnsupportedFailureType() { - final FailureReason readFailureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, READ_COMMAND)) - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.CONFIG_ERROR); - - final FailureReason discoverFailureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, DISCOVER_COMMAND)) - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.MANUAL_CANCELLATION); - - final FailureReason checkFailureReason = new FailureReason() - .withMetadata(new Metadata() - .withAdditionalProperty(FROM_TRACE_MESSAGE, true) - .withAdditionalProperty(CONNECTOR_COMMAND_KEY, CHECK_COMMAND)) - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.CONFIG_ERROR); - - final ConnectorJobReportingContext jobContext = new ConnectorJobReportingContext(JOB_ID, SOURCE_DOCKER_IMAGE); - - jobErrorReporter.reportSpecJobFailure(readFailureReason, jobContext); - jobErrorReporter.reportSpecJobFailure(discoverFailureReason, jobContext); - jobErrorReporter.reportSpecJobFailure(checkFailureReason, jobContext); - - Mockito.verifyNoInteractions(jobErrorReportingClient); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/JobErrorReportingClientFactoryTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/JobErrorReportingClientFactoryTest.java deleted file mode 100644 index acfebfa0b3ea..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/JobErrorReportingClientFactoryTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.config.Configs; -import io.airbyte.config.Configs.JobErrorReportingStrategy; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -class JobErrorReportingClientFactoryTest { - - @Test - void testCreateErrorReportingClientLogging() { - assertTrue( - JobErrorReportingClientFactory.getClient( - JobErrorReportingStrategy.LOGGING, Mockito.mock(Configs.class)) instanceof LoggingJobErrorReportingClient); - } - - @Test - void testCreateErrorReportingClientSentry() { - final Configs configsMock = Mockito.mock(Configs.class); - Mockito.when(configsMock.getJobErrorReportingSentryDSN()).thenReturn(""); - - assertTrue( - JobErrorReportingClientFactory.getClient( - JobErrorReportingStrategy.SENTRY, configsMock) instanceof SentryJobErrorReportingClient); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelperTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelperTest.java deleted file mode 100644 index d499f9b1303b..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelperTest.java +++ /dev/null @@ -1,680 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -import io.airbyte.persistence.job.errorreporter.SentryExceptionHelper.SentryExceptionPlatform; -import io.airbyte.persistence.job.errorreporter.SentryExceptionHelper.SentryParsedException; -import io.sentry.protocol.SentryException; -import io.sentry.protocol.SentryStackFrame; -import io.sentry.protocol.SentryStackTrace; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class SentryExceptionHelperTest { - - private static final String ERROR_PATH = "/airbyte/connector-errors/error.py"; - private static final String ABS_PATH = "abspath"; - private static final String LINE_NO = "lineno"; - private static final String FUNCTION = "function"; - private static final String CONTEXT_LINE = "context_line"; - private static final String FILENAME = "filename"; - private static final String MODULE = "module"; - - final SentryExceptionHelper exceptionHelper = new SentryExceptionHelper(); - - @Test - void testBuildSentryExceptionsInvalid() { - final String stacktrace = "this is not a stacktrace"; - final Optional parsedException = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(parsedException.isEmpty()); - } - - @Test - void testBuildSentryExceptionsPartiallyInvalid() { - final String stacktrace = "Traceback (most recent call last):\n Oops!"; - final Optional parsedException = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(parsedException.isEmpty()); - } - - @Test - void testBuildSentryExceptionsPythonChained() { - final String stacktrace = - """ - Traceback (most recent call last): - File "/airbyte/connector-errors/error.py", line 31, in read_records - failing_method() - File "/airbyte/connector-errors/error.py", line 36, in failing_method - raise HTTPError(http_error_msg, response=self) - requests.exceptions.HTTPError: 400 Client Error: Bad Request for url: https://airbyte.com - - The above exception was the direct cause of the following exception: - - Traceback (most recent call last): - File "/airbyte/connector-errors/error.py", line 39, in - main() - File "/airbyte/connector-errors/error.py", line 13, in main - sync_mode("incremental") - File "/airbyte/connector-errors/error.py", line 17, in sync_mode - incremental() - File "/airbyte/connector-errors/error.py", line 33, in incremental - raise RuntimeError("My other error") from err - RuntimeError: My other error - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.PYTHON, parsedException.platform()); - Assertions.assertEquals(2, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "requests.exceptions.HTTPError", "400 Client Error: Bad Request for url: https://airbyte.com", - List.of( - Map.of( - ABS_PATH, ERROR_PATH, - LINE_NO, 31, - FUNCTION, "read_records", - CONTEXT_LINE, "failing_method()"), - Map.of( - ABS_PATH, ERROR_PATH, - LINE_NO, 36, - FUNCTION, "failing_method", - CONTEXT_LINE, "raise HTTPError(http_error_msg, response=self)"))); - - assertExceptionContent(exceptionList.get(1), "RuntimeError", "My other error", List.of( - Map.of( - ABS_PATH, ERROR_PATH, - LINE_NO, 39, - FUNCTION, "", - CONTEXT_LINE, "main()"), - Map.of( - ABS_PATH, ERROR_PATH, - LINE_NO, 13, - FUNCTION, "main", - CONTEXT_LINE, "sync_mode(\"incremental\")"), - Map.of( - ABS_PATH, ERROR_PATH, - LINE_NO, 17, - FUNCTION, "sync_mode", - CONTEXT_LINE, "incremental()"), - Map.of( - ABS_PATH, ERROR_PATH, - LINE_NO, 33, - FUNCTION, "incremental", - CONTEXT_LINE, "raise RuntimeError(\"My other error\") from err"))); - - } - - @Test - void testBuildSentryExceptionsPythonNoValue() { - final String stacktrace = - """ - Traceback (most recent call last): - File "/airbyte/connector-errors/error.py", line 33, in incremental - raise RuntimeError() - RuntimeError - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.PYTHON, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "RuntimeError", null, List.of( - Map.of( - ABS_PATH, ERROR_PATH, - LINE_NO, 33, - FUNCTION, "incremental", - CONTEXT_LINE, "raise RuntimeError()"))); - } - - @Test - void testBuildSentryExceptionsPythonMultilineValue() { - final String stacktrace = - """ - Traceback (most recent call last): - File "/usr/local/lib/python3.9/site-packages/grpc/_channel.py", line 849, in _end_unary_response_blocking - raise _InactiveRpcError(state) - grpc._channel._InactiveRpcError: <_InactiveRpcError of RPC that terminated with: - status = StatusCode.INTERNAL - details = "Internal error encountered." - > - - During handling of the above exception, another exception occurred: - - Traceback (most recent call last): - File "/usr/local/lib/python3.9/site-packages/google/api_core/exceptions.py", line 553, in _parse_grpc_error_details - status = rpc_status.from_call(rpc_exc) - AttributeError: 'NoneType' object has no attribute 'from_call' - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.PYTHON, parsedException.platform()); - Assertions.assertEquals(2, exceptionList.size()); - - final String expectedValue = - """ - <_InactiveRpcError of RPC that terminated with: - status = StatusCode.INTERNAL - details = "Internal error encountered." - >"""; - - assertExceptionContent(exceptionList.get(0), "grpc._channel._InactiveRpcError", expectedValue, List.of( - Map.of( - ABS_PATH, "/usr/local/lib/python3.9/site-packages/grpc/_channel.py", - LINE_NO, 849, - FUNCTION, "_end_unary_response_blocking", - CONTEXT_LINE, "raise _InactiveRpcError(state)"))); - - assertExceptionContent(exceptionList.get(1), "AttributeError", "'NoneType' object has no attribute 'from_call'", List.of( - Map.of( - ABS_PATH, "/usr/local/lib/python3.9/site-packages/google/api_core/exceptions.py", - LINE_NO, 553, - FUNCTION, "_parse_grpc_error_details", - CONTEXT_LINE, "status = rpc_status.from_call(rpc_exc)"))); - } - - @Test - void testBuildSentryExceptionsJava() { - final String stacktrace = - """ - java.lang.ArithmeticException: / by zero - at io.airbyte.integrations.base.AirbyteTraceMessageUtilityTest.testCorrectStacktraceFormat(AirbyteTraceMessageUtilityTest.java:61) - at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at org.junit.jupiter.engine.execution.ExecutableInvoker$ReflectiveInterceptorCall.lambda$ofVoidMethod$0(ExecutableInvoker.java:115) - at app//org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:141) - at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73) - at jdk.proxy2/jdk.proxy2.$Proxy5.stop(Unknown Source) - at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74) - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.JAVA, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "java.lang.ArithmeticException", "/ by zero", - List.of( - Map.of( - FILENAME, "GradleWorkerMain.java", - LINE_NO, 74, - MODULE, "worker.org.gradle.process.internal.worker.GradleWorkerMain", - FUNCTION, "main"), - Map.of( - MODULE, "jdk.proxy2.$Proxy5", - FUNCTION, "stop"), - Map.of( - FILENAME, "ThrowableCollector.java", - LINE_NO, 73, - MODULE, "org.junit.platform.engine.support.hierarchical.ThrowableCollector", - FUNCTION, "execute"), - Map.of( - FILENAME, "NodeTestTask.java", - LINE_NO, 141, - MODULE, "org.junit.platform.engine.support.hierarchical.NodeTestTask", - FUNCTION, "lambda$executeRecursively$8"), - Map.of( - FILENAME, "ExecutableInvoker.java", - LINE_NO, 115, - MODULE, "org.junit.jupiter.engine.execution.ExecutableInvoker$ReflectiveInterceptorCall", - FUNCTION, "lambda$ofVoidMethod$0"), - Map.of( - "isNative", true, - MODULE, "jdk.internal.reflect.NativeMethodAccessorImpl", - FUNCTION, "invoke0"), - Map.of( - FILENAME, "AirbyteTraceMessageUtilityTest.java", - LINE_NO, 61, - MODULE, "io.airbyte.integrations.base.AirbyteTraceMessageUtilityTest", - FUNCTION, "testCorrectStacktraceFormat"))); - } - - @Test - void testBuildSentryExceptionsJavaChained() { - final String stacktrace = - """ - java.util.concurrent.CompletionException: io.airbyte.workers.DefaultReplicationWorker$DestinationException: Destination process exited with non-zero exit code 1 - at java.base/java.util.concurrent.CompletableFuture.encodeThrowable(CompletableFuture.java:315) - at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) - at java.base/java.lang.Thread.run(Thread.java:833) - Suppressed: io.airbyte.workers.exception.WorkerException: Source process exit with code 1. This warning is normal if the job was cancelled. - at io.airbyte.workers.internal.DefaultAirbyteSource.close(DefaultAirbyteSource.java:136) - at io.airbyte.workers.general.DefaultReplicationWorker.run(DefaultReplicationWorker.java:137) - at io.airbyte.workers.general.DefaultReplicationWorker.run(DefaultReplicationWorker.java:65) - at io.airbyte.workers.temporal.TemporalAttemptExecution.lambda$getWorkerThread$2(TemporalAttemptExecution.java:158) - at java.lang.Thread.run(Thread.java:833) - Caused by: io.airbyte.workers.DefaultReplicationWorker$DestinationException: Destination process exited with non-zero exit code 1 - at io.airbyte.workers.DefaultReplicationWorker.lambda$getDestinationOutputRunnable$7(DefaultReplicationWorker.java:397) - at java.base/java.util.concurrent.CompletableFuture$AsyncRun.run(CompletableFuture.java:1804) - ... 3 more - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.JAVA, parsedException.platform()); - Assertions.assertEquals(2, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "java.util.concurrent.CompletionException", - "io.airbyte.workers.DefaultReplicationWorker$DestinationException: Destination process exited with non-zero exit code 1", - List.of( - Map.of( - FILENAME, "Thread.java", - LINE_NO, 833, - MODULE, "java.lang.Thread", - FUNCTION, "run"), - Map.of( - FILENAME, "ThreadPoolExecutor.java", - LINE_NO, 635, - MODULE, "java.util.concurrent.ThreadPoolExecutor$Worker", - FUNCTION, "run"), - Map.of( - FILENAME, "CompletableFuture.java", - LINE_NO, 315, - MODULE, "java.util.concurrent.CompletableFuture", - FUNCTION, "encodeThrowable"))); - - assertExceptionContent(exceptionList.get(1), "io.airbyte.workers.DefaultReplicationWorker$DestinationException", - "Destination process exited with non-zero exit code 1", List.of( - Map.of( - FILENAME, "CompletableFuture.java", - LINE_NO, 1804, - MODULE, "java.util.concurrent.CompletableFuture$AsyncRun", - FUNCTION, "run"), - Map.of( - FILENAME, "DefaultReplicationWorker.java", - LINE_NO, 397, - MODULE, "io.airbyte.workers.DefaultReplicationWorker", - FUNCTION, "lambda$getDestinationOutputRunnable$7"))); - } - - @Test - void testBuildSentryExceptionsJavaMultilineValue() { - final String stacktrace = - """ - io.temporal.failure.ApplicationFailure: GET https://storage.googleapis.com/ - { - "code" : 401, - "message" : "Invalid Credentials" - } - at com.google.api.client.googleapis.json.GoogleJsonResponseException.from(GoogleJsonResponseException.java:146) - ... 22 more - Caused by: org.postgresql.util.PSQLException: ERROR: publication "airbyte_publication" does not exist - Where: slot "airbyte_slot", output plugin "pgoutput", in the change callback, associated LSN 0/48029520 - at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2675) - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.JAVA, parsedException.platform()); - Assertions.assertEquals(2, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "io.temporal.failure.ApplicationFailure", - """ - GET https://storage.googleapis.com/ - { - "code" : 401, - "message" : "Invalid Credentials" - }""", List.of( - Map.of( - FILENAME, "GoogleJsonResponseException.java", - LINE_NO, 146, - MODULE, "com.google.api.client.googleapis.json.GoogleJsonResponseException", - FUNCTION, "from"))); - - assertExceptionContent(exceptionList.get(1), "org.postgresql.util.PSQLException", - """ - ERROR: publication "airbyte_publication" does not exist - Where: slot "airbyte_slot", output plugin "pgoutput", in the change callback, associated LSN 0/48029520""", List.of( - Map.of( - FILENAME, "QueryExecutorImpl.java", - LINE_NO, 2675, - MODULE, "org.postgresql.core.v3.QueryExecutorImpl", - FUNCTION, "receiveErrorResponse"))); - } - - @Test - void testBuildSentryExceptionsDbtDatabaseErrorDefault() { - final String stacktrace = - """ - AirbyteDbtError:\s - 1 of 1 ERROR creating table model public.midaug_start_users............................................................. [ERROR in 0.24s] - Database Error in model midaug_start_users (models/generated/airbyte_incremental/public/midaug_start_users.sql) - 1292 (22007): Truncated incorrect DOUBLE value: 'ABC' - compiled SQL at ../build/run/airbyte_utils/models/generated/airbyte_incremental/public/midaug_start_users.sql - 1 of 1 ERROR creating table model public.midaug_start_users............................................................. [ERROR in 0.24s] - Database Error in model midaug_start_users (models/generated/airbyte_incremental/public/midaug_start_users.sql) - 1292 (22007): Truncated incorrect DOUBLE value: 'ABC' - compiled SQL at ../build/run/airbyte_utils/models/generated/airbyte_incremental/public/midaug_start_users.sql - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.OTHER, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "DbtDatabaseError", - "1292 (22007): Truncated incorrect DOUBLE value: 'ABC'", List.of()); - } - - @Test - void testBuildSentryExceptionsDbtDatabaseErrorSqlComp() { - final String stacktrace = - """ - AirbyteDbtError:\s - 4 of 14 ERROR creating incremental model RAW.GENERAL_LEDGER_TRANS....................................................... [ERROR in 11.47s] - Database Error in model GENERAL_LEDGER_TRANS (models/generated/airbyte_incremental/RAW/GENERAL_LEDGER_TRANS.sql) - 001003 (42000): SQL compilation error: - syntax error line 47 at position 19 unexpected '-'. - Setting attempt to FAILED because the job was cancelled - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.OTHER, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "DbtDatabaseSQLCompilationError", - "001003 (42000): SQL compilation error: syntax error line 47 at position 19 unexpected '-'.", List.of()); - } - - @Test - void testBuildSentryExceptionsDbtDatabaseErrorInvalidInput() { - final String stacktrace = - """ - AirbyteDbtError:\s - 1 of 1 ERROR creating table model tdym_processing_test.tdym_api_data.................................................... [ERROR in 0.61s] - Database Error in model tdym_api_data (models/generated/airbyte_tables/tdym_processing_test/tdym_api_data.sql) - Invalid input - DETAIL: - - - -- error: Invalid input - code: 8001 - context: SUPER value exceeds export size. - query: 3667701 - location: partiql_export.cpp:9 - process: query0_127_3667701 [pid=17836] - - - -- compiled SQL at ../build/run/airbyte_utils/models/generated/airbyte_tables/tdym_processing_test/tdym_api_data.sql - Setting attempt to FAILED because the job was cancelled - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.OTHER, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "DbtDatabaseInvalidInputError", - "Invalid input\ncontext: SUPER value exceeds export size.", List.of()); - } - - @Test - void testBuildSentryExceptionsDbtDatabaseErrorSyntax() { - final String stacktrace = - """ - AirbyteDbtError:\s - 1 of 3 ERROR creating incremental model _airbyte_public.products_stg.................................................... [ERROR in 0.89s] - Database Error in model products_stg (models/generated/airbyte_incremental/public/products_stg.sql) - syntax error at or near "text" - LINE 6: add column Base name text - ^ - 1 of 3 ERROR creating incremental model _airbyte_public.products_stg.................................................... [ERROR in 0.89s] - Database Error in model products_stg (models/generated/airbyte_incremental/public/products_stg.sql) - syntax error at or near "text" - LINE 6: add column Base name text - ^ - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.OTHER, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "DbtDatabaseSyntaxError", - "syntax error at or near \"text\"\nLINE 6: add column Base name text", List.of()); - } - - @Test - void testBuildSentryExceptionsDbtUnhandledError() { - final String stacktrace = - """ - AirbyteDbtError:\s - Unhandled error while executing model.airbyte_utils.1595965687212073_ads_insights_cost_per_2_sec_continuous_video_view_ab2 - ("{'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}: None", {'error': {'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}}) - Unhandled error while executing model.airbyte_utils.1595965687212073_ads_insights_video_avg_time_watched_actions_ab2 - ("{'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}: None", {'error': {'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}}) - 10 of 48 SKIP relation ba__facebook_ads.1595965687212073_ads_insights_cost_per_2_sec_continuous_video_view due to ephemeral model error [ERROR SKIP] - ("{'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}: None", {'error': {'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}}) - 23 of 48 SKIP relation ba__facebook_ads.1595965687212073_ads_insights_video_avg_time_watched_actions due to ephemeral model error [ERROR SKIP] - ("{'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}: None", {'error': {'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}}) - Compilation Error in model.airbyte_utils.1595965687212073_ads_insights_cost_per_2_sec_continuous_video_view, caused by compilation error in referenced ephemeral model model.airbyte_utils.1595965687212073_ads_insights_cost_per_2_sec_continuous_video_view_ab2 - Compilation Error in model.airbyte_utils.1595965687212073_ads_insights_video_avg_time_watched_actions, caused by compilation error in referenced ephemeral model model.airbyte_utils.1595965687212073_ads_insights_video_avg_time_watched_actions_ab2 - Unhandled error while executing model.airbyte_utils.1595965687212073_ads_insights_cost_per_2_sec_continuous_video_view_ab2 - ("{'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}: None", {'error': {'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}}) - Compilation Error in model.airbyte_utils.1595965687212073_ads_insights_video_avg_time_watched_actions, caused by compilation error in referenced ephemeral model model.airbyte_utils.1595965687212073_ads_insights_video_avg_time_watched_actions_ab2 - Compilation Error in model.airbyte_utils.1595965687212073_ads_insights_video_p95_watched_actions, caused by compilation error in referenced ephemeral model model.airbyte_utils.1595965687212073_ads_insights_video_p95_watched_actions_ab2 - Compilation Error in model.airbyte_utils.1595965687212073_ads_insights_video_time_watched_actions, caused by compilation error in referenced ephemeral model model.airbyte_utils.1595965687212073_ads_insights_video_time_watched_actions_ab2 - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.OTHER, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "DbtUnhandledError", - "(\"{'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}: None\", {'error': {'code': 503, 'message': 'The service is currently unavailable.', 'status': 'UNAVAILABLE'}})", - List.of()); - } - - @Test - void testBuildSentryExceptionsDbtCompilationErrorAmbigRelation() { - final String stacktrace = - """ - AirbyteDbtError:\s - 1 of 1 ERROR creating table model dbo.sheet1............................................................................ [ERROR in 0.11s] - Compilation Error in model sheet1 (models/generated/airbyte_tables/dbo/sheet1.sql) - When searching for a relation, dbt found an approximate match. Instead of guessing - which relation to use, dbt will move on. Please delete "Data_integration"."dbo"."sheet1", or rename it to be less ambiguous. - Searched for: "data_integration"."dbo"."sheet1" - Found: "Data_integration"."dbo"."sheet1" - - > in macro materialization_table_default (macros/materializations/models/table/table.sql) - > called by model sheet1 (models/generated/airbyte_tables/dbo/sheet1.sql) - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.OTHER, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "DbtCompilationAmbiguousRelationError", - "When searching for a relation, dbt found an approximate match. Instead of guessing which relation to use, dbt will move on. Please delete \"Data_integration\".\"dbo\".\"sheet1\", or rename it to be less ambiguous.", - List.of()); - } - - @Test - void testBuildSentryExceptionsDbtCompilationErrorDefault() { - final String stacktrace = - """ - AirbyteDbtError:\s - Encountered an error: - Compilation Error in model banking_test (models/generated/airbyte_tables/public/banking_test.sql) - Model 'model.airbyte_utils.banking_test' (models/generated/airbyte_tables/public/banking_test.sql) depends on a source named 'public._airbyte_raw_banking_test' which was not found - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.OTHER, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "DbtCompilationError", - "Model 'model.airbyte_utils.banking_test' (models/generated/airbyte_tables/public/banking_test.sql) depends on a source named 'public._airbyte_raw_banking_test' which was not found", - List.of()); - } - - @Test - void testBuildSentryExceptionsDbtRuntimeErrorDefault() { - final String stacktrace = - """ - AirbyteDbtError:\s - Encountered an error: - Runtime Error - Code: 102. Unexpected packet from server abcdefg.eu-west-1.aws.clickhouse.cloud:8443 (expected Hello or Exception, got Unknown packet) - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.OTHER, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "DbtRuntimeError", - "Code: 102. Unexpected packet from server abcdefg.eu-west-1.aws.clickhouse.cloud:8443 (expected Hello or Exception, got Unknown packet)", - List.of()); - } - - @Test - void testBuildSentryExceptionsDbtRuntimeErrorDatabase() { - final String stacktrace = - """ - AirbyteDbtError:\s - Encountered an error: - Runtime Error - Database error while listing schemas in database ""AIRBYTE_DATABASE"" - Database Error - 250001 (08001): Failed to connect to DB: xyzxyz.us-east-2.aws.snowflakecomputing.com:443. The user you were trying to authenticate as differs from the user tied to the access token. - message='io.temporal.serviceclient.CheckedExceptionWrapper: java.util.concurrent.ExecutionException: java.lang.RuntimeException: io.airbyte.workers.exception.WorkerException: Running the launcher replication-orchestrator failed', type='java.lang.RuntimeException', nonRetryable=false - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.OTHER, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "DbtRuntimeDatabaseError", - "250001 (08001): Failed to connect to DB: xyzxyz.us-east-2.aws.snowflakecomputing.com:443. The user you were trying to authenticate as differs from the user tied to the access token.", - List.of()); - } - - @Test - void testBuildSentryExceptionsDbtDatabaseErrorOther() { - final String stacktrace = - """ - AirbyteDbtError:\s - Encountered an error: - Database Error - Access Denied: Project upside-cloud-prod: User does not have bigquery.datasets.create permission in project upside-cloud-prod. - Setting attempt to FAILED because the job was cancelled - """; - - final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); - Assertions.assertTrue(optionalSentryExceptions.isPresent()); - - final SentryParsedException parsedException = optionalSentryExceptions.get(); - final List exceptionList = parsedException.exceptions(); - Assertions.assertEquals(SentryExceptionPlatform.OTHER, parsedException.platform()); - Assertions.assertEquals(1, exceptionList.size()); - - assertExceptionContent(exceptionList.get(0), "DbtDatabaseError", - "Access Denied: Project upside-cloud-prod: User does not have bigquery.datasets.create permission in project upside-cloud-prod.", List.of()); - } - - private void assertExceptionContent(final SentryException exception, - final String type, - final String value, - final List> frames) { - Assertions.assertEquals(type, exception.getType()); - Assertions.assertEquals(value, exception.getValue()); - - if (!frames.isEmpty()) { - final SentryStackTrace stackTrace = exception.getStacktrace(); - Assertions.assertNotNull(stackTrace); - final List sentryFrames = stackTrace.getFrames(); - Assertions.assertNotNull(sentryFrames); - Assertions.assertEquals(frames.size(), sentryFrames.size()); - - for (int i = 0; i < frames.size(); i++) { - final Map expectedFrame = frames.get(i); - final SentryStackFrame sentryFrame = sentryFrames.get(i); - - if (expectedFrame.containsKey(MODULE)) { - Assertions.assertEquals(expectedFrame.get(MODULE), sentryFrame.getModule()); - } - - if (expectedFrame.containsKey(FILENAME)) { - Assertions.assertEquals(expectedFrame.get(FILENAME), sentryFrame.getFilename()); - } - - if (expectedFrame.containsKey(ABS_PATH)) { - Assertions.assertEquals(expectedFrame.get(ABS_PATH), sentryFrame.getAbsPath()); - } - - if (expectedFrame.containsKey(FUNCTION)) { - Assertions.assertEquals(expectedFrame.get(FUNCTION), sentryFrame.getFunction()); - } - - if (expectedFrame.containsKey(LINE_NO)) { - Assertions.assertEquals(expectedFrame.get(LINE_NO), sentryFrame.getLineno()); - } - - if (expectedFrame.containsKey(CONTEXT_LINE)) { - Assertions.assertEquals(expectedFrame.get(CONTEXT_LINE), sentryFrame.getContextLine()); - } - - if (expectedFrame.containsKey("isNative")) { - Assertions.assertEquals(expectedFrame.get("isNative"), sentryFrame.isNative()); - } - } - } - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/SentryJobErrorReportingClientTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/SentryJobErrorReportingClientTest.java deleted file mode 100644 index 0aeb90f38bc4..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/SentryJobErrorReportingClientTest.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.errorreporter; - -import static io.airbyte.persistence.job.errorreporter.SentryJobErrorReportingClient.STACKTRACE_PARSE_ERROR_TAG_KEY; -import static io.airbyte.persistence.job.errorreporter.SentryJobErrorReportingClient.STACKTRACE_PLATFORM_TAG_KEY; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.config.FailureReason; -import io.airbyte.config.FailureReason.FailureOrigin; -import io.airbyte.config.FailureReason.FailureType; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.persistence.job.errorreporter.SentryExceptionHelper.SentryExceptionPlatform; -import io.airbyte.persistence.job.errorreporter.SentryExceptionHelper.SentryParsedException; -import io.sentry.IHub; -import io.sentry.NoOpHub; -import io.sentry.SentryEvent; -import io.sentry.protocol.Message; -import io.sentry.protocol.SentryException; -import io.sentry.protocol.User; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; - -class SentryJobErrorReportingClientTest { - - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final String WORKSPACE_NAME = "My Workspace"; - private static final String DOCKER_IMAGE = "airbyte/source-stripe:1.2.3"; - private static final String ERROR_MESSAGE = "RuntimeError: Something went wrong"; - - private final StandardWorkspace workspace = new StandardWorkspace().withWorkspaceId(WORKSPACE_ID).withName(WORKSPACE_NAME); - private SentryJobErrorReportingClient sentryErrorReportingClient; - private IHub mockSentryHub; - private SentryExceptionHelper mockSentryExceptionHelper; - - @BeforeEach - void setup() { - mockSentryHub = mock(IHub.class); - mockSentryExceptionHelper = mock(SentryExceptionHelper.class); - sentryErrorReportingClient = new SentryJobErrorReportingClient(mockSentryHub, mockSentryExceptionHelper); - } - - @Test - void testCreateSentryHubWithBlankDSN() { - final String sentryDSN = ""; - final IHub sentryHub = SentryJobErrorReportingClient.createSentryHubWithDSN(sentryDSN); - assertEquals(NoOpHub.getInstance(), sentryHub); - } - - @Test - void testCreateSentryHubWithNullDSN() { - final IHub sentryHub = SentryJobErrorReportingClient.createSentryHubWithDSN(null); - assertEquals(NoOpHub.getInstance(), sentryHub); - } - - @Test - void testCreateSentryHubWithDSN() { - final String sentryDSN = "https://public@sentry.example.com/1"; - final IHub sentryHub = SentryJobErrorReportingClient.createSentryHubWithDSN(sentryDSN); - assertNotNull(sentryHub); - assertEquals(sentryDSN, sentryHub.getOptions().getDsn()); - assertFalse(sentryHub.getOptions().isAttachStacktrace()); - assertFalse(sentryHub.getOptions().isEnableUncaughtExceptionHandler()); - } - - @Test - void testReportJobFailureReason() { - final ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(SentryEvent.class); - - final FailureReason failureReason = new FailureReason() - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.SYSTEM_ERROR) - .withInternalMessage(ERROR_MESSAGE); - final Map metadata = Map.of("some_metadata", "some_metadata_value"); - - sentryErrorReportingClient.reportJobFailureReason(workspace, failureReason, DOCKER_IMAGE, metadata); - - verify(mockSentryHub).captureEvent(eventCaptor.capture()); - final SentryEvent actualEvent = eventCaptor.getValue(); - assertEquals("other", actualEvent.getPlatform()); - assertEquals("airbyte-source-stripe@1.2.3", actualEvent.getRelease()); - assertEquals(List.of("{{ default }}", "airbyte-source-stripe"), actualEvent.getFingerprints()); - assertEquals("some_metadata_value", actualEvent.getTag("some_metadata")); - assertNull(actualEvent.getTag(STACKTRACE_PARSE_ERROR_TAG_KEY)); - assertNull(actualEvent.getExceptions()); - - final User sentryUser = actualEvent.getUser(); - assertNotNull(sentryUser); - assertEquals(WORKSPACE_ID.toString(), sentryUser.getId()); - assertEquals(WORKSPACE_NAME, sentryUser.getUsername()); - - final Message message = actualEvent.getMessage(); - assertNotNull(message); - assertEquals(ERROR_MESSAGE, message.getFormatted()); - } - - @Test - void testReportJobFailureReasonWithNoWorkspace() { - final ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(SentryEvent.class); - - final FailureReason failureReason = new FailureReason() - .withFailureOrigin(FailureOrigin.SOURCE) - .withFailureType(FailureType.SYSTEM_ERROR) - .withInternalMessage(ERROR_MESSAGE); - - sentryErrorReportingClient.reportJobFailureReason(null, failureReason, DOCKER_IMAGE, Map.of()); - - verify(mockSentryHub).captureEvent(eventCaptor.capture()); - final SentryEvent actualEvent = eventCaptor.getValue(); - final User sentryUser = actualEvent.getUser(); - assertNull(sentryUser); - - final Message message = actualEvent.getMessage(); - assertNotNull(message); - assertEquals(ERROR_MESSAGE, message.getFormatted()); - } - - @Test - void testReportJobFailureReasonWithStacktrace() { - final ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(SentryEvent.class); - - final List exceptions = new ArrayList<>(); - final SentryException exception = new SentryException(); - exception.setType("RuntimeError"); - exception.setValue("Something went wrong"); - exceptions.add(exception); - - final SentryParsedException parsedException = new SentryParsedException(SentryExceptionPlatform.PYTHON, exceptions); - when(mockSentryExceptionHelper.buildSentryExceptions("Some valid stacktrace")).thenReturn(Optional.of(parsedException)); - - final FailureReason failureReason = new FailureReason() - .withInternalMessage(ERROR_MESSAGE) - .withStacktrace("Some valid stacktrace"); - - sentryErrorReportingClient.reportJobFailureReason(workspace, failureReason, DOCKER_IMAGE, Map.of()); - - verify(mockSentryHub).captureEvent(eventCaptor.capture()); - final SentryEvent actualEvent = eventCaptor.getValue(); - assertEquals(exceptions, actualEvent.getExceptions()); - assertNull(actualEvent.getTag(STACKTRACE_PARSE_ERROR_TAG_KEY)); - assertEquals("python", actualEvent.getPlatform()); - assertEquals("python", actualEvent.getTag(STACKTRACE_PLATFORM_TAG_KEY)); - } - - @Test - void testReportJobFailureReasonWithInvalidStacktrace() { - final ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(SentryEvent.class); - final String invalidStacktrace = "Invalid stacktrace\nRuntimeError: Something went wrong"; - - when(mockSentryExceptionHelper.buildSentryExceptions(invalidStacktrace)).thenReturn(Optional.empty()); - - final FailureReason failureReason = new FailureReason() - .withInternalMessage("Something went wrong") - .withStacktrace(invalidStacktrace); - - sentryErrorReportingClient.reportJobFailureReason(workspace, failureReason, DOCKER_IMAGE, Map.of()); - - verify(mockSentryHub).captureEvent(eventCaptor.capture()); - final SentryEvent actualEvent = eventCaptor.getValue(); - assertEquals("1", actualEvent.getTag(STACKTRACE_PARSE_ERROR_TAG_KEY)); - final List exceptions = actualEvent.getExceptions(); - assertNotNull(exceptions); - assertEquals(1, exceptions.size()); - assertEquals("Invalid stacktrace, RuntimeError: ", exceptions.get(0).getValue()); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactoryTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactoryTest.java deleted file mode 100644 index ed00191847e9..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactoryTest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.factory; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.version.Version; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.DefaultJobCreator; -import io.airbyte.persistence.job.WorkspaceHelper; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.Test; - -class DefaultSyncJobFactoryTest { - - @Test - void createSyncJobFromConnectionId() throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID sourceDefinitionId = UUID.randomUUID(); - final UUID destinationDefinitionId = UUID.randomUUID(); - final UUID connectionId = UUID.randomUUID(); - final UUID sourceId = UUID.randomUUID(); - final UUID destinationId = UUID.randomUUID(); - final UUID operationId = UUID.randomUUID(); - final UUID workspaceWebhookConfigId = UUID.randomUUID(); - final UUID workspaceId = UUID.randomUUID(); - final String workspaceWebhookName = "test-webhook-name"; - final JsonNode persistedWebhookConfigs = Jsons.deserialize( - String.format("{\"webhookConfigs\": [{\"id\": \"%s\", \"name\": \"%s\", \"authToken\": {\"_secret\": \"a-secret_v1\"}}]}", - workspaceWebhookConfigId, workspaceWebhookName)); - final DefaultJobCreator jobCreator = mock(DefaultJobCreator.class); - final ConfigRepository configRepository = mock(ConfigRepository.class); - final WorkspaceHelper workspaceHelper = mock(WorkspaceHelper.class); - final long jobId = 11L; - - final StandardSyncOperation operation = new StandardSyncOperation().withOperationId(operationId); - final List operations = List.of(operation); - final StandardSync standardSync = new StandardSync() - .withSourceId(sourceId) - .withDestinationId(destinationId) - .withOperationIds(List.of(operationId)); - - final SourceConnection sourceConnection = new SourceConnection().withSourceDefinitionId(sourceDefinitionId); - final DestinationConnection destinationConnection = - new DestinationConnection().withDestinationDefinitionId(destinationDefinitionId); - - final String srcDockerRepo = "srcrepo"; - final String srcDockerTag = "tag"; - final String srcDockerImage = srcDockerRepo + ":" + srcDockerTag; - final Version srcProtocolVersion = new Version("0.3.1"); - - final String dstDockerRepo = "dstrepo"; - final String dstDockerTag = "tag"; - final String dstDockerImage = dstDockerRepo + ":" + dstDockerTag; - final Version dstProtocolVersion = new Version("0.3.2"); - final StandardSourceDefinition standardSourceDefinition = - new StandardSourceDefinition().withSourceDefinitionId(sourceDefinitionId).withDockerRepository(srcDockerRepo) - .withDockerImageTag(srcDockerTag).withProtocolVersion(srcProtocolVersion.serialize()); - final StandardDestinationDefinition standardDestinationDefinition = - new StandardDestinationDefinition().withDestinationDefinitionId(destinationDefinitionId).withDockerRepository(dstDockerRepo) - .withDockerImageTag(dstDockerTag).withProtocolVersion(dstProtocolVersion.serialize()); - - when(configRepository.getStandardSync(connectionId)).thenReturn(standardSync); - when(configRepository.getSourceConnection(sourceId)).thenReturn(sourceConnection); - when(configRepository.getDestinationConnection(destinationId)).thenReturn(destinationConnection); - when(configRepository.getStandardSyncOperation(operationId)).thenReturn(operation); - when( - jobCreator.createSyncJob(sourceConnection, destinationConnection, standardSync, srcDockerImage, srcProtocolVersion, dstDockerImage, - dstProtocolVersion, operations, - persistedWebhookConfigs, standardSourceDefinition, standardDestinationDefinition, workspaceId)) - .thenReturn(Optional.of(jobId)); - when(configRepository.getStandardSourceDefinition(sourceDefinitionId)) - .thenReturn(standardSourceDefinition); - - when(configRepository.getStandardDestinationDefinition(destinationDefinitionId)) - .thenReturn(standardDestinationDefinition); - - when(configRepository.getStandardWorkspaceNoSecrets(any(), eq(true))).thenReturn( - new StandardWorkspace().withWorkspaceId(workspaceId).withWebhookOperationConfigs(persistedWebhookConfigs)); - - final SyncJobFactory factory = new DefaultSyncJobFactory(true, jobCreator, configRepository, mock(OAuthConfigSupplier.class), workspaceHelper); - final long actualJobId = factory.create(connectionId); - assertEquals(jobId, actualJobId); - - verify(jobCreator) - .createSyncJob(sourceConnection, destinationConnection, standardSync, srcDockerImage, srcProtocolVersion, dstDockerImage, dstProtocolVersion, - operations, persistedWebhookConfigs, - standardSourceDefinition, standardDestinationDefinition, workspaceId); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/OAuthConfigSupplierTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/OAuthConfigSupplierTest.java deleted file mode 100644 index f115ccbf3133..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/OAuthConfigSupplierTest.java +++ /dev/null @@ -1,374 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.factory; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.oauth.MoreOAuthParameters; -import io.airbyte.protocol.models.AdvancedAuth; -import io.airbyte.protocol.models.AdvancedAuth.AuthFlowType; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.protocol.models.OAuthConfigSpecification; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class OAuthConfigSupplierTest { - - static final String API_CLIENT = "api_client"; - static final String CREDENTIALS = "credentials"; - static final String PROPERTIES = "properties"; - - private static final String AUTH_TYPE = "auth_type"; - private static final String OAUTH = "oauth"; - private static final String API_SECRET = "api_secret"; - - private ConfigRepository configRepository; - private TrackingClient trackingClient; - private OAuthConfigSupplier oAuthConfigSupplier; - private UUID sourceDefinitionId; - private StandardSourceDefinition testSourceDefinition; - - @BeforeEach - void setup() throws JsonValidationException, ConfigNotFoundException, IOException { - configRepository = mock(ConfigRepository.class); - trackingClient = mock(TrackingClient.class); - oAuthConfigSupplier = new OAuthConfigSupplier(configRepository, trackingClient); - sourceDefinitionId = UUID.randomUUID(); - testSourceDefinition = new StandardSourceDefinition() - .withSourceDefinitionId(sourceDefinitionId) - .withName("test") - .withDockerRepository("test/test") - .withDockerImageTag("dev") - .withSpec(null); - - setupStandardDefinitionMock(createAdvancedAuth() - .withPredicateKey(List.of(CREDENTIALS, AUTH_TYPE)) - .withPredicateValue(OAUTH)); - } - - @Test - void testNoOAuthInjectionBecauseEmptyParams() throws IOException { - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final JsonNode actualConfig = oAuthConfigSupplier.injectSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - assertEquals(config, actualConfig); - assertNoTracking(); - } - - @Test - void testNoAuthMaskingBecauseEmptyParams() throws IOException { - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final JsonNode actualConfig = oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - assertEquals(config, actualConfig); - } - - @Test - void testNoOAuthInjectionBecauseMissingPredicateKey() throws IOException, JsonValidationException, ConfigNotFoundException { - setupStandardDefinitionMock(createAdvancedAuth() - .withPredicateKey(List.of("some_random_fields", AUTH_TYPE)) - .withPredicateValue(OAUTH)); - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - setupOAuthParamMocks(generateOAuthParameters()); - final JsonNode actualConfig = oAuthConfigSupplier.injectSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - assertEquals(config, actualConfig); - assertNoTracking(); - } - - @Test - void testNoOAuthInjectionBecauseWrongPredicateValue() throws IOException, JsonValidationException, ConfigNotFoundException { - setupStandardDefinitionMock(createAdvancedAuth() - .withPredicateKey(List.of(CREDENTIALS, AUTH_TYPE)) - .withPredicateValue("wrong_auth_type")); - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - setupOAuthParamMocks(generateOAuthParameters()); - final JsonNode actualConfig = oAuthConfigSupplier.injectSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - assertEquals(config, actualConfig); - assertNoTracking(); - } - - @Test - void testNoOAuthMaskingBecauseWrongPredicateValue() throws IOException, JsonValidationException, ConfigNotFoundException { - setupStandardDefinitionMock(createAdvancedAuth() - .withPredicateKey(List.of(CREDENTIALS, AUTH_TYPE)) - .withPredicateValue("wrong_auth_type")); - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - setupOAuthParamMocks(generateOAuthParameters()); - final JsonNode actualConfig = oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - assertEquals(config, actualConfig); - } - - @Test - void testOAuthInjection() throws JsonValidationException, IOException { - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateOAuthParameters(); - setupOAuthParamMocks(oauthParameters); - final JsonNode actualConfig = oAuthConfigSupplier.injectSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - final JsonNode expectedConfig = getExpectedNode((String) oauthParameters.get(API_CLIENT)); - assertEquals(expectedConfig, actualConfig); - assertTracking(workspaceId); - } - - @Test - void testOAuthMasking() throws JsonValidationException, IOException { - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateOAuthParameters(); - setupOAuthParamMocks(oauthParameters); - final JsonNode actualConfig = oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - final JsonNode expectedConfig = getExpectedNode(MoreOAuthParameters.SECRET_MASK); - assertEquals(expectedConfig, actualConfig); - } - - @Test - void testOAuthInjectionWithoutPredicate() throws JsonValidationException, IOException, ConfigNotFoundException { - setupStandardDefinitionMock(createAdvancedAuth() - .withPredicateKey(null) - .withPredicateValue(null)); - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateOAuthParameters(); - setupOAuthParamMocks(oauthParameters); - final JsonNode actualConfig = oAuthConfigSupplier.injectSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - final JsonNode expectedConfig = getExpectedNode((String) oauthParameters.get(API_CLIENT)); - assertEquals(expectedConfig, actualConfig); - assertTracking(workspaceId); - } - - @Test - void testOAuthMaskingWithoutPredicate() throws JsonValidationException, IOException, ConfigNotFoundException { - setupStandardDefinitionMock(createAdvancedAuth() - .withPredicateKey(null) - .withPredicateValue(null)); - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateOAuthParameters(); - setupOAuthParamMocks(oauthParameters); - final JsonNode actualConfig = oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - final JsonNode expectedConfig = getExpectedNode(MoreOAuthParameters.SECRET_MASK); - assertEquals(expectedConfig, actualConfig); - } - - @Test - void testOAuthInjectionWithoutPredicateValue() throws JsonValidationException, IOException, ConfigNotFoundException { - setupStandardDefinitionMock(createAdvancedAuth() - .withPredicateKey(List.of(CREDENTIALS, AUTH_TYPE)) - .withPredicateValue("")); - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateOAuthParameters(); - setupOAuthParamMocks(oauthParameters); - final JsonNode actualConfig = oAuthConfigSupplier.injectSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - final JsonNode expectedConfig = getExpectedNode((String) oauthParameters.get(API_CLIENT)); - assertEquals(expectedConfig, actualConfig); - assertTracking(workspaceId); - } - - @Test - void testOAuthMaskingWithoutPredicateValue() throws JsonValidationException, IOException, ConfigNotFoundException { - setupStandardDefinitionMock(createAdvancedAuth() - .withPredicateKey(List.of(CREDENTIALS, AUTH_TYPE)) - .withPredicateValue("")); - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateOAuthParameters(); - setupOAuthParamMocks(oauthParameters); - final JsonNode actualConfig = oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - final JsonNode expectedConfig = getExpectedNode(MoreOAuthParameters.SECRET_MASK); - assertEquals(expectedConfig, actualConfig); - } - - @Test - void testOAuthFullInjectionBecauseNoOAuthSpec() throws JsonValidationException, IOException, ConfigNotFoundException { - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateOAuthParameters(); - when(configRepository.getStandardSourceDefinition(any())) - .thenReturn(testSourceDefinition.withSpec(null)); - setupOAuthParamMocks(oauthParameters); - final JsonNode actualConfig = oAuthConfigSupplier.injectSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - final ObjectNode expectedConfig = ((ObjectNode) Jsons.clone(config)); - for (final String key : oauthParameters.keySet()) { - expectedConfig.set(key, Jsons.jsonNode(oauthParameters.get(key))); - } - assertEquals(expectedConfig, actualConfig); - assertTracking(workspaceId); - } - - @Test - void testOAuthNoMaskingBecauseNoOAuthSpec() throws JsonValidationException, IOException, ConfigNotFoundException { - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateOAuthParameters(); - when(configRepository.getStandardSourceDefinition(any())) - .thenReturn(testSourceDefinition.withSpec(null)); - setupOAuthParamMocks(oauthParameters); - final JsonNode actualConfig = oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - assertEquals(config, actualConfig); - } - - @Test - void testOAuthInjectionScopedToWorkspace() throws JsonValidationException, IOException { - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateOAuthParameters(); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of( - new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(UUID.randomUUID()) - .withWorkspaceId(null) - .withConfiguration(Jsons.jsonNode(generateOAuthParameters())), - new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(sourceDefinitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(oauthParameters)))); - final JsonNode actualConfig = oAuthConfigSupplier.injectSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - final JsonNode expectedConfig = getExpectedNode((String) oauthParameters.get(API_CLIENT)); - assertEquals(expectedConfig, actualConfig); - assertTracking(workspaceId); - } - - @Test - void testOAuthFullInjectionBecauseNoOAuthSpecNestedParameters() throws JsonValidationException, IOException, ConfigNotFoundException { - // Until https://github.com/airbytehq/airbyte/issues/7624 is solved, we need to handle nested oauth - // parameters - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateNestedOAuthParameters(); - setupOAuthParamMocks(oauthParameters); - final JsonNode actualConfig = oAuthConfigSupplier.injectSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - final JsonNode expectedConfig = Jsons.jsonNode(Map.of( - "fieldName", "fieldValue", - CREDENTIALS, Map.of( - API_SECRET, "123", - AUTH_TYPE, OAUTH, - API_CLIENT, ((Map) oauthParameters.get(CREDENTIALS)).get(API_CLIENT)))); - assertEquals(expectedConfig, actualConfig); - assertTracking(workspaceId); - } - - @Test - void testOAuthInjectionNestedParameters() throws JsonValidationException, IOException { - // Until https://github.com/airbytehq/airbyte/issues/7624 is solved, we need to handle nested oauth - // parameters - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateNestedOAuthParameters(); - setupOAuthParamMocks(oauthParameters); - final JsonNode actualConfig = oAuthConfigSupplier.injectSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - final JsonNode expectedConfig = getExpectedNode((String) ((Map) oauthParameters.get(CREDENTIALS)).get(API_CLIENT)); - assertEquals(expectedConfig, actualConfig); - assertTracking(workspaceId); - } - - @Test - void testOAuthMaskingNestedParameters() throws JsonValidationException, IOException { - // Until https://github.com/airbytehq/airbyte/issues/7624 is solved, we need to handle nested oauth - // parameters - final JsonNode config = generateJsonConfig(); - final UUID workspaceId = UUID.randomUUID(); - final Map oauthParameters = generateNestedOAuthParameters(); - setupOAuthParamMocks(oauthParameters); - final JsonNode actualConfig = oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionId, workspaceId, Jsons.clone(config)); - final JsonNode expectedConfig = getExpectedNode(MoreOAuthParameters.SECRET_MASK); - assertEquals(expectedConfig, actualConfig); - } - - private static AdvancedAuth createAdvancedAuth() { - return new AdvancedAuth() - .withAuthFlowType(AuthFlowType.OAUTH_2_0) - .withOauthConfigSpecification(new OAuthConfigSpecification() - .withCompleteOauthServerOutputSpecification(Jsons.jsonNode(Map.of(PROPERTIES, - Map.of(API_CLIENT, Map.of( - "type", "string", - OAuthConfigSupplier.PATH_IN_CONNECTOR_CONFIG, List.of(CREDENTIALS, API_CLIENT))))))); - } - - private void setupStandardDefinitionMock(final AdvancedAuth advancedAuth) throws JsonValidationException, ConfigNotFoundException, IOException { - when(configRepository.getStandardSourceDefinition(any())).thenReturn(testSourceDefinition - .withSpec(new ConnectorSpecification().withAdvancedAuth(advancedAuth))); - } - - private void setupOAuthParamMocks(final Map oauthParameters) throws JsonValidationException, IOException { - when(configRepository.listSourceOAuthParam()).thenReturn(List.of( - new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(sourceDefinitionId) - .withWorkspaceId(null) - .withConfiguration(Jsons.jsonNode(oauthParameters)), - new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(UUID.randomUUID()) - .withWorkspaceId(null) - .withConfiguration(Jsons.jsonNode(generateOAuthParameters())))); - } - - private static ObjectNode generateJsonConfig() { - return (ObjectNode) Jsons.jsonNode( - Map.of( - "fieldName", "fieldValue", - CREDENTIALS, Map.of( - API_SECRET, "123", - AUTH_TYPE, OAUTH))); - } - - private static Map generateOAuthParameters() { - return Map.of( - API_SECRET, "mysecret", - API_CLIENT, UUID.randomUUID().toString()); - } - - private static Map generateNestedOAuthParameters() { - return Map.of(CREDENTIALS, generateOAuthParameters()); - } - - private static JsonNode getExpectedNode(final String apiClient) { - return Jsons.jsonNode( - Map.of( - "fieldName", "fieldValue", - CREDENTIALS, Map.of( - API_SECRET, "123", - AUTH_TYPE, OAUTH, - API_CLIENT, apiClient))); - } - - private void assertNoTracking() { - verify(trackingClient, times(0)).track(any(), anyString(), anyMap()); - } - - private void assertTracking(final UUID workspaceId) { - verify(trackingClient, times(1)).track(workspaceId, "OAuth Injection - Backend", Map.of( - "connector_source", "test", - "connector_source_definition_id", sourceDefinitionId, - "connector_source_docker_repository", "test/test", - "connector_source_version", "dev")); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/models/AttemptTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/models/AttemptTest.java deleted file mode 100644 index 084162d0964c..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/models/AttemptTest.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.models; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import org.junit.jupiter.api.Test; - -class AttemptTest { - - @Test - void testIsAttemptInTerminalState() { - assertFalse(Attempt.isAttemptInTerminalState(attemptWithStatus(AttemptStatus.RUNNING))); - assertTrue(Attempt.isAttemptInTerminalState(attemptWithStatus(AttemptStatus.FAILED))); - assertTrue(Attempt.isAttemptInTerminalState(attemptWithStatus(AttemptStatus.SUCCEEDED))); - } - - private static Attempt attemptWithStatus(final AttemptStatus attemptStatus) { - return new Attempt(1, 1L, null, null, null, attemptStatus, null, null, 0L, 0L, null); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/models/JobTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/models/JobTest.java deleted file mode 100644 index 6af45f53cd5d..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/models/JobTest.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.models; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import org.junit.jupiter.api.Test; - -class JobTest { - - @Test - void testIsJobInTerminalState() { - assertFalse(jobWithStatus(JobStatus.PENDING).isJobInTerminalState()); - assertFalse(jobWithStatus(JobStatus.RUNNING).isJobInTerminalState()); - assertFalse(jobWithStatus(JobStatus.INCOMPLETE).isJobInTerminalState()); - assertTrue(jobWithStatus(JobStatus.FAILED).isJobInTerminalState()); - assertTrue(jobWithStatus(JobStatus.SUCCEEDED).isJobInTerminalState()); - assertTrue(jobWithStatus(JobStatus.CANCELLED).isJobInTerminalState()); - - } - - private static Job jobWithStatus(final JobStatus jobStatus) { - return new Job(1L, null, null, null, null, jobStatus, 0L, 0L, 0L); - } - - @Test - void testHasRunningAttempt() { - assertTrue(jobWithAttemptWithStatus(AttemptStatus.RUNNING).hasRunningAttempt()); - assertFalse(jobWithAttemptWithStatus(AttemptStatus.FAILED).hasRunningAttempt()); - assertFalse(jobWithAttemptWithStatus(AttemptStatus.SUCCEEDED).hasRunningAttempt()); - assertFalse(jobWithAttemptWithStatus().hasRunningAttempt()); - assertTrue(jobWithAttemptWithStatus(AttemptStatus.SUCCEEDED, AttemptStatus.RUNNING).hasRunningAttempt()); - } - - private static Job jobWithAttemptWithStatus(final AttemptStatus... attemptStatuses) { - final List attempts = IntStream.range(0, attemptStatuses.length) - .mapToObj(idx -> new Attempt(idx + 1, 1L, null, null, null, attemptStatuses[idx], null, null, idx, 0L, null)) - .collect(Collectors.toList()); - return new Job(1L, null, null, null, attempts, null, 0L, 0L, 0L); - } - - @Test - void testGetSuccessfulAttempt() { - assertTrue(jobWithAttemptWithStatus().getSuccessfulAttempt().isEmpty()); - assertTrue(jobWithAttemptWithStatus(AttemptStatus.FAILED).getSuccessfulAttempt().isEmpty()); - assertThrows(IllegalStateException.class, - () -> jobWithAttemptWithStatus(AttemptStatus.SUCCEEDED, AttemptStatus.SUCCEEDED).getSuccessfulAttempt()); - - final Job job = jobWithAttemptWithStatus(AttemptStatus.FAILED, AttemptStatus.SUCCEEDED); - assertTrue(job.getSuccessfulAttempt().isPresent()); - assertEquals(job.getAttempts().get(1), job.getSuccessfulAttempt().get()); - } - - @Test - void testGetLastFailedAttempt() { - assertTrue(jobWithAttemptWithStatus().getLastFailedAttempt().isEmpty()); - assertTrue(jobWithAttemptWithStatus(AttemptStatus.SUCCEEDED).getLastFailedAttempt().isEmpty()); - assertTrue(jobWithAttemptWithStatus(AttemptStatus.FAILED).getLastFailedAttempt().isPresent()); - - final Job job = jobWithAttemptWithStatus(AttemptStatus.FAILED, AttemptStatus.FAILED); - assertTrue(job.getLastFailedAttempt().isPresent()); - assertEquals(2, job.getLastFailedAttempt().get().getAttemptNumber()); - } - - @Test - void testGetLastAttempt() { - final Job job = jobWithAttemptWithStatus(AttemptStatus.FAILED, AttemptStatus.FAILED, AttemptStatus.SUCCEEDED); - assertTrue(job.getLastAttempt().isPresent()); - assertEquals(3, job.getLastAttempt().get().getAttemptNumber()); - } - - @Test - void testGetAttemptByNumber() { - final Job job = jobWithAttemptWithStatus(AttemptStatus.FAILED, AttemptStatus.FAILED, AttemptStatus.SUCCEEDED); - assertTrue(job.getAttemptByNumber(2).isPresent()); - assertEquals(2, job.getAttemptByNumber(2).get().getAttemptNumber()); - } - - @Test - void testValidateStatusTransitionFromPending() { - final Job pendingJob = jobWithStatus(JobStatus.PENDING); - assertDoesNotThrow(() -> pendingJob.validateStatusTransition(JobStatus.RUNNING)); - assertDoesNotThrow(() -> pendingJob.validateStatusTransition(JobStatus.FAILED)); - assertDoesNotThrow(() -> pendingJob.validateStatusTransition(JobStatus.CANCELLED)); - assertThrows(IllegalStateException.class, () -> pendingJob.validateStatusTransition(JobStatus.INCOMPLETE)); - assertThrows(IllegalStateException.class, () -> pendingJob.validateStatusTransition(JobStatus.SUCCEEDED)); - } - - @Test - void testValidateStatusTransitionFromRunning() { - final Job runningJob = jobWithStatus(JobStatus.RUNNING); - assertDoesNotThrow(() -> runningJob.validateStatusTransition(JobStatus.INCOMPLETE)); - assertDoesNotThrow(() -> runningJob.validateStatusTransition(JobStatus.SUCCEEDED)); - assertDoesNotThrow(() -> runningJob.validateStatusTransition(JobStatus.FAILED)); - assertDoesNotThrow(() -> runningJob.validateStatusTransition(JobStatus.CANCELLED)); - assertThrows(IllegalStateException.class, () -> runningJob.validateStatusTransition(JobStatus.PENDING)); - } - - @Test - void testValidateStatusTransitionFromIncomplete() { - final Job incompleteJob = jobWithStatus(JobStatus.INCOMPLETE); - assertDoesNotThrow(() -> incompleteJob.validateStatusTransition(JobStatus.PENDING)); - assertDoesNotThrow(() -> incompleteJob.validateStatusTransition(JobStatus.RUNNING)); - assertDoesNotThrow(() -> incompleteJob.validateStatusTransition(JobStatus.FAILED)); - assertDoesNotThrow(() -> incompleteJob.validateStatusTransition(JobStatus.CANCELLED)); - assertThrows(IllegalStateException.class, () -> incompleteJob.validateStatusTransition(JobStatus.SUCCEEDED)); - } - - @Test - void testValidateStatusTransitionFromSucceeded() { - final Job suceededJob = jobWithStatus(JobStatus.SUCCEEDED); - assertThrows(IllegalStateException.class, () -> suceededJob.validateStatusTransition(JobStatus.PENDING)); - assertThrows(IllegalStateException.class, () -> suceededJob.validateStatusTransition(JobStatus.RUNNING)); - assertThrows(IllegalStateException.class, () -> suceededJob.validateStatusTransition(JobStatus.INCOMPLETE)); - assertThrows(IllegalStateException.class, () -> suceededJob.validateStatusTransition(JobStatus.FAILED)); - assertThrows(IllegalStateException.class, () -> suceededJob.validateStatusTransition(JobStatus.CANCELLED)); - } - - @Test - void testValidateStatusTransitionFromFailed() { - final Job failedJob = jobWithStatus(JobStatus.FAILED); - assertThrows(IllegalStateException.class, () -> failedJob.validateStatusTransition(JobStatus.SUCCEEDED)); - assertThrows(IllegalStateException.class, () -> failedJob.validateStatusTransition(JobStatus.PENDING)); - assertThrows(IllegalStateException.class, () -> failedJob.validateStatusTransition(JobStatus.RUNNING)); - assertThrows(IllegalStateException.class, () -> failedJob.validateStatusTransition(JobStatus.INCOMPLETE)); - assertThrows(IllegalStateException.class, () -> failedJob.validateStatusTransition(JobStatus.CANCELLED)); - } - - @Test - void testValidateStatusTransitionFromCancelled() { - final Job cancelledJob = jobWithStatus(JobStatus.CANCELLED); - assertThrows(IllegalStateException.class, () -> cancelledJob.validateStatusTransition(JobStatus.SUCCEEDED)); - assertThrows(IllegalStateException.class, () -> cancelledJob.validateStatusTransition(JobStatus.PENDING)); - assertThrows(IllegalStateException.class, () -> cancelledJob.validateStatusTransition(JobStatus.RUNNING)); - assertThrows(IllegalStateException.class, () -> cancelledJob.validateStatusTransition(JobStatus.INCOMPLETE)); - assertThrows(IllegalStateException.class, () -> cancelledJob.validateStatusTransition(JobStatus.CANCELLED)); - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/JobTrackerTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/JobTrackerTest.java deleted file mode 100644 index 70f1a7cc394c..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/JobTrackerTest.java +++ /dev/null @@ -1,764 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.tracker; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.map.MoreMaps; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.config.AttemptFailureSummary; -import io.airbyte.config.AttemptSyncConfig; -import io.airbyte.config.FailureReason; -import io.airbyte.config.JobConfig; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.JobOutput; -import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.Metadata; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.config.Schedule; -import io.airbyte.config.Schedule.TimeUnit; -import io.airbyte.config.StandardCheckConnectionOutput; -import io.airbyte.config.StandardCheckConnectionOutput.Status; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOutput; -import io.airbyte.config.StandardSyncSummary; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.SyncStats; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.WorkspaceHelper; -import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.persistence.job.tracker.JobTracker.JobState; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.protocol.models.DestinationSyncMode; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.SyncMode; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.function.BiConsumer; -import java.util.function.Consumer; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class JobTrackerTest { - - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final String WORKSPACE_NAME = "WORKSPACE_TEST"; - private static final UUID JOB_ID = UUID.randomUUID(); - private static final UUID UUID1 = UUID.randomUUID(); - private static final UUID UUID2 = UUID.randomUUID(); - private static final UUID CONNECTION_ID = UUID.randomUUID(); - private static final String SOURCE_DEF_NAME = "postgres"; - private static final String DESTINATION_DEF_NAME = "bigquery"; - private static final String CONNECTOR_REPOSITORY = "test/test"; - private static final String CONNECTOR_VERSION = "test"; - private static final String JOB_TYPE = "job_type"; - private static final String JOB_ID_KEY = "job_id"; - private static final String ATTEMPT_ID = "attempt_id"; - private static final String METADATA = "metadata"; - private static final String SOME = "some"; - private static final String ATTEMPT_STAGE_KEY = "attempt_stage"; - private static final String CONNECTOR_SOURCE_KEY = "connector_source"; - private static final String CONNECTOR_SOURCE_DEFINITION_ID_KEY = "connector_source_definition_id"; - private static final String CONNECTOR_SOURCE_DOCKER_REPOSITORY_KEY = "connector_source_docker_repository"; - private static final String CONNECTOR_SOURCE_VERSION_KEY = "connector_source_version"; - private static final String FREQUENCY_KEY = "frequency"; - - private static final long SYNC_START_TIME = 1000L; - private static final long SYNC_END_TIME = 10000L; - private static final long SYNC_DURATION = 9L; // in sync between end and start time - private static final long SYNC_BYTES_SYNC = 42L; - private static final long SYNC_RECORDS_SYNC = 4L; - private static final long LONG_JOB_ID = 10L; // for sync the job id is a long not a uuid. - - private static final ImmutableMap STARTED_STATE_METADATA = ImmutableMap.builder() - .put(ATTEMPT_STAGE_KEY, "STARTED") - .build(); - private static final ImmutableMap SUCCEEDED_STATE_METADATA = ImmutableMap.builder() - .put(ATTEMPT_STAGE_KEY, "ENDED") - .put("attempt_completion_status", JobState.SUCCEEDED) - .build(); - private static final ImmutableMap FAILED_STATE_METADATA = ImmutableMap.builder() - .put(ATTEMPT_STAGE_KEY, "ENDED") - .put("attempt_completion_status", JobState.FAILED) - .build(); - private static final ImmutableMap ATTEMPT_METADATA = ImmutableMap.builder() - .put("sync_start_time", SYNC_START_TIME) - .put("duration", SYNC_DURATION) - .put("volume_rows", SYNC_RECORDS_SYNC) - .put("volume_mb", SYNC_BYTES_SYNC) - .put("count_state_messages_from_source", 3L) - .put("count_state_messages_from_destination", 1L) - .put("max_seconds_before_source_state_message_emitted", 5L) - .put("mean_seconds_before_source_state_message_emitted", 4L) - .put("max_seconds_between_state_message_emit_and_commit", 7L) - .put("mean_seconds_between_state_message_emit_and_commit", 6L) - .put("replication_start_time", 7L) - .put("replication_end_time", 8L) - .put("source_read_start_time", 9L) - .put("source_read_end_time", 10L) - .put("destination_write_start_time", 11L) - .put("destination_write_end_time", 12L) - .put("normalization_start_time", 13L) - .put("normalization_end_time", 14L) - .build(); - private static final ImmutableMap SYNC_CONFIG_METADATA = ImmutableMap.builder() - .put(JobTracker.CONFIG + ".source.key", JobTracker.SET) - .put(JobTracker.CONFIG + ".destination.key", false) - .put(JobTracker.CATALOG + ".sync_mode.full_refresh", JobTracker.SET) - .put(JobTracker.CATALOG + ".destination_sync_mode.append", JobTracker.SET) - .put("namespace_definition", NamespaceDefinitionType.SOURCE) - .put("table_prefix", false) - .put("operation_count", 0) - .build(); - private static final ConfiguredAirbyteCatalog CATALOG = CatalogHelpers - .createConfiguredAirbyteCatalog("stream_name", "stream_namespace", - Field.of("int_field", JsonSchemaType.NUMBER)); - - private static final ConnectorSpecification SOURCE_SPEC; - private static final ConnectorSpecification DESTINATION_SPEC; - - static { - try { - SOURCE_SPEC = new ConnectorSpecification().withConnectionSpecification(OBJECT_MAPPER.readTree( - """ - { - "type": "object", - "properties": { - "key": { - "type": "string" - } - } - } - """)); - DESTINATION_SPEC = new ConnectorSpecification().withConnectionSpecification(OBJECT_MAPPER.readTree( - """ - { - "type": "object", - "properties": { - "key": { - "type": "boolean" - } - } - } - """)); - } catch (final JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - private ConfigRepository configRepository; - - private JobPersistence jobPersistence; - private TrackingClient trackingClient; - private WorkspaceHelper workspaceHelper; - private JobTracker jobTracker; - - @BeforeEach - void setup() { - configRepository = mock(ConfigRepository.class); - jobPersistence = mock(JobPersistence.class); - workspaceHelper = mock(WorkspaceHelper.class); - trackingClient = mock(TrackingClient.class); - jobTracker = new JobTracker(configRepository, jobPersistence, workspaceHelper, trackingClient); - } - - @Test - void testTrackCheckConnectionSource() throws ConfigNotFoundException, IOException, JsonValidationException { - final ImmutableMap metadata = ImmutableMap.builder() - .put(JOB_TYPE, ConfigType.CHECK_CONNECTION_SOURCE) - .put(JOB_ID_KEY, JOB_ID.toString()) - .put(ATTEMPT_ID, 0) - .put(CONNECTOR_SOURCE_KEY, SOURCE_DEF_NAME) - .put(CONNECTOR_SOURCE_DEFINITION_ID_KEY, UUID1) - .put(CONNECTOR_SOURCE_DOCKER_REPOSITORY_KEY, CONNECTOR_REPOSITORY) - .put(CONNECTOR_SOURCE_VERSION_KEY, CONNECTOR_VERSION) - .build(); - - when(configRepository.getStandardSourceDefinition(UUID1)) - .thenReturn(new StandardSourceDefinition() - .withSourceDefinitionId(UUID1) - .withName(SOURCE_DEF_NAME) - .withDockerRepository(CONNECTOR_REPOSITORY) - .withDockerImageTag(CONNECTOR_VERSION)); - when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)) - .thenReturn(new StandardWorkspace().withWorkspaceId(WORKSPACE_ID).withName(WORKSPACE_NAME)); - assertCheckConnCorrectMessageForEachState( - (jobState, output) -> jobTracker.trackCheckConnectionSource(JOB_ID, UUID1, WORKSPACE_ID, jobState, output), - metadata, - true); - assertCheckConnCorrectMessageForEachState( - (jobState, output) -> jobTracker.trackCheckConnectionSource(JOB_ID, UUID1, null, jobState, output), - metadata, - false); - } - - @Test - void testTrackCheckConnectionDestination() throws ConfigNotFoundException, IOException, JsonValidationException { - final ImmutableMap metadata = ImmutableMap.builder() - .put(JOB_TYPE, ConfigType.CHECK_CONNECTION_DESTINATION) - .put(JOB_ID_KEY, JOB_ID.toString()) - .put(ATTEMPT_ID, 0) - .put("connector_destination", DESTINATION_DEF_NAME) - .put("connector_destination_definition_id", UUID2) - .put("connector_destination_docker_repository", CONNECTOR_REPOSITORY) - .put("connector_destination_version", CONNECTOR_VERSION) - .build(); - - when(configRepository.getStandardDestinationDefinition(UUID2)) - .thenReturn(new StandardDestinationDefinition() - .withDestinationDefinitionId(UUID2) - .withName(DESTINATION_DEF_NAME) - .withDockerRepository(CONNECTOR_REPOSITORY) - .withDockerImageTag(CONNECTOR_VERSION)); - when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)) - .thenReturn(new StandardWorkspace().withWorkspaceId(WORKSPACE_ID).withName(WORKSPACE_NAME)); - assertCheckConnCorrectMessageForEachState( - (jobState, output) -> jobTracker.trackCheckConnectionDestination(JOB_ID, UUID2, WORKSPACE_ID, jobState, output), - metadata, - true); - assertCheckConnCorrectMessageForEachState( - (jobState, output) -> jobTracker.trackCheckConnectionDestination(JOB_ID, UUID2, null, jobState, output), - metadata, - false); - } - - @Test - void testTrackDiscover() throws ConfigNotFoundException, IOException, JsonValidationException { - final ImmutableMap metadata = ImmutableMap.builder() - .put(JOB_TYPE, ConfigType.DISCOVER_SCHEMA) - .put(JOB_ID_KEY, JOB_ID.toString()) - .put(ATTEMPT_ID, 0) - .put(CONNECTOR_SOURCE_KEY, SOURCE_DEF_NAME) - .put(CONNECTOR_SOURCE_DEFINITION_ID_KEY, UUID1) - .put(CONNECTOR_SOURCE_DOCKER_REPOSITORY_KEY, CONNECTOR_REPOSITORY) - .put(CONNECTOR_SOURCE_VERSION_KEY, CONNECTOR_VERSION) - .build(); - - when(configRepository.getStandardSourceDefinition(UUID1)) - .thenReturn(new StandardSourceDefinition() - .withSourceDefinitionId(UUID1) - .withName(SOURCE_DEF_NAME) - .withDockerRepository(CONNECTOR_REPOSITORY) - .withDockerImageTag(CONNECTOR_VERSION)); - when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)) - .thenReturn(new StandardWorkspace().withWorkspaceId(WORKSPACE_ID).withName(WORKSPACE_NAME)); - assertCorrectMessageForEachState((jobState) -> jobTracker.trackDiscover(JOB_ID, UUID1, WORKSPACE_ID, jobState), metadata); - assertCorrectMessageForEachState((jobState) -> jobTracker.trackDiscover(JOB_ID, UUID1, null, jobState), metadata); - } - - @Test - void testTrackSync() throws ConfigNotFoundException, IOException, JsonValidationException { - testAsynchronous(ConfigType.SYNC, SYNC_CONFIG_METADATA); - } - - @Test - void testTrackSyncForInternalFailure() throws JsonValidationException, ConfigNotFoundException, IOException { - final Long jobId = 12345L; - final Integer attemptNumber = 2; - final JobState jobState = JobState.SUCCEEDED; - final Exception exception = new IOException("test"); - - when(workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(jobId)).thenReturn(WORKSPACE_ID); - when(configRepository.getStandardSync(CONNECTION_ID)) - .thenReturn(new StandardSync().withConnectionId(CONNECTION_ID).withManual(true).withCatalog(CATALOG)); - when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)) - .thenReturn(new StandardWorkspace().withWorkspaceId(WORKSPACE_ID).withName(WORKSPACE_NAME)); - when(configRepository.getStandardSync(CONNECTION_ID)) - .thenReturn(new StandardSync().withConnectionId(CONNECTION_ID).withManual(false).withCatalog(CATALOG) - .withSchedule(new Schedule().withUnits(1L).withTimeUnit(TimeUnit.MINUTES))); - when(configRepository.getSourceDefinitionFromConnection(CONNECTION_ID)) - .thenReturn(new StandardSourceDefinition() - .withSourceDefinitionId(UUID1) - .withName(SOURCE_DEF_NAME) - .withDockerRepository(CONNECTOR_REPOSITORY) - .withDockerImageTag(CONNECTOR_VERSION) - .withSpec(SOURCE_SPEC)); - when(configRepository.getDestinationDefinitionFromConnection(CONNECTION_ID)) - .thenReturn(new StandardDestinationDefinition() - .withDestinationDefinitionId(UUID2) - .withName(DESTINATION_DEF_NAME) - .withDockerRepository(CONNECTOR_REPOSITORY) - .withDockerImageTag(CONNECTOR_VERSION) - .withSpec(DESTINATION_SPEC)); - when(configRepository.getStandardSourceDefinition(UUID1)) - .thenReturn(new StandardSourceDefinition() - .withSourceDefinitionId(UUID1) - .withName(SOURCE_DEF_NAME) - .withDockerRepository(CONNECTOR_REPOSITORY) - .withDockerImageTag(CONNECTOR_VERSION) - .withSpec(SOURCE_SPEC)); - when(configRepository.getStandardDestinationDefinition(UUID2)) - .thenReturn(new StandardDestinationDefinition() - .withDestinationDefinitionId(UUID2) - .withName(DESTINATION_DEF_NAME) - .withDockerRepository(CONNECTOR_REPOSITORY) - .withDockerImageTag(CONNECTOR_VERSION) - .withSpec(DESTINATION_SPEC)); - - jobTracker.trackSyncForInternalFailure(jobId, CONNECTION_ID, attemptNumber, jobState, exception); - final Map metadata = new LinkedHashMap(); - metadata.put("namespace_definition", NamespaceDefinitionType.SOURCE); - metadata.put("number_of_streams", 1); - metadata.put("internal_error_type", exception.getClass().getName()); - metadata.put(CONNECTOR_SOURCE_KEY, SOURCE_DEF_NAME); - metadata.put("internal_error_cause", exception.getMessage()); - metadata.put(FREQUENCY_KEY, "1 min"); - metadata.put(CONNECTOR_SOURCE_DEFINITION_ID_KEY, UUID1); - metadata.put("workspace_id", WORKSPACE_ID); - metadata.put(CONNECTOR_SOURCE_DOCKER_REPOSITORY_KEY, CONNECTOR_REPOSITORY); - metadata.put(ATTEMPT_STAGE_KEY, "ENDED"); - metadata.put("attempt_completion_status", jobState); - metadata.put("connection_id", CONNECTION_ID); - metadata.put(JOB_ID_KEY, String.valueOf(jobId)); - metadata.put(CONNECTOR_SOURCE_VERSION_KEY, CONNECTOR_VERSION); - metadata.put("connector_destination_version", CONNECTOR_VERSION); - metadata.put("attempt_id", attemptNumber); - metadata.put("connector_destination", DESTINATION_DEF_NAME); - metadata.put("operation_count", 0); - metadata.put("connector_destination_docker_repository", CONNECTOR_REPOSITORY); - metadata.put("table_prefix", false); - metadata.put("workspace_name", WORKSPACE_NAME); - metadata.put("connector_destination_definition_id", UUID2); - - verify(trackingClient).track(WORKSPACE_ID, JobTracker.MESSAGE_NAME, metadata); - } - - @Test - void testTrackReset() throws ConfigNotFoundException, IOException, JsonValidationException { - testAsynchronous(ConfigType.RESET_CONNECTION); - } - - void testAsynchronous(final ConfigType configType) throws ConfigNotFoundException, IOException, JsonValidationException { - testAsynchronous(configType, Collections.emptyMap()); - } - - // todo update with connection-specific test - void testAsynchronous(final ConfigType configType, final Map additionalExpectedMetadata) - throws ConfigNotFoundException, IOException, JsonValidationException { - // for sync the job id is a long not a uuid. - final long jobId = 10L; - when(workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(jobId)).thenReturn(WORKSPACE_ID); - - final ImmutableMap metadata = getJobMetadata(configType, jobId); - final Job job = getJobMock(configType, jobId); - // test when frequency is manual. - - when(configRepository.getStandardSync(CONNECTION_ID)) - .thenReturn(new StandardSync().withConnectionId(CONNECTION_ID).withManual(true).withCatalog(CATALOG)); - when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)) - .thenReturn(new StandardWorkspace().withWorkspaceId(WORKSPACE_ID).withName(WORKSPACE_NAME)); - final Map manualMetadata = MoreMaps.merge( - metadata, - Map.of(FREQUENCY_KEY, "manual"), - additionalExpectedMetadata); - assertCorrectMessageForEachState((jobState) -> jobTracker.trackSync(job, jobState), manualMetadata); - - // test when frequency is scheduled. - when(configRepository.getStandardSync(CONNECTION_ID)) - .thenReturn(new StandardSync().withConnectionId(CONNECTION_ID).withManual(false).withCatalog(CATALOG) - .withSchedule(new Schedule().withUnits(1L).withTimeUnit(TimeUnit.MINUTES))); - final Map scheduledMetadata = MoreMaps.merge( - metadata, - Map.of(FREQUENCY_KEY, "1 min"), - additionalExpectedMetadata); - assertCorrectMessageForEachState((jobState) -> jobTracker.trackSync(job, jobState), scheduledMetadata); - } - - @Test - void testTrackSyncAttempt() throws ConfigNotFoundException, IOException, JsonValidationException { - testAsynchronousAttempt(ConfigType.SYNC, SYNC_CONFIG_METADATA); - } - - @Test - void testTrackResetAttempt() throws ConfigNotFoundException, IOException, JsonValidationException { - testAsynchronousAttempt(ConfigType.RESET_CONNECTION); - } - - @Test - void testTrackSyncAttemptWithFailures() throws ConfigNotFoundException, IOException, JsonValidationException { - testAsynchronousAttemptWithFailures(ConfigType.SYNC, SYNC_CONFIG_METADATA); - } - - @Test - void testConfigToMetadata() throws IOException { - final String configJson = MoreResources.readResource("example_config.json"); - final JsonNode config = Jsons.deserialize(configJson); - - final String schemaJson = MoreResources.readResource("example_config_schema.json"); - final JsonNode schema = Jsons.deserialize(schemaJson); - - final Map expected = new ImmutableMap.Builder() - .put(JobTracker.CONFIG + ".username", JobTracker.SET) - .put(JobTracker.CONFIG + ".has_ssl", false) - .put(JobTracker.CONFIG + ".password", JobTracker.SET) - .put(JobTracker.CONFIG + ".one_of.type_key", "foo") - .put(JobTracker.CONFIG + ".one_of.some_key", JobTracker.SET) - .put(JobTracker.CONFIG + ".const_object.sub_key", "bar") - .put(JobTracker.CONFIG + ".const_object.sub_array", "[1,2,3]") - .put(JobTracker.CONFIG + ".const_object.sub_object.sub_sub_key", "baz") - .put(JobTracker.CONFIG + ".enum_string", "foo") - .put(JobTracker.CONFIG + ".additionalPropertiesUnset.foo", JobTracker.SET) - .put(JobTracker.CONFIG + ".additionalPropertiesBoolean.foo", JobTracker.SET) - .put(JobTracker.CONFIG + ".additionalPropertiesSchema.foo", JobTracker.SET) - .put(JobTracker.CONFIG + ".additionalPropertiesConst.foo", 42) - .put(JobTracker.CONFIG + ".additionalPropertiesEnumString", "foo") - .build(); - - final Map actual = JobTracker.configToMetadata(JobTracker.CONFIG, config, schema); - - assertEquals(expected, actual); - } - - void testAsynchronousAttempt(final ConfigType configType) throws ConfigNotFoundException, IOException, JsonValidationException { - testAsynchronousAttempt(configType, getJobWithAttemptsMock(configType, LONG_JOB_ID), Collections.emptyMap()); - } - - void testAsynchronousAttempt(final ConfigType configType, final Map additionalExpectedMetadata) - throws ConfigNotFoundException, IOException, JsonValidationException { - testAsynchronousAttempt(configType, getJobWithAttemptsMock(configType, LONG_JOB_ID), additionalExpectedMetadata); - } - - void testAsynchronousAttemptWithFailures(final ConfigType configType, final Map additionalExpectedMetadata) - throws ConfigNotFoundException, IOException, JsonValidationException { - final LinkedHashMap linkedHashMap = new LinkedHashMap<>(); - linkedHashMap.put("failureOrigin", "source"); - linkedHashMap.put("failureType", "config_error"); - linkedHashMap.put("internalMessage", "Internal config error error msg"); - linkedHashMap.put("externalMessage", "Config error related msg"); - linkedHashMap.put(METADATA, ImmutableMap.of(SOME, METADATA)); - linkedHashMap.put("retryable", true); - linkedHashMap.put("timestamp", 1010); - final JsonNode configFailureJson = Jsons.jsonNode(linkedHashMap); - - final LinkedHashMap linkedHashMap1 = new LinkedHashMap<>(); - linkedHashMap1.put("failureOrigin", "replication"); - linkedHashMap1.put("failureType", "system_error"); - linkedHashMap1.put("internalMessage", "Internal system error error msg"); - linkedHashMap1.put("externalMessage", "System error related msg"); - linkedHashMap1.put(METADATA, ImmutableMap.of(SOME, METADATA)); - linkedHashMap1.put("retryable", true); - linkedHashMap1.put("timestamp", 1100); - final JsonNode systemFailureJson = Jsons.jsonNode(linkedHashMap1); - - final LinkedHashMap linkedHashMap2 = new LinkedHashMap<>(); - linkedHashMap2.put("failureOrigin", null); - linkedHashMap2.put("failureType", null); - linkedHashMap2.put("internalMessage", "Internal unknown error error msg"); - linkedHashMap2.put("externalMessage", "Unknown error related msg"); - linkedHashMap2.put(METADATA, ImmutableMap.of(SOME, METADATA)); - linkedHashMap2.put("retryable", true); - linkedHashMap2.put("timestamp", 1110); - final JsonNode unknownFailureJson = Jsons.jsonNode(linkedHashMap2); - - final Map failureMetadata = ImmutableMap.of( - "failure_reasons", Jsons.arrayNode().addAll(Arrays.asList(configFailureJson, systemFailureJson, unknownFailureJson)).toString(), - "main_failure_reason", configFailureJson.toString()); - testAsynchronousAttempt(configType, getJobWithFailuresMock(configType, LONG_JOB_ID), - MoreMaps.merge(additionalExpectedMetadata, failureMetadata)); - } - - void testAsynchronousAttempt(final ConfigType configType, final Job job, final Map additionalExpectedMetadata) - throws ConfigNotFoundException, IOException, JsonValidationException { - - final Map metadata = getJobMetadata(configType, LONG_JOB_ID); - // test when frequency is manual. - when(configRepository.getStandardSync(CONNECTION_ID)) - .thenReturn(new StandardSync().withConnectionId(CONNECTION_ID).withManual(true).withCatalog(CATALOG)); - when(workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(LONG_JOB_ID)).thenReturn(WORKSPACE_ID); - when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)) - .thenReturn(new StandardWorkspace().withWorkspaceId(WORKSPACE_ID).withName(WORKSPACE_NAME)); - final Map manualMetadata = MoreMaps.merge( - ATTEMPT_METADATA, - metadata, - Map.of(FREQUENCY_KEY, "manual"), - additionalExpectedMetadata); - - jobTracker.trackSync(job, JobState.SUCCEEDED); - assertCorrectMessageForSucceededState(manualMetadata); - - jobTracker.trackSync(job, JobState.FAILED); - assertCorrectMessageForFailedState(manualMetadata); - } - - private Job getJobMock(final ConfigType configType, final long jobId) throws ConfigNotFoundException, IOException, JsonValidationException { - when(configRepository.getSourceDefinitionFromConnection(CONNECTION_ID)) - .thenReturn(new StandardSourceDefinition() - .withSourceDefinitionId(UUID1) - .withName(SOURCE_DEF_NAME) - .withDockerRepository(CONNECTOR_REPOSITORY) - .withDockerImageTag(CONNECTOR_VERSION) - .withSpec(SOURCE_SPEC)); - when(configRepository.getDestinationDefinitionFromConnection(CONNECTION_ID)) - .thenReturn(new StandardDestinationDefinition() - .withDestinationDefinitionId(UUID2) - .withName(DESTINATION_DEF_NAME) - .withDockerRepository(CONNECTOR_REPOSITORY) - .withDockerImageTag(CONNECTOR_VERSION) - .withSpec(DESTINATION_SPEC)); - - when(configRepository.getStandardSourceDefinition(UUID1)) - .thenReturn(new StandardSourceDefinition() - .withSourceDefinitionId(UUID1) - .withName(SOURCE_DEF_NAME) - .withDockerRepository(CONNECTOR_REPOSITORY) - .withDockerImageTag(CONNECTOR_VERSION) - .withSpec(SOURCE_SPEC)); - when(configRepository.getStandardDestinationDefinition(UUID2)) - .thenReturn(new StandardDestinationDefinition() - .withDestinationDefinitionId(UUID2) - .withName(DESTINATION_DEF_NAME) - .withDockerRepository(CONNECTOR_REPOSITORY) - .withDockerImageTag(CONNECTOR_VERSION) - .withSpec(DESTINATION_SPEC)); - - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.APPEND))); - - final JobSyncConfig jobSyncConfig = new JobSyncConfig() - .withConfiguredAirbyteCatalog(catalog); - - final AttemptSyncConfig attemptSyncConfig = new AttemptSyncConfig() - .withSourceConfiguration(Jsons.jsonNode(ImmutableMap.of("key", "some_value"))) - .withDestinationConfiguration(Jsons.jsonNode(ImmutableMap.of("key", false))); - - final JobConfig jobConfig = mock(JobConfig.class); - when(jobConfig.getConfigType()).thenReturn(configType); - - if (configType == ConfigType.SYNC) { - when(jobConfig.getSync()).thenReturn(jobSyncConfig); - } - - final Attempt attempt = mock(Attempt.class); - when(attempt.getSyncConfig()).thenReturn(Optional.of(attemptSyncConfig)); - - final Job job = mock(Job.class); - when(job.getId()).thenReturn(jobId); - when(job.getConfig()).thenReturn(jobConfig); - when(job.getConfigType()).thenReturn(configType); - when(job.getScope()).thenReturn(CONNECTION_ID.toString()); - when(job.getLastAttempt()).thenReturn(Optional.of(attempt)); - when(job.getAttemptsCount()).thenReturn(700); - return job; - } - - private Attempt getAttemptMock() { - final Attempt attempt = mock(Attempt.class); - final JobOutput jobOutput = mock(JobOutput.class); - final StandardSyncOutput syncOutput = mock(StandardSyncOutput.class); - final StandardSyncSummary syncSummary = mock(StandardSyncSummary.class); - final NormalizationSummary normalizationSummary = mock(NormalizationSummary.class); - final SyncStats syncStats = mock(SyncStats.class); - - when(syncSummary.getStartTime()).thenReturn(SYNC_START_TIME); - when(syncSummary.getEndTime()).thenReturn(SYNC_END_TIME); - when(syncSummary.getBytesSynced()).thenReturn(SYNC_BYTES_SYNC); - when(syncSummary.getRecordsSynced()).thenReturn(SYNC_RECORDS_SYNC); - when(syncOutput.getStandardSyncSummary()).thenReturn(syncSummary); - when(syncOutput.getNormalizationSummary()).thenReturn(normalizationSummary); - when(syncSummary.getTotalStats()).thenReturn(syncStats); - when(jobOutput.getSync()).thenReturn(syncOutput); - when(attempt.getOutput()).thenReturn(java.util.Optional.of(jobOutput)); - when(syncStats.getSourceStateMessagesEmitted()).thenReturn(3L); - when(syncStats.getDestinationStateMessagesEmitted()).thenReturn(1L); - when(syncStats.getMaxSecondsBeforeSourceStateMessageEmitted()).thenReturn(5L); - when(syncStats.getMeanSecondsBeforeSourceStateMessageEmitted()).thenReturn(4L); - when(syncStats.getMaxSecondsBetweenStateMessageEmittedandCommitted()).thenReturn(7L); - when(syncStats.getMeanSecondsBetweenStateMessageEmittedandCommitted()).thenReturn(6L); - when(syncStats.getReplicationStartTime()).thenReturn(7L); - when(syncStats.getReplicationEndTime()).thenReturn(8L); - when(syncStats.getSourceReadStartTime()).thenReturn(9L); - when(syncStats.getSourceReadEndTime()).thenReturn(10L); - when(syncStats.getDestinationWriteStartTime()).thenReturn(11L); - when(syncStats.getDestinationWriteEndTime()).thenReturn(12L); - when(normalizationSummary.getStartTime()).thenReturn(13L); - when(normalizationSummary.getEndTime()).thenReturn(14L); - - return attempt; - } - - private Job getJobWithAttemptsMock(final ConfigType configType, final long jobId) - throws ConfigNotFoundException, IOException, JsonValidationException { - return getJobWithAttemptsMock(configType, jobId, List.of(getAttemptMock())); - } - - private Job getJobWithAttemptsMock(final ConfigType configType, final long jobId, final List attempts) - throws ConfigNotFoundException, IOException, JsonValidationException { - final Job job = getJobMock(configType, jobId); - when(job.getAttempts()).thenReturn(attempts); - when(jobPersistence.getJob(jobId)).thenReturn(job); - return job; - } - - private List getAttemptsWithFailuresMock() { - final Attempt attemptWithSingleFailure = getAttemptMock(); - final AttemptFailureSummary singleFailureSummary = mock(AttemptFailureSummary.class); - final FailureReason configFailureReason = new FailureReason() - .withFailureOrigin(FailureReason.FailureOrigin.SOURCE) - .withFailureType(FailureReason.FailureType.CONFIG_ERROR) - .withRetryable(true) - .withMetadata(new Metadata().withAdditionalProperty(SOME, METADATA)) - .withExternalMessage("Config error related msg") - .withInternalMessage("Internal config error error msg") - .withStacktrace("Don't include stacktrace in call to track") - .withTimestamp(SYNC_START_TIME + 10); - when(singleFailureSummary.getFailures()).thenReturn(List.of(configFailureReason)); - when(attemptWithSingleFailure.getFailureSummary()).thenReturn(Optional.of(singleFailureSummary)); - - final Attempt attemptWithMultipleFailures = getAttemptMock(); - final AttemptFailureSummary multipleFailuresSummary = mock(AttemptFailureSummary.class); - final FailureReason systemFailureReason = new FailureReason() - .withFailureOrigin(FailureReason.FailureOrigin.REPLICATION) - .withFailureType(FailureReason.FailureType.SYSTEM_ERROR) - .withRetryable(true) - .withMetadata(new Metadata().withAdditionalProperty(SOME, METADATA)) - .withExternalMessage("System error related msg") - .withInternalMessage("Internal system error error msg") - .withStacktrace("Don't include stacktrace in call to track") - .withTimestamp(SYNC_START_TIME + 100); - final FailureReason unknownFailureReason = new FailureReason() - .withRetryable(true) - .withMetadata(new Metadata().withAdditionalProperty(SOME, METADATA)) - .withExternalMessage("Unknown error related msg") - .withInternalMessage("Internal unknown error error msg") - .withStacktrace("Don't include stacktrace in call to track") - .withTimestamp(SYNC_START_TIME + 110); - when(multipleFailuresSummary.getFailures()).thenReturn(List.of(systemFailureReason, unknownFailureReason)); - when(attemptWithMultipleFailures.getFailureSummary()).thenReturn(Optional.of(multipleFailuresSummary)); - - final Attempt attemptWithNoFailures = getAttemptMock(); - when(attemptWithNoFailures.getFailureSummary()).thenReturn(Optional.empty()); - - // in non-test cases we shouldn't actually get failures out of order chronologically - // this is to verify that we are explicitly sorting the results with tracking failure metadata - return List.of(attemptWithMultipleFailures, attemptWithSingleFailure, attemptWithNoFailures); - } - - private Job getJobWithFailuresMock(final ConfigType configType, final long jobId) - throws ConfigNotFoundException, IOException, JsonValidationException { - return getJobWithAttemptsMock(configType, jobId, getAttemptsWithFailuresMock()); - } - - private ImmutableMap getJobMetadata(final ConfigType configType, final long jobId) { - return ImmutableMap.builder() - .put(JOB_TYPE, configType) - .put(JOB_ID_KEY, String.valueOf(jobId)) - .put(ATTEMPT_ID, 700) - .put("connection_id", CONNECTION_ID) - .put(CONNECTOR_SOURCE_KEY, SOURCE_DEF_NAME) - .put(CONNECTOR_SOURCE_DEFINITION_ID_KEY, UUID1) - .put(CONNECTOR_SOURCE_DOCKER_REPOSITORY_KEY, CONNECTOR_REPOSITORY) - .put(CONNECTOR_SOURCE_VERSION_KEY, CONNECTOR_VERSION) - .put("connector_destination", DESTINATION_DEF_NAME) - .put("connector_destination_definition_id", UUID2) - .put("connector_destination_docker_repository", CONNECTOR_REPOSITORY) - .put("connector_destination_version", CONNECTOR_VERSION) - .put("namespace_definition", NamespaceDefinitionType.SOURCE) - .put("table_prefix", false) - .put("operation_count", 0) - .put("number_of_streams", 1) - .build(); - } - - private void assertCheckConnCorrectMessageForEachState(final BiConsumer jobStateConsumer, - final Map metadata, - final boolean workspaceSet) { - reset(trackingClient); - - // Output does not exist when job has started. - jobStateConsumer.accept(JobState.STARTED, null); - - final var successOutput = new StandardCheckConnectionOutput(); - successOutput.setStatus(Status.SUCCEEDED); - jobStateConsumer.accept(JobState.SUCCEEDED, successOutput); - final ImmutableMap checkConnSuccessMetadata = ImmutableMap.of("check_connection_outcome", "succeeded"); - - final var failureOutput = new StandardCheckConnectionOutput(); - failureOutput.setStatus(Status.FAILED); - jobStateConsumer.accept(JobState.SUCCEEDED, failureOutput); - final ImmutableMap checkConnFailureMetadata = ImmutableMap.of("check_connection_outcome", "failed"); - - // Failure implies the job threw an exception which almost always meant no output. - jobStateConsumer.accept(JobState.FAILED, null); - if (workspaceSet) { - assertCorrectMessageForStartedState(metadata); - assertCorrectMessageForSucceededState(MoreMaps.merge(metadata, checkConnSuccessMetadata)); - assertCorrectMessageForSucceededState(MoreMaps.merge(metadata, checkConnFailureMetadata)); - assertCorrectMessageForFailedState(metadata); - } else { - verifyNoInteractions(trackingClient); - } - } - - /** - * Tests that the tracker emits the correct message for when the job starts, succeeds, and fails. - * - * @param jobStateConsumer - consumer that takes in a job state and then calls the relevant method - * on the job tracker with it. if testing discover, it calls trackDiscover, etc. - * @param expectedMetadata - expected metadata (except job state). - */ - private void assertCorrectMessageForEachState(final Consumer jobStateConsumer, - final Map expectedMetadata) { - jobStateConsumer.accept(JobState.STARTED); - assertCorrectMessageForStartedState(expectedMetadata); - jobStateConsumer.accept(JobState.SUCCEEDED); - assertCorrectMessageForSucceededState(expectedMetadata); - jobStateConsumer.accept(JobState.FAILED); - assertCorrectMessageForFailedState(expectedMetadata); - } - - private void assertCorrectMessageForStartedState(final Map metadata) { - verify(trackingClient).track(WORKSPACE_ID, JobTracker.MESSAGE_NAME, MoreMaps.merge(metadata, STARTED_STATE_METADATA, mockWorkspaceInfo())); - } - - private void assertCorrectMessageForSucceededState(final Map metadata) { - verify(trackingClient).track(WORKSPACE_ID, JobTracker.MESSAGE_NAME, MoreMaps.merge(metadata, SUCCEEDED_STATE_METADATA, mockWorkspaceInfo())); - } - - private void assertCorrectMessageForFailedState(final Map metadata) { - verify(trackingClient).track(WORKSPACE_ID, JobTracker.MESSAGE_NAME, MoreMaps.merge(metadata, FAILED_STATE_METADATA, mockWorkspaceInfo())); - } - - private Map mockWorkspaceInfo() { - final Map map = new HashMap<>(); - map.put("workspace_id", WORKSPACE_ID); - map.put("workspace_name", WORKSPACE_NAME); - return map; - } - -} diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/TrackingMetadataTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/TrackingMetadataTest.java deleted file mode 100644 index d67ab8323173..000000000000 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/TrackingMetadataTest.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.tracker; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.config.AttemptSyncConfig; -import io.airbyte.config.JobOutput; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOutput; -import io.airbyte.config.StandardSyncSummary; -import io.airbyte.config.SyncStats; -import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.AttemptStatus; -import io.airbyte.persistence.job.models.Job; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.Test; - -class TrackingMetadataTest { - - @Test - void testNulls() { - final UUID connectionId = UUID.randomUUID(); - final StandardSync standardSync = mock(StandardSync.class); - - // set all the required values for a valid connection - when(standardSync.getConnectionId()).thenReturn(connectionId); - when(standardSync.getName()).thenReturn("connection-name"); - when(standardSync.getManual()).thenReturn(true); - when(standardSync.getSourceId()).thenReturn(UUID.randomUUID()); - when(standardSync.getDestinationId()).thenReturn(UUID.randomUUID()); - when(standardSync.getCatalog()).thenReturn(mock(ConfiguredAirbyteCatalog.class)); - when(standardSync.getResourceRequirements()).thenReturn(new ResourceRequirements()); - - // make sure to use a null for resources - when(standardSync.getCatalog()).thenReturn(mock(ConfiguredAirbyteCatalog.class)); - - // try to generate metadata - final Map expected = Map.of( - "connection_id", connectionId, - "frequency", "manual", - "operation_count", 0, - "table_prefix", false); - final Map actual = TrackingMetadata.generateSyncMetadata(standardSync); - assertEquals(expected, actual); - } - - @Test - void testgenerateJobAttemptMetadataWithNulls() { - final SyncStats syncStats = new SyncStats().withRecordsCommitted(10L).withRecordsEmitted(10L).withBytesEmitted(100L) - .withMeanSecondsBetweenStateMessageEmittedandCommitted(5L).withMaxSecondsBeforeSourceStateMessageEmitted(8L) - .withMeanSecondsBeforeSourceStateMessageEmitted(2L).withMaxSecondsBetweenStateMessageEmittedandCommitted(null); - final StandardSyncSummary standardSyncSummary = new StandardSyncSummary().withTotalStats(syncStats); - final StandardSyncOutput standardSyncOutput = new StandardSyncOutput().withStandardSyncSummary(standardSyncSummary); - final AttemptSyncConfig attemptSyncConfig = mock(AttemptSyncConfig.class); - final JobOutput jobOutput = new JobOutput().withSync(standardSyncOutput); - final Attempt attempt = new Attempt(0, 10L, Path.of("test"), attemptSyncConfig, jobOutput, AttemptStatus.SUCCEEDED, null, null, 100L, 100L, 99L); - final Job job = mock(Job.class); - when(job.getAttempts()).thenReturn(List.of(attempt)); - - final Map actual = TrackingMetadata.generateJobAttemptMetadata(job); - final Map expected = Map.of( - "mean_seconds_before_source_state_message_emitted", 2L, - "mean_seconds_between_state_message_emit_and_commit", 5L, - "max_seconds_before_source_state_message_emitted", 8L); - assertEquals(expected, actual); - } - -} diff --git a/airbyte-proxy/401.html b/airbyte-proxy/401.html deleted file mode 100644 index 69e19668f51b..000000000000 --- a/airbyte-proxy/401.html +++ /dev/null @@ -1,19 +0,0 @@ - - - - Airbyte - Access Denied - - -

🐙 Nope.

-

HTTP Error Code: 401

- -

- This deployment of Airbyte is protected by HTTP Basic Authentication. - Please refer to the Airbyte docs to learn more about: -

-

- - diff --git a/airbyte-proxy/Dockerfile b/airbyte-proxy/Dockerfile deleted file mode 100644 index 3a8608f72c91..000000000000 --- a/airbyte-proxy/Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -# Inspired by https://medium.com/pernod-ricard-tech/adding-basic-authentication-with-nginx-as-a-reverse-proxy-a229f9d12b73 - -FROM nginx:latest - -ARG VERSION=0.40.32 - -ENV APPLICATION airbyte-proxy -ENV VERSION ${VERSION} - -RUN apt-get update -y && apt-get install -y apache2-utils && rm -rf /var/lib/apt/lists/* - -# This variable can be used to update the destination containers that Nginx proxies to. -ENV PROXY_PASS_WEB "http://airbyte-webapp:80" -ENV PROXY_PASS_API "http://airbyte-server:8001" -ENV CONNECTOR_BUILDER_SERVER_API "http://airbyte-connector-builder-server:80" - -# Nginx config file -WORKDIR / -RUN mkdir -p /etc/nginx/templates -COPY nginx-auth.conf.template /etc/nginx/templates/nginx-auth.conf.template -COPY nginx-no-auth.conf.template /etc/nginx/templates/nginx-no-auth.conf.template -COPY 401.html /etc/nginx/401.html - -# Startup script -COPY run.sh ./ -RUN chmod 0755 ./run.sh -CMD [ "./run.sh" ] -ENTRYPOINT ["./run.sh"] diff --git a/airbyte-proxy/LICENSE b/airbyte-proxy/LICENSE deleted file mode 100644 index 7d1808d83342..000000000000 --- a/airbyte-proxy/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Airbyte, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/airbyte-proxy/README.md b/airbyte-proxy/README.md deleted file mode 100644 index 1bb4f873fb37..000000000000 --- a/airbyte-proxy/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Airbyte Proxy - -This service uses Nginx to front the Aribyte `webapp` and `server` services to add Authentication via HTTP basic auth. - -Authentication is controlled by 2 environment variables, `BASIC_AUTH_USERNAME` and `BASIC_AUTH_PASSWORD` which can be modified in the `.env` file for your Airbyte deployment. You can disable authentication by setting both `BASIC_AUTH_USERNAME` and `BASIC_AUTH_PASSWORD` to empty strings. Changes in your environment variables will be applied when the service (re)boots. - -This service is intended to work in conjunction with the `airbyte_internal` network defined in the default docker compose file. By default, this application forwards requesting coming in on 8000 and 8001 to the PROXY_PASS_WEB and PROXY_PASS_API accordingly - which are also configured by environment variables within this container (see Dockerfile). The deafults are configured to work with the default `docker-compose.yaml` file for Airbyte OSS deployments. - -``` -ENV PROXY_PASS_WEB "http://airbyte-webapp:80" -ENV PROXY_PASS_API "http://airbyte-server:8001" -``` - -🐙 diff --git a/airbyte-proxy/build.gradle b/airbyte-proxy/build.gradle deleted file mode 100644 index 297f75c8f4c4..000000000000 --- a/airbyte-proxy/build.gradle +++ /dev/null @@ -1,21 +0,0 @@ -task prepareBuild(type: Copy) { - from layout.projectDirectory.file("nginx-auth.conf.template") - from layout.projectDirectory.file("nginx-no-auth.conf.template") - from layout.projectDirectory.file("run.sh") - from layout.projectDirectory.file("401.html") - - into layout.buildDirectory.dir("docker") -} - -tasks.named("buildDockerImage") { - dependsOn prepareBuild - dependsOn copyDocker -} - -task bashTest(type: Exec) { - dependsOn buildDockerImage - commandLine "./test.sh" -} - -// we can't override the 'test' command, so we can make our bash test a dependency -test.dependsOn(project.tasks.bashTest) diff --git a/airbyte-proxy/gradle.properties b/airbyte-proxy/gradle.properties deleted file mode 100644 index f28927f52ca6..000000000000 --- a/airbyte-proxy/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -dockerImageName=proxy diff --git a/airbyte-proxy/nginx-auth.conf.template b/airbyte-proxy/nginx-auth.conf.template deleted file mode 100644 index fe817b622536..000000000000 --- a/airbyte-proxy/nginx-auth.conf.template +++ /dev/null @@ -1,82 +0,0 @@ -events {} - -http { - server { - listen 8000 default_server; - - location / { - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - - client_max_body_size 200M; - - auth_basic "Welcome to Airbyte"; - auth_basic_user_file /etc/nginx/.htpasswd; - - proxy_pass "${PROXY_PASS_WEB}"; - - proxy_connect_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - proxy_send_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - proxy_read_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - send_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - - error_page 401 /etc/nginx/401.html; - location ~ (401.html)$ { - alias /etc/nginx/$1; - auth_basic off; - } - } - } - - server { - listen 8001; - - location / { - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - - client_max_body_size 200M; - - auth_basic "Welcome to Airbyte"; - auth_basic_user_file /etc/nginx/.htpasswd; - - proxy_pass "${PROXY_PASS_API}"; - - proxy_connect_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - proxy_send_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - proxy_read_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - send_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - - error_page 401 /etc/nginx/401.html; - location ~ (401.html)$ { - alias /etc/nginx/$1; - auth_basic off; - } - } - } - - server { - listen 8003; - - location / { - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - - client_max_body_size 200M; - - auth_basic "Welcome to Airbyte"; - auth_basic_user_file /etc/nginx/.htpasswd; - - proxy_pass "${CONNECTOR_BUILDER_SERVER_API}"; - - error_page 401 /etc/nginx/401.html; - location ~ (401.html)$ { - alias /etc/nginx/$1; - auth_basic off; - } - } - } -} diff --git a/airbyte-proxy/nginx-no-auth.conf.template b/airbyte-proxy/nginx-no-auth.conf.template deleted file mode 100644 index 35b25c934616..000000000000 --- a/airbyte-proxy/nginx-no-auth.conf.template +++ /dev/null @@ -1,55 +0,0 @@ -events {} - -http { - server { - listen 8000 default_server; - - location / { - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - - client_max_body_size 200M; - - proxy_pass "${PROXY_PASS_WEB}"; - - proxy_connect_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - proxy_send_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - proxy_read_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - send_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - } - } - - server { - listen 8001; - - location / { - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - - client_max_body_size 200M; - - proxy_pass "${PROXY_PASS_API}"; - - proxy_connect_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - proxy_send_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - proxy_read_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - send_timeout ${BASIC_AUTH_PROXY_TIMEOUT}; - } - } - - server { - listen 8003; - - location / { - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - - client_max_body_size 200M; - - proxy_pass "${CONNECTOR_BUILDER_SERVER_API}"; - } - } -} diff --git a/airbyte-proxy/run.sh b/airbyte-proxy/run.sh deleted file mode 100644 index 264026393984..000000000000 --- a/airbyte-proxy/run.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash - -rm /etc/nginx/nginx.conf - -if [[ -z "${BASIC_AUTH_USERNAME}" ]]; then - echo "BASIC_AUTH_USERNAME is not set, skipping nginx auth" - - TEMPLATE_PATH="/etc/nginx/templates/nginx-no-auth.conf.template" -else - echo "BASIC_AUTH_USERNAME is set, requiring auth for user '$BASIC_AUTH_USERNAME'" - - # htpasswd for basic authentication - rm -rf /etc/nginx/.htpasswd - htpasswd -c -b /etc/nginx/.htpasswd $BASIC_AUTH_USERNAME $BASIC_AUTH_PASSWORD - - TEMPLATE_PATH="/etc/nginx/templates/nginx-auth.conf.template" -fi - -envsubst '${PROXY_PASS_WEB} ${PROXY_PASS_API} ${CONNECTOR_BUILDER_SERVER_API} ${PROXY_PASS_RESOLVER} ${BASIC_AUTH_PROXY_TIMEOUT}' < $TEMPLATE_PATH > /etc/nginx/nginx.conf - -echo "starting nginx..." -nginx -v -nginx -g "daemon off;" diff --git a/airbyte-proxy/test.sh b/airbyte-proxy/test.sh deleted file mode 100755 index c3e66d327361..000000000000 --- a/airbyte-proxy/test.sh +++ /dev/null @@ -1,129 +0,0 @@ -#!/bin/bash - -NAME="airbyte-proxy-test-container" -PORT=18000 -BASIC_AUTH_USERNAME=airbyte -BASIC_AUTH_PASSWORD=password -BASIC_AUTH_UPDATED_PASSWORD=pa55w0rd -BASIC_AUTH_PROXY_TIMEOUT=120 -TEST_HOST=localhost -VERSION="${VERSION:-dev}" # defaults to "dev", otherwise it is set by environment's $VERSION - -echo "testing with proxy container airbyte/proxy:$VERSION" - -function start_container () { - CMD="docker run -d -p $PORT:8000 --env BASIC_AUTH_USERNAME=$1 --env BASIC_AUTH_PASSWORD=$2 --env BASIC_AUTH_PROXY_TIMEOUT=$3 --env PROXY_PASS_WEB=http://localhost --env PROXY_PASS_API=http://localhost --env CONNECTOR_BUILDER_SERVER_API=http://localhost --name $NAME airbyte/proxy:$VERSION" - echo $CMD - eval $CMD - wait_for_docker; -} - -function start_container_with_proxy () { - CMD="docker run -d -p $PORT:8000 --env PROXY_PASS_WEB=$1 --env PROXY_PASS_API=$1 --name $NAME - airbyte/proxy:$VERSION" - echo $CMD - eval $CMD - wait_for_docker; -} - -function stop_container () { - echo "Stopping $NAME" - docker kill $NAME - docker rm $NAME -} - -function wait_for_docker() { - until [ "`docker inspect -f {{.State.Running}} $NAME`"=="true" ]; do - sleep 1; - done; - sleep 1; -} - -echo "Testing airbyte proxy..." - -stop_container; # just in case there was a failure of a previous test run - -echo "Starting $NAME" -start_container $BASIC_AUTH_USERNAME $BASIC_AUTH_PASSWORD $BASIC_AUTH_PROXY_TIMEOUT - -echo "Testing access without auth" -RESPONSE=`curl "http://$TEST_HOST:$PORT" -i --silent` -if [[ $RESPONSE == *"401 Unauthorized"* ]]; then - echo "✔️ access without auth blocked" -else - echo "Auth not working" - echo $RESPONSE - exit 1 -fi - -echo "Testing access with auth" -RESPONSE=`curl "http://$BASIC_AUTH_USERNAME:$BASIC_AUTH_PASSWORD@$TEST_HOST:$PORT" -i --silent` -if [[ $RESPONSE != *"401 Unauthorized"* ]]; then - echo "✔️ access with auth worked" -else - echo "Auth not working" - echo $RESPONSE - exit 1 -fi - -stop_container; - -echo "Starting $NAME with updated password" -start_container $BASIC_AUTH_USERNAME $BASIC_AUTH_UPDATED_PASSWORD $BASIC_AUTH_PROXY_TIMEOUT - -echo "Testing access with orignial paassword" -RESPONSE=`curl "http://$BASIC_AUTH_USERNAME:$BASIC_AUTH_PASSWORD@$TEST_HOST:$PORT" -i --silent` -if [[ $RESPONSE == *"401 Unauthorized"* ]]; then - echo "✔️ access with original auth blocked" -else - echo "Auth not working" - echo $RESPONSE - exit 1 -fi - -echo "Testing access updated auth" -RESPONSE=`curl "http://$BASIC_AUTH_USERNAME:$BASIC_AUTH_UPDATED_PASSWORD@$TEST_HOST:$PORT" -i --silent` -if [[ $RESPONSE != *"401 Unauthorized"* ]]; then - echo "✔️ access with updated auth worked" -else - echo "Auth not working" - echo $RESPONSE - exit 1 -fi - -stop_container; - -echo "Starting $NAME with no password" -start_container "" "" - -echo "Testing access without auth" -RESPONSE=`curl "http://$TEST_HOST:$PORT" -i --silent` -if [[ $RESPONSE != *"401 Unauthorized"* ]]; then - echo "✔️ access without auth allowed when configured" -else - echo "Auth not working" - echo $RESPONSE - exit 1 -fi - -stop_container; - - -# TODO: We can't test external URLs without a resolver, but adding a resolver that isn't dynamic+local doesn't work with docker. - -# echo "Testing that PROXY_PASS can be used to change the backend" -# start_container_with_proxy "http://www.google.com" - -# RESPONSE=`curl "http://$TEST_HOST:$PORT" -i --silent` -# if [[ $RESPONSE == *"google.com"* ]]; then -# echo "✔️ proxy backends can be changed" -# else -# echo "Proxy update not working" -# echo $RESPONSE -# exit 1 -# fi - -# stop_container; - -echo "Tests Passed ✅" -exit 0 diff --git a/airbyte-server/.gitignore b/airbyte-server/.gitignore deleted file mode 100644 index 645c92495363..000000000000 --- a/airbyte-server/.gitignore +++ /dev/null @@ -1 +0,0 @@ -src/main/resources/openapi/config.yaml diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile deleted file mode 100644 index 7ccb09c58a9c..000000000000 --- a/airbyte-server/Dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:1.0 -FROM ${JDK_IMAGE} AS server - -EXPOSE 8000 - -ARG VERSION=0.40.32 - -ENV APPLICATION airbyte-server -ENV VERSION ${VERSION} - -WORKDIR /app - -# This is automatically unzipped by Docker -ADD bin/${APPLICATION}-${VERSION}.tar /app - -# wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-${VERSION}/bin/${APPLICATION}"] diff --git a/airbyte-server/build.gradle b/airbyte-server/build.gradle deleted file mode 100644 index d88d9e30f5ff..000000000000 --- a/airbyte-server/build.gradle +++ /dev/null @@ -1,130 +0,0 @@ -plugins { - id 'application' -} - -configurations.all { - exclude group: 'io.micronaut.jaxrs' -} - -dependencies { - annotationProcessor platform(libs.micronaut.bom) - annotationProcessor libs.bundles.micronaut.annotation.processor - annotationProcessor libs.micronaut.jaxrs.processor - - implementation platform(libs.micronaut.bom) - implementation libs.bundles.micronaut - implementation libs.micronaut.jaxrs.server - - // Ensure that the versions defined in deps.toml are used - // instead of versions from transitive dependencies - implementation(libs.flyway.core) { - force = true - } - implementation(libs.s3) { - // Force to avoid updated version brought in transitively from Micronaut 3.8+ - // that is incompatible with our current Helm setup - force = true - } - implementation(libs.aws.java.sdk.s3) { - // Force to avoid updated version brought in transitively from Micronaut 3.8+ - // that is incompatible with our current Helm setup - force = true - } - - implementation project(':airbyte-analytics') - implementation project(':airbyte-api') - implementation project(':airbyte-commons-temporal') - implementation project(':airbyte-commons-server') - implementation project(':airbyte-commons-worker') - implementation project(':airbyte-config:init') - implementation project(':airbyte-config:config-models') - implementation project(':airbyte-config:config-persistence') - implementation project(':airbyte-config:specs') - implementation project(':airbyte-metrics:metrics-lib') - implementation project(':airbyte-db:db-lib') - implementation project(":airbyte-json-validation") - implementation project(':airbyte-notification') - implementation project(':airbyte-oauth') - implementation libs.airbyte.protocol - implementation project(':airbyte-persistence:job-persistence') - - implementation 'com.github.slugify:slugify:2.4' - implementation 'commons-cli:commons-cli:1.4' - implementation libs.temporal.sdk - implementation 'org.apache.cxf:cxf-core:3.4.2' - implementation 'org.eclipse.jetty:jetty-server:9.4.31.v20200723' - implementation 'org.eclipse.jetty:jetty-servlet:9.4.31.v20200723' - implementation 'org.glassfish.jaxb:jaxb-runtime:3.0.2' - implementation 'org.glassfish.jersey.containers:jersey-container-servlet' - implementation 'org.glassfish.jersey.inject:jersey-hk2' - implementation 'org.glassfish.jersey.media:jersey-media-json-jackson' - implementation 'org.glassfish.jersey.ext:jersey-bean-validation' - implementation 'org.quartz-scheduler:quartz:2.3.2' - implementation 'io.sentry:sentry:6.3.1' - implementation 'io.swagger:swagger-annotations:1.6.2' - - testImplementation project(':airbyte-test-utils') - testImplementation libs.postgresql - testImplementation libs.platform.testcontainers.postgresql - testImplementation 'com.squareup.okhttp3:mockwebserver:4.9.1' - testImplementation 'org.mockito:mockito-inline:4.7.0' -} - -// we want to be able to access the generated db files from config/init when we build the server docker image. -task copySeed(type: Copy, dependsOn: [project(':airbyte-config:init').processResources]) { - from "${project(':airbyte-config:init').buildDir}/resources/main/config" - into "${buildDir}/config_init/resources/main/config" -} - -// need to make sure that the files are in the resource directory before copying. -// tests require the seed to exist. -test.dependsOn(project.tasks.copySeed) -assemble.dependsOn(project.tasks.copySeed) - -mainClassName = 'io.airbyte.server.Application' - -application { - applicationName = project.name - mainClass = mainClassName - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -Properties env = new Properties() -rootProject.file('.env.dev').withInputStream { env.load(it) } - -run { - // default for running on local machine. - environment "DATABASE_USER", env.DATABASE_USER - environment "DATABASE_PASSWORD", env.DATABASE_PASSWORD - - environment "CONFIG_DATABASE_USER", env.CONFIG_DATABASE_USER - environment "CONFIG_DATABASE_PASSWORD", env.CONFIG_DATABASE_PASSWORD - - // we map the docker pg db to port 5433 so it does not conflict with other pg instances. - environment "DATABASE_URL", "jdbc:postgresql://localhost:5433/${env.DATABASE_DB}" - environment "CONFIG_DATABASE_URL", "jdbc:postgresql://localhost:5433/${env.CONFIG_DATABASE_DB}" - - environment "RUN_DATABASE_MIGRATION_ON_STARTUP", "true" - - environment "WORKSPACE_ROOT", env.WORKSPACE_ROOT - environment "CONFIG_ROOT", "/tmp/airbyte_config" - environment "TRACKING_STRATEGY", env.TRACKING_STRATEGY - environment "AIRBYTE_VERSION", env.VERSION - environment "AIRBYTE_ROLE", System.getenv('AIRBYTE_ROLE') - environment "TEMPORAL_HOST", "localhost:7233" - - environment 'MICRONAUT_ENVIRONMENTS', 'control-plane' -} - -// produce reproducible archives -// (see https://docs.gradle.org/current/userguide/working_with_files.html#sec:reproducible_archives) -tasks.withType(AbstractArchiveTask) { - preserveFileTimestamps = false - reproducibleFileOrder = true -} - -tasks.named("buildDockerImage") { - dependsOn copyGeneratedTar -} - -Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project) diff --git a/airbyte-server/gradle.properties b/airbyte-server/gradle.properties deleted file mode 100644 index 227a1c72cdce..000000000000 --- a/airbyte-server/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -dockerImageName=server diff --git a/airbyte-server/readme.md b/airbyte-server/readme.md deleted file mode 100644 index a6817ac3af7b..000000000000 --- a/airbyte-server/readme.md +++ /dev/null @@ -1,5 +0,0 @@ -# airbyte-server - -This module contains the actual app that runs the Airbyte Configuration API. The main method can be found in `ServerApp.java`. - -The external API interface that it implements is declared in `airbyte-api`. The class that actually implements that interface is called `ConfigurationApi`. You will notice that class is very large, because generates a method for every endpoint. To keep it manageable, that class just delegates all requests to more tightly-scoped, resource-based handlers. For example, the `workspace/get` endpoint is present in `ConfigurationApi`, but all it does it delegate the call to the `WorkspaceHandler` which contains all Workspace-specific logic. Unit tests for the server happen at the Handler-level, not for the `ConfigurationApi`. diff --git a/airbyte-server/src/main/java/io/airbyte/server/Application.java b/airbyte-server/src/main/java/io/airbyte/server/Application.java deleted file mode 100644 index c1d154b542bc..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/Application.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server; - -import io.micronaut.runtime.Micronaut; - -public class Application { - - public static void main(final String[] args) { - Micronaut.run(Application.class, args); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/CorsFilter.java b/airbyte-server/src/main/java/io/airbyte/server/CorsFilter.java deleted file mode 100644 index a99d75766e14..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/CorsFilter.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server; - -import com.google.common.collect.ImmutableMap; -import com.google.common.net.HttpHeaders; -import java.util.Map; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.container.ContainerResponseContext; -import javax.ws.rs.container.ContainerResponseFilter; - -// https://medium.com/@Leejjon_net/how-to-allow-cross-origin-requests-in-a-jax-rs-microservice-d2a6aa2df484 -public class CorsFilter implements ContainerResponseFilter { - - public static final ImmutableMap MAP = ImmutableMap.of( - HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "*", - HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "Origin, Content-Type, Accept, Content-Encoding, X-Airbyte-Analytic-Source", - HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, "GET, POST, PUT, DELETE, OPTIONS, HEAD"); - - @Override - public void filter(final ContainerRequestContext requestContext, final ContainerResponseContext responseContext) { - for (final Map.Entry entry : MAP.entrySet()) { - responseContext.getHeaders().add(entry.getKey(), entry.getValue()); - } - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/DatabaseEventListener.java b/airbyte-server/src/main/java/io/airbyte/server/DatabaseEventListener.java deleted file mode 100644 index 887ab0c58bc1..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/DatabaseEventListener.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server; - -import io.airbyte.db.check.DatabaseCheckException; -import io.airbyte.db.check.DatabaseMigrationCheck; -import io.micronaut.context.event.ApplicationEventListener; -import io.micronaut.discovery.event.ServiceReadyEvent; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.lang.invoke.MethodHandles; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Singleton -public class DatabaseEventListener implements ApplicationEventListener { - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private final DatabaseMigrationCheck configsMigrationCheck; - - private final DatabaseMigrationCheck jobsMigrationCheck; - - public DatabaseEventListener( - @Named("configsDatabaseMigrationCheck") final DatabaseMigrationCheck configsMigrationCheck, - @Named("jobsDatabaseMigrationCheck") final DatabaseMigrationCheck jobsMigrationCheck) { - this.configsMigrationCheck = configsMigrationCheck; - this.jobsMigrationCheck = jobsMigrationCheck; - } - - @Override - public void onApplicationEvent(final ServiceReadyEvent event) { - log.info("Checking configs database flyway migration version..."); - try { - configsMigrationCheck.check(); - } catch (final DatabaseCheckException e) { - throw new RuntimeException(e); - } - - log.info("Checking jobs database flyway migration version..."); - try { - jobsMigrationCheck.check(); - } catch (final DatabaseCheckException e) { - throw new RuntimeException(e); - } - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java b/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java deleted file mode 100644 index 6cb04bf40dc9..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java +++ /dev/null @@ -1,384 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server; - -import io.airbyte.analytics.Deployment; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.analytics.TrackingClientSingleton; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.server.RequestLogger; -import io.airbyte.commons.server.errors.*; -import io.airbyte.commons.server.handlers.*; -import io.airbyte.commons.server.scheduler.DefaultSynchronousSchedulerClient; -import io.airbyte.commons.server.scheduler.EventRunner; -import io.airbyte.commons.server.scheduler.TemporalEventRunner; -import io.airbyte.commons.server.services.AirbyteGithubStore; -import io.airbyte.commons.temporal.ConnectionManagerUtils; -import io.airbyte.commons.temporal.NotificationUtils; -import io.airbyte.commons.temporal.StreamResetRecordsHelper; -import io.airbyte.commons.temporal.TemporalClient; -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.commons.temporal.TemporalWorkflowUtils; -import io.airbyte.commons.temporal.scheduling.RouterService; -import io.airbyte.commons.temporal.scheduling.TaskQueueMapper; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.StatePersistence; -import io.airbyte.config.persistence.StreamResetPersistence; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHydrator; -import io.airbyte.db.Database; -import io.airbyte.db.check.DatabaseCheckException; -import io.airbyte.db.factory.DatabaseCheckFactory; -import io.airbyte.persistence.job.DefaultJobPersistence; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.WebUrlHelper; -import io.airbyte.persistence.job.WorkspaceHelper; -import io.airbyte.persistence.job.errorreporter.JobErrorReporter; -import io.airbyte.persistence.job.errorreporter.JobErrorReportingClient; -import io.airbyte.persistence.job.errorreporter.JobErrorReportingClientFactory; -import io.airbyte.persistence.job.factory.OAuthConfigSupplier; -import io.airbyte.persistence.job.tracker.JobTracker; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.workers.helper.ConnectionHelper; -import io.temporal.client.WorkflowClient; -import io.temporal.serviceclient.WorkflowServiceStubs; -import java.net.http.HttpClient; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; -import org.flywaydb.core.Flyway; -import org.glassfish.jersey.jackson.internal.jackson.jaxrs.json.JacksonJaxbJsonProvider; -import org.glassfish.jersey.server.ResourceConfig; -import org.glassfish.jersey.servlet.ServletContainer; -import org.jooq.DSLContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - -@SuppressWarnings("PMD.AvoidCatchingThrowable") -public class ServerApp implements ServerRunnable { - - private static final Logger LOGGER = LoggerFactory.getLogger(ServerApp.class); - private static final int PORT = 8001; - - private final AirbyteVersion airbyteVersion; - private final Set> customComponentClasses; - private final Set customComponents; - - public ServerApp(final AirbyteVersion airbyteVersion, - final Set> customComponentClasses, - final Set customComponents) { - this.airbyteVersion = airbyteVersion; - this.customComponentClasses = customComponentClasses; - this.customComponents = customComponents; - } - - @Override - @SuppressWarnings("PMD.InvalidLogMessageFormat") - public void start() throws Exception { - final Server server = new Server(PORT); - - final ServletContextHandler handler = new ServletContextHandler(); - - final Map mdc = MDC.getCopyOfContextMap(); - - final ResourceConfig rc = - new ResourceConfig() - .register(new RequestLogger(mdc)) - .register(InvalidInputExceptionMapper.class) - .register(InvalidJsonExceptionMapper.class) - .register(InvalidJsonInputExceptionMapper.class) - .register(KnownExceptionMapper.class) - .register(UncaughtExceptionMapper.class) - .register(NotFoundExceptionMapper.class) - // needed so that the custom json exception mappers don't get overridden - // https://stackoverflow.com/questions/35669774/jersey-custom-exception-mapper-for-invalid-json-string - .register(JacksonJaxbJsonProvider.class); - - // inject custom server functionality - customComponentClasses.forEach(rc::register); - customComponents.forEach(rc::register); - - final ServletHolder configServlet = new ServletHolder(new ServletContainer(rc)); - - handler.addServlet(configServlet, "/api/*"); - - server.setHandler(handler); - - server.start(); - final String banner = MoreResources.readResource("banner/banner.txt"); - LOGGER.info(banner + String.format("Version: %s\n", airbyteVersion.serialize())); - server.join(); - - Runtime.getRuntime().addShutdownHook(new Thread(() -> { - try { - server.stop(); - } catch (final Exception ex) { - // silently fail at this stage because server is terminating. - LOGGER.warn("exception: " + ex); - } - })); - } - - public static void assertDatabasesReady(final Configs configs, - final DSLContext configsDslContext, - final Flyway configsFlyway, - final DSLContext jobsDslContext, - final Flyway jobsFlyway) - throws DatabaseCheckException { - LOGGER.info("Checking configs database flyway migration version.."); - DatabaseCheckFactory - .createConfigsDatabaseMigrationCheck(configsDslContext, configsFlyway, configs.getConfigsDatabaseMinimumFlywayMigrationVersion(), - configs.getConfigsDatabaseInitializationTimeoutMs()) - .check(); - - LOGGER.info("Checking jobs database flyway migration version.."); - DatabaseCheckFactory.createJobsDatabaseMigrationCheck(jobsDslContext, jobsFlyway, configs.getJobsDatabaseMinimumFlywayMigrationVersion(), - configs.getJobsDatabaseInitializationTimeoutMs()).check(); - } - - public static ServerRunnable getServer(final ServerFactory apiFactory, - final Configs configs, - final DSLContext configsDslContext, - final Flyway configsFlyway, - final DSLContext jobsDslContext, - final Flyway jobsFlyway, - final TaskQueueMapper taskQueueMapper) - throws Exception { - LogClientSingleton.getInstance().setWorkspaceMdc( - configs.getWorkerEnvironment(), - configs.getLogConfigs(), - LogClientSingleton.getInstance().getServerLogsRoot(configs.getWorkspaceRoot())); - - LOGGER.info("Checking databases.."); - assertDatabasesReady(configs, configsDslContext, configsFlyway, jobsDslContext, jobsFlyway); - - LOGGER.info("Creating config repository..."); - final Database configsDatabase = new Database(configsDslContext); - final SecretsHydrator secretsHydrator = SecretPersistence.getSecretsHydrator(configsDslContext, configs); - final Optional secretPersistence = SecretPersistence.getLongLived(configsDslContext, configs); - final Optional ephemeralSecretPersistence = SecretPersistence.getEphemeral(configsDslContext, configs); - final ConfigRepository configRepository = new ConfigRepository(configsDatabase); - final SecretsRepositoryReader secretsRepositoryReader = new SecretsRepositoryReader(configRepository, secretsHydrator); - final SecretsRepositoryWriter secretsRepositoryWriter = new SecretsRepositoryWriter(configRepository, secretPersistence, - ephemeralSecretPersistence); - - LOGGER.info("Creating jobs persistence..."); - final Database jobsDatabase = new Database(jobsDslContext); - final JobPersistence jobPersistence = new DefaultJobPersistence(jobsDatabase); - - TrackingClientSingleton.initialize( - configs.getTrackingStrategy(), - new Deployment(configs.getDeploymentMode(), jobPersistence.getDeployment().orElseThrow(), configs.getWorkerEnvironment()), - configs.getAirbyteRole(), - configs.getAirbyteVersion(), - configRepository); - - final TrackingClient trackingClient = TrackingClientSingleton.get(); - final JobTracker jobTracker = new JobTracker(configRepository, jobPersistence, trackingClient); - - final FeatureFlags envVariableFeatureFlags = new EnvVariableFeatureFlags(); - - final WebUrlHelper webUrlHelper = new WebUrlHelper(configs.getWebappUrl()); - final JobErrorReportingClient jobErrorReportingClient = JobErrorReportingClientFactory.getClient(configs.getJobErrorReportingStrategy(), configs); - final JobErrorReporter jobErrorReporter = - new JobErrorReporter( - configRepository, - configs.getDeploymentMode(), - configs.getAirbyteVersionOrWarning(), - webUrlHelper, - jobErrorReportingClient); - - final TemporalUtils temporalUtils = new TemporalUtils( - configs.getTemporalCloudClientCert(), - configs.getTemporalCloudClientKey(), - configs.temporalCloudEnabled(), - configs.getTemporalCloudHost(), - configs.getTemporalCloudNamespace(), - configs.getTemporalHost(), - configs.getTemporalRetentionInDays()); - - final StreamResetPersistence streamResetPersistence = new StreamResetPersistence(configsDatabase); - final WorkflowServiceStubs temporalService = temporalUtils.createTemporalService(); - final ConnectionManagerUtils connectionManagerUtils = new ConnectionManagerUtils(); - final NotificationUtils notificationUtils = new NotificationUtils(); - final StreamResetRecordsHelper streamResetRecordsHelper = new StreamResetRecordsHelper(jobPersistence, streamResetPersistence); - - final WorkflowClient workflowClient = TemporalWorkflowUtils.createWorkflowClient(temporalService, temporalUtils.getNamespace()); - final TemporalClient temporalClient = new TemporalClient( - configs.getWorkspaceRoot(), - workflowClient, - temporalService, - streamResetPersistence, - connectionManagerUtils, - notificationUtils, - streamResetRecordsHelper); - - final OAuthConfigSupplier oAuthConfigSupplier = new OAuthConfigSupplier(configRepository, trackingClient); - RouterService routerService = new RouterService(configRepository, taskQueueMapper); - final DefaultSynchronousSchedulerClient syncSchedulerClient = - new DefaultSynchronousSchedulerClient(temporalClient, jobTracker, jobErrorReporter, oAuthConfigSupplier, routerService); - final HttpClient httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); - final EventRunner eventRunner = new TemporalEventRunner(temporalClient); - - final WorkspaceHelper workspaceHelper = new WorkspaceHelper(configRepository, jobPersistence); - - final JsonSchemaValidator schemaValidator = new JsonSchemaValidator(); - - final AttemptHandler attemptHandler = new AttemptHandler(jobPersistence); - - final ConnectionHelper connectionHelper = new ConnectionHelper(configRepository, workspaceHelper); - - final ConnectionsHandler connectionsHandler = new ConnectionsHandler( - configRepository, - workspaceHelper, - trackingClient, - eventRunner, - connectionHelper); - - final DestinationHandler destinationHandler = new DestinationHandler( - configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - schemaValidator, - connectionsHandler, - oAuthConfigSupplier); - - final OperationsHandler operationsHandler = new OperationsHandler(configRepository); - - final SchedulerHandler schedulerHandler = new SchedulerHandler( - configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - syncSchedulerClient, - jobPersistence, - configs.getWorkerEnvironment(), - configs.getLogConfigs(), - eventRunner, - connectionsHandler, - envVariableFeatureFlags, - webUrlHelper); - - final AirbyteProtocolVersionRange airbyteProtocolVersionRange = new AirbyteProtocolVersionRange(configs.getAirbyteProtocolVersionMin(), - configs.getAirbyteProtocolVersionMax()); - - final AirbyteGithubStore airbyteGithubStore = AirbyteGithubStore.production(); - - final DestinationDefinitionsHandler destinationDefinitionsHandler = new DestinationDefinitionsHandler(configRepository, - () -> UUID.randomUUID(), - syncSchedulerClient, - airbyteGithubStore, - destinationHandler, - airbyteProtocolVersionRange); - - final HealthCheckHandler healthCheckHandler = new HealthCheckHandler(configRepository); - - final OAuthHandler oAuthHandler = new OAuthHandler(configRepository, httpClient, trackingClient, secretsRepositoryReader); - - final SourceHandler sourceHandler = new SourceHandler( - configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - schemaValidator, - connectionsHandler, - oAuthConfigSupplier); - - final SourceDefinitionsHandler sourceDefinitionsHandler = - new SourceDefinitionsHandler(configRepository, () -> UUID.randomUUID(), syncSchedulerClient, airbyteGithubStore, sourceHandler, - airbyteProtocolVersionRange); - - final JobHistoryHandler jobHistoryHandler = new JobHistoryHandler( - jobPersistence, - configs.getWorkerEnvironment(), - configs.getLogConfigs(), - connectionsHandler, - sourceHandler, - sourceDefinitionsHandler, - destinationHandler, - destinationDefinitionsHandler, - configs.getAirbyteVersion(), - temporalClient); - - final LogsHandler logsHandler = new LogsHandler(configs); - - final WorkspacesHandler workspacesHandler = new WorkspacesHandler( - configRepository, - secretsRepositoryWriter, - connectionsHandler, - destinationHandler, - sourceHandler); - - final StatePersistence statePersistence = new StatePersistence(configsDatabase); - - final StateHandler stateHandler = new StateHandler(statePersistence); - - final WebBackendConnectionsHandler webBackendConnectionsHandler = new WebBackendConnectionsHandler( - connectionsHandler, - stateHandler, - sourceHandler, - destinationHandler, - jobHistoryHandler, - schedulerHandler, - operationsHandler, - eventRunner, - configRepository); - - final WebBackendGeographiesHandler webBackendGeographiesHandler = new WebBackendGeographiesHandler(); - - final WebBackendCheckUpdatesHandler webBackendCheckUpdatesHandler = - new WebBackendCheckUpdatesHandler(configRepository, AirbyteGithubStore.production()); - - LOGGER.info("Starting server..."); - - return apiFactory.create( - syncSchedulerClient, - configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - jobPersistence, - configsDatabase, - jobsDatabase, - trackingClient, - configs.getWorkerEnvironment(), - configs.getLogConfigs(), - configs.getAirbyteVersion(), - configs.getWorkspaceRoot(), - httpClient, - eventRunner, - configsFlyway, - jobsFlyway, - attemptHandler, - connectionsHandler, - destinationDefinitionsHandler, - destinationHandler, - healthCheckHandler, - jobHistoryHandler, - logsHandler, - oAuthHandler, - new OpenApiConfigHandler(), - operationsHandler, - schedulerHandler, - sourceHandler, - sourceDefinitionsHandler, - stateHandler, - workspacesHandler, - webBackendConnectionsHandler, - webBackendGeographiesHandler, - webBackendCheckUpdatesHandler); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java b/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java deleted file mode 100644 index 19b9230916a7..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server; - -import io.airbyte.analytics.TrackingClient; -import io.airbyte.commons.server.handlers.*; -import io.airbyte.commons.server.scheduler.EventRunner; -import io.airbyte.commons.server.scheduler.SynchronousSchedulerClient; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.db.Database; -import io.airbyte.persistence.job.JobPersistence; -import java.net.http.HttpClient; -import java.nio.file.Path; -import java.util.HashSet; -import org.flywaydb.core.Flyway; - -public interface ServerFactory { - - ServerRunnable create(final SynchronousSchedulerClient synchronousSchedulerClient, - final ConfigRepository configRepository, - final SecretsRepositoryReader secretsRepositoryReader, - final SecretsRepositoryWriter secretsRepositoryWriter, - final JobPersistence jobPersistence, - final Database configsDatabase, - final Database jobsDatabase, - final TrackingClient trackingClient, - final WorkerEnvironment workerEnvironment, - final LogConfigs logConfigs, - final AirbyteVersion airbyteVersion, - final Path workspaceRoot, - final HttpClient httpClient, - final EventRunner eventRunner, - final Flyway configsFlyway, - final Flyway jobsFlyway, - final AttemptHandler attemptHandler, - final ConnectionsHandler connectionsHandler, - final DestinationDefinitionsHandler destinationDefinitionsHandler, - final DestinationHandler destinationHandler, - final HealthCheckHandler healthCheckHandler, - final JobHistoryHandler jobHistoryHandler, - final LogsHandler logsHandler, - final OAuthHandler ooAuthHandler, - final OpenApiConfigHandler openApiConfigHandler, - final OperationsHandler operationsHandler, - final SchedulerHandler schedulerHandler, - final SourceHandler sourceHandler, - final SourceDefinitionsHandler sourceDefinitionsHandler, - final StateHandler stateHandler, - final WorkspacesHandler workspacesHandler, - final WebBackendConnectionsHandler webBackendConnectionsHandler, - final WebBackendGeographiesHandler webBackendGeographiesHandler, - final WebBackendCheckUpdatesHandler webBackendCheckUpdatesHandler); - - class Api implements ServerFactory { - - @Override - public ServerRunnable create(final SynchronousSchedulerClient synchronousSchedulerClient, - final ConfigRepository configRepository, - final SecretsRepositoryReader secretsRepositoryReader, - final SecretsRepositoryWriter secretsRepositoryWriter, - final JobPersistence jobPersistence, - final Database configsDatabase, - final Database jobsDatabase, - final TrackingClient trackingClient, - final WorkerEnvironment workerEnvironment, - final LogConfigs logConfigs, - final AirbyteVersion airbyteVersion, - final Path workspaceRoot, - final HttpClient httpClient, - final EventRunner eventRunner, - final Flyway configsFlyway, - final Flyway jobsFlyway, - final AttemptHandler attemptHandler, - final ConnectionsHandler connectionsHandler, - final DestinationDefinitionsHandler destinationDefinitionsHandler, - final DestinationHandler destinationHandler, - final HealthCheckHandler healthCheckHandler, - final JobHistoryHandler jobHistoryHandler, - final LogsHandler logsHandler, - final OAuthHandler ooAuthHandler, - final OpenApiConfigHandler openApiConfigHandler, - final OperationsHandler operationsHandler, - final SchedulerHandler schedulerHandler, - final SourceHandler sourceHandler, - final SourceDefinitionsHandler sourceDefinitionsHandler, - final StateHandler stateHandler, - final WorkspacesHandler workspacesHandler, - final WebBackendConnectionsHandler webBackendConnectionsHandler, - final WebBackendGeographiesHandler webBackendGeographiesHandler, - final WebBackendCheckUpdatesHandler webBackendCheckUpdatesHandler) { - - // construct server - return new ServerApp(airbyteVersion, new HashSet<>(), new HashSet<>()); - } - - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerRunnable.java b/airbyte-server/src/main/java/io/airbyte/server/ServerRunnable.java deleted file mode 100644 index 4668ff84251a..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerRunnable.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server; - -public interface ServerRunnable { - - void start() throws Exception; - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ApiHelper.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ApiHelper.java deleted file mode 100644 index de4cdbd3c30f..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ApiHelper.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.commons.server.errors.BadObjectSchemaKnownException; -import io.airbyte.commons.server.errors.IdNotFoundKnownException; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import org.slf4j.LoggerFactory; - -public class ApiHelper { - - static T execute(final HandlerCall call) { - try { - return call.call(); - } catch (final ConfigNotFoundException e) { - throw new IdNotFoundKnownException(String.format("Could not find configuration for %s: %s.", e.getType(), e.getConfigId()), - e.getConfigId(), e); - } catch (final JsonValidationException e) { - throw new BadObjectSchemaKnownException( - String.format("The provided configuration does not fulfill the specification. Errors: %s", e.getMessage()), e); - } catch (final IOException e) { - throw new RuntimeException(e); - } catch (final Exception e) { - LoggerFactory.getLogger(ApiHelper.class).error("Unexpected Exception", e); - throw e; - } - } - - interface HandlerCall { - - T call() throws ConfigNotFoundException, IOException, JsonValidationException; - - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/AttemptApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/AttemptApiController.java deleted file mode 100644 index 9c6949b9b36e..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/AttemptApiController.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.ADMIN; - -import io.airbyte.api.generated.AttemptApi; -import io.airbyte.api.model.generated.InternalOperationResult; -import io.airbyte.api.model.generated.SaveAttemptSyncConfigRequestBody; -import io.airbyte.api.model.generated.SaveStatsRequestBody; -import io.airbyte.api.model.generated.SetWorkflowInAttemptRequestBody; -import io.airbyte.commons.server.handlers.AttemptHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.MediaType; -import io.micronaut.http.annotation.Body; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/attempt/") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class AttemptApiController implements AttemptApi { - - private final AttemptHandler attemptHandler; - - public AttemptApiController(final AttemptHandler attemptHandler) { - this.attemptHandler = attemptHandler; - } - - @Override - @Post(uri = "/save_stats", - processes = MediaType.APPLICATION_JSON) - public InternalOperationResult saveStats(final SaveStatsRequestBody requestBody) { - return ApiHelper.execute(() -> attemptHandler.saveStats(requestBody)); - } - - @Override - @Post(uri = "/set_workflow_in_attempt", - processes = MediaType.APPLICATION_JSON) - @Secured({ADMIN}) - public InternalOperationResult setWorkflowInAttempt(@Body final SetWorkflowInAttemptRequestBody requestBody) { - return ApiHelper.execute(() -> attemptHandler.setWorkflowInAttempt(requestBody)); - } - - @Override - @Post(uri = "/save_sync_config", - processes = MediaType.APPLICATION_JSON) - @Secured({ADMIN}) - public InternalOperationResult saveSyncConfig(@Body final SaveAttemptSyncConfigRequestBody requestBody) { - return ApiHelper.execute(() -> attemptHandler.saveSyncConfig(requestBody)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java deleted file mode 100644 index 5a86ff1dfa54..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.EDITOR; -import static io.airbyte.commons.auth.AuthRoleConstants.READER; - -import io.airbyte.api.generated.ConnectionApi; -import io.airbyte.api.model.generated.ConnectionCreate; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionReadList; -import io.airbyte.api.model.generated.ConnectionSearch; -import io.airbyte.api.model.generated.ConnectionUpdate; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.ConnectionsHandler; -import io.airbyte.commons.server.handlers.OperationsHandler; -import io.airbyte.commons.server.handlers.SchedulerHandler; -import io.micronaut.context.annotation.Context; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.annotation.Body; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.http.annotation.Status; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/connections") -@Context() -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class ConnectionApiController implements ConnectionApi { - - private final ConnectionsHandler connectionsHandler; - private final OperationsHandler operationsHandler; - private final SchedulerHandler schedulerHandler; - - public ConnectionApiController(final ConnectionsHandler connectionsHandler, - final OperationsHandler operationsHandler, - final SchedulerHandler schedulerHandler) { - this.connectionsHandler = connectionsHandler; - this.operationsHandler = operationsHandler; - this.schedulerHandler = schedulerHandler; - } - - @Override - @Post(uri = "/create") - @Secured({EDITOR}) - @SecuredWorkspace - public ConnectionRead createConnection(@Body final ConnectionCreate connectionCreate) { - return ApiHelper.execute(() -> connectionsHandler.createConnection(connectionCreate)); - } - - @Override - @Post(uri = "/update") - @Secured({EDITOR}) - @SecuredWorkspace - public ConnectionRead updateConnection(@Body final ConnectionUpdate connectionUpdate) { - return ApiHelper.execute(() -> connectionsHandler.updateConnection(connectionUpdate)); - } - - @Override - @Post(uri = "/list") - @Secured({READER}) - @SecuredWorkspace - public ConnectionReadList listConnectionsForWorkspace(@Body final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ApiHelper.execute(() -> connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody)); - } - - @Override - @Post(uri = "/list_all") - @Secured({READER}) - @SecuredWorkspace - public ConnectionReadList listAllConnectionsForWorkspace(@Body final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ApiHelper.execute(() -> connectionsHandler.listAllConnectionsForWorkspace(workspaceIdRequestBody)); - } - - @Override - @Post(uri = "/search") - public ConnectionReadList searchConnections(@Body final ConnectionSearch connectionSearch) { - return ApiHelper.execute(() -> connectionsHandler.searchConnections(connectionSearch)); - } - - @Override - @Post(uri = "/get") - @Secured({READER}) - @SecuredWorkspace - public ConnectionRead getConnection(@Body final ConnectionIdRequestBody connectionIdRequestBody) { - return ApiHelper.execute(() -> connectionsHandler.getConnection(connectionIdRequestBody.getConnectionId())); - } - - @Override - @Post(uri = "/delete") - @Status(HttpStatus.NO_CONTENT) - @Secured({EDITOR}) - @SecuredWorkspace - public void deleteConnection(@Body final ConnectionIdRequestBody connectionIdRequestBody) { - ApiHelper.execute(() -> { - operationsHandler.deleteOperationsForConnection(connectionIdRequestBody); - connectionsHandler.deleteConnection(connectionIdRequestBody.getConnectionId()); - return null; - }); - } - - @Override - @Post(uri = "/sync") - @Secured({EDITOR}) - @SecuredWorkspace - public JobInfoRead syncConnection(@Body final ConnectionIdRequestBody connectionIdRequestBody) { - return ApiHelper.execute(() -> schedulerHandler.syncConnection(connectionIdRequestBody)); - } - - @Override - @Post(uri = "/reset") - @Secured({EDITOR}) - @SecuredWorkspace - public JobInfoRead resetConnection(@Body final ConnectionIdRequestBody connectionIdRequestBody) { - return ApiHelper.execute(() -> schedulerHandler.resetConnection(connectionIdRequestBody)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationApiController.java deleted file mode 100644 index d594124f998c..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationApiController.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.EDITOR; -import static io.airbyte.commons.auth.AuthRoleConstants.READER; - -import io.airbyte.api.generated.DestinationApi; -import io.airbyte.api.model.generated.CheckConnectionRead; -import io.airbyte.api.model.generated.DestinationCloneRequestBody; -import io.airbyte.api.model.generated.DestinationCreate; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationReadList; -import io.airbyte.api.model.generated.DestinationSearch; -import io.airbyte.api.model.generated.DestinationUpdate; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.DestinationHandler; -import io.airbyte.commons.server.handlers.SchedulerHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.annotation.Body; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.http.annotation.Status; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/destinations") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class DestinationApiController implements DestinationApi { - - private final DestinationHandler destinationHandler; - private final SchedulerHandler schedulerHandler; - - public DestinationApiController(final DestinationHandler destinationHandler, final SchedulerHandler schedulerHandler) { - this.destinationHandler = destinationHandler; - this.schedulerHandler = schedulerHandler; - } - - @Post(uri = "/check_connection") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public CheckConnectionRead checkConnectionToDestination(@Body final DestinationIdRequestBody destinationIdRequestBody) { - return ApiHelper.execute(() -> schedulerHandler.checkDestinationConnectionFromDestinationId(destinationIdRequestBody)); - } - - @Post(uri = "/check_connection_for_update") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public CheckConnectionRead checkConnectionToDestinationForUpdate(@Body final DestinationUpdate destinationUpdate) { - return ApiHelper.execute(() -> schedulerHandler.checkDestinationConnectionFromDestinationIdForUpdate(destinationUpdate)); - } - - @Post(uri = "/clone") - @Override - public DestinationRead cloneDestination(@Body final DestinationCloneRequestBody destinationCloneRequestBody) { - return ApiHelper.execute(() -> destinationHandler.cloneDestination(destinationCloneRequestBody)); - } - - @Post(uri = "/create") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public DestinationRead createDestination(@Body final DestinationCreate destinationCreate) { - return ApiHelper.execute(() -> destinationHandler.createDestination(destinationCreate)); - } - - @Post(uri = "/delete") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - @Status(HttpStatus.NO_CONTENT) - public void deleteDestination(@Body final DestinationIdRequestBody destinationIdRequestBody) { - ApiHelper.execute(() -> { - destinationHandler.deleteDestination(destinationIdRequestBody); - return null; - }); - } - - @Post(uri = "/get") - @Secured({READER}) - @SecuredWorkspace - @Override - public DestinationRead getDestination(@Body final DestinationIdRequestBody destinationIdRequestBody) { - return ApiHelper.execute(() -> destinationHandler.getDestination(destinationIdRequestBody)); - } - - @Post(uri = "/list") - @Secured({READER}) - @SecuredWorkspace - @Override - public DestinationReadList listDestinationsForWorkspace(@Body final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ApiHelper.execute(() -> destinationHandler.listDestinationsForWorkspace(workspaceIdRequestBody)); - } - - @Post(uri = "/search") - @Override - public DestinationReadList searchDestinations(@Body final DestinationSearch destinationSearch) { - return ApiHelper.execute(() -> destinationHandler.searchDestinations(destinationSearch)); - } - - @Post(uri = "/update") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public DestinationRead updateDestination(@Body final DestinationUpdate destinationUpdate) { - return ApiHelper.execute(() -> destinationHandler.updateDestination(destinationUpdate)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionApiController.java deleted file mode 100644 index 2f86a587082f..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionApiController.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.ADMIN; -import static io.airbyte.commons.auth.AuthRoleConstants.AUTHENTICATED_USER; -import static io.airbyte.commons.auth.AuthRoleConstants.EDITOR; -import static io.airbyte.commons.auth.AuthRoleConstants.READER; - -import io.airbyte.api.generated.DestinationDefinitionApi; -import io.airbyte.api.model.generated.CustomDestinationDefinitionCreate; -import io.airbyte.api.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.DestinationDefinitionRead; -import io.airbyte.api.model.generated.DestinationDefinitionReadList; -import io.airbyte.api.model.generated.DestinationDefinitionUpdate; -import io.airbyte.api.model.generated.PrivateDestinationDefinitionRead; -import io.airbyte.api.model.generated.PrivateDestinationDefinitionReadList; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.DestinationDefinitionsHandler; -import io.micronaut.context.annotation.Context; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.http.annotation.Status; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/destination_definitions") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Context -@Secured(SecurityRule.IS_AUTHENTICATED) -public class DestinationDefinitionApiController implements DestinationDefinitionApi { - - private final DestinationDefinitionsHandler destinationDefinitionsHandler; - - public DestinationDefinitionApiController(final DestinationDefinitionsHandler destinationDefinitionsHandler) { - this.destinationDefinitionsHandler = destinationDefinitionsHandler; - } - - @Post(uri = "/create_custom") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public DestinationDefinitionRead createCustomDestinationDefinition(final CustomDestinationDefinitionCreate customDestinationDefinitionCreate) { - return ApiHelper.execute(() -> destinationDefinitionsHandler.createCustomDestinationDefinition(customDestinationDefinitionCreate)); - } - - @Post(uri = "/delete") - @Secured({ADMIN}) - @Override - @Status(HttpStatus.NO_CONTENT) - public void deleteDestinationDefinition(final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody) { - ApiHelper.execute(() -> { - destinationDefinitionsHandler.deleteDestinationDefinition(destinationDefinitionIdRequestBody); - return null; - }); - } - - @Post(uri = "/get") - @Secured({AUTHENTICATED_USER}) - @Override - public DestinationDefinitionRead getDestinationDefinition(final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody) { - return ApiHelper.execute(() -> destinationDefinitionsHandler.getDestinationDefinition(destinationDefinitionIdRequestBody)); - } - - @Post(uri = "/get_for_workspace") - @Secured({READER}) - @SecuredWorkspace - @Override - public DestinationDefinitionRead getDestinationDefinitionForWorkspace(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - return ApiHelper.execute(() -> destinationDefinitionsHandler.getDestinationDefinitionForWorkspace(destinationDefinitionIdWithWorkspaceId)); - } - - @Post(uri = "/grant_definition") - @Secured({ADMIN}) - @Override - public PrivateDestinationDefinitionRead grantDestinationDefinitionToWorkspace(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - return ApiHelper - .execute(() -> destinationDefinitionsHandler.grantDestinationDefinitionToWorkspace(destinationDefinitionIdWithWorkspaceId)); - } - - @Post(uri = "/list") - @Secured({AUTHENTICATED_USER}) - @Override - public DestinationDefinitionReadList listDestinationDefinitions() { - return ApiHelper.execute(destinationDefinitionsHandler::listDestinationDefinitions); - } - - @Post(uri = "/list_for_workspace") - @Secured({READER}) - @SecuredWorkspace - @Override - public DestinationDefinitionReadList listDestinationDefinitionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ApiHelper.execute(() -> destinationDefinitionsHandler.listDestinationDefinitionsForWorkspace(workspaceIdRequestBody)); - } - - @Post(uri = "/list_latest") - @Secured({AUTHENTICATED_USER}) - @Override - public DestinationDefinitionReadList listLatestDestinationDefinitions() { - return ApiHelper.execute(destinationDefinitionsHandler::listLatestDestinationDefinitions); - } - - @Post(uri = "/list_private") - @Secured({ADMIN}) - @Override - public PrivateDestinationDefinitionReadList listPrivateDestinationDefinitions(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ApiHelper.execute(() -> destinationDefinitionsHandler.listPrivateDestinationDefinitions(workspaceIdRequestBody)); - } - - @Post(uri = "/revoke_definition") - @Secured({ADMIN}) - @Override - public void revokeDestinationDefinitionFromWorkspace(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - ApiHelper.execute(() -> { - destinationDefinitionsHandler.revokeDestinationDefinitionFromWorkspace(destinationDefinitionIdWithWorkspaceId); - return null; - }); - } - - @Post(uri = "/update") - @Secured({AUTHENTICATED_USER}) - @Override - public DestinationDefinitionRead updateDestinationDefinition(final DestinationDefinitionUpdate destinationDefinitionUpdate) { - return ApiHelper.execute(() -> destinationDefinitionsHandler.updateDestinationDefinition(destinationDefinitionUpdate)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionSpecificationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionSpecificationApiController.java deleted file mode 100644 index 92dc7cae20e8..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionSpecificationApiController.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.AUTHENTICATED_USER; - -import io.airbyte.api.generated.DestinationDefinitionSpecificationApi; -import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.DestinationDefinitionSpecificationRead; -import io.airbyte.commons.server.handlers.SchedulerHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/destination_definition_specifications") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class DestinationDefinitionSpecificationApiController implements DestinationDefinitionSpecificationApi { - - private final SchedulerHandler schedulerHandler; - - public DestinationDefinitionSpecificationApiController(final SchedulerHandler schedulerHandler) { - this.schedulerHandler = schedulerHandler; - } - - @Post("/get") - @Secured({AUTHENTICATED_USER}) - @Override - public DestinationDefinitionSpecificationRead getDestinationDefinitionSpecification(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - return ApiHelper.execute(() -> schedulerHandler.getDestinationSpecification(destinationDefinitionIdWithWorkspaceId)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationOauthApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationOauthApiController.java deleted file mode 100644 index 7b747dafa47f..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationOauthApiController.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.ADMIN; -import static io.airbyte.commons.auth.AuthRoleConstants.EDITOR; - -import io.airbyte.api.generated.DestinationOauthApi; -import io.airbyte.api.model.generated.CompleteDestinationOAuthRequest; -import io.airbyte.api.model.generated.DestinationOauthConsentRequest; -import io.airbyte.api.model.generated.OAuthConsentRead; -import io.airbyte.api.model.generated.SetInstancewideDestinationOauthParamsRequestBody; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.OAuthHandler; -import io.micronaut.context.annotation.Context; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; -import java.util.Map; - -@Controller("/api/v1/destination_oauths") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Context -@Secured(SecurityRule.IS_AUTHENTICATED) -public class DestinationOauthApiController implements DestinationOauthApi { - - private final OAuthHandler oAuthHandler; - - public DestinationOauthApiController(final OAuthHandler oAuthHandler) { - this.oAuthHandler = oAuthHandler; - } - - @Post("/complete_oauth") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public Map completeDestinationOAuth(final CompleteDestinationOAuthRequest completeDestinationOAuthRequest) { - return ApiHelper.execute(() -> oAuthHandler.completeDestinationOAuth(completeDestinationOAuthRequest)); - } - - @Post("/get_consent_url") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public OAuthConsentRead getDestinationOAuthConsent(final DestinationOauthConsentRequest destinationOauthConsentRequest) { - return ApiHelper.execute(() -> oAuthHandler.getDestinationOAuthConsent(destinationOauthConsentRequest)); - } - - @Post("/oauth_params/create") - @Secured({ADMIN}) - @Override - public void setInstancewideDestinationOauthParams(final SetInstancewideDestinationOauthParamsRequestBody setInstancewideDestinationOauthParamsRequestBody) { - ApiHelper.execute(() -> { - oAuthHandler.setDestinationInstancewideOauthParams(setInstancewideDestinationOauthParamsRequestBody); - return null; - }); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/HealthApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/HealthApiController.java deleted file mode 100644 index 0e8622c52ddd..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/HealthApiController.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.generated.HealthApi; -import io.airbyte.api.model.generated.HealthCheckRead; -import io.airbyte.commons.server.handlers.HealthCheckHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.MediaType; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Get; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/health") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_ANONYMOUS) -public class HealthApiController implements HealthApi { - - private final HealthCheckHandler healthCheckHandler; - - public HealthApiController(final HealthCheckHandler healthCheckHandler) { - this.healthCheckHandler = healthCheckHandler; - } - - @Override - @Get(produces = MediaType.APPLICATION_JSON) - public HealthCheckRead getHealthCheck() { - return healthCheckHandler.health(); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/JobsApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/JobsApiController.java deleted file mode 100644 index 4fc06076ab28..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/JobsApiController.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.ADMIN; -import static io.airbyte.commons.auth.AuthRoleConstants.EDITOR; -import static io.airbyte.commons.auth.AuthRoleConstants.READER; - -import io.airbyte.api.generated.JobsApi; -import io.airbyte.api.model.generated.AttemptNormalizationStatusReadList; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.JobDebugInfoRead; -import io.airbyte.api.model.generated.JobIdRequestBody; -import io.airbyte.api.model.generated.JobInfoLightRead; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.api.model.generated.JobListRequestBody; -import io.airbyte.api.model.generated.JobOptionalRead; -import io.airbyte.api.model.generated.JobReadList; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.JobHistoryHandler; -import io.airbyte.commons.server.handlers.SchedulerHandler; -import io.micronaut.context.annotation.Context; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/jobs") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Context -@Secured(SecurityRule.IS_AUTHENTICATED) -public class JobsApiController implements JobsApi { - - private final JobHistoryHandler jobHistoryHandler; - private final SchedulerHandler schedulerHandler; - - public JobsApiController(final JobHistoryHandler jobHistoryHandler, final SchedulerHandler schedulerHandler) { - this.jobHistoryHandler = jobHistoryHandler; - this.schedulerHandler = schedulerHandler; - } - - @Post("/cancel") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public JobInfoRead cancelJob(final JobIdRequestBody jobIdRequestBody) { - return ApiHelper.execute(() -> schedulerHandler.cancelJob(jobIdRequestBody)); - } - - @Post("/get_normalization_status") - @Secured({ADMIN}) - @Override - public AttemptNormalizationStatusReadList getAttemptNormalizationStatusesForJob(final JobIdRequestBody jobIdRequestBody) { - return ApiHelper.execute(() -> jobHistoryHandler.getAttemptNormalizationStatuses(jobIdRequestBody)); - } - - @Post("/get_debug_info") - @Secured({READER}) - @SecuredWorkspace - @Override - public JobDebugInfoRead getJobDebugInfo(final JobIdRequestBody jobIdRequestBody) { - return ApiHelper.execute(() -> jobHistoryHandler.getJobDebugInfo(jobIdRequestBody)); - } - - @Post("/get") - @Secured({READER}) - @SecuredWorkspace - @Override - public JobInfoRead getJobInfo(final JobIdRequestBody jobIdRequestBody) { - return ApiHelper.execute(() -> jobHistoryHandler.getJobInfo(jobIdRequestBody)); - } - - @Post("/get_light") - @Secured({READER}) - @SecuredWorkspace - @Override - public JobInfoLightRead getJobInfoLight(final JobIdRequestBody jobIdRequestBody) { - return ApiHelper.execute(() -> jobHistoryHandler.getJobInfoLight(jobIdRequestBody)); - } - - @Post("/get_last_replication_job") - @Secured({READER}) - @Override - public JobOptionalRead getLastReplicationJob(final ConnectionIdRequestBody connectionIdRequestBody) { - return ApiHelper.execute(() -> jobHistoryHandler.getLastReplicationJob(connectionIdRequestBody)); - } - - @Post("/list") - @Secured({READER}) - @SecuredWorkspace - @Override - public JobReadList listJobsFor(final JobListRequestBody jobListRequestBody) { - return ApiHelper.execute(() -> jobHistoryHandler.listJobsFor(jobListRequestBody)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/LogsApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/LogsApiController.java deleted file mode 100644 index 97e1050fc518..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/LogsApiController.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.ADMIN; - -import io.airbyte.api.generated.LogsApi; -import io.airbyte.api.model.generated.LogsRequestBody; -import io.airbyte.commons.server.handlers.LogsHandler; -import io.micronaut.context.annotation.Context; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; -import java.io.File; - -@Controller("/api/v1/logs") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Context -@Secured(SecurityRule.IS_AUTHENTICATED) -public class LogsApiController implements LogsApi { - - private final LogsHandler logsHandler; - - public LogsApiController(final LogsHandler logsHandler) { - this.logsHandler = logsHandler; - } - - @Post("/get") - @Secured({ADMIN}) - @Override - public File getLogs(final LogsRequestBody logsRequestBody) { - return ApiHelper.execute(() -> logsHandler.getLogs(logsRequestBody)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/NotFoundController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/NotFoundController.java deleted file mode 100644 index d278cde04c5d..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/NotFoundController.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.MediaType; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Error; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -/** - * Custom controller that handles global 404 responses for unknown/unmapped paths. - */ -@Controller("/api/notfound") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_ANONYMOUS) -public class NotFoundController { - - @Error(status = HttpStatus.NOT_FOUND, - global = true) - public HttpResponse notFound(final HttpRequest request) { - // Would like to send the id along but we don't have access to the http request anymore to fetch it - // from. TODO: Come back to this with issue #4189 - return HttpResponse.status(HttpStatus.NOT_FOUND) - .body("Object not found.") - .contentType(MediaType.APPLICATION_JSON); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/NotificationsApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/NotificationsApiController.java deleted file mode 100644 index cf8c0a57180d..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/NotificationsApiController.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.AUTHENTICATED_USER; - -import io.airbyte.api.generated.NotificationsApi; -import io.airbyte.api.model.generated.Notification; -import io.airbyte.api.model.generated.NotificationRead; -import io.airbyte.api.model.generated.NotificationRead.StatusEnum; -import io.airbyte.commons.server.converters.NotificationConverter; -import io.airbyte.commons.server.errors.IdNotFoundKnownException; -import io.airbyte.notification.NotificationClient; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Body; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; -import java.io.IOException; - -@Controller("/api/v1/notifications/try") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class NotificationsApiController implements NotificationsApi { - - public NotificationsApiController() {} - - @Post - @Secured({AUTHENTICATED_USER}) - @Override - public NotificationRead tryNotificationConfig(@Body final Notification notification) { - return ApiHelper.execute(() -> tryNotification(notification)); - } - - private NotificationRead tryNotification(final Notification notification) { - try { - final NotificationClient notificationClient = NotificationClient.createNotificationClient(NotificationConverter.toConfig(notification)); - final String messageFormat = "Hello World! This is a test from Airbyte to try %s notification settings for sync %s"; - final boolean failureNotified = notificationClient.notifyFailure(String.format(messageFormat, notification.getNotificationType(), "failures")); - final boolean successNotified = notificationClient.notifySuccess(String.format(messageFormat, notification.getNotificationType(), "successes")); - if (failureNotified || successNotified) { - return new NotificationRead().status(StatusEnum.SUCCEEDED); - } - } catch (final IllegalArgumentException e) { - throw new IdNotFoundKnownException(e.getMessage(), notification.getNotificationType().name(), e); - } catch (final IOException | InterruptedException e) { - return new NotificationRead().status(StatusEnum.FAILED).message(e.getMessage()); - } - return new NotificationRead().status(StatusEnum.FAILED); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/OpenapiApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/OpenapiApiController.java deleted file mode 100644 index ee5b4b6d0be8..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/OpenapiApiController.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.AUTHENTICATED_USER; - -import io.airbyte.api.generated.OpenapiApi; -import io.airbyte.commons.server.handlers.OpenApiConfigHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Get; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; -import java.io.File; - -@Controller("/api/v1/openapi") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class OpenapiApiController implements OpenapiApi { - - private final OpenApiConfigHandler openApiConfigHandler; - - public OpenapiApiController(final OpenApiConfigHandler openApiConfigHandler) { - this.openApiConfigHandler = openApiConfigHandler; - } - - @Get(produces = "text/plain") - @Secured({AUTHENTICATED_USER}) - @Override - public File getOpenApiSpec() { - return ApiHelper.execute(openApiConfigHandler::getFile); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/OperationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/OperationApiController.java deleted file mode 100644 index 8065ea9280b6..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/OperationApiController.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.AUTHENTICATED_USER; -import static io.airbyte.commons.auth.AuthRoleConstants.EDITOR; -import static io.airbyte.commons.auth.AuthRoleConstants.READER; - -import io.airbyte.api.generated.OperationApi; -import io.airbyte.api.model.generated.CheckOperationRead; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.OperationCreate; -import io.airbyte.api.model.generated.OperationIdRequestBody; -import io.airbyte.api.model.generated.OperationRead; -import io.airbyte.api.model.generated.OperationReadList; -import io.airbyte.api.model.generated.OperationUpdate; -import io.airbyte.api.model.generated.OperatorConfiguration; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.OperationsHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.annotation.Body; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.http.annotation.Status; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/operations") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class OperationApiController implements OperationApi { - - private final OperationsHandler operationsHandler; - - public OperationApiController(final OperationsHandler operationsHandler) { - this.operationsHandler = operationsHandler; - } - - @Post("/check") - @Secured({AUTHENTICATED_USER}) - @Override - public CheckOperationRead checkOperation(@Body final OperatorConfiguration operatorConfiguration) { - return ApiHelper.execute(() -> operationsHandler.checkOperation(operatorConfiguration)); - } - - @Post("/create") - @Override - @Secured({EDITOR}) - @SecuredWorkspace - public OperationRead createOperation(@Body final OperationCreate operationCreate) { - return ApiHelper.execute(() -> operationsHandler.createOperation(operationCreate)); - } - - @Post("/delete") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - @Status(HttpStatus.NO_CONTENT) - public void deleteOperation(@Body final OperationIdRequestBody operationIdRequestBody) { - ApiHelper.execute(() -> { - operationsHandler.deleteOperation(operationIdRequestBody); - return null; - }); - } - - @Post("/get") - @Secured({READER}) - @SecuredWorkspace - @Override - public OperationRead getOperation(@Body final OperationIdRequestBody operationIdRequestBody) { - return ApiHelper.execute(() -> operationsHandler.getOperation(operationIdRequestBody)); - } - - @Post("/list") - @Secured({READER}) - @SecuredWorkspace - @Override - public OperationReadList listOperationsForConnection(@Body final ConnectionIdRequestBody connectionIdRequestBody) { - return ApiHelper.execute(() -> operationsHandler.listOperationsForConnection(connectionIdRequestBody)); - } - - @Post("/update") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public OperationRead updateOperation(@Body final OperationUpdate operationUpdate) { - return ApiHelper.execute(() -> operationsHandler.updateOperation(operationUpdate)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SchedulerApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SchedulerApiController.java deleted file mode 100644 index d7a5edf6f652..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/SchedulerApiController.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.AUTHENTICATED_USER; -import static io.airbyte.commons.auth.AuthRoleConstants.EDITOR; - -import io.airbyte.api.generated.SchedulerApi; -import io.airbyte.api.model.generated.CheckConnectionRead; -import io.airbyte.api.model.generated.DestinationCoreConfig; -import io.airbyte.api.model.generated.SourceCoreConfig; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRead; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.SchedulerHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/scheduler") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class SchedulerApiController implements SchedulerApi { - - private final SchedulerHandler schedulerHandler; - - public SchedulerApiController(final SchedulerHandler schedulerHandler) { - this.schedulerHandler = schedulerHandler; - } - - @Post("/destinations/check_connection") - @Secured({AUTHENTICATED_USER}) - @Override - public CheckConnectionRead executeDestinationCheckConnection(final DestinationCoreConfig destinationCoreConfig) { - return ApiHelper.execute(() -> schedulerHandler.checkDestinationConnectionFromDestinationCreate(destinationCoreConfig)); - } - - @Post("/sources/check_connection") - @Secured({AUTHENTICATED_USER}) - @Override - public CheckConnectionRead executeSourceCheckConnection(final SourceCoreConfig sourceCoreConfig) { - return ApiHelper.execute(() -> schedulerHandler.checkSourceConnectionFromSourceCreate(sourceCoreConfig)); - } - - @Post("/sources/discover_schema") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public SourceDiscoverSchemaRead executeSourceDiscoverSchema(final SourceCoreConfig sourceCoreConfig) { - return ApiHelper.execute(() -> schedulerHandler.discoverSchemaForSourceFromSourceCreate(sourceCoreConfig)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceApiController.java deleted file mode 100644 index 4ad1f408ff05..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceApiController.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.EDITOR; -import static io.airbyte.commons.auth.AuthRoleConstants.READER; - -import io.airbyte.api.generated.SourceApi; -import io.airbyte.api.model.generated.ActorCatalogWithUpdatedAt; -import io.airbyte.api.model.generated.CheckConnectionRead; -import io.airbyte.api.model.generated.DiscoverCatalogResult; -import io.airbyte.api.model.generated.SourceCloneRequestBody; -import io.airbyte.api.model.generated.SourceCreate; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRead; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRequestBody; -import io.airbyte.api.model.generated.SourceDiscoverSchemaWriteRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.SourceReadList; -import io.airbyte.api.model.generated.SourceSearch; -import io.airbyte.api.model.generated.SourceUpdate; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.SchedulerHandler; -import io.airbyte.commons.server.handlers.SourceHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.http.annotation.Status; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/sources") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class SourceApiController implements SourceApi { - - private final SchedulerHandler schedulerHandler; - private final SourceHandler sourceHandler; - - public SourceApiController(final SchedulerHandler schedulerHandler, final SourceHandler sourceHandler) { - this.schedulerHandler = schedulerHandler; - this.sourceHandler = sourceHandler; - } - - @Post("/check_connection") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public CheckConnectionRead checkConnectionToSource(final SourceIdRequestBody sourceIdRequestBody) { - return ApiHelper.execute(() -> schedulerHandler.checkSourceConnectionFromSourceId(sourceIdRequestBody)); - } - - @Post("/check_connection_for_update") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public CheckConnectionRead checkConnectionToSourceForUpdate(final SourceUpdate sourceUpdate) { - return ApiHelper.execute(() -> schedulerHandler.checkSourceConnectionFromSourceIdForUpdate(sourceUpdate)); - } - - @Post("/clone") - @Override - public SourceRead cloneSource(final SourceCloneRequestBody sourceCloneRequestBody) { - return ApiHelper.execute(() -> sourceHandler.cloneSource(sourceCloneRequestBody)); - } - - @Post("/create") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public SourceRead createSource(final SourceCreate sourceCreate) { - return ApiHelper.execute(() -> sourceHandler.createSource(sourceCreate)); - } - - @Post("/delete") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - @Status(HttpStatus.NO_CONTENT) - public void deleteSource(final SourceIdRequestBody sourceIdRequestBody) { - ApiHelper.execute(() -> { - sourceHandler.deleteSource(sourceIdRequestBody); - return null; - }); - } - - @Post("/discover_schema") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public SourceDiscoverSchemaRead discoverSchemaForSource(final SourceDiscoverSchemaRequestBody sourceDiscoverSchemaRequestBody) { - return ApiHelper.execute(() -> schedulerHandler.discoverSchemaForSourceFromSourceId(sourceDiscoverSchemaRequestBody)); - } - - @Post("/get") - @Secured({READER}) - @SecuredWorkspace - @Override - public SourceRead getSource(final SourceIdRequestBody sourceIdRequestBody) { - return ApiHelper.execute(() -> sourceHandler.getSource(sourceIdRequestBody)); - } - - @Post("/most_recent_source_actor_catalog") - @Secured({READER}) - @SecuredWorkspace - @Override - public ActorCatalogWithUpdatedAt getMostRecentSourceActorCatalog(final SourceIdRequestBody sourceIdRequestBody) { - return ApiHelper.execute(() -> sourceHandler.getMostRecentSourceActorCatalogWithUpdatedAt(sourceIdRequestBody)); - } - - @Post("/list") - @Secured({READER}) - @SecuredWorkspace - @Override - public SourceReadList listSourcesForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ApiHelper.execute(() -> sourceHandler.listSourcesForWorkspace(workspaceIdRequestBody)); - } - - @Post("/search") - @Override - public SourceReadList searchSources(final SourceSearch sourceSearch) { - return ApiHelper.execute(() -> sourceHandler.searchSources(sourceSearch)); - } - - @Post("/update") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public SourceRead updateSource(final SourceUpdate sourceUpdate) { - return ApiHelper.execute(() -> sourceHandler.updateSource(sourceUpdate)); - } - - @Post("/write_discover_catalog_result") - @Override - public DiscoverCatalogResult writeDiscoverCatalogResult(final SourceDiscoverSchemaWriteRequestBody request) { - return ApiHelper.execute(() -> sourceHandler.writeDiscoverCatalogResult(request)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionApiController.java deleted file mode 100644 index f7d3a233b03d..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionApiController.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.ADMIN; -import static io.airbyte.commons.auth.AuthRoleConstants.AUTHENTICATED_USER; -import static io.airbyte.commons.auth.AuthRoleConstants.EDITOR; -import static io.airbyte.commons.auth.AuthRoleConstants.READER; - -import io.airbyte.api.generated.SourceDefinitionApi; -import io.airbyte.api.model.generated.CustomSourceDefinitionCreate; -import io.airbyte.api.model.generated.PrivateSourceDefinitionRead; -import io.airbyte.api.model.generated.PrivateSourceDefinitionReadList; -import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.SourceDefinitionRead; -import io.airbyte.api.model.generated.SourceDefinitionReadList; -import io.airbyte.api.model.generated.SourceDefinitionUpdate; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.SourceDefinitionsHandler; -import io.micronaut.context.annotation.Context; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.http.annotation.Status; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/source_definitions") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Context -@Secured(SecurityRule.IS_AUTHENTICATED) -public class SourceDefinitionApiController implements SourceDefinitionApi { - - private final SourceDefinitionsHandler sourceDefinitionsHandler; - - public SourceDefinitionApiController(final SourceDefinitionsHandler sourceDefinitionsHandler) { - this.sourceDefinitionsHandler = sourceDefinitionsHandler; - } - - @Post("/create_custom") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public SourceDefinitionRead createCustomSourceDefinition(final CustomSourceDefinitionCreate customSourceDefinitionCreate) { - return ApiHelper.execute(() -> sourceDefinitionsHandler.createCustomSourceDefinition(customSourceDefinitionCreate)); - } - - @Post("/delete") - @Secured({ADMIN}) - @Override - @Status(HttpStatus.NO_CONTENT) - public void deleteSourceDefinition(final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody) { - ApiHelper.execute(() -> { - sourceDefinitionsHandler.deleteSourceDefinition(sourceDefinitionIdRequestBody); - return null; - }); - } - - @Post("/get") - @Secured({AUTHENTICATED_USER}) - @Override - public SourceDefinitionRead getSourceDefinition(final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody) { - return ApiHelper.execute(() -> sourceDefinitionsHandler.getSourceDefinition(sourceDefinitionIdRequestBody)); - } - - @Post("/get_for_workspace") - @Secured({READER}) - @SecuredWorkspace - @Override - public SourceDefinitionRead getSourceDefinitionForWorkspace(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { - return ApiHelper.execute(() -> sourceDefinitionsHandler.getSourceDefinitionForWorkspace(sourceDefinitionIdWithWorkspaceId)); - } - - @Post("/grant_definition") - @Secured({ADMIN}) - @Override - public PrivateSourceDefinitionRead grantSourceDefinitionToWorkspace(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { - return ApiHelper.execute(() -> sourceDefinitionsHandler.grantSourceDefinitionToWorkspace(sourceDefinitionIdWithWorkspaceId)); - } - - @Post("/list_latest") - @Secured({AUTHENTICATED_USER}) - @Override - public SourceDefinitionReadList listLatestSourceDefinitions() { - return ApiHelper.execute(sourceDefinitionsHandler::listLatestSourceDefinitions); - } - - @Post("/list_private") - @Secured({ADMIN}) - @Override - public PrivateSourceDefinitionReadList listPrivateSourceDefinitions(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ApiHelper.execute(() -> sourceDefinitionsHandler.listPrivateSourceDefinitions(workspaceIdRequestBody)); - } - - @Post("/list") - @Secured({AUTHENTICATED_USER}) - @Override - public SourceDefinitionReadList listSourceDefinitions() { - return ApiHelper.execute(sourceDefinitionsHandler::listSourceDefinitions); - } - - @Post("/list_for_workspace") - @Secured({READER}) - @SecuredWorkspace - @Override - public SourceDefinitionReadList listSourceDefinitionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ApiHelper.execute(() -> sourceDefinitionsHandler.listSourceDefinitionsForWorkspace(workspaceIdRequestBody)); - } - - @Post("/revoke_definition") - @Secured({ADMIN}) - @Override - @Status(HttpStatus.NO_CONTENT) - public void revokeSourceDefinitionFromWorkspace(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { - ApiHelper.execute(() -> { - sourceDefinitionsHandler.revokeSourceDefinitionFromWorkspace(sourceDefinitionIdWithWorkspaceId); - return null; - }); - } - - @Post("/update") - @Secured({AUTHENTICATED_USER}) - @Override - public SourceDefinitionRead updateSourceDefinition(final SourceDefinitionUpdate sourceDefinitionUpdate) { - return ApiHelper.execute(() -> sourceDefinitionsHandler.updateSourceDefinition(sourceDefinitionUpdate)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionSpecificationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionSpecificationApiController.java deleted file mode 100644 index 0bf03a8fd403..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionSpecificationApiController.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.AUTHENTICATED_USER; - -import io.airbyte.api.generated.SourceDefinitionSpecificationApi; -import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.SourceDefinitionSpecificationRead; -import io.airbyte.commons.server.handlers.SchedulerHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/source_definition_specifications") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class SourceDefinitionSpecificationApiController implements SourceDefinitionSpecificationApi { - - private final SchedulerHandler schedulerHandler; - - public SourceDefinitionSpecificationApiController(final SchedulerHandler schedulerHandler) { - this.schedulerHandler = schedulerHandler; - } - - @Post("/get") - @Secured({AUTHENTICATED_USER}) - @Override - public SourceDefinitionSpecificationRead getSourceDefinitionSpecification(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { - return ApiHelper.execute(() -> schedulerHandler.getSourceDefinitionSpecification(sourceDefinitionIdWithWorkspaceId)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceOauthApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceOauthApiController.java deleted file mode 100644 index 14f0069e260a..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceOauthApiController.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.ADMIN; -import static io.airbyte.commons.auth.AuthRoleConstants.EDITOR; - -import io.airbyte.api.generated.SourceOauthApi; -import io.airbyte.api.model.generated.CompleteSourceOauthRequest; -import io.airbyte.api.model.generated.OAuthConsentRead; -import io.airbyte.api.model.generated.SetInstancewideSourceOauthParamsRequestBody; -import io.airbyte.api.model.generated.SourceOauthConsentRequest; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.OAuthHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Body; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; -import java.util.Map; - -@Controller("/api/v1/source_oauths") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class SourceOauthApiController implements SourceOauthApi { - - private final OAuthHandler oAuthHandler; - - public SourceOauthApiController(final OAuthHandler oAuthHandler) { - this.oAuthHandler = oAuthHandler; - } - - @Post("/complete_oauth") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public Map completeSourceOAuth(@Body final CompleteSourceOauthRequest completeSourceOauthRequest) { - return ApiHelper.execute(() -> oAuthHandler.completeSourceOAuth(completeSourceOauthRequest)); - } - - @Post("/get_consent_url") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public OAuthConsentRead getSourceOAuthConsent(@Body final SourceOauthConsentRequest sourceOauthConsentRequest) { - return ApiHelper.execute(() -> oAuthHandler.getSourceOAuthConsent(sourceOauthConsentRequest)); - } - - @Post("/oauth_params/create") - @Secured({ADMIN}) - @Override - public void setInstancewideSourceOauthParams(@Body final SetInstancewideSourceOauthParamsRequestBody setInstancewideSourceOauthParamsRequestBody) { - ApiHelper.execute(() -> { - oAuthHandler.setSourceInstancewideOauthParams(setInstancewideSourceOauthParamsRequestBody); - return null; - }); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/StateApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/StateApiController.java deleted file mode 100644 index 7446ca7f15af..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/StateApiController.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.ADMIN; -import static io.airbyte.commons.auth.AuthRoleConstants.READER; - -import io.airbyte.api.generated.StateApi; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionState; -import io.airbyte.api.model.generated.ConnectionStateCreateOrUpdate; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.StateHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/state") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class StateApiController implements StateApi { - - private final StateHandler stateHandler; - - public StateApiController(final StateHandler stateHandler) { - this.stateHandler = stateHandler; - } - - @Post("/create_or_update") - @Secured({ADMIN}) - @Override - public ConnectionState createOrUpdateState(final ConnectionStateCreateOrUpdate connectionStateCreateOrUpdate) { - return ApiHelper.execute(() -> stateHandler.createOrUpdateState(connectionStateCreateOrUpdate)); - } - - @Post("/get") - @Secured({READER}) - @SecuredWorkspace - @Override - public ConnectionState getState(final ConnectionIdRequestBody connectionIdRequestBody) { - return ApiHelper.execute(() -> stateHandler.getState(connectionIdRequestBody)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java deleted file mode 100644 index ca42ae51e2cb..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.AUTHENTICATED_USER; -import static io.airbyte.commons.auth.AuthRoleConstants.EDITOR; -import static io.airbyte.commons.auth.AuthRoleConstants.READER; - -import io.airbyte.api.generated.WebBackendApi; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionStateType; -import io.airbyte.api.model.generated.WebBackendCheckUpdatesRead; -import io.airbyte.api.model.generated.WebBackendConnectionCreate; -import io.airbyte.api.model.generated.WebBackendConnectionListRequestBody; -import io.airbyte.api.model.generated.WebBackendConnectionRead; -import io.airbyte.api.model.generated.WebBackendConnectionReadList; -import io.airbyte.api.model.generated.WebBackendConnectionRequestBody; -import io.airbyte.api.model.generated.WebBackendConnectionUpdate; -import io.airbyte.api.model.generated.WebBackendGeographiesListResult; -import io.airbyte.api.model.generated.WebBackendWorkspaceState; -import io.airbyte.api.model.generated.WebBackendWorkspaceStateResult; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.WebBackendCheckUpdatesHandler; -import io.airbyte.commons.server.handlers.WebBackendConnectionsHandler; -import io.airbyte.commons.server.handlers.WebBackendGeographiesHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/web_backend") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -public class WebBackendApiController implements WebBackendApi { - - private final WebBackendConnectionsHandler webBackendConnectionsHandler; - private final WebBackendGeographiesHandler webBackendGeographiesHandler; - private final WebBackendCheckUpdatesHandler webBackendCheckUpdatesHandler; - - public WebBackendApiController(final WebBackendConnectionsHandler webBackendConnectionsHandler, - final WebBackendGeographiesHandler webBackendGeographiesHandler, - final WebBackendCheckUpdatesHandler webBackendCheckUpdatesHandler) { - this.webBackendConnectionsHandler = webBackendConnectionsHandler; - this.webBackendGeographiesHandler = webBackendGeographiesHandler; - this.webBackendCheckUpdatesHandler = webBackendCheckUpdatesHandler; - } - - @Post("/state/get_type") - @Secured({READER}) - @SecuredWorkspace - @Override - public ConnectionStateType getStateType(final ConnectionIdRequestBody connectionIdRequestBody) { - return ApiHelper.execute(() -> webBackendConnectionsHandler.getStateType(connectionIdRequestBody)); - } - - @Post("/check_updates") - @Secured({READER}) - @Override - public WebBackendCheckUpdatesRead webBackendCheckUpdates() { - return ApiHelper.execute(webBackendCheckUpdatesHandler::checkUpdates); - } - - @Post("/connections/create") - @Secured({EDITOR}) - @SecuredWorkspace - @Override - public WebBackendConnectionRead webBackendCreateConnection(final WebBackendConnectionCreate webBackendConnectionCreate) { - return ApiHelper.execute(() -> webBackendConnectionsHandler.webBackendCreateConnection(webBackendConnectionCreate)); - } - - @Post("/connections/get") - @Secured({READER}) - @SecuredWorkspace - @Override - public WebBackendConnectionRead webBackendGetConnection(final WebBackendConnectionRequestBody webBackendConnectionRequestBody) { - return ApiHelper.execute(() -> webBackendConnectionsHandler.webBackendGetConnection(webBackendConnectionRequestBody)); - } - - @Post("/workspace/state") - @Secured({READER}) - @SecuredWorkspace - @Override - public WebBackendWorkspaceStateResult webBackendGetWorkspaceState(final WebBackendWorkspaceState webBackendWorkspaceState) { - return ApiHelper.execute(() -> webBackendConnectionsHandler.getWorkspaceState(webBackendWorkspaceState)); - } - - @Post("/connections/list") - @Secured({READER}) - @SecuredWorkspace - @Override - public WebBackendConnectionReadList webBackendListConnectionsForWorkspace(final WebBackendConnectionListRequestBody webBackendConnectionListRequestBody) { - return ApiHelper.execute(() -> webBackendConnectionsHandler.webBackendListConnectionsForWorkspace(webBackendConnectionListRequestBody)); - } - - @Post("/geographies/list") - @Secured({AUTHENTICATED_USER}) - @Override - public WebBackendGeographiesListResult webBackendListGeographies() { - return ApiHelper.execute(webBackendGeographiesHandler::listGeographiesOSS); - } - - @Post("/connections/update") - @Secured({EDITOR}) - @Override - public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConnectionUpdate webBackendConnectionUpdate) { - return ApiHelper.execute(() -> webBackendConnectionsHandler.webBackendUpdateConnection(webBackendConnectionUpdate)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java deleted file mode 100644 index 1611a7807871..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static io.airbyte.commons.auth.AuthRoleConstants.AUTHENTICATED_USER; -import static io.airbyte.commons.auth.AuthRoleConstants.OWNER; -import static io.airbyte.commons.auth.AuthRoleConstants.READER; - -import io.airbyte.api.generated.WorkspaceApi; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.SlugRequestBody; -import io.airbyte.api.model.generated.WorkspaceCreate; -import io.airbyte.api.model.generated.WorkspaceGiveFeedback; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.api.model.generated.WorkspaceRead; -import io.airbyte.api.model.generated.WorkspaceReadList; -import io.airbyte.api.model.generated.WorkspaceUpdate; -import io.airbyte.api.model.generated.WorkspaceUpdateName; -import io.airbyte.commons.auth.SecuredWorkspace; -import io.airbyte.commons.server.handlers.WorkspacesHandler; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.annotation.Body; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Post; -import io.micronaut.http.annotation.Status; -import io.micronaut.security.annotation.Secured; -import io.micronaut.security.rules.SecurityRule; - -@Controller("/api/v1/workspaces") -@Requires(property = "airbyte.deployment-mode", - value = "OSS") -@Secured(SecurityRule.IS_AUTHENTICATED) -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -public class WorkspaceApiController implements WorkspaceApi { - - private final WorkspacesHandler workspacesHandler; - - public WorkspaceApiController(final WorkspacesHandler workspacesHandler) { - this.workspacesHandler = workspacesHandler; - } - - @Post("/create") - @Secured({AUTHENTICATED_USER}) - @Override - public WorkspaceRead createWorkspace(@Body final WorkspaceCreate workspaceCreate) { - return ApiHelper.execute(() -> workspacesHandler.createWorkspace(workspaceCreate)); - } - - @Post("/delete") - @Secured({OWNER}) - @SecuredWorkspace - @Override - @Status(HttpStatus.NO_CONTENT) - public void deleteWorkspace(@Body final WorkspaceIdRequestBody workspaceIdRequestBody) { - ApiHelper.execute(() -> { - workspacesHandler.deleteWorkspace(workspaceIdRequestBody); - return null; - }); - } - - @Post("/get") - @Secured({OWNER}) - @SecuredWorkspace - @Override - public WorkspaceRead getWorkspace(@Body final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ApiHelper.execute(() -> workspacesHandler.getWorkspace(workspaceIdRequestBody)); - } - - @Post("/get_by_slug") - @Secured({OWNER}) - @SecuredWorkspace - @Override - public WorkspaceRead getWorkspaceBySlug(@Body final SlugRequestBody slugRequestBody) { - return ApiHelper.execute(() -> workspacesHandler.getWorkspaceBySlug(slugRequestBody)); - } - - @Post("/list") - @Secured({AUTHENTICATED_USER}) - @Override - public WorkspaceReadList listWorkspaces() { - return ApiHelper.execute(workspacesHandler::listWorkspaces); - } - - @Post("/update") - @Secured({OWNER}) - @SecuredWorkspace - @Override - public WorkspaceRead updateWorkspace(@Body final WorkspaceUpdate workspaceUpdate) { - return ApiHelper.execute(() -> workspacesHandler.updateWorkspace(workspaceUpdate)); - } - - @Post("/tag_feedback_status_as_done") - @Secured({OWNER}) - @SecuredWorkspace - @Override - public void updateWorkspaceFeedback(@Body final WorkspaceGiveFeedback workspaceGiveFeedback) { - ApiHelper.execute(() -> { - workspacesHandler.setFeedbackDone(workspaceGiveFeedback); - return null; - }); - } - - @Post("/update_name") - @Secured({OWNER}) - @SecuredWorkspace - @Override - public WorkspaceRead updateWorkspaceName(@Body final WorkspaceUpdateName workspaceUpdateName) { - return ApiHelper.execute(() -> workspacesHandler.updateWorkspaceName(workspaceUpdateName)); - } - - @Post("/get_by_connection_id") - @Secured({READER}) - @Override - public WorkspaceRead getWorkspaceByConnectionId(@Body final ConnectionIdRequestBody connectionIdRequestBody) { - return ApiHelper.execute(() -> workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java deleted file mode 100644 index e70d08d72da5..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.config; - -import io.airbyte.analytics.TrackingClient; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.server.scheduler.EventRunner; -import io.airbyte.commons.server.scheduler.TemporalEventRunner; -import io.airbyte.commons.server.services.AirbyteGithubStore; -import io.airbyte.commons.temporal.TemporalClient; -import io.airbyte.commons.version.AirbyteProtocolVersionRange; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.commons.version.Version; -import io.airbyte.config.Configs.DeploymentMode; -import io.airbyte.config.Configs.TrackingStrategy; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.WebUrlHelper; -import io.airbyte.persistence.job.tracker.JobTracker; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Value; -import io.micronaut.core.util.StringUtils; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.nio.file.Path; -import java.util.Locale; -import java.util.UUID; -import java.util.function.Function; -import java.util.function.Supplier; - -@Factory -public class ApplicationBeanFactory { - - @Singleton - public Supplier randomUUIDSupplier() { - return () -> UUID.randomUUID(); - } - - @Singleton - public EventRunner eventRunner(final TemporalClient temporalClient) { - return new TemporalEventRunner(temporalClient); - } - - @Singleton - public TrackingStrategy trackingStrategy(@Value("${airbyte.tracking-strategy}") final String trackingStrategy) { - return convertToEnum(trackingStrategy, TrackingStrategy::valueOf, TrackingStrategy.LOGGING); - } - - @Singleton - public AirbyteVersion airbyteVersion(@Value("${airbyte.version}") final String airbyteVersion) { - return new AirbyteVersion(airbyteVersion); - } - - @Singleton - public DeploymentMode deploymentMode(@Value("${airbyte.deployment-mode}") final String deploymentMode) { - return convertToEnum(deploymentMode, DeploymentMode::valueOf, DeploymentMode.OSS); - } - - @Singleton - public JobTracker jobTracker( - final ConfigRepository configRepository, - final JobPersistence jobPersistence, - final TrackingClient trackingClient) { - return new JobTracker(configRepository, jobPersistence, trackingClient); - } - - @Singleton - public WebUrlHelper webUrlHelper(@Value("${airbyte.web-app.url}") final String webAppUrl) { - return new WebUrlHelper(webAppUrl); - } - - @Singleton - public FeatureFlags featureFlags() { - return new EnvVariableFeatureFlags(); - } - - @Singleton - @Named("workspaceRoot") - public Path workspaceRoot(@Value("${airbyte.workspace.root}") final String workspaceRoot) { - return Path.of(workspaceRoot); - } - - @Singleton - public JsonSecretsProcessor jsonSecretsProcessor(final FeatureFlags featureFlags) { - return JsonSecretsProcessor.builder() - .copySecrets(false) - .build(); - } - - @Singleton - public JsonSchemaValidator jsonSchemaValidator() { - return new JsonSchemaValidator(); - } - - @Singleton - public AirbyteGithubStore airbyteGithubStore() { - return AirbyteGithubStore.production(); - } - - @Singleton - public AirbyteProtocolVersionRange airbyteProtocolVersionRange( - @Value("${airbyte.protocol.min-version}") final String minVersion, - @Value("${airbyte.protocol.max-version}") final String maxVersion) { - return new AirbyteProtocolVersionRange(new Version(minVersion), new Version(maxVersion)); - } - - private T convertToEnum(final String value, final Function creatorFunction, final T defaultValue) { - return StringUtils.isNotEmpty(value) ? creatorFunction.apply(value.toUpperCase(Locale.ROOT)) : defaultValue; - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/CloudStorageBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/CloudStorageBeanFactory.java deleted file mode 100644 index e914b815a089..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/config/CloudStorageBeanFactory.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.config; - -import io.airbyte.config.storage.CloudStorageConfigs; -import io.airbyte.config.storage.CloudStorageConfigs.GcsConfig; -import io.airbyte.config.storage.CloudStorageConfigs.MinioConfig; -import io.airbyte.config.storage.CloudStorageConfigs.S3Config; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.annotation.Value; -import jakarta.inject.Named; -import jakarta.inject.Singleton; - -/** - * Micronaut bean factory for cloud storage-related singletons. - */ -@Factory -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -public class CloudStorageBeanFactory { - - @Singleton - @Requires(property = "airbyte.cloud.storage.logs.type", - pattern = "(?i)^gcs$") - @Named("logStorageConfigs") - public CloudStorageConfigs gcsLogStorageConfigs( - @Value("${airbyte.cloud.storage.logs.gcs.bucket}") final String gcsLogBucket, - @Value("${airbyte.cloud.storage.logs.gcs.application-credentials}") final String googleApplicationCredentials) { - return CloudStorageConfigs.gcs(new GcsConfig(gcsLogBucket, googleApplicationCredentials)); - } - - @Singleton - @Requires(property = "airbyte.cloud.storage.logs.type", - pattern = "(?i)^minio$") - @Named("logStorageConfigs") - public CloudStorageConfigs minioLogStorageConfigs( - @Value("${airbyte.cloud.storage.logs.minio.access-key}") final String awsAccessKeyId, - @Value("${airbyte.cloud.storage.logs.minio.secret-access-key}") final String awsSecretAccessKey, - @Value("${airbyte.cloud.storage.logs.minio.bucket}") final String s3LogBucket, - @Value("${airbyte.cloud.storage.logs.minio.endpoint}") final String s3MinioEndpoint) { - return CloudStorageConfigs.minio(new MinioConfig(s3LogBucket, awsAccessKeyId, awsSecretAccessKey, s3MinioEndpoint)); - } - - @Singleton - @Requires(property = "airbyte.cloud.storage.logs.type", - pattern = "(?i)^s3$") - @Named("logStorageConfigs") - public CloudStorageConfigs s3LogStorageConfigs( - @Value("${airbyte.cloud.storage.logs.s3.access-key}") final String awsAccessKeyId, - @Value("${airbyte.cloud.storage.logs.s3.secret-access-key}") final String awsSecretAccessKey, - @Value("${airbyte.cloud.storage.logs.s3.bucket}") final String s3LogBucket, - @Value("${airbyte.cloud.storage.logs.s3.region}") final String s3LogBucketRegion) { - return CloudStorageConfigs.s3(new S3Config(s3LogBucket, awsAccessKeyId, awsSecretAccessKey, s3LogBucketRegion)); - } - - @Singleton - @Requires(property = "airbyte.cloud.storage.state.type", - pattern = "(?i)^gcs$") - @Named("stateStorageConfigs") - public CloudStorageConfigs gcsStateStorageConfiguration( - @Value("${airbyte.cloud.storage.state.gcs.bucket}") final String gcsBucketName, - @Value("${airbyte.cloud.storage.state.gcs.application-credentials}") final String gcsApplicationCredentials) { - return CloudStorageConfigs.gcs(new GcsConfig(gcsBucketName, gcsApplicationCredentials)); - } - - @Singleton - @Requires(property = "airbyte.cloud.storage.state.type", - pattern = "(?i)^minio$") - @Named("stateStorageConfigs") - public CloudStorageConfigs minioStateStorageConfiguration( - @Value("${airbyte.cloud.storage.state.minio.bucket}") final String bucketName, - @Value("${airbyte.cloud.storage.state.minio.access-key}") final String awsAccessKey, - @Value("${airbyte.cloud.storage.state.minio.secret-access-key}") final String secretAccessKey, - @Value("${airbyte.cloud.storage.state.minio.endpoint}") final String endpoint) { - return CloudStorageConfigs.minio(new MinioConfig(bucketName, awsAccessKey, secretAccessKey, endpoint)); - } - - @Singleton - @Requires(property = "airbyte.cloud.storage.state.type", - pattern = "(?i)^s3$") - @Named("stateStorageConfigs") - public CloudStorageConfigs s3StateStorageConfiguration( - @Value("${airbyte.cloud.storage.state.s3.bucket}") final String bucketName, - @Value("${airbyte.cloud.storage.state.s3.access-key}") final String awsAccessKey, - @Value("${airbyte.cloud.storage.state.s3.secret-access-key}") final String secretAcessKey, - @Value("${airbyte.cloud.storage.state.s3.region}") final String s3Region) { - return CloudStorageConfigs.s3(new S3Config(bucketName, awsAccessKey, secretAcessKey, s3Region)); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java deleted file mode 100644 index 326ef184562d..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.config; - -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.StatePersistence; -import io.airbyte.config.persistence.StreamResetPersistence; -import io.airbyte.db.Database; -import io.airbyte.db.check.DatabaseMigrationCheck; -import io.airbyte.db.check.impl.JobsDatabaseAvailabilityCheck; -import io.airbyte.db.factory.DatabaseCheckFactory; -import io.airbyte.db.instance.DatabaseConstants; -import io.airbyte.persistence.job.DefaultJobPersistence; -import io.airbyte.persistence.job.JobPersistence; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Value; -import io.micronaut.flyway.FlywayConfigurationProperties; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.IOException; -import javax.sql.DataSource; -import lombok.extern.slf4j.Slf4j; -import org.flywaydb.core.Flyway; -import org.jooq.DSLContext; - -/** - * Micronaut bean factory for database-related singletons. - */ -@Factory -@Slf4j -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -public class DatabaseBeanFactory { - - private static final String BASELINE_DESCRIPTION = "Baseline from file-based migration v1"; - private static final Boolean BASELINE_ON_MIGRATION = true; - private static final String INSTALLED_BY = "ServerApp"; - - @Singleton - @Named("configDatabase") - public Database configDatabase(@Named("config") final DSLContext dslContext) throws IOException { - return new Database(dslContext); - } - - @Singleton - @Named("configFlyway") - public Flyway configFlyway(@Named("config") final FlywayConfigurationProperties configFlywayConfigurationProperties, - @Named("config") final DataSource configDataSource, - @Value("${airbyte.flyway.configs.minimum-migration-version}") final String baselineVersion) { - return configFlywayConfigurationProperties.getFluentConfiguration() - .dataSource(configDataSource) - .baselineVersion(baselineVersion) - .baselineDescription(BASELINE_DESCRIPTION) - .baselineOnMigrate(BASELINE_ON_MIGRATION) - .installedBy(INSTALLED_BY) - .table(String.format("airbyte_%s_migrations", "configs")) - .load(); - } - - @Singleton - @Named("jobsFlyway") - public Flyway jobsFlyway(@Named("jobs") final FlywayConfigurationProperties jobsFlywayConfigurationProperties, - @Named("config") final DataSource jobsDataSource, - @Value("${airbyte.flyway.jobs.minimum-migration-version}") final String baselineVersion) { - return jobsFlywayConfigurationProperties.getFluentConfiguration() - .dataSource(jobsDataSource) - .baselineVersion(baselineVersion) - .baselineDescription(BASELINE_DESCRIPTION) - .baselineOnMigrate(BASELINE_ON_MIGRATION) - .installedBy(INSTALLED_BY) - .table(String.format("airbyte_%s_migrations", "jobs")) - .load(); - } - - @Singleton - public ConfigRepository configRepository(@Named("configDatabase") final Database configDatabase) { - return new ConfigRepository(configDatabase); - } - - @Singleton - public JobPersistence jobPersistence(@Named("configDatabase") final Database jobDatabase) { - return new DefaultJobPersistence(jobDatabase); - } - - @Singleton - public StatePersistence statePersistence(@Named("configDatabase") final Database configDatabase) { - return new StatePersistence(configDatabase); - } - - @Singleton - @Named("configsDatabaseMigrationCheck") - public DatabaseMigrationCheck configsDatabaseMigrationCheck(@Named("config") final DSLContext dslContext, - @Named("configFlyway") final Flyway configsFlyway, - @Value("${airbyte.flyway.configs.minimum-migration-version}") final String configsDatabaseMinimumFlywayMigrationVersion, - @Value("${airbyte.flyway.configs.initialization-timeout-ms}") final Long configsDatabaseInitializationTimeoutMs) { - log.info("Configs database configuration: {} {}", configsDatabaseMinimumFlywayMigrationVersion, configsDatabaseInitializationTimeoutMs); - return DatabaseCheckFactory - .createConfigsDatabaseMigrationCheck(dslContext, configsFlyway, configsDatabaseMinimumFlywayMigrationVersion, - configsDatabaseInitializationTimeoutMs); - } - - @Singleton - @Named("jobsDatabaseMigrationCheck") - public DatabaseMigrationCheck jobsDatabaseMigrationCheck(@Named("config") final DSLContext dslContext, - @Named("jobsFlyway") final Flyway jobsFlyway, - @Value("${airbyte.flyway.jobs.minimum-migration-version}") final String jobsDatabaseMinimumFlywayMigrationVersion, - @Value("${airbyte.flyway.jobs.initialization-timeout-ms}") final Long jobsDatabaseInitializationTimeoutMs) { - return DatabaseCheckFactory - .createJobsDatabaseMigrationCheck(dslContext, jobsFlyway, jobsDatabaseMinimumFlywayMigrationVersion, - jobsDatabaseInitializationTimeoutMs); - } - - @Singleton - @Named("jobsDatabaseAvailabilityCheck") - public JobsDatabaseAvailabilityCheck jobsDatabaseAvailabilityCheck(@Named("config") final DSLContext dslContext) { - return new JobsDatabaseAvailabilityCheck(dslContext, DatabaseConstants.DEFAULT_ASSERT_DATABASE_TIMEOUT_MS); - } - - @Singleton - public StreamResetPersistence streamResetPersistence(@Named("configDatabase") final Database configDatabase) { - return new StreamResetPersistence(configDatabase); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/HelperBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/HelperBeanFactory.java deleted file mode 100644 index d5493f2a6939..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/config/HelperBeanFactory.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.config; - -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.WorkspaceHelper; -import io.micronaut.context.annotation.Factory; -import jakarta.inject.Singleton; - -@Factory -public class HelperBeanFactory { - - @Singleton - public WorkspaceHelper workspaceHelper(final ConfigRepository configRepository, final JobPersistence jobPersistence) { - return new WorkspaceHelper(configRepository, jobPersistence); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/JobErrorReportingBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/JobErrorReportingBeanFactory.java deleted file mode 100644 index a02f36a90339..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/config/JobErrorReportingBeanFactory.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.config; - -import io.airbyte.config.Configs.DeploymentMode; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.WebUrlHelper; -import io.airbyte.persistence.job.errorreporter.JobErrorReporter; -import io.airbyte.persistence.job.errorreporter.JobErrorReportingClient; -import io.airbyte.persistence.job.errorreporter.LoggingJobErrorReportingClient; -import io.airbyte.persistence.job.errorreporter.SentryExceptionHelper; -import io.airbyte.persistence.job.errorreporter.SentryJobErrorReportingClient; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.annotation.Value; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.util.Optional; - -/** - * Micronaut bean factory for job error reporting-related singletons. - */ -@Factory -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -public class JobErrorReportingBeanFactory { - - @Singleton - @Requires(property = "airbyte.worker.job.error-reporting.strategy", - pattern = "(?i)^sentry$") - @Named("jobErrorReportingClient") - public JobErrorReportingClient sentryJobErrorReportingClient( - @Value("${airbyte.worker.job.error-reporting.sentry.dsn}") final String sentryDsn) { - return new SentryJobErrorReportingClient(sentryDsn, new SentryExceptionHelper()); - } - - @Singleton - @Requires(property = "airbyte.worker.job.error-reporting.strategy", - pattern = "(?i)^logging$") - @Named("jobErrorReportingClient") - public JobErrorReportingClient loggingJobErrorReportingClient() { - return new LoggingJobErrorReportingClient(); - } - - @Singleton - public JobErrorReporter jobErrorReporter( - @Value("${airbyte.version}") final String airbyteVersion, - final ConfigRepository configRepository, - final DeploymentMode deploymentMode, - @Named("jobErrorReportingClient") final Optional jobErrorReportingClient, - final WebUrlHelper webUrlHelper) { - return new JobErrorReporter( - configRepository, - deploymentMode, - airbyteVersion, - webUrlHelper, - jobErrorReportingClient.orElseGet(() -> new LoggingJobErrorReportingClient())); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/SecretPersistenceBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/SecretPersistenceBeanFactory.java deleted file mode 100644 index ce96b37dbce0..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/config/SecretPersistenceBeanFactory.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.config; - -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.split_secrets.AWSSecretManagerPersistence; -import io.airbyte.config.persistence.split_secrets.GoogleSecretManagerPersistence; -import io.airbyte.config.persistence.split_secrets.LocalTestingSecretPersistence; -import io.airbyte.config.persistence.split_secrets.RealSecretsHydrator; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHydrator; -import io.airbyte.config.persistence.split_secrets.VaultSecretPersistence; -import io.airbyte.db.Database; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.annotation.Value; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.util.Optional; - -/** - * Micronaut bean factory for secret persistence-related singletons. - */ -@Factory -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -public class SecretPersistenceBeanFactory { - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^(?!testing_config_db_table).*") - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^(?!google_secret_manager).*") - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^(?!vault).*") - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^(?!aws_secret_manager).*") - @Named("secretPersistence") - public SecretPersistence defaultSecretPersistence(@Named("configDatabase") final Database configDatabase) { - return localTestingSecretPersistence(configDatabase); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^testing_config_db_table$") - @Named("secretPersistence") - public SecretPersistence localTestingSecretPersistence(@Named("configDatabase") final Database configDatabase) { - return new LocalTestingSecretPersistence(configDatabase); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^testing_config_db_table$") - @Named("ephemeralSecretPersistence") - public SecretPersistence ephemeralLocalTestingSecretPersistence(@Named("configDatabase") final Database configDatabase) { - return new LocalTestingSecretPersistence(configDatabase); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^google_secret_manager$") - @Named("secretPersistence") - public SecretPersistence googleSecretPersistence(@Value("${airbyte.secret.store.gcp.credentials}") final String credentials, - @Value("${airbyte.secret.store.gcp.project-id}") final String projectId) { - return GoogleSecretManagerPersistence.getLongLived(projectId, credentials); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^google_secret_manager$") - @Named("ephemeralSecretPersistence") - public SecretPersistence ephemeralGoogleSecretPersistence(@Value("${airbyte.secret.store.gcp.credentials}") final String credentials, - @Value("${airbyte.secret.store.gcp.project-id}") final String projectId) { - return GoogleSecretManagerPersistence.getEphemeral(projectId, credentials); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^vault$") - @Named("secretPersistence") - public SecretPersistence vaultSecretPersistence(@Value("${airbyte.secret.store.vault.address}") final String address, - @Value("${airbyte.secret.store.vault.prefix}") final String prefix, - @Value("${airbyte.secret.store.vault.token}") final String token) { - return new VaultSecretPersistence(address, prefix, token); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^vault$") - @Named("ephemeralSecretPersistence") - public SecretPersistence ephemeralVaultSecretPersistence(@Value("${airbyte.secret.store.vault.address}") final String address, - @Value("${airbyte.secret.store.vault.prefix}") final String prefix, - @Value("${airbyte.secret.store.vault.token}") final String token) { - return new VaultSecretPersistence(address, prefix, token); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^aws_secret_manager$") - @Named("secretPersistence") - public SecretPersistence awsSecretPersistence(@Value("${airbyte.secret.store.aws.access-key}") final String awsAccessKey, - @Value("${airbyte.secret.store.aws.secret-key}") final String awsSecretKey) { - return new AWSSecretManagerPersistence(awsAccessKey, awsSecretKey); - } - - @Singleton - @Requires(property = "airbyte.secret.persistence", - pattern = "(?i)^aws_secret_manager$") - @Named("ephemeralSecretPersistence") - public SecretPersistence ephemeralAwsSecretPersistence(@Value("${airbyte.secret.store.aws.access-key}") final String awsAccessKey, - @Value("${airbyte.secret.store.aws.secret-key}") final String awsSecretKey) { - return new AWSSecretManagerPersistence(awsAccessKey, awsSecretKey); - } - - @Singleton - public SecretsHydrator secretsHydrator(@Named("secretPersistence") final SecretPersistence secretPersistence) { - return new RealSecretsHydrator(secretPersistence); - } - - @Singleton - public SecretsRepositoryReader secretsRepositoryReader(final ConfigRepository configRepository, final SecretsHydrator secretsHydrator) { - return new SecretsRepositoryReader(configRepository, secretsHydrator); - } - - @Singleton - public SecretsRepositoryWriter secretsRepositoryWriter(final ConfigRepository configRepository, - @Named("secretPersistence") final Optional secretPersistence, - @Named("ephemeralSecretPersistence") final Optional ephemeralSecretPersistence) { - return new SecretsRepositoryWriter(configRepository, secretPersistence, ephemeralSecretPersistence); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/TemporalBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/TemporalBeanFactory.java deleted file mode 100644 index 5e7565b74d21..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/config/TemporalBeanFactory.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.config; - -import io.airbyte.analytics.Deployment; -import io.airbyte.analytics.TrackingClient; -import io.airbyte.analytics.TrackingClientSingleton; -import io.airbyte.commons.server.scheduler.DefaultSynchronousSchedulerClient; -import io.airbyte.commons.server.scheduler.SynchronousSchedulerClient; -import io.airbyte.commons.temporal.TemporalClient; -import io.airbyte.commons.temporal.scheduling.DefaultTaskQueueMapper; -import io.airbyte.commons.temporal.scheduling.RouterService; -import io.airbyte.commons.temporal.scheduling.TaskQueueMapper; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs.DeploymentMode; -import io.airbyte.config.Configs.TrackingStrategy; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.errorreporter.JobErrorReporter; -import io.airbyte.persistence.job.factory.OAuthConfigSupplier; -import io.airbyte.persistence.job.tracker.JobTracker; -import io.micronaut.context.annotation.Factory; -import io.micronaut.context.annotation.Value; -import jakarta.inject.Singleton; -import java.io.IOException; - -/** - * Micronaut bean factory for Temporal-related singletons. - */ -@Factory -public class TemporalBeanFactory { - - @Singleton - public TrackingClient trackingClient(final TrackingStrategy trackingStrategy, - final DeploymentMode deploymentMode, - final JobPersistence jobPersistence, - final WorkerEnvironment workerEnvironment, - @Value("${airbyte.role}") final String airbyteRole, - final AirbyteVersion airbyteVersion, - final ConfigRepository configRepository) - throws IOException { - - TrackingClientSingleton.initialize( - trackingStrategy, - new Deployment(deploymentMode, jobPersistence.getDeployment().orElseThrow(), - workerEnvironment), - airbyteRole, - airbyteVersion, - configRepository); - - return TrackingClientSingleton.get(); - } - - @Singleton - public OAuthConfigSupplier oAuthConfigSupplier(final ConfigRepository configRepository, final TrackingClient trackingClient) { - return new OAuthConfigSupplier(configRepository, trackingClient); - } - - @Singleton - public TaskQueueMapper taskQueueMapper() { - return new DefaultTaskQueueMapper(); - } - - @Singleton - public SynchronousSchedulerClient synchronousSchedulerClient(final TemporalClient temporalClient, - final JobTracker jobTracker, - final JobErrorReporter jobErrorReporter, - final OAuthConfigSupplier oAuthConfigSupplier, - final RouterService routerService) { - return new DefaultSynchronousSchedulerClient(temporalClient, jobTracker, jobErrorReporter, oAuthConfigSupplier, routerService); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/errors/InvalidInputExceptionHandler.java b/airbyte-server/src/main/java/io/airbyte/server/errors/InvalidInputExceptionHandler.java deleted file mode 100644 index 36b0dffbba5b..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/errors/InvalidInputExceptionHandler.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.errors; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.errors.InvalidInputExceptionMapper; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.MediaType; -import io.micronaut.http.annotation.Produces; -import io.micronaut.http.server.exceptions.ExceptionHandler; -import jakarta.inject.Singleton; -import javax.validation.ConstraintViolationException; - -// https://www.baeldung.com/jersey-bean-validation#custom-exception-handler -// handles exceptions related to the request body not matching the openapi config. -@Produces -@Singleton -@Requires(classes = ConstraintViolationException.class) -public class InvalidInputExceptionHandler implements ExceptionHandler { - - @Override - public HttpResponse handle(final HttpRequest request, final ConstraintViolationException exception) { - return HttpResponse.status(HttpStatus.BAD_REQUEST) - .body(Jsons.serialize(InvalidInputExceptionMapper.infoFromConstraints(exception))) - .contentType(MediaType.APPLICATION_JSON_TYPE); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/errors/InvalidJsonExceptionHandler.java b/airbyte-server/src/main/java/io/airbyte/server/errors/InvalidJsonExceptionHandler.java deleted file mode 100644 index 4231f7777668..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/errors/InvalidJsonExceptionHandler.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.errors; - -import com.fasterxml.jackson.core.JsonParseException; -import io.airbyte.commons.server.errors.KnownException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.MediaType; -import io.micronaut.http.annotation.Produces; -import io.micronaut.http.server.exceptions.ExceptionHandler; -import jakarta.inject.Singleton; - -@Produces -@Singleton -@Requires(classes = JsonParseException.class) -public class InvalidJsonExceptionHandler implements ExceptionHandler { - - @Override - public HttpResponse handle(final HttpRequest request, final JsonParseException exception) { - return HttpResponse.status(HttpStatus.UNPROCESSABLE_ENTITY) - .body( - KnownException.infoFromThrowableWithMessage(exception, "Invalid json. " + exception.getMessage() + " " + exception.getOriginalMessage())) - .contentType(MediaType.APPLICATION_JSON_TYPE); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/errors/InvalidJsonInputExceptionHandler.java b/airbyte-server/src/main/java/io/airbyte/server/errors/InvalidJsonInputExceptionHandler.java deleted file mode 100644 index 958a77bdb084..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/errors/InvalidJsonInputExceptionHandler.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.errors; - -import com.fasterxml.jackson.databind.JsonMappingException; -import io.airbyte.commons.server.errors.KnownException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.MediaType; -import io.micronaut.http.annotation.Produces; -import io.micronaut.http.server.exceptions.ExceptionHandler; -import jakarta.inject.Singleton; - -@Produces -@Singleton -@Requires(classes = JsonMappingException.class) -public class InvalidJsonInputExceptionHandler implements ExceptionHandler { - - @Override - public HttpResponse handle(final HttpRequest request, final JsonMappingException exception) { - return HttpResponse.status(HttpStatus.UNPROCESSABLE_ENTITY) - .body(KnownException.infoFromThrowableWithMessage(exception, - "Invalid json input. " + exception.getMessage() + " " + exception.getOriginalMessage())) - .contentType(MediaType.APPLICATION_JSON_TYPE); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/errors/KnownExceptionHandler.java b/airbyte-server/src/main/java/io/airbyte/server/errors/KnownExceptionHandler.java deleted file mode 100644 index 6a3f420cc359..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/errors/KnownExceptionHandler.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.errors; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.errors.KnownException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.MediaType; -import io.micronaut.http.annotation.Produces; -import io.micronaut.http.server.exceptions.ExceptionHandler; -import jakarta.inject.Singleton; - -@Produces -@Singleton -@Requires(classes = KnownException.class) -public class KnownExceptionHandler implements ExceptionHandler { - - @Override - public HttpResponse handle(HttpRequest request, KnownException exception) { - return HttpResponse.status(HttpStatus.valueOf(exception.getHttpCode())) - .body(Jsons.serialize(exception.getKnownExceptionInfo())) - .contentType(MediaType.APPLICATION_JSON_TYPE); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/errors/NotFoundExceptionHandler.java b/airbyte-server/src/main/java/io/airbyte/server/errors/NotFoundExceptionHandler.java deleted file mode 100644 index f4333d790673..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/errors/NotFoundExceptionHandler.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.errors; - -import io.airbyte.commons.server.errors.IdNotFoundKnownException; -import io.airbyte.commons.server.errors.KnownException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.MediaType; -import io.micronaut.http.annotation.Produces; -import io.micronaut.http.server.exceptions.ExceptionHandler; -import jakarta.inject.Singleton; -import javax.ws.rs.NotFoundException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Produces -@Singleton -@Requires(classes = NotFoundException.class) -public class NotFoundExceptionHandler implements ExceptionHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(NotFoundExceptionHandler.class); - - @Override - public HttpResponse handle(final HttpRequest request, final NotFoundException exception) { - final IdNotFoundKnownException idnf = new IdNotFoundKnownException("Object not found. " + exception.getMessage(), exception); - LOGGER.error("Not found exception", idnf.getNotFoundKnownExceptionInfo()); - - return HttpResponse.status(HttpStatus.NOT_FOUND) - .body(KnownException.infoFromThrowableWithMessage(exception, "Internal Server Error: " + exception.getMessage())) - .contentType(MediaType.APPLICATION_JSON); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/errors/UncaughtExceptionHandler.java b/airbyte-server/src/main/java/io/airbyte/server/errors/UncaughtExceptionHandler.java deleted file mode 100644 index 7441fe2dbadd..000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/errors/UncaughtExceptionHandler.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.errors; - -import io.airbyte.commons.server.errors.KnownException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.MediaType; -import io.micronaut.http.annotation.Produces; -import io.micronaut.http.server.exceptions.ExceptionHandler; -import jakarta.inject.Singleton; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Produces -@Singleton -@Requires(classes = Throwable.class) -public class UncaughtExceptionHandler implements ExceptionHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(UncaughtExceptionHandler.class); - - @Override - public HttpResponse handle(final HttpRequest request, final Throwable exception) { - LOGGER.error("Uncaught exception", exception); - return HttpResponse.status(HttpStatus.INTERNAL_SERVER_ERROR) - .body(KnownException.infoFromThrowableWithMessage(exception, "Internal Server Error: " + exception.getMessage())) - .contentType(MediaType.APPLICATION_JSON); - } - -} diff --git a/airbyte-server/src/main/resources/application.yml b/airbyte-server/src/main/resources/application.yml deleted file mode 100644 index 7f22450b01c2..000000000000 --- a/airbyte-server/src/main/resources/application.yml +++ /dev/null @@ -1,162 +0,0 @@ -micronaut: - application: - name: airbyte-server - security: - authentication-provider-strategy: ALL - enabled: ${API_AUTHORIZATION_ENABLED:false} - server: - port: 8001 - cors: - enabled: true - configurations: - web: - allowedOrigins: - - ^.*$ -airbyte: - cloud: - storage: - logs: - type: ${WORKER_LOGS_STORAGE_TYPE:} - gcs: - application-credentials: ${GOOGLE_APPLICATION_CREDENTIALS:} - bucket: ${GCS_LOG_BUCKET:} - minio: - access-key: ${AWS_ACCESS_KEY_ID:} - bucket: ${S3_LOG_BUCKET:} - endpoint: ${S3_MINIO_ENDPOINT:} - secret-access-key: ${AWS_SECRET_ACCESS_KEY:} - s3: - access-key: ${AWS_ACCESS_KEY_ID:} - bucket: ${S3_LOG_BUCKET:} - region: ${S3_LOG_BUCKET_REGION:} - secret-access-key: ${AWS_SECRET_ACCESS_KEY:} - state: - type: ${WORKER_STATE_STORAGE_TYPE:} - gcs: - application-credentials: ${STATE_STORAGE_GCS_APPLICATION_CREDENTIALS:} - bucket: ${STATE_STORAGE_GCS_BUCKET_NAME:} - minio: - access-key: ${STATE_STORAGE_MINIO_ACCESS_KEY:} - bucket: ${STATE_STORAGE_MINIO_BUCKET_NAME:} - endpoint: ${STATE_STORAGE_MINIO_ENDPOINT:} - secret-access-key: ${STATE_STORAGE_MINIO_SECRET_ACCESS_KEY:} - s3: - access-key: ${STATE_STORAGE_S3_ACCESS_KEY:} - bucket: ${STATE_STORAGE_S3_BUCKET_NAME:} - region: ${STATE_STORAGE_S3_REGION:} - secret-access-key: ${STATE_STORAGE_S3_SECRET_ACCESS_KEY:} - deployment-mode: ${DEPLOYMENT_MODE:OSS} - flyway: - configs: - initialization-timeout-ms: ${CONFIGS_DATABASE_INITIALIZATION_TIMEOUT_MS:60000} - minimum-migration-version: ${CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION} - jobs: - initialization-timeout-ms: ${JOBS_DATABASE_INITIALIZATION_TIMEOUT_MS:60000} - minimum-migration-version: ${JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION} - version: ${AIRBYTE_VERSION} - secret: - persistence: ${SECRET_PERSISTENCE:TESTING_CONFIG_DB_TABLE} - store: - aws: - access-key: ${AWS_ACCESS_KEY:} - secret-key: ${AWS_SECRET_ACCESS_KEY:} - gcp: - credentials: ${SECRET_STORE_GCP_CREDENTIALS:} - project-id: ${SECRET_STORE_GCP_PROJECT_ID:} - vault: - address: ${VAULT_ADDRESS:} - prefix: ${VAULT_PREFIX:} - token: ${VAULT_AUTH_TOKEN:} - role: ${AIRBYTE_ROLE:dev} - tracking-strategy: ${TRACKING_STRATEGY:LOGGING} - worker: - job: - error-reporting: - sentry: - dsn: ${JOB_ERROR_REPORTING_SENTRY_DSN} - strategy: ${JOB_ERROR_REPORTING_STRATEGY:LOGGING} - web-app: - url: ${WEBAPP_URL:} - workspace: - root: ${WORKSPACE_ROOT} - protocol: - min-version: ${AIRBYTE_PROTOCOL_VERSION_MIN:0.0.0} - max-version: ${AIRBYTE_PROTOCOL_VERSION_MAX:0.3.0} - -temporal: - cloud: - client: - cert: ${TEMPORAL_CLOUD_CLIENT_CERT:} - key: ${TEMPORAL_CLOUD_CLIENT_KEY:} - enabled: ${TEMPORAL_CLOUD_ENABLED:false} - host: ${TEMPORAL_CLOUD_HOST:} - namespace: ${TEMPORAL_CLOUD_NAMESPACE:} - host: ${TEMPORAL_HOST:`airbyte-temporal:7233`} - retention: ${TEMPORAL_HISTORY_RETENTION_IN_DAYS:30} - -datasources: - config: - connection-test-query: SELECT 1 - connection-timeout: 30000 - maximum-pool-size: 10 - minimum-idle: 0 - idle-timeout: 600000 - initialization-fail-timeout: -1 # Disable fail fast checking to avoid issues due to other pods not being started in time - url: ${DATABASE_URL} - driverClassName: org.postgresql.Driver - username: ${DATABASE_USER} - password: ${DATABASE_PASSWORD} - -endpoints: - beans: - enabled: true - sensitive: false - env: - enabled: true - sensitive: false - health: - enabled: true - sensitive: false - info: - enabled: true - sensitive: true - loggers: - enabled: true - sensitive: true - refresh: - enabled: false - sensitive: true - routes: - enabled: true - sensitive: false - threaddump: - enabled: true - sensitive: true - -flyway: - enabled: true - datasources: - config: - enabled: false - locations: - - "classpath:io/airbyte/db/instance/configs/migrations" - jobs: - enabled: false - locations: - - "classpath:io/airbyte/db/instance/jobs/migrations" - -jooq: - datasources: - config: - jackson-converter-enabled: true - sql-dialect: POSTGRES - jobs: - jackson-converter-enabled: true - sql-dialect: POSTGRES - -logger: - levels: - # Uncomment to help resolve issues with conditional beans - # io.micronaut.context.condition: DEBUG - # Uncomment to help resolve issues with security beans - # io.micronaut.security: DEBUG diff --git a/airbyte-server/src/main/resources/micronaut-banner.txt b/airbyte-server/src/main/resources/micronaut-banner.txt deleted file mode 100644 index 83ea519237a6..000000000000 --- a/airbyte-server/src/main/resources/micronaut-banner.txt +++ /dev/null @@ -1,11 +0,0 @@ - - ___ _ __ __ - / | (_)____/ /_ __ __/ /____ - / /| | / / ___/ __ \/ / / / __/ _ \ - / ___ |/ / / / /_/ / /_/ / /_/ __/ -/_/ |_/_/_/ /_.___/\__, /\__/\___/ - /____/ - : airbyte-server : --------------------------------------------------- - Will soon be available at http://localhost:8000/ --------------------------------------------------- diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/HealthCheckApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/HealthCheckApiTest.java deleted file mode 100644 index 34db9e8540fa..000000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/HealthCheckApiTest.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.HealthCheckRead; -import io.airbyte.commons.server.handlers.HealthCheckHandler; -import org.junit.jupiter.api.Test; - -class HealthCheckApiTest { - - @Test - void testImportDefinitions() { - final HealthCheckHandler healthCheckHandler = mock(HealthCheckHandler.class); - when(healthCheckHandler.health()) - .thenReturn(new HealthCheckRead().available( - false)); - - final HealthApiController configurationApi = new HealthApiController(healthCheckHandler); - - assertFalse(configurationApi.getHealthCheck().getAvailable()); - } - -} diff --git a/airbyte-temporal/.gitignore b/airbyte-temporal/.gitignore deleted file mode 100644 index 1fbd8688c413..000000000000 --- a/airbyte-temporal/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -scripts/*.tar.gz -scripts/temporal-* diff --git a/airbyte-temporal/Dockerfile b/airbyte-temporal/Dockerfile deleted file mode 100644 index 3ed5b4896570..000000000000 --- a/airbyte-temporal/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -# A test describe in the README is available to test a version update -FROM airbyte/temporal-auto-setup:1.13.0 - -ENV TEMPORAL_HOME /etc/temporal - -COPY bin/scripts/update-and-start-temporal.sh update-and-start-temporal.sh - -ENTRYPOINT ["./update-and-start-temporal.sh"] diff --git a/airbyte-temporal/README.md b/airbyte-temporal/README.md deleted file mode 100644 index d085652ea733..000000000000 --- a/airbyte-temporal/README.md +++ /dev/null @@ -1,44 +0,0 @@ -# airbyte-temporal - -This module implements a custom version of what the Temporal autosetup image is doing. Because Temporal does not recommend the autosetup be used in production, we had to add some modifications. It ensures that the temporalDB schema will get upgraded if the temporal version is updated. - -## Testing a temporal migration - -`tools/bin/test_temporal_migration.sh` is available to test that a bump of the temporal version won't break the docker compose build. Here is what -the script does: -- checkout master -- build the docker image -- run docker compose up in the background -- Sleep for 75 secondes -- shutdown docker compose -- checkout the commit being tested -- build the docker image -- run docker compose up. - -At the end of the script you should be able to access a local airbyte in `localhost:8000`. - -## Apple Silicon (M1) Support - -Airbyte publishes an image called [airbyte/temporal-auto-setup](https://hub.docker.com/r/airbyte/temporal-auto-setup/tags) which is built for both -Intel-based and ARM-based systems. - -This is because at the time of this writing, Temporal only offers their [temporalio/auto-setup](https://hub.docker.com/r/temporalio/auto-setup) image -for Intel-based (amd64) systems. - -Airbyte re-publishes this image -as [airbyte/temporal-auto-setup:1.13.0-amd64](https://hub.docker.com/layers/airbyte/temporal-auto-setup/1.13.0-amd64/images/sha256-46da05b202e2fa66d9c3f5af5a31b954979d8132c4f67300e884bdad8a45b94d?context=explore) -, and also runs the `build-temporal.sh` script in this repository on an ARM-based system to build and -publish [airbyte/temporal-auto-setup:1.13.0-arm64](https://hub.docker.com/layers/airbyte/temporal-auto-setup/1.13.0-arm64/images/sha256-05027f6a9ba658205c5e961165bb8dad55c95ae0a009eddbf491d12f3d84fe20?context=explore) -. - -Finally, Airbyte creates and publishes a manifest list with both images -as [airbyte/temporal-auto-setup:1.13.0](https://hub.docker.com/layers/airbyte/temporal-auto-setup/1.13.0/images/sha256-46da05b202e2fa66d9c3f5af5a31b954979d8132c4f67300e884bdad8a45b94d?context=explore) -like so: - -```bash -docker manifest create airbyte/temporal-auto-setup:1.13.0 \ ---amend airbyte/temporal-auto-setup:1.13.0-amd64 \ ---amend airbyte/temporal-auto-setup:1.13.0-arm64 -``` - -This process will need to be replicated for any future version upgrades beyond `1.13.0`. See the [original issue](https://github.com/airbytehq/airbyte/issues/8849) for more info. diff --git a/airbyte-temporal/build.gradle b/airbyte-temporal/build.gradle deleted file mode 100644 index e82af665d64d..000000000000 --- a/airbyte-temporal/build.gradle +++ /dev/null @@ -1,10 +0,0 @@ -task copyScripts(type: Copy) { - dependsOn copyDocker - - from('scripts') - into 'build/docker/bin/scripts' -} - -tasks.named("buildDockerImage") { - dependsOn copyScripts -} diff --git a/airbyte-temporal/gradle.properties b/airbyte-temporal/gradle.properties deleted file mode 100644 index 4dd7cdd06c6a..000000000000 --- a/airbyte-temporal/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -dockerImageName=temporal diff --git a/airbyte-temporal/scripts/build-temporal.sh b/airbyte-temporal/scripts/build-temporal.sh deleted file mode 100755 index 3a9d5b7f873d..000000000000 --- a/airbyte-temporal/scripts/build-temporal.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash - -TEMPORAL_VERSION=1.13.0 - -curl -OL https://github.com/temporalio/temporal/archive/refs/tags/v"$TEMPORAL_VERSION".tar.gz -tar -xvf v"$TEMPORAL_VERSION".tar.gz -cd temporal-"$TEMPORAL_VERSION" && docker build . -t airbyte/temporal-auto-setup:"$TEMPORAL_VERSION" --build-arg TARGET=auto-setup -rm -rf ../temporal-"$TEMPORAL_VERSION" ../v"$TEMPORAL_VERSION".tar.gz diff --git a/airbyte-temporal/scripts/update-and-start-temporal.sh b/airbyte-temporal/scripts/update-and-start-temporal.sh deleted file mode 100755 index 64da337936c4..000000000000 --- a/airbyte-temporal/scripts/update-and-start-temporal.sh +++ /dev/null @@ -1,134 +0,0 @@ -#!/usr/bin/env bash - -DBNAME="${DBNAME:-temporal}" -VISIBILITY_DBNAME="${VISIBILITY_DBNAME:-temporal_visibility}" -DB_PORT="${DB_PORT:-3306}" - -POSTGRES_SEEDS="${POSTGRES_SEEDS:-}" -POSTGRES_USER="${POSTGRES_USER:-}" -POSTGRES_PWD="${POSTGRES_PWD:-}" - -SCHEMA_DIR=${TEMPORAL_HOME}/schema/postgresql/v96/temporal/versioned -VISIBILITY_SCHEMA_DIR=${TEMPORAL_HOME}/schema/postgresql/v96/visibility/versioned - -SKIP_DEFAULT_NAMESPACE_CREATION="${SKIP_DEFAULT_NAMESPACE_CREATION:-false}" -DEFAULT_NAMESPACE="${DEFAULT_NAMESPACE:-default}" -DEFAULT_NAMESPACE_RETENTION=${DEFAULT_NAMESPACE_RETENTION:-1} - -# See https://github.com/temporalio/temporal/blob/release/v1.13.x/docker/entrypoint.sh -init_entry_point() { - echo "Start init" - export BIND_ON_IP="${BIND_ON_IP:-$(hostname -i)}" - - if [[ "${BIND_ON_IP}" =~ ":" ]]; then - # ipv6 - export TEMPORAL_CLI_ADDRESS="[${BIND_ON_IP}]:7233" - else - # ipv4 - export TEMPORAL_CLI_ADDRESS="${BIND_ON_IP}:7233" - fi - - dockerize -template ./config/config_template.yaml:./config/docker.yaml - echo "Done init" -} - -wait_for_postgres() { - until nc -z "${POSTGRES_SEEDS%%,*}" "${DB_PORT}"; do - echo 'Waiting for PostgreSQL to startup.' - sleep 1 - done - - echo 'PostgreSQL started.' -} - -update_postgres_schema() { - { export SQL_PASSWORD=${POSTGRES_PWD}; } 2> /dev/null - - CONTAINER_ALREADY_STARTED="CONTAINER_ALREADY_STARTED_PLACEHOLDER" - if [ ! -e $CONTAINER_ALREADY_STARTED ]; then - touch $CONTAINER_ALREADY_STARTED - temporal-sql-tool --plugin postgres --ep "${POSTGRES_SEEDS}" -u "${POSTGRES_USER}" -p "${DB_PORT}" create --db "${DBNAME}" - temporal-sql-tool --plugin postgres --ep "${POSTGRES_SEEDS}" -u "${POSTGRES_USER}" -p "${DB_PORT}" --db "${DBNAME}" setup-schema -v 0.0 - - - temporal-sql-tool --plugin postgres --ep "${POSTGRES_SEEDS}" -u "${POSTGRES_USER}" -p "${DB_PORT}" create --db "${VISIBILITY_DBNAME}" - temporal-sql-tool --plugin postgres --ep "${POSTGRES_SEEDS}" -u "${POSTGRES_USER}" -p "${DB_PORT}" --db "${VISIBILITY_DBNAME}" setup-schema -v 0.0 - fi - echo "Starting to update the temporal DB" - temporal-sql-tool --plugin postgres --ep "${POSTGRES_SEEDS}" -u "${POSTGRES_USER}" -p "${DB_PORT}" --db "${DBNAME}" update-schema -d "${SCHEMA_DIR}" - echo "Update the temporal DB is done" - - echo "Starting to update the temporal visibility DB" - temporal-sql-tool --plugin postgres --ep "${POSTGRES_SEEDS}" -u "${POSTGRES_USER}" -p "${DB_PORT}" --db "${VISIBILITY_DBNAME}" update-schema -d "${VISIBILITY_SCHEMA_DIR}" - echo "Update the temporal visibility DB is done" - -} - -setup_server(){ - echo "Temporal CLI address: ${TEMPORAL_CLI_ADDRESS}." - - until tctl cluster health | grep SERVING; do - echo "Waiting for Temporal server to start..." - sleep 1 - done - echo "Temporal server started." - - if [ "${SKIP_DEFAULT_NAMESPACE_CREATION}" != true ]; then - register_default_namespace - fi - - if [ "${SKIP_ADD_CUSTOM_SEARCH_ATTRIBUTES}" != true ]; then - add_custom_search_attributes - fi -} -register_default_namespace() { - echo "Registering default namespace: ${DEFAULT_NAMESPACE}." - if ! tctl --ns "${DEFAULT_NAMESPACE}" namespace describe; then - echo "Default namespace ${DEFAULT_NAMESPACE} not found. Creating..." - tctl --ns "${DEFAULT_NAMESPACE}" namespace register --rd "${DEFAULT_NAMESPACE_RETENTION}" --desc "Default namespace for Temporal Server." - echo "Default namespace ${DEFAULT_NAMESPACE} registration complete." - else - echo "Default namespace ${DEFAULT_NAMESPACE} already registered." - fi -} - -add_custom_search_attributes() { - echo "Adding Custom*Field search attributes." - # TODO: Remove CustomStringField -# @@@SNIPSTART add-custom-search-attributes-for-testing-command - tctl --auto_confirm admin cluster add-search-attributes \ - --name CustomKeywordField --type Keyword \ - --name CustomStringField --type Text \ - --name CustomTextField --type Text \ - --name CustomIntField --type Int \ - --name CustomDatetimeField --type Datetime \ - --name CustomDoubleField --type Double \ - --name CustomBoolField --type Bool -# @@@SNIPEND -} - -setup_server(){ - echo "Temporal CLI address: ${TEMPORAL_CLI_ADDRESS}." - - until tctl cluster health | grep SERVING; do - echo "Waiting for Temporal server to start..." - sleep 1 - done - echo "Temporal server started." - - if [ "${SKIP_DEFAULT_NAMESPACE_CREATION}" != true ]; then - register_default_namespace - fi - - if [ "${SKIP_ADD_CUSTOM_SEARCH_ATTRIBUTES}" != true ]; then - add_custom_search_attributes - fi -} - -init_entry_point -wait_for_postgres -update_postgres_schema - -echo "starting temporal server" -setup_server & -./start-temporal.sh diff --git a/airbyte-test-utils/build.gradle b/airbyte-test-utils/build.gradle index aaa2980c6dca..228304f11d2c 100644 --- a/airbyte-test-utils/build.gradle +++ b/airbyte-test-utils/build.gradle @@ -10,7 +10,6 @@ configurations.all { dependencies { api project(':airbyte-db:db-lib') implementation project(':airbyte-api') - implementation project(':airbyte-commons-temporal') implementation project(':airbyte-commons-worker') implementation 'io.fabric8:kubernetes-client:5.12.2' diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AirbyteAcceptanceTestHarness.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AirbyteAcceptanceTestHarness.java deleted file mode 100644 index 2dd4cca40ad5..000000000000 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AirbyteAcceptanceTestHarness.java +++ /dev/null @@ -1,952 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.test.utils; - -import static java.lang.Thread.sleep; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import com.google.common.io.Resources; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.generated.JobsApi; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.model.generated.AirbyteCatalog; -import io.airbyte.api.client.model.generated.AttemptInfoRead; -import io.airbyte.api.client.model.generated.ConnectionCreate; -import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.client.model.generated.ConnectionRead; -import io.airbyte.api.client.model.generated.ConnectionScheduleData; -import io.airbyte.api.client.model.generated.ConnectionScheduleType; -import io.airbyte.api.client.model.generated.ConnectionState; -import io.airbyte.api.client.model.generated.ConnectionStatus; -import io.airbyte.api.client.model.generated.ConnectionUpdate; -import io.airbyte.api.client.model.generated.CustomDestinationDefinitionCreate; -import io.airbyte.api.client.model.generated.CustomSourceDefinitionCreate; -import io.airbyte.api.client.model.generated.DestinationCreate; -import io.airbyte.api.client.model.generated.DestinationDefinitionCreate; -import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.DestinationDefinitionUpdate; -import io.airbyte.api.client.model.generated.DestinationIdRequestBody; -import io.airbyte.api.client.model.generated.DestinationRead; -import io.airbyte.api.client.model.generated.DestinationSyncMode; -import io.airbyte.api.client.model.generated.Geography; -import io.airbyte.api.client.model.generated.JobConfigType; -import io.airbyte.api.client.model.generated.JobDebugInfoRead; -import io.airbyte.api.client.model.generated.JobIdRequestBody; -import io.airbyte.api.client.model.generated.JobListRequestBody; -import io.airbyte.api.client.model.generated.JobRead; -import io.airbyte.api.client.model.generated.JobStatus; -import io.airbyte.api.client.model.generated.JobWithAttemptsRead; -import io.airbyte.api.client.model.generated.NamespaceDefinitionType; -import io.airbyte.api.client.model.generated.OperationCreate; -import io.airbyte.api.client.model.generated.OperationIdRequestBody; -import io.airbyte.api.client.model.generated.OperationRead; -import io.airbyte.api.client.model.generated.OperatorConfiguration; -import io.airbyte.api.client.model.generated.OperatorNormalization; -import io.airbyte.api.client.model.generated.OperatorType; -import io.airbyte.api.client.model.generated.SourceCreate; -import io.airbyte.api.client.model.generated.SourceDefinitionCreate; -import io.airbyte.api.client.model.generated.SourceDefinitionRead; -import io.airbyte.api.client.model.generated.SourceDefinitionUpdate; -import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRequestBody; -import io.airbyte.api.client.model.generated.SourceIdRequestBody; -import io.airbyte.api.client.model.generated.SourceRead; -import io.airbyte.api.client.model.generated.SyncMode; -import io.airbyte.api.client.model.generated.WebBackendConnectionUpdate; -import io.airbyte.api.client.model.generated.WebBackendOperationCreateOrUpdate; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.commons.temporal.TemporalWorkflowUtils; -import io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow; -import io.airbyte.commons.temporal.scheduling.state.WorkflowState; -import io.airbyte.commons.util.MoreProperties; -import io.airbyte.db.Database; -import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.test.container.AirbyteTestContainer; -import io.fabric8.kubernetes.client.DefaultKubernetesClient; -import io.fabric8.kubernetes.client.KubernetesClient; -import io.temporal.client.WorkflowClient; -import io.temporal.serviceclient.WorkflowServiceStubs; -import java.io.File; -import java.io.IOException; -import java.net.Inet4Address; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.UnknownHostException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.sql.SQLException; -import java.time.Duration; -import java.time.Instant; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; -import org.jooq.JSONB; -import org.jooq.Record; -import org.jooq.Result; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.Assertions; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.DockerImageName; -import org.testcontainers.utility.MountableFile; - -/** - * This class contains containers used for acceptance tests. Some of those containers/states are - * only used when the test are run without GKE. Specific environmental variables govern what types - * of containers are run. - *

- * This class is put in a separate module to be easily pulled in as a dependency for Airbyte Cloud - * Acceptance Tests. - *

- * Containers and states include: - *

  • source postgres SQL
  • - *
  • destination postgres SQL
  • - *
  • {@link AirbyteTestContainer}
  • - *
  • kubernetes client
  • - *
  • lists of UUIDS representing IDs of sources, destinations, connections, and operations
  • - */ -public class AirbyteAcceptanceTestHarness { - - private static final Logger LOGGER = LoggerFactory.getLogger(AirbyteAcceptanceTestHarness.class); - - private static final String DOCKER_COMPOSE_FILE_NAME = "docker-compose.yaml"; - // assume env file is one directory level up from airbyte-tests. - private final static File ENV_FILE = Path.of(System.getProperty("user.dir")).getParent().resolve(".env").toFile(); - - private static final DockerImageName DESTINATION_POSTGRES_IMAGE_NAME = DockerImageName.parse("postgres:13-alpine"); - - private static final DockerImageName SOURCE_POSTGRES_IMAGE_NAME = DockerImageName.parse("debezium/postgres:13-alpine") - .asCompatibleSubstituteFor("postgres"); - - private static final String SOURCE_E2E_TEST_CONNECTOR_VERSION = "0.1.2"; - private static final String DESTINATION_E2E_TEST_CONNECTOR_VERSION = "0.1.1"; - - public static final String POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION = "0.4.26"; - - private static final String OUTPUT_NAMESPACE_PREFIX = "output_namespace_"; - private static final String OUTPUT_NAMESPACE = OUTPUT_NAMESPACE_PREFIX + "${SOURCE_NAMESPACE}"; - private static final String OUTPUT_STREAM_PREFIX = "output_table_"; - private static final String TABLE_NAME = "id_and_name"; - public static final String STREAM_NAME = TABLE_NAME; - public static final String COLUMN_ID = "id"; - public static final String COLUMN_NAME = "name"; - private static final String COLUMN_NAME_DATA = "_airbyte_data"; - private static final String SOURCE_USERNAME = "sourceusername"; - public static final String SOURCE_PASSWORD = "hunter2"; - public static final String PUBLIC_SCHEMA_NAME = "public"; - public static final String STAGING_SCHEMA_NAME = "staging"; - public static final String COOL_EMPLOYEES_TABLE_NAME = "cool_employees"; - public static final String AWESOME_PEOPLE_TABLE_NAME = "awesome_people"; - - private static final String DEFAULT_POSTGRES_INIT_SQL_FILE = "postgres_init.sql"; - - // Used for bypassing SSL modification for db configs - private static final String IS_TEST = "is_test"; - - private static boolean isKube; - private static boolean isMinikube; - private static boolean isGke; - private static boolean isMac; - private static boolean useExternalDeployment; - - /** - * When the acceptance tests are run against a local instance of docker-compose or KUBE then these - * test containers are used. When we run these tests in GKE, we spawn a source and destination - * postgres database ane use them for testing. - */ - private PostgreSQLContainer sourcePsql; - private PostgreSQLContainer destinationPsql; - private AirbyteTestContainer airbyteTestContainer; - private AirbyteApiClient apiClient; - private final UUID defaultWorkspaceId; - private final String postgresSqlInitFile; - - private KubernetesClient kubernetesClient; - - private List sourceIds; - private List connectionIds; - private List destinationIds; - private List operationIds; - - public PostgreSQLContainer getSourcePsql() { - return sourcePsql; - } - - public KubernetesClient getKubernetesClient() { - return kubernetesClient; - } - - public void removeConnection(final UUID connection) { - connectionIds.remove(connection); - } - - public void setApiClient(final AirbyteApiClient apiClient) { - this.apiClient = apiClient; - } - - public AirbyteAcceptanceTestHarness(final AirbyteApiClient apiClient, final UUID defaultWorkspaceId, final String postgresSqlInitFile) - throws URISyntaxException, IOException, InterruptedException { - // reads env vars to assign static variables - assignEnvVars(); - this.apiClient = apiClient; - this.defaultWorkspaceId = defaultWorkspaceId; - this.postgresSqlInitFile = postgresSqlInitFile; - - if (isGke && !isKube) { - throw new RuntimeException("KUBE Flag should also be enabled if GKE flag is enabled"); - } - if (!isGke) { - sourcePsql = new PostgreSQLContainer(SOURCE_POSTGRES_IMAGE_NAME) - .withUsername(SOURCE_USERNAME) - .withPassword(SOURCE_PASSWORD); - sourcePsql.start(); - - destinationPsql = new PostgreSQLContainer(DESTINATION_POSTGRES_IMAGE_NAME); - destinationPsql.start(); - } - - if (isKube) { - kubernetesClient = new DefaultKubernetesClient(); - } - - // by default use airbyte deployment governed by a test container. - if (!useExternalDeployment) { - LOGGER.info("Using deployment of airbyte managed by test containers."); - airbyteTestContainer = new AirbyteTestContainer.Builder(new File(Resources.getResource(DOCKER_COMPOSE_FILE_NAME).toURI())) - .setEnv(MoreProperties.envFileToProperties(ENV_FILE)) - // override env VERSION to use dev to test current build of airbyte. - .setEnvVariable("VERSION", "dev") - // override to use test mounts. - .setEnvVariable("DATA_DOCKER_MOUNT", "airbyte_data_migration_test") - .setEnvVariable("DB_DOCKER_MOUNT", "airbyte_db_migration_test") - .setEnvVariable("WORKSPACE_DOCKER_MOUNT", "airbyte_workspace_migration_test") - .setEnvVariable("LOCAL_ROOT", "/tmp/airbyte_local_migration_test") - .setEnvVariable("LOCAL_DOCKER_MOUNT", "/tmp/airbyte_local_migration_test") - .build(); - airbyteTestContainer.startBlocking(); - } else { - LOGGER.info("Using external deployment of airbyte."); - } - } - - public AirbyteAcceptanceTestHarness(final AirbyteApiClient apiClient, final UUID defaultWorkspaceId) - throws URISyntaxException, IOException, InterruptedException { - this(apiClient, defaultWorkspaceId, DEFAULT_POSTGRES_INIT_SQL_FILE); - } - - public void stopDbAndContainers() { - if (!isGke) { - sourcePsql.stop(); - destinationPsql.stop(); - } - - if (airbyteTestContainer != null) { - airbyteTestContainer.stop(); - } - } - - public void setup() throws SQLException, URISyntaxException, IOException { - sourceIds = Lists.newArrayList(); - connectionIds = Lists.newArrayList(); - destinationIds = Lists.newArrayList(); - operationIds = Lists.newArrayList(); - - if (isGke) { - // seed database. - final Database database = getSourceDatabase(); - final Path path = Path.of(MoreResources.readResourceAsFile(postgresSqlInitFile).toURI()); - final StringBuilder query = new StringBuilder(); - for (final String line : java.nio.file.Files.readAllLines(path, StandardCharsets.UTF_8)) { - if (line != null && !line.isEmpty()) { - query.append(line); - } - } - database.query(context -> context.execute(query.toString())); - } else { - PostgreSQLContainerHelper.runSqlScript(MountableFile.forClasspathResource(postgresSqlInitFile), sourcePsql); - - destinationPsql = new PostgreSQLContainer("postgres:13-alpine"); - destinationPsql.start(); - } - } - - public void cleanup() { - try { - clearSourceDbData(); - clearDestinationDbData(); - - for (final UUID operationId : operationIds) { - deleteOperation(operationId); - } - - for (final UUID connectionId : connectionIds) { - disableConnection(connectionId); - } - - for (final UUID sourceId : sourceIds) { - deleteSource(sourceId); - } - - for (final UUID destinationId : destinationIds) { - deleteDestination(destinationId); - } - if (!isGke) { - destinationPsql.stop(); - } - } catch (final Exception e) { - LOGGER.error("Error tearing down test fixtures:", e); - } - } - - private void assignEnvVars() { - isKube = System.getenv().containsKey("KUBE"); - isMinikube = System.getenv().containsKey("IS_MINIKUBE"); - isGke = System.getenv().containsKey("IS_GKE"); - isMac = System.getProperty("os.name").startsWith("Mac"); - useExternalDeployment = - System.getenv("USE_EXTERNAL_DEPLOYMENT") != null && - System.getenv("USE_EXTERNAL_DEPLOYMENT").equalsIgnoreCase("true"); - } - - private WorkflowClient getWorkflowClient() { - final TemporalUtils temporalUtils = new TemporalUtils(null, null, null, null, null, null, null); - final WorkflowServiceStubs temporalService = temporalUtils.createTemporalService( - TemporalWorkflowUtils.getAirbyteTemporalOptions("localhost:7233"), - TemporalUtils.DEFAULT_NAMESPACE); - return WorkflowClient.newInstance(temporalService); - } - - public WorkflowState getWorkflowState(final UUID connectionId) { - final WorkflowClient workflowCLient = getWorkflowClient(); - - // check if temporal workflow is reachable - final ConnectionManagerWorkflow connectionManagerWorkflow = - workflowCLient.newWorkflowStub(ConnectionManagerWorkflow.class, "connection_manager_" + connectionId); - - return connectionManagerWorkflow.getState(); - } - - public void terminateTemporalWorkflow(final UUID connectionId) { - final WorkflowClient workflowCLient = getWorkflowClient(); - - // check if temporal workflow is reachable - getWorkflowState(connectionId); - - // Terminate workflow - LOGGER.info("Terminating temporal workflow..."); - workflowCLient.newUntypedWorkflowStub("connection_manager_" + connectionId).terminate(""); - - // remove connection to avoid exception during tear down - connectionIds.remove(connectionId); - } - - public AirbyteCatalog discoverSourceSchema(final UUID sourceId) throws ApiException { - return apiClient.getSourceApi().discoverSchemaForSource(new SourceDiscoverSchemaRequestBody().sourceId(sourceId)).getCatalog(); - } - - public AirbyteCatalog discoverSourceSchemaWithoutCache(final UUID sourceId) throws ApiException { - return apiClient.getSourceApi().discoverSchemaForSource( - new SourceDiscoverSchemaRequestBody().sourceId(sourceId).disableCache(true)).getCatalog(); - } - - public void assertSourceAndDestinationDbInSync(final boolean withScdTable) throws Exception { - final Database source = getSourceDatabase(); - final Set sourceTables = listAllTables(source); - final Set sourceTablesWithRawTablesAdded = addAirbyteGeneratedTables(withScdTable, sourceTables); - final Database destination = getDestinationDatabase(); - final Set destinationTables = listAllTables(destination); - assertEquals(sourceTablesWithRawTablesAdded, destinationTables, - String.format("streams did not match.\n source stream names: %s\n destination stream names: %s\n", sourceTables, destinationTables)); - - for (final SchemaTableNamePair pair : sourceTables) { - final List sourceRecords = retrieveSourceRecords(source, pair.getFullyQualifiedTableName()); - assertRawDestinationContains(sourceRecords, pair); - } - } - - public Database getSourceDatabase() { - if (isKube && isGke) { - return GKEPostgresConfig.getSourceDatabase(); - } - return getDatabase(sourcePsql); - } - - public Database getDestinationDatabase() { - if (isKube && isGke) { - return GKEPostgresConfig.getDestinationDatabase(); - } - return getDatabase(destinationPsql); - } - - public Database getDatabase(final PostgreSQLContainer db) { - return new Database(DatabaseConnectionHelper.createDslContext(db, SQLDialect.POSTGRES)); - } - - public Set listAllTables(final Database database) throws SQLException { - return database.query( - context -> { - final Result fetch = - context.fetch( - "SELECT tablename, schemaname FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema'"); - return fetch.stream() - .map(record -> { - final var schemaName = (String) record.get("schemaname"); - final var tableName = (String) record.get("tablename"); - return new SchemaTableNamePair(schemaName, tableName); - }) - .collect(Collectors.toSet()); - }); - } - - private Set addAirbyteGeneratedTables(final boolean withScdTable, final Set sourceTables) { - return sourceTables.stream().flatMap(x -> { - final String cleanedNameStream = x.tableName().replace(".", "_"); - final List explodedStreamNames = new ArrayList<>(List.of( - new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName(), - String.format("_airbyte_raw_%s%s", OUTPUT_STREAM_PREFIX, cleanedNameStream)), - new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName(), String.format("%s%s", OUTPUT_STREAM_PREFIX, cleanedNameStream)))); - if (withScdTable) { - explodedStreamNames - .add(new SchemaTableNamePair("_airbyte_" + OUTPUT_NAMESPACE_PREFIX + x.schemaName(), - String.format("%s%s_stg", OUTPUT_STREAM_PREFIX, cleanedNameStream))); - explodedStreamNames - .add(new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName(), - String.format("%s%s_scd", OUTPUT_STREAM_PREFIX, cleanedNameStream))); - } - return explodedStreamNames.stream(); - }).collect(Collectors.toSet()); - } - - public void assertRawDestinationContains(final List sourceRecords, final SchemaTableNamePair pair) throws Exception { - final Set destinationRecords = new HashSet<>(retrieveRawDestinationRecords(pair)); - - assertEquals(sourceRecords.size(), destinationRecords.size(), - String.format("destination contains: %s record. source contains: %s, \nsource records %s \ndestination records: %s", - destinationRecords.size(), sourceRecords.size(), sourceRecords, destinationRecords)); - - for (final JsonNode sourceStreamRecord : sourceRecords) { - assertTrue(destinationRecords.contains(sourceStreamRecord), - String.format("destination does not contain record:\n %s \n destination contains:\n %s\n", - sourceStreamRecord, destinationRecords)); - } - } - - public void assertNormalizedDestinationContains(final List sourceRecords) throws Exception { - final Database destination = getDestinationDatabase(); - final String finalDestinationTable = String.format("%spublic.%s%s", OUTPUT_NAMESPACE_PREFIX, OUTPUT_STREAM_PREFIX, STREAM_NAME.replace(".", "_")); - final List destinationRecords = retrieveSourceRecords(destination, finalDestinationTable); - - assertEquals(sourceRecords.size(), destinationRecords.size(), - String.format("destination contains: %s record. source contains: %s", sourceRecords.size(), destinationRecords.size())); - - for (final JsonNode sourceStreamRecord : sourceRecords) { - assertTrue( - destinationRecords.stream() - .anyMatch(r -> r.get(COLUMN_NAME).asText().equals(sourceStreamRecord.get(COLUMN_NAME).asText()) - && r.get(COLUMN_ID).asInt() == sourceStreamRecord.get(COLUMN_ID).asInt()), - String.format("destination does not contain record:\n %s \n destination contains:\n %s\n", sourceStreamRecord, destinationRecords)); - } - } - - /** - * Assert that the normalized destination matches the input records, only expecting a single id - * column. - * - * @param sourceRecords - * @throws Exception - */ - public void assertNormalizedDestinationContainsIdColumn(final List sourceRecords) throws Exception { - final Database destination = getDestinationDatabase(); - final String finalDestinationTable = String.format("%spublic.%s%s", OUTPUT_NAMESPACE_PREFIX, OUTPUT_STREAM_PREFIX, STREAM_NAME.replace(".", "_")); - final List destinationRecords = retrieveSourceRecords(destination, finalDestinationTable); - - assertEquals(sourceRecords.size(), destinationRecords.size(), - String.format("destination contains: %s record. source contains: %s", sourceRecords.size(), destinationRecords.size())); - - for (final JsonNode sourceStreamRecord : sourceRecords) { - assertTrue( - destinationRecords.stream() - .anyMatch(r -> r.get(COLUMN_ID).asInt() == sourceStreamRecord.get(COLUMN_ID).asInt()), - String.format("destination does not contain record:\n %s \n destination contains:\n %s\n", sourceStreamRecord, destinationRecords)); - } - } - - public ConnectionRead createConnection(final String name, - final UUID sourceId, - final UUID destinationId, - final List operationIds, - final AirbyteCatalog catalog, - final ConnectionScheduleType scheduleType, - final ConnectionScheduleData scheduleData) - throws ApiException { - return createConnectionWithGeography(name, sourceId, destinationId, operationIds, catalog, scheduleType, scheduleData, Geography.AUTO); - } - - public ConnectionRead createConnectionWithGeography(final String name, - final UUID sourceId, - final UUID destinationId, - final List operationIds, - final AirbyteCatalog catalog, - final ConnectionScheduleType scheduleType, - final ConnectionScheduleData scheduleData, - final Geography geography) - throws ApiException { - final ConnectionRead connection = apiClient.getConnectionApi().createConnection( - new ConnectionCreate() - .status(ConnectionStatus.ACTIVE) - .sourceId(sourceId) - .destinationId(destinationId) - .syncCatalog(catalog) - .scheduleType(scheduleType) - .scheduleData(scheduleData) - .operationIds(operationIds) - .name(name) - .namespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) - .namespaceFormat(OUTPUT_NAMESPACE) - .prefix(OUTPUT_STREAM_PREFIX) - .geography(geography)); - connectionIds.add(connection.getConnectionId()); - return connection; - } - - public void updateConnectionSchedule( - final UUID connectionId, - final ConnectionScheduleType newScheduleType, - final ConnectionScheduleData newScheduleData) - throws ApiException { - apiClient.getConnectionApi().updateConnection( - new ConnectionUpdate() - .connectionId(connectionId) - .scheduleType(newScheduleType) - .scheduleData(newScheduleData)); - } - - public void updateConnectionCatalog(final UUID connectionId, final AirbyteCatalog catalog) throws ApiException { - apiClient.getConnectionApi().updateConnection( - new ConnectionUpdate() - .connectionId(connectionId) - .syncCatalog(catalog)); - } - - public DestinationRead createPostgresDestination(final boolean isLegacy) throws ApiException { - return createDestination( - "AccTestDestination-" + UUID.randomUUID(), - defaultWorkspaceId, - getPostgresDestinationDefinitionId(), - getDestinationDbConfig(isLegacy)); - } - - public DestinationRead createPostgresDestination() throws ApiException { - return createPostgresDestination(false); - } - - public DestinationRead createDestination(final String name, - final UUID workspaceId, - final UUID destinationDefId, - final JsonNode destinationConfig) - throws ApiException { - final DestinationRead destination = - apiClient.getDestinationApi().createDestination(new DestinationCreate() - .name(name) - .connectionConfiguration(Jsons.jsonNode(destinationConfig)) - .workspaceId(workspaceId) - .destinationDefinitionId(destinationDefId)); - destinationIds.add(destination.getDestinationId()); - return destination; - } - - public OperationRead createOperation() throws ApiException { - final OperatorConfiguration normalizationConfig = new OperatorConfiguration() - .operatorType(OperatorType.NORMALIZATION).normalization(new OperatorNormalization().option( - OperatorNormalization.OptionEnum.BASIC)); - - final OperationCreate operationCreate = new OperationCreate() - .workspaceId(defaultWorkspaceId) - .name("AccTestDestination-" + UUID.randomUUID()).operatorConfiguration(normalizationConfig); - - final OperationRead operation = apiClient.getOperationApi().createOperation(operationCreate); - operationIds.add(operation.getOperationId()); - return operation; - } - - public List retrieveSourceRecords(final Database database, final String table) throws SQLException { - return database.query(context -> context.fetch(String.format("SELECT * FROM %s;", table))) - .stream() - .map(Record::intoMap) - .map(Jsons::jsonNode) - .collect(Collectors.toList()); - } - - private List retrieveDestinationRecords(final Database database, final String table) throws SQLException { - return database.query(context -> context.fetch(String.format("SELECT * FROM %s;", table))) - .stream() - .map(Record::intoMap) - .map(r -> r.get(COLUMN_NAME_DATA)) - .map(f -> (JSONB) f) - .map(JSONB::data) - .map(Jsons::deserialize) - .map(Jsons::jsonNode) - .collect(Collectors.toList()); - } - - public List retrieveRawDestinationRecords(final SchemaTableNamePair pair) throws Exception { - final Database destination = getDestinationDatabase(); - final Set namePairs = listAllTables(destination); - - final String rawStreamName = String.format("_airbyte_raw_%s%s", OUTPUT_STREAM_PREFIX, pair.tableName().replace(".", "_")); - final SchemaTableNamePair rawTablePair = new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + pair.schemaName(), rawStreamName); - assertTrue(namePairs.contains(rawTablePair), "can't find a non-normalized version (raw) of " + rawTablePair.getFullyQualifiedTableName()); - - return retrieveDestinationRecords(destination, rawTablePair.getFullyQualifiedTableName()); - } - - public JsonNode getSourceDbConfig(final boolean isLegacy) { - return getDbConfig(sourcePsql, false, false, isLegacy, Type.SOURCE); - } - - public JsonNode getSourceDbConfig() { - return getSourceDbConfig(false); - } - - public JsonNode getDestinationDbConfig(final boolean isLegacy) { - return getDbConfig(destinationPsql, false, true, isLegacy, Type.DESTINATION); - } - - public JsonNode getDestinationDbConfig() { - return getDestinationDbConfig(false); - } - - public JsonNode getDestinationDbConfigWithHiddenPassword() { - return getDbConfig(destinationPsql, true, true, false, Type.DESTINATION); - } - - public JsonNode getDbConfig(final PostgreSQLContainer psql, - final boolean hiddenPassword, - final boolean withSchema, - final boolean isLegacy, - final Type connectorType) { - try { - final Map dbConfig = (isKube && isGke) ? GKEPostgresConfig.dbConfig(connectorType, hiddenPassword, withSchema) - : localConfig(psql, hiddenPassword, withSchema, isLegacy); - return Jsons.jsonNode(dbConfig); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - private Map localConfig(final PostgreSQLContainer psql, - final boolean hiddenPassword, - final boolean withSchema, - final boolean isLegacy) - throws UnknownHostException { - final Map dbConfig = new HashMap<>(); - // don't use psql.getHost() directly since the ip we need differs depending on environment - if (isKube) { - if (isMinikube) { - // used with minikube driver=none instance - dbConfig.put(JdbcUtils.HOST_KEY, Inet4Address.getLocalHost().getHostAddress()); - } else { - // used on a single node with docker driver - dbConfig.put(JdbcUtils.HOST_KEY, "host.docker.internal"); - } - } else if (isMac) { - dbConfig.put(JdbcUtils.HOST_KEY, "host.docker.internal"); - } else { - dbConfig.put(JdbcUtils.HOST_KEY, "localhost"); - } - - if (hiddenPassword) { - dbConfig.put(JdbcUtils.PASSWORD_KEY, "**********"); - } else { - dbConfig.put(JdbcUtils.PASSWORD_KEY, psql.getPassword()); - } - - dbConfig.put(JdbcUtils.PORT_KEY, psql.getFirstMappedPort()); - dbConfig.put(JdbcUtils.DATABASE_KEY, psql.getDatabaseName()); - dbConfig.put(JdbcUtils.USERNAME_KEY, psql.getUsername()); - - // bypasses the SSL modification for cloud acceptance tests. This use useful in cloud since it - // enforces most databases to have SSL on, but the postgres containers we use for testing does not - // allow SSL. - if (!isLegacy) { - dbConfig.put(IS_TEST, true); - } - dbConfig.put(JdbcUtils.SSL_KEY, false); - - if (withSchema) { - dbConfig.put(JdbcUtils.SCHEMA_KEY, "public"); - } - return dbConfig; - } - - public SourceDefinitionRead createE2eSourceDefinition(final UUID workspaceId) throws ApiException { - return apiClient.getSourceDefinitionApi().createCustomSourceDefinition(new CustomSourceDefinitionCreate() - .workspaceId(workspaceId) - .sourceDefinition(new SourceDefinitionCreate() - .name("E2E Test Source") - .dockerRepository("airbyte/source-e2e-test") - .dockerImageTag(SOURCE_E2E_TEST_CONNECTOR_VERSION) - .documentationUrl(URI.create("https://example.com")))); - } - - public DestinationDefinitionRead createE2eDestinationDefinition(final UUID workspaceId) throws ApiException { - return apiClient.getDestinationDefinitionApi().createCustomDestinationDefinition(new CustomDestinationDefinitionCreate() - .workspaceId(workspaceId) - .destinationDefinition(new DestinationDefinitionCreate() - .name("E2E Test Destination") - .dockerRepository("airbyte/destination-e2e-test") - .dockerImageTag(DESTINATION_E2E_TEST_CONNECTOR_VERSION) - .documentationUrl(URI.create("https://example.com")))); - } - - public SourceRead createPostgresSource(final boolean isLegacy) throws ApiException { - return createSource( - "acceptanceTestDb-" + UUID.randomUUID(), - defaultWorkspaceId, - getPostgresSourceDefinitionId(), - getSourceDbConfig(isLegacy)); - } - - public SourceRead createPostgresSource() throws ApiException { - return createPostgresSource(false); - } - - public SourceRead createSource(final String name, final UUID workspaceId, final UUID sourceDefId, final JsonNode sourceConfig) - throws ApiException { - final SourceRead source = apiClient.getSourceApi().createSource(new SourceCreate() - .name(name) - .sourceDefinitionId(sourceDefId) - .workspaceId(workspaceId) - .connectionConfiguration(sourceConfig)); - sourceIds.add(source.getSourceId()); - return source; - } - - public UUID getPostgresSourceDefinitionId() throws ApiException { - return apiClient.getSourceDefinitionApi().listSourceDefinitions().getSourceDefinitions() - .stream() - .filter(sourceRead -> "postgres".equalsIgnoreCase(sourceRead.getName())) - .findFirst() - .orElseThrow() - .getSourceDefinitionId(); - } - - public UUID getPostgresDestinationDefinitionId() throws ApiException { - return apiClient.getDestinationDefinitionApi().listDestinationDefinitions().getDestinationDefinitions() - .stream() - .filter(destRead -> "postgres".equalsIgnoreCase(destRead.getName())) - .findFirst() - .orElseThrow() - .getDestinationDefinitionId(); - } - - public void updateSourceDefinitionVersion(final UUID sourceDefinitionId, final String dockerImageTag) throws ApiException { - apiClient.getSourceDefinitionApi().updateSourceDefinition(new SourceDefinitionUpdate() - .sourceDefinitionId(sourceDefinitionId).dockerImageTag(dockerImageTag)); - } - - public void updateDestinationDefinitionVersion(final UUID destDefinitionId, final String dockerImageTag) throws ApiException { - apiClient.getDestinationDefinitionApi().updateDestinationDefinition(new DestinationDefinitionUpdate() - .destinationDefinitionId(destDefinitionId).dockerImageTag(dockerImageTag)); - } - - private void clearSourceDbData() throws SQLException { - final Database database = getSourceDatabase(); - final Set pairs = listAllTables(database); - for (final SchemaTableNamePair pair : pairs) { - database.query(context -> context.execute(String.format("DROP TABLE %s.%s", pair.schemaName(), pair.tableName()))); - } - } - - private void clearDestinationDbData() throws SQLException { - final Database database = getDestinationDatabase(); - final Set pairs = listAllTables(database); - for (final SchemaTableNamePair pair : pairs) { - database.query(context -> context.execute(String.format("DROP TABLE %s.%s CASCADE", pair.schemaName(), pair.tableName()))); - } - } - - private void disableConnection(final UUID connectionId) throws ApiException { - final ConnectionUpdate connectionUpdate = - new ConnectionUpdate().connectionId(connectionId).status(ConnectionStatus.DEPRECATED); - apiClient.getConnectionApi().updateConnection(connectionUpdate); - } - - private void deleteSource(final UUID sourceId) throws ApiException { - apiClient.getSourceApi().deleteSource(new SourceIdRequestBody().sourceId(sourceId)); - } - - private void deleteDestination(final UUID destinationId) throws ApiException { - apiClient.getDestinationApi().deleteDestination(new DestinationIdRequestBody().destinationId(destinationId)); - } - - private void deleteOperation(final UUID destinationId) throws ApiException { - apiClient.getOperationApi().deleteOperation(new OperationIdRequestBody().operationId(destinationId)); - } - - public JobRead getMostRecentSyncJobId(final UUID connectionId) throws Exception { - return apiClient.getJobsApi() - .listJobsFor(new JobListRequestBody().configId(connectionId.toString()).configTypes(List.of(JobConfigType.SYNC))) - .getJobs() - .stream().findFirst().map(JobWithAttemptsRead::getJob).orElseThrow(); - } - - public static void waitForSuccessfulJob(final JobsApi jobsApi, final JobRead originalJob) throws InterruptedException, ApiException { - final JobRead job = waitWhileJobHasStatus(jobsApi, originalJob, Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING)); - - if (!JobStatus.SUCCEEDED.equals(job.getStatus())) { - // If a job failed during testing, show us why. - final JobIdRequestBody id = new JobIdRequestBody(); - id.setId(originalJob.getId()); - for (final AttemptInfoRead attemptInfo : jobsApi.getJobInfo(id).getAttempts()) { - LOGGER.warn("Unsuccessful job attempt " + attemptInfo.getAttempt().getId() - + " with status " + job.getStatus() + " produced log output as follows: " + attemptInfo.getLogs().getLogLines()); - } - } - assertEquals(JobStatus.SUCCEEDED, job.getStatus()); - Thread.sleep(200); - } - - public static JobRead waitWhileJobHasStatus(final JobsApi jobsApi, final JobRead originalJob, final Set jobStatuses) - throws InterruptedException, ApiException { - return waitWhileJobHasStatus(jobsApi, originalJob, jobStatuses, Duration.ofMinutes(6)); - } - - @SuppressWarnings("BusyWait") - public static JobRead waitWhileJobHasStatus(final JobsApi jobsApi, - final JobRead originalJob, - final Set jobStatuses, - final Duration maxWaitTime) - throws InterruptedException, ApiException { - JobRead job = originalJob; - - final Instant waitStart = Instant.now(); - while (jobStatuses.contains(job.getStatus())) { - if (Duration.between(waitStart, Instant.now()).compareTo(maxWaitTime) > 0) { - LOGGER.info("Max wait time of {} has been reached. Stopping wait.", maxWaitTime); - break; - } - sleep(1000); - - job = jobsApi.getJobInfo(new JobIdRequestBody().id(job.getId())).getJob(); - LOGGER.info("waiting: job id: {} config type: {} status: {}", job.getId(), job.getConfigType(), job.getStatus()); - } - return job; - } - - @SuppressWarnings("BusyWait") - public static void waitWhileJobIsRunning(final JobsApi jobsApi, final JobRead job, final Duration maxWaitTime) - throws ApiException, InterruptedException { - final Instant waitStart = Instant.now(); - JobDebugInfoRead jobDebugInfoRead = jobsApi.getJobDebugInfo(new JobIdRequestBody().id(job.getId())); - LOGGER.info("workflow state: {}", jobDebugInfoRead.getWorkflowState()); - while (jobDebugInfoRead.getWorkflowState() != null && jobDebugInfoRead.getWorkflowState().getRunning()) { - if (Duration.between(waitStart, Instant.now()).compareTo(maxWaitTime) > 0) { - LOGGER.info("Max wait time of {} has been reached. Stopping wait.", maxWaitTime); - break; - } - LOGGER.info("waiting: job id: {}, workflowState.isRunning is still true", job.getId()); - sleep(1000); - jobDebugInfoRead = jobsApi.getJobDebugInfo(new JobIdRequestBody().id(job.getId())); - } - } - - @SuppressWarnings("BusyWait") - public static ConnectionState waitForConnectionState(final AirbyteApiClient apiClient, final UUID connectionId) - throws ApiException, InterruptedException { - ConnectionState connectionState = apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId)); - int count = 0; - while (count < 60 && (connectionState.getState() == null || connectionState.getState().isNull())) { - LOGGER.info("fetching connection state. attempt: {}", count++); - connectionState = apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId)); - sleep(1000); - } - return connectionState; - } - - public JobRead waitUntilTheNextJobIsStarted(final UUID connectionId) throws Exception { - final JobRead lastJob = getMostRecentSyncJobId(connectionId); - if (lastJob.getStatus() != JobStatus.SUCCEEDED) { - return lastJob; - } - - JobRead mostRecentSyncJob = getMostRecentSyncJobId(connectionId); - int count = 0; - while (count < 60 && mostRecentSyncJob.getId().equals(lastJob.getId())) { - Thread.sleep(Duration.ofSeconds(1).toMillis()); - mostRecentSyncJob = getMostRecentSyncJobId(connectionId); - ++count; - } - final boolean exceeded60seconds = count >= 60; - if (exceeded60seconds) { - // Fail because taking more than 60seconds to start a job is not expected - // Returning the current mostRecencSyncJob here could end up hiding some issues - Assertions.fail("unable to find the next job within 60seconds"); - } - - return mostRecentSyncJob; - } - - public enum Type { - SOURCE, - DESTINATION - } - - public void assertDestinationDbEmpty(final boolean withScdTable) throws Exception { - final Database source = getSourceDatabase(); - final Set sourceTables = listAllTables(source); - final Set sourceTablesWithRawTablesAdded = addAirbyteGeneratedTables(withScdTable, sourceTables); - final Database destination = getDestinationDatabase(); - final Set destinationTables = listAllTables(destination); - assertEquals(sourceTablesWithRawTablesAdded, destinationTables, - String.format("streams did not match.\n source stream names: %s\n destination stream names: %s\n", sourceTables, destinationTables)); - - for (final SchemaTableNamePair pair : sourceTables) { - final List sourceRecords = retrieveRawDestinationRecords(pair); - assertTrue(sourceRecords.isEmpty()); - } - } - - public void setIncrementalAppendSyncMode(final AirbyteCatalog airbyteCatalog, final List cursorField) { - airbyteCatalog.getStreams().forEach(stream -> { - stream.getConfig().syncMode(SyncMode.INCREMENTAL) - .destinationSyncMode(DestinationSyncMode.APPEND) - .cursorField(cursorField); - }); - } - - public WebBackendConnectionUpdate getUpdateInput(final ConnectionRead connection, final AirbyteCatalog catalog, final OperationRead operation) { - setIncrementalAppendSyncMode(catalog, List.of(COLUMN_ID)); - - return new WebBackendConnectionUpdate() - .connectionId(connection.getConnectionId()) - .name(connection.getName()) - .operations(List.of(new WebBackendOperationCreateOrUpdate() - .name(operation.getName()) - .operationId(operation.getOperationId()) - .workspaceId(operation.getWorkspaceId()) - .operatorConfiguration(operation.getOperatorConfiguration()))) - .namespaceDefinition(connection.getNamespaceDefinition()) - .namespaceFormat(connection.getNamespaceFormat()) - .syncCatalog(catalog) - .schedule(connection.getSchedule()) - .sourceCatalogId(connection.getSourceCatalogId()) - .status(connection.getStatus()) - .prefix(connection.getPrefix()) - .skipReset(false); - } - -} diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java deleted file mode 100644 index ef576689e620..000000000000 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.test.utils; - -import io.airbyte.db.Database; -import io.airbyte.db.factory.DSLContextFactory; -import io.airbyte.db.factory.DatabaseDriver; -import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.test.utils.AirbyteAcceptanceTestHarness.Type; -import java.util.HashMap; -import java.util.Map; -import org.jooq.SQLDialect; - -/** - * This class is used to provide information related to the test databases for running the - * {@link AirbyteAcceptanceTestHarness} on GKE. We launch 2 postgres databases in GKE as pods which - * act as source and destination and the tests run against them. In order to allow the test instance - * to connect to these databases we use port forwarding Refer - * tools/bin/gke-kube-acceptance-test/acceptance_test_kube_gke.sh for more info - */ -class GKEPostgresConfig { - - private static final String SOURCE_HOST = "postgres-source-svc"; - private static final String DESTINATION_HOST = "postgres-destination-svc"; - private static final Integer PORT = 5432; - private static final String USERNAME = "postgresadmin"; - private static final String PASSWORD = "admin123"; - private static final String DB = "postgresdb"; - - static Map dbConfig(final Type connectorType, final boolean hiddenPassword, final boolean withSchema) { - final Map dbConfig = new HashMap<>(); - dbConfig.put(JdbcUtils.HOST_KEY, connectorType == Type.SOURCE ? SOURCE_HOST : DESTINATION_HOST); - dbConfig.put(JdbcUtils.PASSWORD_KEY, hiddenPassword ? "**********" : PASSWORD); - - dbConfig.put(JdbcUtils.PORT_KEY, PORT); - dbConfig.put(JdbcUtils.DATABASE_KEY, DB); - dbConfig.put(JdbcUtils.USERNAME_KEY, USERNAME); - - if (withSchema) { - dbConfig.put(JdbcUtils.SCHEMA_KEY, "public"); - } - - return dbConfig; - } - - static Database getSourceDatabase() { - return new Database(DSLContextFactory.create(USERNAME, PASSWORD, DatabaseDriver.POSTGRESQL.getDriverClassName(), - "jdbc:postgresql://localhost:2000/postgresdb", SQLDialect.POSTGRES)); - } - - static Database getDestinationDatabase() { - return new Database(DSLContextFactory.create(USERNAME, PASSWORD, DatabaseDriver.POSTGRESQL.getDriverClassName(), - "jdbc:postgresql://localhost:4000/postgresdb", SQLDialect.POSTGRES)); - } - -} diff --git a/airbyte-tests/build.gradle b/airbyte-tests/build.gradle deleted file mode 100644 index 23084735b098..000000000000 --- a/airbyte-tests/build.gradle +++ /dev/null @@ -1,111 +0,0 @@ -plugins { - id 'java' -} - -// The java plugin automatically compiles/runs tests in the test source set (./src/test directory). Since we want acceptance tests to run -// only when explicitly requested, we put them in a separate source set, specify the sourceset's dependencies via configuration extensions below, -// and create a custom test task that can be invoked to run acceptance tests. -sourceSets { - acceptanceTests { - java { - srcDir("src/acceptanceTests/java") - } - resources { - srcDir("src/acceptanceTests/resources") - } - } - automaticMigrationAcceptanceTest { - java { - srcDir("src/automaticMigrationAcceptanceTest/java") - } - resources { - srcDir("src/automaticMigrationAcceptanceTest/resources") - } - } -} - -// Gradle links configurations with the name xImplementation or xRuntimeOnly etc.. to the source set named x. Therefore, any deps specified -// using the extensions below apply only to this sourceset and not any other code in the project. -configurations { - acceptanceTestsImplementation.extendsFrom testImplementation - acceptanceTestsRuntimeOnly.extendsFrom testRuntimeOnly - - automaticMigrationAcceptanceTestImplementation.extendsFrom testImplementation - automaticMigrationAcceptanceTestRuntimeOnly.extendsFrom testRuntimeOnly -} - -dependencies { - implementation project(':airbyte-api') - implementation project(':airbyte-container-orchestrator') - - implementation 'io.fabric8:kubernetes-client:5.12.2' - implementation libs.platform.testcontainers - - acceptanceTestsImplementation project(':airbyte-api') - acceptanceTestsImplementation project(':airbyte-commons') - acceptanceTestsImplementation project(':airbyte-commons-temporal') - acceptanceTestsImplementation project(':airbyte-config:config-models') - acceptanceTestsImplementation project(':airbyte-config:config-persistence') - acceptanceTestsImplementation project(':airbyte-db:db-lib') - acceptanceTestsImplementation project(':airbyte-tests') - acceptanceTestsImplementation project(':airbyte-test-utils') - acceptanceTestsImplementation project(':airbyte-commons-worker') - - acceptanceTestsImplementation 'com.fasterxml.jackson.core:jackson-databind' - acceptanceTestsImplementation 'io.github.cdimascio:java-dotenv:3.0.0' - acceptanceTestsImplementation libs.temporal.sdk - acceptanceTestsImplementation 'org.apache.commons:commons-csv:1.4' - acceptanceTestsImplementation libs.platform.testcontainers.postgresql - acceptanceTestsImplementation libs.postgresql - acceptanceTestsImplementation 'org.bouncycastle:bcprov-jdk15on:1.66' - acceptanceTestsImplementation 'org.bouncycastle:bcpkix-jdk15on:1.66' - - automaticMigrationAcceptanceTestImplementation project(':airbyte-api') - automaticMigrationAcceptanceTestImplementation project(':airbyte-commons') - automaticMigrationAcceptanceTestImplementation project(':airbyte-tests') - automaticMigrationAcceptanceTestImplementation project(':airbyte-test-utils') - - - automaticMigrationAcceptanceTestImplementation libs.platform.testcontainers -} - -// test should run using the current version of the docker compose configuration. -task copyComposeFileForAcceptanceTests(type: Copy) { - from "${rootDir}/docker-compose.yaml" - into "${sourceSets.acceptanceTests.output.resourcesDir}" -} -task copyComposeFileForMigrationAcceptanceTests(type: Copy) { - from "${rootDir}/docker-compose.yaml" - into "${sourceSets.automaticMigrationAcceptanceTest.output.resourcesDir}" -} - -assemble.dependsOn(project.tasks.copyComposeFileForAcceptanceTests) -assemble.dependsOn(project.tasks.copyComposeFileForMigrationAcceptanceTests) - -task acceptanceTests(type: Test) { - testClassesDirs += sourceSets.acceptanceTests.output.classesDirs - classpath += sourceSets.acceptanceTests.runtimeClasspath - useJUnitPlatform() - failFast = true - testLogging() { - events "passed", "failed" - exceptionFormat "full" - } - mustRunAfter test -} - -task automaticMigrationAcceptanceTest(type: Test) { - testClassesDirs += sourceSets.automaticMigrationAcceptanceTest.output.classesDirs - classpath += sourceSets.automaticMigrationAcceptanceTest.runtimeClasspath - useJUnitPlatform() - failFast = true - testLogging() { - events "passed", "failed" - exceptionFormat "full" - } - mustRunAfter test -} - -tasks.withType(Copy) { - duplicatesStrategy DuplicatesStrategy.INCLUDE -} diff --git a/airbyte-tests/readme.md b/airbyte-tests/readme.md deleted file mode 100644 index eb75f7da3fc1..000000000000 --- a/airbyte-tests/readme.md +++ /dev/null @@ -1,5 +0,0 @@ -# airbyte-tests - -This module contains two major test suites: -1. Acceptance Tests - These are feature-level tests that run as part of the build. They spin up Airbyte and test functionality by executing commands against the Airbyte Configuration API. It is possible to run them both on `docker-compose` and `kuberenetes`. We do both in the build. These tests are designed to verify that large features work in broad strokes. More detailed testing should happen in unit tests. -2. Auto Migration Acceptance Tests - These tests verify that it is possible to upgrade from older version of Airbyte (as far back as 0.17.0) all the way up to the current version. diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java deleted file mode 100644 index c8324e7b0cf4..000000000000 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java +++ /dev/null @@ -1,276 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.test.acceptance; - -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.COLUMN_ID; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitForConnectionState; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitForSuccessfulJob; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitWhileJobHasStatus; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.model.generated.AirbyteCatalog; -import io.airbyte.api.client.model.generated.AirbyteStream; -import io.airbyte.api.client.model.generated.AttemptInfoRead; -import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.client.model.generated.ConnectionScheduleType; -import io.airbyte.api.client.model.generated.ConnectionState; -import io.airbyte.api.client.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.DestinationRead; -import io.airbyte.api.client.model.generated.DestinationSyncMode; -import io.airbyte.api.client.model.generated.JobIdRequestBody; -import io.airbyte.api.client.model.generated.JobInfoRead; -import io.airbyte.api.client.model.generated.JobRead; -import io.airbyte.api.client.model.generated.JobStatus; -import io.airbyte.api.client.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.client.model.generated.SourceDefinitionRead; -import io.airbyte.api.client.model.generated.SourceRead; -import io.airbyte.api.client.model.generated.SyncMode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.MoreBooleans; -import io.airbyte.test.utils.AirbyteAcceptanceTestHarness; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.SQLException; -import java.util.Collections; -import java.util.List; -import java.util.UUID; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.MethodOrderer; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestMethodOrder; -import org.junitpioneer.jupiter.RetryingTest; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * The class test for advanced platform functionality that can be affected by the networking - * difference between the Kube and Docker deployments i.e. distributed vs local processes. All tests - * in this class should pass when ran on either type of deployment. - *

    - * Tests use the {@link RetryingTest} annotation instead of the more common {@link Test} to allow - * multiple tries for a test to pass. This is because these tests sometimes fail transiently, and we - * haven't been able to fix that yet. - *

    - * However, in general we should prefer using {@code @Test} instead and only resort to using - * {@code @RetryingTest} for tests that we can't get to pass reliably. New tests should thus default - * to using {@code @Test} if possible. - *

    - * We order tests such that earlier tests test more basic behavior that is relied upon in later - * tests. e.g. We test that we can create a destination before we test whether we can sync data to - * it. - */ -@SuppressWarnings({"rawtypes", "ConstantConditions"}) -@TestMethodOrder(MethodOrderer.OrderAnnotation.class) -class AdvancedAcceptanceTests { - - private static final Logger LOGGER = LoggerFactory.getLogger(AdvancedAcceptanceTests.class); - private static final String TYPE = "type"; - private static final String COLUMN1 = "column1"; - - private static AirbyteAcceptanceTestHarness testHarness; - private static AirbyteApiClient apiClient; - private static UUID workspaceId; - - @SuppressWarnings("UnstableApiUsage") - @BeforeAll - static void init() throws URISyntaxException, IOException, InterruptedException, ApiException { - apiClient = new AirbyteApiClient( - new ApiClient().setScheme("http") - .setHost("localhost") - .setPort(8001) - .setBasePath("/api")); - // work in whatever default workspace is present. - workspaceId = apiClient.getWorkspaceApi().listWorkspaces().getWorkspaces().get(0).getWorkspaceId(); - LOGGER.info("workspaceId = " + workspaceId); - - // log which connectors are being used. - final SourceDefinitionRead sourceDef = apiClient.getSourceDefinitionApi() - .getSourceDefinition(new SourceDefinitionIdRequestBody() - .sourceDefinitionId(UUID.fromString("decd338e-5647-4c0b-adf4-da0e75f5a750"))); - final DestinationDefinitionRead destinationDef = apiClient.getDestinationDefinitionApi() - .getDestinationDefinition(new DestinationDefinitionIdRequestBody() - .destinationDefinitionId(UUID.fromString("25c5221d-dce2-4163-ade9-739ef790f503"))); - LOGGER.info("pg source definition: {}", sourceDef.getDockerImageTag()); - LOGGER.info("pg destination definition: {}", destinationDef.getDockerImageTag()); - - testHarness = new AirbyteAcceptanceTestHarness(apiClient, workspaceId); - } - - @AfterAll - static void end() { - testHarness.stopDbAndContainers(); - } - - @BeforeEach - void setup() throws URISyntaxException, IOException, SQLException { - testHarness.setup(); - } - - @AfterEach - void tearDown() { - testHarness.cleanup(); - } - - @RetryingTest(3) - @Order(1) - void testManualSync() throws Exception { - final String connectionName = "test-connection"; - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead.getJob()); - testHarness.assertSourceAndDestinationDbInSync(false); - } - - @RetryingTest(3) - @Order(2) - void testCheckpointing() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(workspaceId); - final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition(workspaceId); - - final SourceRead source = testHarness.createSource( - "E2E Test Source -" + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "EXCEPTION_AFTER_N") - .put("throw_after_n_records", 100) - .build())); - - final DestinationRead destination = testHarness.createDestination( - "E2E Test Destination -" + UUID.randomUUID(), - workspaceId, - destinationDefinition.getDestinationDefinitionId(), - Jsons.jsonNode(ImmutableMap.of(TYPE, "SILENT"))); - - final String connectionName = "test-connection"; - final UUID sourceId = source.getSourceId(); - final UUID destinationId = destination.getDestinationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final AirbyteStream stream = catalog.getStreams().get(0).getStream(); - - assertEquals( - Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL), - stream.getSupportedSyncModes()); - assertTrue(MoreBooleans.isTruthy(stream.getSourceDefinedCursor())); - - final SyncMode syncMode = SyncMode.INCREMENTAL; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND; - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(syncMode) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(connectionName, sourceId, destinationId, Collections.emptyList(), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - // wait to get out of pending. - final JobRead runningJob = waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); - // wait to get out of running. - waitWhileJobHasStatus(apiClient.getJobsApi(), runningJob, Sets.newHashSet(JobStatus.RUNNING)); - // now cancel it so that we freeze state! - try { - apiClient.getJobsApi().cancelJob(new JobIdRequestBody().id(connectionSyncRead1.getJob().getId())); - } catch (final Exception e) { - LOGGER.error("error:", e); - } - - final ConnectionState connectionState = waitForConnectionState(apiClient, connectionId); - - // the source is set to emit a state message every 5th message. because of the multi threaded - // nature, we can't guarantee exactly what checkpoint will be registered. what we can do is send - // enough messages to make sure that we checkpoint at least once. - assertNotNull(connectionState.getState()); - assertTrue(connectionState.getState().get(COLUMN1).isInt()); - LOGGER.info("state value: {}", connectionState.getState().get(COLUMN1).asInt()); - assertTrue(connectionState.getState().get(COLUMN1).asInt() > 0); - assertEquals(0, connectionState.getState().get(COLUMN1).asInt() % 5); - } - - // verify that when the worker uses backpressure from pipes that no records are lost. - @RetryingTest(3) - @Order(4) - void testBackpressure() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(workspaceId); - final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition(workspaceId); - - final SourceRead source = testHarness.createSource( - "E2E Test Source -" + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "INFINITE_FEED") - .put("max_records", 5000) - .build())); - - final DestinationRead destination = testHarness.createDestination( - "E2E Test Destination -" + UUID.randomUUID(), - workspaceId, - destinationDefinition.getDestinationDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "THROTTLED") - .put("millis_per_record", 1) - .build())); - - final String connectionName = "test-connection"; - final UUID sourceId = source.getSourceId(); - final UUID destinationId = destination.getDestinationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - - final UUID connectionId = - testHarness.createConnection(connectionName, sourceId, destinationId, Collections.emptyList(), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - // wait to get out of pending. - final JobRead runningJob = waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); - // wait to get out of running. - waitWhileJobHasStatus(apiClient.getJobsApi(), runningJob, Sets.newHashSet(JobStatus.RUNNING)); - - final JobInfoRead jobInfo = apiClient.getJobsApi().getJobInfo(new JobIdRequestBody().id(runningJob.getId())); - final AttemptInfoRead attemptInfoRead = jobInfo.getAttempts().get(jobInfo.getAttempts().size() - 1); - assertNotNull(attemptInfoRead); - - int expectedMessageNumber = 0; - final int max = 10_000; - for (final String logLine : attemptInfoRead.getLogs().getLogLines()) { - if (expectedMessageNumber > max) { - break; - } - - if (logLine.contains("received record: ") && logLine.contains("\"type\": \"RECORD\"")) { - assertTrue( - logLine.contains(String.format("\"column1\": \"%s\"", expectedMessageNumber)), - String.format("Expected %s but got: %s", expectedMessageNumber, logLine)); - expectedMessageNumber++; - } - } - } - -} diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/BasicAcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/BasicAcceptanceTests.java deleted file mode 100644 index 3e7cf2888f75..000000000000 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/BasicAcceptanceTests.java +++ /dev/null @@ -1,1594 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.test.acceptance; - -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.AWESOME_PEOPLE_TABLE_NAME; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.COLUMN_ID; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.COLUMN_NAME; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.COOL_EMPLOYEES_TABLE_NAME; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.PUBLIC_SCHEMA_NAME; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.STAGING_SCHEMA_NAME; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.STREAM_NAME; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitForSuccessfulJob; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitWhileJobHasStatus; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitWhileJobIsRunning; -import static java.lang.Thread.sleep; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.generated.WebBackendApi; -import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.model.generated.AirbyteCatalog; -import io.airbyte.api.client.model.generated.AirbyteStream; -import io.airbyte.api.client.model.generated.AirbyteStreamAndConfiguration; -import io.airbyte.api.client.model.generated.AirbyteStreamConfiguration; -import io.airbyte.api.client.model.generated.AttemptInfoRead; -import io.airbyte.api.client.model.generated.AttemptStatus; -import io.airbyte.api.client.model.generated.CheckConnectionRead; -import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.client.model.generated.ConnectionRead; -import io.airbyte.api.client.model.generated.ConnectionScheduleData; -import io.airbyte.api.client.model.generated.ConnectionScheduleDataBasicSchedule; -import io.airbyte.api.client.model.generated.ConnectionScheduleDataBasicSchedule.TimeUnitEnum; -import io.airbyte.api.client.model.generated.ConnectionScheduleDataCron; -import io.airbyte.api.client.model.generated.ConnectionScheduleType; -import io.airbyte.api.client.model.generated.ConnectionState; -import io.airbyte.api.client.model.generated.ConnectionStatus; -import io.airbyte.api.client.model.generated.DataType; -import io.airbyte.api.client.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.client.model.generated.DestinationDefinitionIdWithWorkspaceId; -import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.DestinationDefinitionSpecificationRead; -import io.airbyte.api.client.model.generated.DestinationIdRequestBody; -import io.airbyte.api.client.model.generated.DestinationRead; -import io.airbyte.api.client.model.generated.DestinationSyncMode; -import io.airbyte.api.client.model.generated.JobConfigType; -import io.airbyte.api.client.model.generated.JobIdRequestBody; -import io.airbyte.api.client.model.generated.JobInfoRead; -import io.airbyte.api.client.model.generated.JobListRequestBody; -import io.airbyte.api.client.model.generated.JobRead; -import io.airbyte.api.client.model.generated.JobStatus; -import io.airbyte.api.client.model.generated.JobWithAttemptsRead; -import io.airbyte.api.client.model.generated.OperationCreate; -import io.airbyte.api.client.model.generated.OperationRead; -import io.airbyte.api.client.model.generated.OperatorConfiguration; -import io.airbyte.api.client.model.generated.OperatorType; -import io.airbyte.api.client.model.generated.OperatorWebhook; -import io.airbyte.api.client.model.generated.OperatorWebhook.WebhookTypeEnum; -import io.airbyte.api.client.model.generated.OperatorWebhookDbtCloud; -import io.airbyte.api.client.model.generated.SelectedFieldInfo; -import io.airbyte.api.client.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.client.model.generated.SourceDefinitionIdWithWorkspaceId; -import io.airbyte.api.client.model.generated.SourceDefinitionRead; -import io.airbyte.api.client.model.generated.SourceDefinitionSpecificationRead; -import io.airbyte.api.client.model.generated.SourceIdRequestBody; -import io.airbyte.api.client.model.generated.SourceRead; -import io.airbyte.api.client.model.generated.StreamDescriptor; -import io.airbyte.api.client.model.generated.StreamState; -import io.airbyte.api.client.model.generated.SyncMode; -import io.airbyte.api.client.model.generated.WebBackendConnectionUpdate; -import io.airbyte.api.client.model.generated.WebhookConfigWrite; -import io.airbyte.api.client.model.generated.WorkspaceRead; -import io.airbyte.api.client.model.generated.WorkspaceUpdate; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.scheduling.state.WorkflowState; -import io.airbyte.db.Database; -import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.test.utils.AirbyteAcceptanceTestHarness; -import io.airbyte.test.utils.PostgreSQLContainerHelper; -import io.airbyte.test.utils.SchemaTableNamePair; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.SQLException; -import java.time.Duration; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInfo; -import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; - -/** - * This class tests for api functionality and basic sync functionality. - *

    - * Due to the number of tests here, this set runs only on the docker deployment for speed. The tests - * here are disabled for Kubernetes as operations take much longer due to Kubernetes pod spin up - * times and there is little value in re-running these tests since this part of the system does not - * vary between deployments. - *

    - * We order tests such that earlier tests test more basic behavior relied upon in later tests. e.g. - * We test that we can create a destination before we test whether we can sync data to it. - */ -@DisabledIfEnvironmentVariable(named = "KUBE", - matches = "true") -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class BasicAcceptanceTests { - - private static final Logger LOGGER = LoggerFactory.getLogger(BasicAcceptanceTests.class); - - private static final Boolean WITH_SCD_TABLE = true; - - private static final Boolean WITHOUT_SCD_TABLE = false; - - private static AirbyteAcceptanceTestHarness testHarness; - private static AirbyteApiClient apiClient; - private static WebBackendApi webBackendApi; - private static UUID workspaceId; - private static PostgreSQLContainer sourcePsql; - - private static final String TYPE = "type"; - private static final String REF = "$ref"; - private static final String INTEGER_REFERENCE = "WellKnownTypes.json#/definitions/Integer"; - private static final String STRING_REFERENCE = "WellKnownTypes.json#/definitions/String"; - private static final String PUBLIC = "public"; - private static final String E2E_TEST_SOURCE = "E2E Test Source -"; - private static final String INFINITE_FEED = "INFINITE_FEED"; - private static final String MESSAGE_INTERVAL = "message_interval"; - private static final String MAX_RECORDS = "max_records"; - private static final String TEST_CONNECTION = "test-connection"; - private static final String STATE_AFTER_SYNC_ONE = "state after sync 1: {}"; - private static final String STATE_AFTER_SYNC_TWO = "state after sync 2: {}"; - private static final String GERALT = "geralt"; - private static final String NAME = "name"; - private static final String VALUE = "value"; - private static final String LOCATION = "location"; - private static final String FIELD = "field"; - private static final String ID_AND_NAME = "id_and_name"; - - private static final int MAX_SCHEDULED_JOB_RETRIES = 10; - - private static final ConnectionScheduleData BASIC_SCHEDULE_DATA = new ConnectionScheduleData().basicSchedule( - new ConnectionScheduleDataBasicSchedule().units(1L).timeUnit(TimeUnitEnum.HOURS)); - - @BeforeAll - static void init() throws URISyntaxException, IOException, InterruptedException, ApiException { - apiClient = new AirbyteApiClient( - new ApiClient().setScheme("http") - .setHost("localhost") - .setPort(8001) - .setBasePath("/api")); - webBackendApi = new WebBackendApi( - new ApiClient().setScheme("http") - .setHost("localhost") - .setPort(8001) - .setBasePath("/api")); - // work in whatever default workspace is present. - workspaceId = apiClient.getWorkspaceApi().listWorkspaces().getWorkspaces().get(0).getWorkspaceId(); - LOGGER.info("workspaceId = " + workspaceId); - - // log which connectors are being used. - final SourceDefinitionRead sourceDef = apiClient.getSourceDefinitionApi() - .getSourceDefinition(new SourceDefinitionIdRequestBody() - .sourceDefinitionId(UUID.fromString("decd338e-5647-4c0b-adf4-da0e75f5a750"))); - final DestinationDefinitionRead destinationDef = apiClient.getDestinationDefinitionApi() - .getDestinationDefinition(new DestinationDefinitionIdRequestBody() - .destinationDefinitionId(UUID.fromString("25c5221d-dce2-4163-ade9-739ef790f503"))); - LOGGER.info("pg source definition: {}", sourceDef.getDockerImageTag()); - LOGGER.info("pg destination definition: {}", destinationDef.getDockerImageTag()); - - testHarness = new AirbyteAcceptanceTestHarness(apiClient, workspaceId); - sourcePsql = testHarness.getSourcePsql(); - } - - @AfterAll - static void end() { - testHarness.stopDbAndContainers(); - } - - @BeforeEach - void setup() throws SQLException, URISyntaxException, IOException { - testHarness.setup(); - } - - @AfterEach - void tearDown() { - testHarness.cleanup(); - } - - @Test - @Order(-2) - void testGetDestinationSpec() throws ApiException { - final UUID destinationDefinitionId = testHarness.getPostgresDestinationDefinitionId(); - final DestinationDefinitionSpecificationRead spec = apiClient.getDestinationDefinitionSpecificationApi() - .getDestinationDefinitionSpecification( - new DestinationDefinitionIdWithWorkspaceId().destinationDefinitionId(destinationDefinitionId).workspaceId(UUID.randomUUID())); - assertEquals(destinationDefinitionId, spec.getDestinationDefinitionId()); - assertNotNull(spec.getConnectionSpecification()); - } - - @Test - @Order(-1) - void testFailedGet404() { - final var e = assertThrows(ApiException.class, () -> apiClient.getDestinationDefinitionSpecificationApi() - .getDestinationDefinitionSpecification( - new DestinationDefinitionIdWithWorkspaceId().destinationDefinitionId(UUID.randomUUID()).workspaceId(UUID.randomUUID()))); - assertEquals(404, e.getCode()); - } - - @Test - @Order(0) - void testGetSourceSpec() throws ApiException { - final UUID sourceDefId = testHarness.getPostgresSourceDefinitionId(); - final SourceDefinitionSpecificationRead spec = apiClient.getSourceDefinitionSpecificationApi() - .getSourceDefinitionSpecification(new SourceDefinitionIdWithWorkspaceId().sourceDefinitionId(sourceDefId).workspaceId(UUID.randomUUID())); - assertEquals(sourceDefId, spec.getSourceDefinitionId()); - assertNotNull(spec.getConnectionSpecification()); - } - - @Test - @Order(1) - void testCreateDestination() throws ApiException { - final UUID destinationDefId = testHarness.getPostgresDestinationDefinitionId(); - final JsonNode destinationConfig = testHarness.getDestinationDbConfig(); - final String name = "AccTestDestinationDb-" + UUID.randomUUID(); - - final DestinationRead createdDestination = testHarness.createDestination( - name, - workspaceId, - destinationDefId, - destinationConfig); - - assertEquals(name, createdDestination.getName()); - assertEquals(destinationDefId, createdDestination.getDestinationDefinitionId()); - assertEquals(workspaceId, createdDestination.getWorkspaceId()); - assertEquals(testHarness.getDestinationDbConfigWithHiddenPassword(), createdDestination.getConnectionConfiguration()); - } - - @Test - @Order(2) - void testDestinationCheckConnection() throws ApiException { - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - - final CheckConnectionRead.StatusEnum checkOperationStatus = apiClient.getDestinationApi() - .checkConnectionToDestination(new DestinationIdRequestBody().destinationId(destinationId)) - .getStatus(); - - assertEquals(CheckConnectionRead.StatusEnum.SUCCEEDED, checkOperationStatus); - } - - @Test - @Order(3) - void wtestCreateSource() throws ApiException { - final String dbName = "acc-test-db"; - final UUID postgresSourceDefinitionId = testHarness.getPostgresSourceDefinitionId(); - final JsonNode sourceDbConfig = testHarness.getSourceDbConfig(); - - final SourceRead response = testHarness.createSource( - dbName, - workspaceId, - postgresSourceDefinitionId, - sourceDbConfig); - - final JsonNode expectedConfig = Jsons.jsonNode(sourceDbConfig); - // expect replacement of secret with magic string. - ((ObjectNode) expectedConfig).put(JdbcUtils.PASSWORD_KEY, "**********"); - assertEquals(dbName, response.getName()); - assertEquals(workspaceId, response.getWorkspaceId()); - assertEquals(postgresSourceDefinitionId, response.getSourceDefinitionId()); - assertEquals(expectedConfig, response.getConnectionConfiguration()); - } - - @Test - @Order(4) - void testSourceCheckConnection() throws ApiException { - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - - final CheckConnectionRead checkConnectionRead = apiClient.getSourceApi().checkConnectionToSource(new SourceIdRequestBody().sourceId(sourceId)); - - assertEquals( - CheckConnectionRead.StatusEnum.SUCCEEDED, - checkConnectionRead.getStatus(), - checkConnectionRead.getMessage()); - } - - @Test - @Order(5) - void testDiscoverSourceSchema() throws ApiException { - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - - final AirbyteCatalog actual = testHarness.discoverSourceSchema(sourceId); - - final Map> fields = ImmutableMap.of( - COLUMN_ID, ImmutableMap.of(TYPE, DataType.NUMBER.getValue(), "airbyte_type", "integer"), - COLUMN_NAME, ImmutableMap.of(TYPE, DataType.STRING.getValue())); - final JsonNode jsonSchema = Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "object") - .put("properties", fields) - .build()); - final AirbyteStream stream = new AirbyteStream() - .name(STREAM_NAME) - .namespace(PUBLIC) - .jsonSchema(jsonSchema) - .sourceDefinedCursor(null) - .defaultCursorField(Collections.emptyList()) - .sourceDefinedPrimaryKey(Collections.emptyList()) - .supportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - final AirbyteStreamConfiguration streamConfig = new AirbyteStreamConfiguration() - .syncMode(SyncMode.FULL_REFRESH) - .cursorField(Collections.emptyList()) - .destinationSyncMode(DestinationSyncMode.APPEND) - .primaryKey(Collections.emptyList()) - .aliasName(STREAM_NAME.replace(".", "_")) - .selected(true) - .suggested(true); - final AirbyteCatalog expected = new AirbyteCatalog() - .streams(Lists.newArrayList(new AirbyteStreamAndConfiguration() - .stream(stream) - .config(streamConfig))); - - assertEquals(expected, actual); - } - - @Test - @Order(6) - void testCreateConnection() throws ApiException { - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final String name = "test-connection-" + UUID.randomUUID(); - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode).setFieldSelectionEnabled(false)); - final ConnectionRead createdConnection = - testHarness.createConnection(name, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.BASIC, BASIC_SCHEDULE_DATA); - createdConnection.getSyncCatalog().getStreams().forEach(s -> s.getConfig().setSuggested(true)); - - assertEquals(sourceId, createdConnection.getSourceId()); - assertEquals(destinationId, createdConnection.getDestinationId()); - assertEquals(1, createdConnection.getOperationIds().size()); - assertEquals(operationId, createdConnection.getOperationIds().get(0)); - assertEquals(catalog, createdConnection.getSyncCatalog()); - assertEquals(ConnectionScheduleType.BASIC, createdConnection.getScheduleType()); - assertEquals(BASIC_SCHEDULE_DATA, createdConnection.getScheduleData()); - assertEquals(name, createdConnection.getName()); - } - - @Test - @Order(7) - void testCancelSync() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(workspaceId); - - final SourceRead source = testHarness.createSource( - E2E_TEST_SOURCE + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, INFINITE_FEED) - .put(MESSAGE_INTERVAL, 1000) - .put(MAX_RECORDS, Duration.ofMinutes(5).toSeconds()) - .build())); - - final UUID sourceId = source.getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - // wait to get out of PENDING - final JobRead jobRead = waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead.getJob(), Set.of(JobStatus.PENDING)); - assertEquals(JobStatus.RUNNING, jobRead.getStatus()); - - final var resp = apiClient.getJobsApi().cancelJob(new JobIdRequestBody().id(connectionSyncRead.getJob().getId())); - assertEquals(JobStatus.CANCELLED, resp.getJob().getStatus()); - } - - @Test - @Order(8) - void testScheduledSync() throws Exception { - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.BASIC, - BASIC_SCHEDULE_DATA).getConnectionId(); - - waitForSuccessfulJobWithRetries(connectionId, MAX_SCHEDULED_JOB_RETRIES); - - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - } - - @Test - @Order(9) - void testCronSync() throws Exception { - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - - // NOTE: this cron should run once every two minutes. - final ConnectionScheduleData connectionScheduleData = new ConnectionScheduleData().cron( - new ConnectionScheduleDataCron().cronExpression("* */2 * * * ?").cronTimeZone("UTC")); - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.CRON, - connectionScheduleData).getConnectionId(); - - waitForSuccessfulJobWithRetries(connectionId, MAX_SCHEDULED_JOB_RETRIES); - - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - apiClient.getConnectionApi().deleteConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - // remove connection to avoid exception during tear down - testHarness.removeConnection(connectionId); - } - - @Test - @Order(19) - void testWebhookOperationExecutesSuccessfully() throws Exception { - // create workspace webhook config - final WorkspaceRead workspaceRead = apiClient.getWorkspaceApi().updateWorkspace( - new WorkspaceUpdate().workspaceId(workspaceId).addWebhookConfigsItem( - new WebhookConfigWrite().name("reqres test"))); - // create a webhook operation - final OperationRead operationRead = apiClient.getOperationApi().createOperation(new OperationCreate() - .workspaceId(workspaceId) - .name("reqres test") - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.WEBHOOK) - .webhook(new OperatorWebhook() - .webhookConfigId(workspaceRead.getWebhookConfigs().get(0).getId()) - // NOTE: this dbt Cloud config won't actually work, but the sync should still succeed. - .webhookType(WebhookTypeEnum.DBTCLOUD) - .dbtCloud(new OperatorWebhookDbtCloud().accountId(123).jobId(456))))); - // create a connection with the new operation. - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - // NOTE: this is a normalization operation. - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection( - TEST_CONNECTION, sourceId, destinationId, List.of(operationId, operationRead.getOperationId()), catalog, ConnectionScheduleType.MANUAL, - null) - .getConnectionId(); - // run the sync - apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJobWithRetries(connectionId, MAX_SCHEDULED_JOB_RETRIES); - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - apiClient.getConnectionApi().deleteConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - // remove connection to avoid exception during tear down - testHarness.removeConnection(connectionId); - // TODO(mfsiega-airbyte): add webhook info to the jobs api to verify the webhook execution status. - } - - @Test - @Order(10) - void testMultipleSchemasAndTablesSync() throws Exception { - // create tables in another schema - PostgreSQLContainerHelper.runSqlScript(MountableFile.forClasspathResource("postgres_second_schema_multiple_tables.sql"), sourcePsql); - - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead.getJob()); - testHarness.assertSourceAndDestinationDbInSync(false); - } - - @Test - @Order(11) - void testMultipleSchemasSameTablesSync() throws Exception { - // create tables in another schema - PostgreSQLContainerHelper.runSqlScript(MountableFile.forClasspathResource("postgres_separate_schema_same_table.sql"), sourcePsql); - - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead.getJob()); - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - } - - @Test - @Order(12) - void testIncrementalDedupeSync() throws Exception { - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final SyncMode syncMode = SyncMode.INCREMENTAL; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND_DEDUP; - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(syncMode) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(destinationSyncMode) - .primaryKey(List.of(List.of(COLUMN_NAME)))); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - // sync from start - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); - - testHarness.assertSourceAndDestinationDbInSync(WITH_SCD_TABLE); - - // add new records and run again. - final Database source = testHarness.getSourceDatabase(); - final List expectedRawRecords = testHarness.retrieveSourceRecords(source, STREAM_NAME); - expectedRawRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).put(COLUMN_NAME, "sherif").build())); - expectedRawRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 7).put(COLUMN_NAME, "chris").build())); - source.query(ctx -> ctx.execute("UPDATE id_and_name SET id=6 WHERE name='sherif'")); - source.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(7, 'chris')")); - // retrieve latest snapshot of source records after modifications; the deduplicated table in - // destination should mirror this latest state of records - final List expectedNormalizedRecords = testHarness.retrieveSourceRecords(source, STREAM_NAME); - - final JobInfoRead connectionSyncRead2 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead2.getJob()); - - testHarness.assertRawDestinationContains(expectedRawRecords, new SchemaTableNamePair(PUBLIC, STREAM_NAME)); - testHarness.assertNormalizedDestinationContains(expectedNormalizedRecords); - } - - @Test - @Order(13) - void testIncrementalSync() throws Exception { - LOGGER.info("Starting testIncrementalSync()"); - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final AirbyteStream stream = catalog.getStreams().get(0).getStream(); - - assertEquals(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL), stream.getSupportedSyncModes()); - // instead of assertFalse to avoid NPE from unboxed. - assertNull(stream.getSourceDefinedCursor()); - assertTrue(stream.getDefaultCursorField().isEmpty()); - assertTrue(stream.getSourceDefinedPrimaryKey().isEmpty()); - - final SyncMode syncMode = SyncMode.INCREMENTAL; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND; - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(syncMode) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - LOGGER.info("Beginning testIncrementalSync() sync 1"); - - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); - LOGGER.info(STATE_AFTER_SYNC_ONE, apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - - // add new records and run again. - final Database source = testHarness.getSourceDatabase(); - // get contents of source before mutating records. - final List expectedRecords = testHarness.retrieveSourceRecords(source, STREAM_NAME); - expectedRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).put(COLUMN_NAME, GERALT).build())); - // add a new record - source.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(6, 'geralt')")); - // mutate a record that was already synced with out updating its cursor value. if we are actually - // full refreshing, this record will appear in the output and cause the test to fail. if we are, - // correctly, doing incremental, we will not find this value in the destination. - source.query(ctx -> ctx.execute("UPDATE id_and_name SET name='yennefer' WHERE id=2")); - - LOGGER.info("Starting testIncrementalSync() sync 2"); - final JobInfoRead connectionSyncRead2 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead2.getJob()); - LOGGER.info(STATE_AFTER_SYNC_TWO, apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertRawDestinationContains(expectedRecords, new SchemaTableNamePair(PUBLIC, STREAM_NAME)); - - // reset back to no data. - - LOGGER.info("Starting testIncrementalSync() reset"); - final JobInfoRead jobInfoRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitWhileJobHasStatus(apiClient.getJobsApi(), jobInfoRead.getJob(), - Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING, JobStatus.INCOMPLETE, JobStatus.FAILED)); - // This is a band-aid to prevent some race conditions where the job status was updated but we may - // still be cleaning up some data in the reset table. This would be an argument for reworking the - // source of truth of the replication workflow state to be in DB rather than in Memory and - // serialized automagically by temporal - waitWhileJobIsRunning(apiClient.getJobsApi(), jobInfoRead.getJob(), Duration.ofMinutes(1)); - - LOGGER.info("state after reset: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertRawDestinationContains(Collections.emptyList(), new SchemaTableNamePair(PUBLIC, - STREAM_NAME)); - - // sync one more time. verify it is the equivalent of a full refresh. - LOGGER.info("Starting testIncrementalSync() sync 3"); - final JobInfoRead connectionSyncRead3 = - apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead3.getJob()); - LOGGER.info("state after sync 3: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - - } - - @Test - @Order(14) - void testDeleteConnection() throws Exception { - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final SyncMode syncMode = SyncMode.INCREMENTAL; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND_DEDUP; - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(syncMode) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(destinationSyncMode) - .primaryKey(List.of(List.of(COLUMN_NAME)))); - - UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead.getJob(), Set.of(JobStatus.RUNNING)); - - // test normal deletion of connection - LOGGER.info("Calling delete connection..."); - apiClient.getConnectionApi().deleteConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - // remove connection to avoid exception during tear down - // connectionIds.remove(connectionId); // todo remove - testHarness.removeConnection(connectionId); - - ConnectionStatus connectionStatus = - apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)).getStatus(); - assertEquals(ConnectionStatus.DEPRECATED, connectionStatus); - - // test that repeated deletion call for same connection is successful - LOGGER.info("Calling delete connection a second time to test repeat call behavior..."); - apiClient.getConnectionApi().deleteConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - // test deletion of connection when temporal workflow is in a bad state - LOGGER.info("Testing connection deletion when temporal is in a terminal state"); - connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - testHarness.terminateTemporalWorkflow(connectionId); - - // we should still be able to delete the connection when the temporal workflow is in this state - apiClient.getConnectionApi().deleteConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - connectionStatus = apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)).getStatus(); - assertEquals(ConnectionStatus.DEPRECATED, connectionStatus); - } - - @Test - @Order(15) - void testUpdateConnectionWhenWorkflowUnreachable() throws Exception { - // This test only covers the specific behavior of updating a connection that does not have an - // underlying temporal workflow. - // Also, this test doesn't verify correctness of the schedule update applied, as adding the ability - // to query a workflow for its current - // schedule is out of scope for the issue (https://github.com/airbytehq/airbyte/issues/11215). This - // test just ensures that the underlying workflow - // is running after the update method is called. - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .primaryKey(List.of(List.of(COLUMN_NAME)))); - - LOGGER.info("Testing connection update when temporal is in a terminal state"); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - testHarness.terminateTemporalWorkflow(connectionId); - - // we should still be able to update the connection when the temporal workflow is in this state - testHarness.updateConnectionSchedule( - connectionId, - ConnectionScheduleType.BASIC, - new ConnectionScheduleData().basicSchedule(new ConnectionScheduleDataBasicSchedule().timeUnit(TimeUnitEnum.HOURS).units(1L))); - - LOGGER.info("Waiting for workflow to be recreated..."); - Thread.sleep(500); - - final WorkflowState workflowState = testHarness.getWorkflowState(connectionId); - assertTrue(workflowState.isRunning()); - } - - @Test - @Order(16) - void testManualSyncRepairsWorkflowWhenWorkflowUnreachable() throws Exception { - // This test only covers the specific behavior of updating a connection that does not have an - // underlying temporal workflow. - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(workspaceId); - final SourceRead source = testHarness.createSource( - E2E_TEST_SOURCE + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, INFINITE_FEED) - .put(MAX_RECORDS, 5000) - .put(MESSAGE_INTERVAL, 100) - .build())); - final UUID sourceId = source.getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .primaryKey(List.of(List.of(COLUMN_NAME)))); - - LOGGER.info("Testing manual sync when temporal is in a terminal state"); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - LOGGER.info("Starting first manual sync"); - final JobInfoRead firstJobInfo = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - LOGGER.info("Terminating workflow during first sync"); - testHarness.terminateTemporalWorkflow(connectionId); - - LOGGER.info("Submitted another manual sync"); - apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - LOGGER.info("Waiting for workflow to be recreated..."); - Thread.sleep(500); - - final WorkflowState workflowState = testHarness.getWorkflowState(connectionId); - assertTrue(workflowState.isRunning()); - assertTrue(workflowState.isSkipScheduling()); - - // verify that the first manual sync was marked as failed - final JobInfoRead terminatedJobInfo = apiClient.getJobsApi().getJobInfo(new JobIdRequestBody().id(firstJobInfo.getJob().getId())); - assertEquals(JobStatus.FAILED, terminatedJobInfo.getJob().getStatus()); - } - - @Test - @Order(17) - void testResetConnectionRepairsWorkflowWhenWorkflowUnreachable() throws Exception { - // This test only covers the specific behavior of updating a connection that does not have an - // underlying temporal workflow. - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .primaryKey(List.of(List.of(COLUMN_NAME)))); - - LOGGER.info("Testing reset connection when temporal is in a terminal state"); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - testHarness.terminateTemporalWorkflow(connectionId); - - final JobInfoRead jobInfoRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - assertEquals(JobConfigType.RESET_CONNECTION, jobInfoRead.getJob().getConfigType()); - } - - @Test - @Order(18) - void testResetCancelsRunningSync() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(workspaceId); - - final SourceRead source = testHarness.createSource( - E2E_TEST_SOURCE + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, INFINITE_FEED) - .put(MESSAGE_INTERVAL, 1000) - .put(MAX_RECORDS, Duration.ofMinutes(5).toSeconds()) - .build())); - - final UUID sourceId = source.getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - // wait to get out of PENDING - final JobRead jobRead = waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead.getJob(), Set.of(JobStatus.PENDING)); - assertEquals(JobStatus.RUNNING, jobRead.getStatus()); - - // send reset request while sync is still running - final JobInfoRead jobInfoRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - assertEquals(JobConfigType.RESET_CONNECTION, jobInfoRead.getJob().getConfigType()); - - // verify that sync job was cancelled - final JobRead connectionSyncReadAfterReset = - apiClient.getJobsApi().getJobInfo(new JobIdRequestBody().id(connectionSyncRead.getJob().getId())).getJob(); - assertEquals(JobStatus.CANCELLED, connectionSyncReadAfterReset.getStatus()); - } - - @Test - void testSyncAfterUpgradeToPerStreamState(final TestInfo testInfo) throws Exception { - LOGGER.info("Starting {}", testInfo.getDisplayName()); - final SourceRead source = testHarness.createPostgresSource(true); - final UUID sourceId = source.getSourceId(); - final UUID sourceDefinitionId = source.getSourceDefinitionId(); - final UUID destinationId = testHarness.createPostgresDestination(true).getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - - // Fetch the current/most recent source definition version - final SourceDefinitionRead sourceDefinitionRead = - apiClient.getSourceDefinitionApi().getSourceDefinition(new SourceDefinitionIdRequestBody().sourceDefinitionId(sourceDefinitionId)); - final String currentSourceDefintionVersion = sourceDefinitionRead.getDockerImageTag(); - - // Set the source to a version that does not support per-stream state - LOGGER.info("Setting source connector to pre-per-stream state version {}...", - AirbyteAcceptanceTestHarness.POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - testHarness.updateSourceDefinitionVersion(sourceDefinitionId, AirbyteAcceptanceTestHarness.POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND)); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - LOGGER.info("Beginning {} sync 1", testInfo.getDisplayName()); - - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); - LOGGER.info(STATE_AFTER_SYNC_ONE, apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - - // Set source to a version that supports per-stream state - testHarness.updateSourceDefinitionVersion(sourceDefinitionId, currentSourceDefintionVersion); - LOGGER.info("Upgraded source connector per-stream state supported version {}.", currentSourceDefintionVersion); - - // add new records and run again. - final Database sourceDatabase = testHarness.getSourceDatabase(); - // get contents of source before mutating records. - final List expectedRecords = testHarness.retrieveSourceRecords(sourceDatabase, STREAM_NAME); - expectedRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).put(COLUMN_NAME, GERALT).build())); - // add a new record - sourceDatabase.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(6, 'geralt')")); - // mutate a record that was already synced with out updating its cursor value. if we are actually - // full refreshing, this record will appear in the output and cause the test to fail. if we are, - // correctly, doing incremental, we will not find this value in the destination. - sourceDatabase.query(ctx -> ctx.execute("UPDATE id_and_name SET name='yennefer' WHERE id=2")); - - LOGGER.info("Starting {} sync 2", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead2 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead2.getJob()); - LOGGER.info(STATE_AFTER_SYNC_TWO, apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertRawDestinationContains(expectedRecords, new SchemaTableNamePair(PUBLIC, STREAM_NAME)); - - // reset back to no data. - LOGGER.info("Starting {} reset", testInfo.getDisplayName()); - final JobInfoRead jobInfoRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitWhileJobHasStatus(apiClient.getJobsApi(), jobInfoRead.getJob(), - Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING, JobStatus.INCOMPLETE, JobStatus.FAILED)); - // This is a band-aid to prevent some race conditions where the job status was updated but we may - // still be cleaning up some data in the reset table. This would be an argument for reworking the - // source of truth of the replication workflow state to be in DB rather than in Memory and - // serialized automagically by temporal - waitWhileJobIsRunning(apiClient.getJobsApi(), jobInfoRead.getJob(), Duration.ofMinutes(1)); - - LOGGER.info("state after reset: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertRawDestinationContains(Collections.emptyList(), new SchemaTableNamePair(PUBLIC, - STREAM_NAME)); - - // sync one more time. verify it is the equivalent of a full refresh. - final String expectedState = - "{\"cursor\":\"6\",\"stream_name\":\"id_and_name\",\"cursor_field\":[\"id\"],\"stream_namespace\":\"public\",\"cursor_record_count\":1}"; - LOGGER.info("Starting {} sync 3", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead3 = - apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead3.getJob()); - final ConnectionState state = apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId)); - LOGGER.info("state after sync 3: {}", state); - - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - assertNotNull(state.getStreamState()); - assertEquals(1, state.getStreamState().size()); - final StreamState idAndNameState = state.getStreamState().get(0); - assertEquals(new StreamDescriptor().namespace(PUBLIC).name(STREAM_NAME), idAndNameState.getStreamDescriptor()); - assertEquals(Jsons.deserialize(expectedState), idAndNameState.getStreamState()); - } - - @Test - void testSyncAfterUpgradeToPerStreamStateWithNoNewData(final TestInfo testInfo) throws Exception { - LOGGER.info("Starting {}", testInfo.getDisplayName()); - final SourceRead source = testHarness.createPostgresSource(true); - final UUID sourceId = source.getSourceId(); - final UUID sourceDefinitionId = source.getSourceDefinitionId(); - final UUID destinationId = testHarness.createPostgresDestination(true).getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - - // Fetch the current/most recent source definition version - final SourceDefinitionRead sourceDefinitionRead = - apiClient.getSourceDefinitionApi().getSourceDefinition(new SourceDefinitionIdRequestBody().sourceDefinitionId(sourceDefinitionId)); - final String currentSourceDefintionVersion = sourceDefinitionRead.getDockerImageTag(); - - // Set the source to a version that does not support per-stream state - LOGGER.info("Setting source connector to pre-per-stream state version {}...", - AirbyteAcceptanceTestHarness.POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - testHarness.updateSourceDefinitionVersion(sourceDefinitionId, AirbyteAcceptanceTestHarness.POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND)); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - LOGGER.info("Beginning {} sync 1", testInfo.getDisplayName()); - - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); - LOGGER.info(STATE_AFTER_SYNC_ONE, apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - - // Set source to a version that supports per-stream state - testHarness.updateSourceDefinitionVersion(sourceDefinitionId, currentSourceDefintionVersion); - LOGGER.info("Upgraded source connector per-stream state supported version {}.", currentSourceDefintionVersion); - - // sync one more time. verify that nothing has been synced - LOGGER.info("Starting {} sync 2", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead2 = - apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead2.getJob()); - LOGGER.info(STATE_AFTER_SYNC_TWO, apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - final JobInfoRead syncJob = apiClient.getJobsApi().getJobInfo(new JobIdRequestBody().id(connectionSyncRead2.getJob().getId())); - final Optional result = syncJob.getAttempts().stream() - .sorted((a, b) -> Long.compare(b.getAttempt().getEndedAt(), a.getAttempt().getEndedAt())) - .findFirst(); - - assertTrue(result.isPresent()); - assertEquals(0, result.get().getAttempt().getRecordsSynced()); - assertEquals(0, result.get().getAttempt().getTotalStats().getRecordsEmitted()); - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - } - - @Test - void testResetAllWhenSchemaIsModifiedForLegacySource() throws Exception { - final String sourceTable1 = "test_table1"; - final String sourceTable2 = "test_table2"; - final String sourceTable3 = "test_table3"; - final String outputPrefix = "output_namespace_public.output_table_"; - final Database sourceDb = testHarness.getSourceDatabase(); - final Database destDb = testHarness.getDestinationDatabase(); - sourceDb.query(ctx -> { - ctx.createTableIfNotExists(sourceTable1).columns(DSL.field(NAME, SQLDataType.VARCHAR)).execute(); - ctx.truncate(sourceTable1).execute(); - ctx.insertInto(DSL.table(sourceTable1)).columns(DSL.field(NAME)).values("john").execute(); - ctx.insertInto(DSL.table(sourceTable1)).columns(DSL.field(NAME)).values("bob").execute(); - - ctx.createTableIfNotExists(sourceTable2).columns(DSL.field(VALUE, SQLDataType.VARCHAR)).execute(); - ctx.truncate(sourceTable2).execute(); - ctx.insertInto(DSL.table(sourceTable2)).columns(DSL.field(VALUE)).values("v1").execute(); - ctx.insertInto(DSL.table(sourceTable2)).columns(DSL.field(VALUE)).values("v2").execute(); - return null; - }); - - final SourceRead source = testHarness.createPostgresSource(true); - final UUID sourceId = source.getSourceId(); - final UUID sourceDefinitionId = source.getSourceDefinitionId(); - - // Fetch the current/most recent source definition version - final SourceDefinitionRead sourceDefinitionRead = - apiClient.getSourceDefinitionApi().getSourceDefinition(new SourceDefinitionIdRequestBody().sourceDefinitionId(sourceDefinitionId)); - final String currentSourceDefinitionVersion = sourceDefinitionRead.getDockerImageTag(); - - try { - // Set the source to a version that does not support per-stream state - LOGGER.info("Setting source connector to pre-per-stream state version {}...", - AirbyteAcceptanceTestHarness.POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - testHarness.updateSourceDefinitionVersion(sourceDefinitionId, AirbyteAcceptanceTestHarness.POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final UUID destinationId = testHarness.createPostgresDestination(true).getDestinationId(); - final OperationRead operation = testHarness.createOperation(); - final String name = "test_reset_when_schema_is_modified_" + UUID.randomUUID(); - - LOGGER.info("Discovered catalog: {}", catalog); - - final ConnectionRead connection = - testHarness.createConnection(name, sourceId, destinationId, List.of(operation.getOperationId()), catalog, ConnectionScheduleType.MANUAL, - null); - LOGGER.info("Created Connection: {}", connection); - - sourceDb.query(ctx -> { - prettyPrintTables(ctx, sourceTable1, sourceTable2); - return null; - }); - - // Run initial sync - LOGGER.info("Running initial sync"); - final JobInfoRead syncRead = - apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connection.getConnectionId())); - waitForSuccessfulJob(apiClient.getJobsApi(), syncRead.getJob()); - - // Some inspection for debug - destDb.query(ctx -> { - prettyPrintTables(ctx, outputPrefix + sourceTable1, outputPrefix + sourceTable2); - return null; - }); - final ConnectionState initSyncState = - apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connection.getConnectionId())); - LOGGER.info("ConnectionState after the initial sync: " + initSyncState.toString()); - - testHarness.assertSourceAndDestinationDbInSync(false); - - // Patch some data in the source - LOGGER.info("Modifying source tables"); - sourceDb.query(ctx -> { - // Adding a new rows to make sure we sync more data. - ctx.insertInto(DSL.table(sourceTable1)).columns(DSL.field(NAME)).values("alice").execute(); - ctx.insertInto(DSL.table(sourceTable2)).columns(DSL.field(VALUE)).values("v3").execute(); - - // The removed rows should no longer be in the destination since we expect a full reset - ctx.deleteFrom(DSL.table(sourceTable1)).where(DSL.field(NAME).eq("john")).execute(); - ctx.deleteFrom(DSL.table(sourceTable2)).where(DSL.field(VALUE).eq("v2")).execute(); - - // Adding a new table to trigger reset from the update connection API - ctx.createTableIfNotExists(sourceTable3).columns(DSL.field(LOCATION, SQLDataType.VARCHAR)).execute(); - ctx.truncate(sourceTable3).execute(); - ctx.insertInto(DSL.table(sourceTable3)).columns(DSL.field(LOCATION)).values("home").execute(); - ctx.insertInto(DSL.table(sourceTable3)).columns(DSL.field(LOCATION)).values("work").execute(); - ctx.insertInto(DSL.table(sourceTable3)).columns(DSL.field(LOCATION)).values("space").execute(); - return null; - }); - - final AirbyteCatalog updatedCatalog = testHarness.discoverSourceSchemaWithoutCache(sourceId); - LOGGER.info("Discovered updated catalog: {}", updatedCatalog); - - // Update with refreshed catalog - LOGGER.info("Submit the update request"); - final WebBackendConnectionUpdate update = new WebBackendConnectionUpdate() - .connectionId(connection.getConnectionId()) - .syncCatalog(updatedCatalog); - webBackendApi.webBackendUpdateConnection(update); - - LOGGER.info("Inspecting Destination DB after the update request, tables should be empty"); - destDb.query(ctx -> { - prettyPrintTables(ctx, outputPrefix + sourceTable1, outputPrefix + sourceTable2); - return null; - }); - final ConnectionState postResetState = - apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connection.getConnectionId())); - LOGGER.info("ConnectionState after the update request: {}", postResetState.toString()); - - // Wait until the sync from the UpdateConnection is finished - final JobRead syncFromTheUpdate = testHarness.waitUntilTheNextJobIsStarted(connection.getConnectionId()); - LOGGER.info("Generated SyncJob config: {}", syncFromTheUpdate.toString()); - waitForSuccessfulJob(apiClient.getJobsApi(), syncFromTheUpdate); - - final ConnectionState postUpdateState = - apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connection.getConnectionId())); - LOGGER.info("ConnectionState after the final sync: {}", postUpdateState.toString()); - - LOGGER.info("Inspecting DBs After the final sync"); - sourceDb.query(ctx -> { - prettyPrintTables(ctx, sourceTable1, sourceTable2, sourceTable3); - return null; - }); - destDb.query(ctx -> { - prettyPrintTables(ctx, outputPrefix + sourceTable1, outputPrefix + sourceTable2, outputPrefix + sourceTable3); - return null; - }); - - testHarness.assertSourceAndDestinationDbInSync(false); - } finally { - // Set source back to version it was set to at beginning of test - LOGGER.info("Set source connector back to per-stream state supported version {}.", currentSourceDefinitionVersion); - testHarness.updateSourceDefinitionVersion(sourceDefinitionId, currentSourceDefinitionVersion); - } - } - - private void prettyPrintTables(final DSLContext ctx, final String... tables) { - for (final String table : tables) { - LOGGER.info("select * from {}", table); - Arrays.stream(ctx.selectFrom(table) - .fetch() - .toString() - .split("\\n")).forEach(LOGGER::info); - } - } - - @Test - @Disabled - void testIncrementalSyncMultipleStreams() throws Exception { - LOGGER.info("Starting testIncrementalSyncMultipleStreams()"); - - PostgreSQLContainerHelper.runSqlScript(MountableFile.forClasspathResource("postgres_second_schema_multiple_tables.sql"), sourcePsql); - - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - - for (final AirbyteStreamAndConfiguration streamAndConfig : catalog.getStreams()) { - final AirbyteStream stream = streamAndConfig.getStream(); - assertEquals(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL), stream.getSupportedSyncModes()); - // instead of assertFalse to avoid NPE from unboxed. - assertNull(stream.getSourceDefinedCursor()); - assertTrue(stream.getDefaultCursorField().isEmpty()); - assertTrue(stream.getSourceDefinedPrimaryKey().isEmpty()); - } - - final SyncMode syncMode = SyncMode.INCREMENTAL; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND; - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(syncMode) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - LOGGER.info("Beginning testIncrementalSync() sync 1"); - - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); - LOGGER.info(STATE_AFTER_SYNC_ONE, apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - - // add new records and run again. - final Database source = testHarness.getSourceDatabase(); - // get contents of source before mutating records. - final List expectedRecordsIdAndName = testHarness.retrieveSourceRecords(source, STREAM_NAME); - final List expectedRecordsCoolEmployees = - testHarness.retrieveSourceRecords(source, STAGING_SCHEMA_NAME + "." + COOL_EMPLOYEES_TABLE_NAME); - final List expectedRecordsAwesomePeople = - testHarness.retrieveSourceRecords(source, STAGING_SCHEMA_NAME + "." + AWESOME_PEOPLE_TABLE_NAME); - expectedRecordsIdAndName.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).put(COLUMN_NAME, GERALT).build())); - expectedRecordsCoolEmployees.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).put(COLUMN_NAME, GERALT).build())); - expectedRecordsAwesomePeople.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 3).put(COLUMN_NAME, GERALT).build())); - // add a new record to each table - source.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(6, 'geralt')")); - source.query(ctx -> ctx.execute("INSERT INTO staging.cool_employees(id, name) VALUES(6, 'geralt')")); - source.query(ctx -> ctx.execute("INSERT INTO staging.awesome_people(id, name) VALUES(3, 'geralt')")); - // mutate a record that was already synced with out updating its cursor value. if we are actually - // full refreshing, this record will appear in the output and cause the test to fail. if we are, - // correctly, doing incremental, we will not find this value in the destination. - source.query(ctx -> ctx.execute("UPDATE id_and_name SET name='yennefer' WHERE id=2")); - source.query(ctx -> ctx.execute("UPDATE staging.cool_employees SET name='yennefer' WHERE id=2")); - source.query(ctx -> ctx.execute("UPDATE staging.awesome_people SET name='yennefer' WHERE id=2")); - - LOGGER.info("Starting testIncrementalSync() sync 2"); - final JobInfoRead connectionSyncRead2 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead2.getJob()); - LOGGER.info(STATE_AFTER_SYNC_TWO, apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertRawDestinationContains(expectedRecordsIdAndName, new SchemaTableNamePair(PUBLIC_SCHEMA_NAME, STREAM_NAME)); - testHarness.assertRawDestinationContains(expectedRecordsCoolEmployees, new SchemaTableNamePair(STAGING_SCHEMA_NAME, COOL_EMPLOYEES_TABLE_NAME)); - testHarness.assertRawDestinationContains(expectedRecordsAwesomePeople, new SchemaTableNamePair(STAGING_SCHEMA_NAME, AWESOME_PEOPLE_TABLE_NAME)); - - // reset back to no data. - - LOGGER.info("Starting testIncrementalSync() reset"); - final JobInfoRead jobInfoRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitWhileJobHasStatus(apiClient.getJobsApi(), jobInfoRead.getJob(), - Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING, JobStatus.INCOMPLETE, JobStatus.FAILED)); - // This is a band-aid to prevent some race conditions where the job status was updated but we may - // still be cleaning up some data in the reset table. This would be an argument for reworking the - // source of truth of the replication workflow state to be in DB rather than in Memory and - // serialized automagically by temporal - waitWhileJobIsRunning(apiClient.getJobsApi(), jobInfoRead.getJob(), Duration.ofMinutes(1)); - - LOGGER.info("state after reset: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertRawDestinationContains(Collections.emptyList(), new SchemaTableNamePair(PUBLIC, - STREAM_NAME)); - - // sync one more time. verify it is the equivalent of a full refresh. - LOGGER.info("Starting testIncrementalSync() sync 3"); - final JobInfoRead connectionSyncRead3 = - apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead3.getJob()); - LOGGER.info("state after sync 3: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - - } - - @Test - void testMultipleSchemasAndTablesSyncAndReset() throws Exception { - // create tables in another schema - PostgreSQLContainerHelper.runSqlScript(MountableFile.forClasspathResource("postgres_second_schema_multiple_tables.sql"), sourcePsql); - - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead.getJob()); - testHarness.assertSourceAndDestinationDbInSync(false); - final JobInfoRead connectionResetRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionResetRead.getJob()); - testHarness.assertDestinationDbEmpty(false); - } - - @Test - void testPartialResetResetAllWhenSchemaIsModified(final TestInfo testInfo) throws Exception { - LOGGER.info("Running: " + testInfo.getDisplayName()); - - // Add Table - final String additionalTable = "additional_table"; - final Database sourceDb = testHarness.getSourceDatabase(); - sourceDb.query(ctx -> { - ctx.createTableIfNotExists(additionalTable) - .columns(DSL.field("id", SQLDataType.INTEGER), DSL.field(FIELD, SQLDataType.VARCHAR)).execute(); - ctx.truncate(additionalTable).execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(1, "1").execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(2, "2").execute(); - return null; - }); - UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final OperationRead operation = testHarness.createOperation(); - final UUID operationId = operation.getOperationId(); - final String name = "test_reset_when_schema_is_modified_" + UUID.randomUUID(); - - testHarness.setIncrementalAppendSyncMode(catalog, List.of(COLUMN_ID)); - - final ConnectionRead connection = - testHarness.createConnection(name, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null); - - // Run initial sync - final JobInfoRead syncRead = - apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connection.getConnectionId())); - waitForSuccessfulJob(apiClient.getJobsApi(), syncRead.getJob()); - - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - assertStreamStateContainsStream(connection.getConnectionId(), List.of( - new StreamDescriptor().name(ID_AND_NAME).namespace(PUBLIC), - new StreamDescriptor().name(additionalTable).namespace(PUBLIC))); - - LOGGER.info("Initial sync ran, now running an update with a stream being removed."); - - /** - * Remove stream - */ - sourceDb.query(ctx -> ctx.dropTableIfExists(additionalTable).execute()); - - // Update with refreshed catalog - AirbyteCatalog refreshedCatalog = testHarness.discoverSourceSchemaWithoutCache(sourceId); - WebBackendConnectionUpdate update = testHarness.getUpdateInput(connection, refreshedCatalog, operation); - webBackendApi.webBackendUpdateConnection(update); - - // Wait until the sync from the UpdateConnection is finished - JobRead syncFromTheUpdate = waitUntilTheNextJobIsStarted(connection.getConnectionId()); - waitForSuccessfulJob(apiClient.getJobsApi(), syncFromTheUpdate); - - // We do not check that the source and the dest are in sync here because removing a stream doesn't - // remove that - assertStreamStateContainsStream(connection.getConnectionId(), List.of( - new StreamDescriptor().name(ID_AND_NAME).namespace(PUBLIC))); - - LOGGER.info("Remove done, now running an update with a stream being added."); - - /** - * Add a stream -- the value of in the table are different than the initial import to ensure that it - * is properly reset. - */ - sourceDb.query(ctx -> { - ctx.createTableIfNotExists(additionalTable) - .columns(DSL.field("id", SQLDataType.INTEGER), DSL.field(FIELD, SQLDataType.VARCHAR)).execute(); - ctx.truncate(additionalTable).execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(3, "3").execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(4, "4").execute(); - return null; - }); - - sourceId = testHarness.createPostgresSource().getSourceId(); - refreshedCatalog = testHarness.discoverSourceSchema(sourceId); - update = testHarness.getUpdateInput(connection, refreshedCatalog, operation); - webBackendApi.webBackendUpdateConnection(update); - - syncFromTheUpdate = waitUntilTheNextJobIsStarted(connection.getConnectionId()); - waitForSuccessfulJob(apiClient.getJobsApi(), syncFromTheUpdate); - - // We do not check that the source and the dest are in sync here because removing a stream doesn't - // remove that - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - assertStreamStateContainsStream(connection.getConnectionId(), List.of( - new StreamDescriptor().name(ID_AND_NAME).namespace(PUBLIC), - new StreamDescriptor().name(additionalTable).namespace(PUBLIC))); - - LOGGER.info("Addition done, now running an update with a stream being updated."); - - // Update - sourceDb.query(ctx -> { - ctx.dropTableIfExists(additionalTable).execute(); - ctx.createTableIfNotExists(additionalTable) - .columns(DSL.field("id", SQLDataType.INTEGER), DSL.field(FIELD, SQLDataType.VARCHAR), DSL.field("another_field", SQLDataType.VARCHAR)) - .execute(); - ctx.truncate(additionalTable).execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD), DSL.field("another_field")).values(3, "3", "three") - .execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD), DSL.field("another_field")).values(4, "4", "four") - .execute(); - return null; - }); - - sourceId = testHarness.createPostgresSource().getSourceId(); - refreshedCatalog = testHarness.discoverSourceSchema(sourceId); - update = testHarness.getUpdateInput(connection, refreshedCatalog, operation); - webBackendApi.webBackendUpdateConnection(update); - - syncFromTheUpdate = waitUntilTheNextJobIsStarted(connection.getConnectionId()); - waitForSuccessfulJob(apiClient.getJobsApi(), syncFromTheUpdate); - - // We do not check that the source and the dest are in sync here because removing a stream doesn't - // remove that - testHarness.assertSourceAndDestinationDbInSync(WITHOUT_SCD_TABLE); - assertStreamStateContainsStream(connection.getConnectionId(), List.of( - new StreamDescriptor().name(ID_AND_NAME).namespace(PUBLIC), - new StreamDescriptor().name(additionalTable).namespace(PUBLIC))); - - } - - @Test - @Order(19) - void testIncrementalDedupeSyncRemoveOneColumn() throws Exception { - // !!! NOTE !!! this test relies on a feature flag that currently defaults to false. If you're - // running these tests locally against an external deployment and this test is failing, make sure - // the flag is enabled. - // Specifically: - // APPLY_FIELD_SELECTION=true - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final SyncMode syncMode = SyncMode.INCREMENTAL; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND_DEDUP; - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(syncMode) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(destinationSyncMode) - .primaryKey(List.of(List.of(COLUMN_ID)))); - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - // sync from start - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); - - testHarness.assertSourceAndDestinationDbInSync(WITH_SCD_TABLE); - - // Update the catalog, so we only select the id column. - catalog.getStreams().get(0).getConfig().fieldSelectionEnabled(true).addSelectedFieldsItem(new SelectedFieldInfo().addFieldPathItem("id")); - testHarness.updateConnectionCatalog(connectionId, catalog); - - // add new records and run again. - final Database source = testHarness.getSourceDatabase(); - final List expectedRawRecords = testHarness.retrieveSourceRecords(source, STREAM_NAME); - source.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(6, 'mike')")); - source.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(7, 'chris')")); - // The expected new raw records should only have the ID column. - expectedRawRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).build())); - expectedRawRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 7).build())); - final JobInfoRead connectionSyncRead2 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead2.getJob()); - - // For the normalized records, they should all only have the ID column. - final List expectedNormalizedRecords = testHarness.retrieveSourceRecords(source, STREAM_NAME).stream() - .map((record) -> ((ObjectNode) record).retain(COLUMN_ID)).collect(Collectors.toList()); - - testHarness.assertRawDestinationContains(expectedRawRecords, new SchemaTableNamePair(PUBLIC, STREAM_NAME)); - testHarness.assertNormalizedDestinationContainsIdColumn(expectedNormalizedRecords); - } - - private void assertStreamStateContainsStream(final UUID connectionId, final List expectedStreamDescriptors) throws ApiException { - final ConnectionState state = apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId)); - final List streamDescriptors = state.getStreamState().stream().map(StreamState::getStreamDescriptor).toList(); - - Assertions.assertTrue(streamDescriptors.containsAll(expectedStreamDescriptors) && expectedStreamDescriptors.containsAll(streamDescriptors)); - } - - private JobRead getMostRecentSyncJobId(final UUID connectionId) throws Exception { - return apiClient.getJobsApi() - .listJobsFor(new JobListRequestBody().configId(connectionId.toString()).configTypes(List.of(JobConfigType.SYNC))) - .getJobs() - .stream().findFirst().map(JobWithAttemptsRead::getJob).orElseThrow(); - } - - private JobRead waitUntilTheNextJobIsStarted(final UUID connectionId) throws Exception { - final JobRead lastJob = getMostRecentSyncJobId(connectionId); - if (lastJob.getStatus() != JobStatus.SUCCEEDED) { - return lastJob; - } - - JobRead mostRecentSyncJob = getMostRecentSyncJobId(connectionId); - while (mostRecentSyncJob.getId().equals(lastJob.getId())) { - Thread.sleep(Duration.ofSeconds(10).toMillis()); - mostRecentSyncJob = getMostRecentSyncJobId(connectionId); - } - return mostRecentSyncJob; - } - - /** - * Waits for the given connection to finish, waiting at 30s intervals, until maxRetries is reached. - * - * @param connectionId the connection to wait for - * @param maxRetries the number of times to retry - * @throws InterruptedException - */ - private void waitForSuccessfulJobWithRetries(final UUID connectionId, final int maxRetries) throws InterruptedException { - int i; - for (i = 0; i < maxRetries; i++) { - try { - final JobRead jobInfo = testHarness.getMostRecentSyncJobId(connectionId); - waitForSuccessfulJob(apiClient.getJobsApi(), jobInfo); - break; - } catch (final Exception e) { - LOGGER.info("Something went wrong querying jobs API, retrying..."); - } - sleep(Duration.ofSeconds(30).toMillis()); - } - - if (i == maxRetries) { - LOGGER.error("Sync job did not complete within 5 minutes"); - } - } - - // This test is disabled because it takes a couple of minutes to run, as it is testing timeouts. - // It should be re-enabled when the @SlowIntegrationTest can be applied to it. - // See relevant issue: https://github.com/airbytehq/airbyte/issues/8397 - @Test - @Disabled - void testFailureTimeout() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(workspaceId); - final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition(workspaceId); - - final SourceRead source = testHarness.createSource( - E2E_TEST_SOURCE + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, INFINITE_FEED) - .put(MAX_RECORDS, 1000) - .put(MESSAGE_INTERVAL, 100) - .build())); - - // Destination fails after processing 5 messages, so the job should fail after the graceful close - // timeout of 1 minute - final DestinationRead destination = testHarness.createDestination( - "E2E Test Destination -" + UUID.randomUUID(), - workspaceId, - destinationDefinition.getDestinationDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "FAILING") - .put("num_messages", 5) - .build())); - - final UUID sourceId = source.getSourceId(); - final UUID destinationId = destination.getDestinationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - - final UUID connectionId = - testHarness.createConnection(TEST_CONNECTION, sourceId, destinationId, Collections.emptyList(), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - // wait to get out of pending. - final JobRead runningJob = - waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); - - // wait for job for max of 3 minutes, by which time the job attempt should have failed - waitWhileJobHasStatus(apiClient.getJobsApi(), runningJob, Sets.newHashSet(JobStatus.RUNNING), Duration.ofMinutes(3)); - - final JobIdRequestBody jobId = new JobIdRequestBody().id(runningJob.getId()); - final JobInfoRead jobInfo = apiClient.getJobsApi().getJobInfo(jobId); - final AttemptInfoRead attemptInfoRead = jobInfo.getAttempts().get(jobInfo.getAttempts().size() - 1); - - // assert that the job attempt failed, and cancel the job regardless of status to prevent retries - try { - assertEquals(AttemptStatus.FAILED, attemptInfoRead.getAttempt().getStatus()); - } finally { - apiClient.getJobsApi().cancelJob(jobId); - } - } - -} diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/CdcAcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/CdcAcceptanceTests.java deleted file mode 100644 index 57a5435ba57b..000000000000 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/CdcAcceptanceTests.java +++ /dev/null @@ -1,602 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.test.acceptance; - -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.COLUMN_ID; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.COLUMN_NAME; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitForSuccessfulJob; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.generated.WebBackendApi; -import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.model.generated.AirbyteCatalog; -import io.airbyte.api.client.model.generated.AirbyteStream; -import io.airbyte.api.client.model.generated.AirbyteStreamAndConfiguration; -import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.client.model.generated.ConnectionRead; -import io.airbyte.api.client.model.generated.ConnectionScheduleType; -import io.airbyte.api.client.model.generated.ConnectionState; -import io.airbyte.api.client.model.generated.ConnectionStateType; -import io.airbyte.api.client.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.DestinationSyncMode; -import io.airbyte.api.client.model.generated.JobInfoRead; -import io.airbyte.api.client.model.generated.JobRead; -import io.airbyte.api.client.model.generated.OperationRead; -import io.airbyte.api.client.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.client.model.generated.SourceDefinitionRead; -import io.airbyte.api.client.model.generated.SourceRead; -import io.airbyte.api.client.model.generated.StreamDescriptor; -import io.airbyte.api.client.model.generated.StreamState; -import io.airbyte.api.client.model.generated.SyncMode; -import io.airbyte.api.client.model.generated.WebBackendConnectionUpdate; -import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Database; -import io.airbyte.test.utils.AirbyteAcceptanceTestHarness; -import io.airbyte.test.utils.SchemaTableNamePair; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.SQLException; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import org.jooq.Record; -import org.jooq.Result; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInfo; -import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * These tests test the CDC source behavior in Airbyte, ensuring that the behavior of syncs when in - * CDC mode is as expected - *

    - * Some of the tests in this class are specifically testing partial reset behavior when in CDC mode, - * support for which was recently added to the postgres connector. - *

    - * These tests are disabled in Kube, similar to the BasicAcceptanceTests, because they aren't - * testing any behavior that is specific to or dependent on this being run on kube vs docker. - * Therefore, since operations tend to take longer to perform on kube, there is little value in - * re-running these tests on kube when we already run them on docker. - */ -@DisabledIfEnvironmentVariable(named = "KUBE", - matches = "true") -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class CdcAcceptanceTests { - - record DestinationCdcRecordMatcher(JsonNode sourceRecord, Instant minUpdatedAt, Optional minDeletedAt) { - - } - - private static final Logger LOGGER = LoggerFactory.getLogger(BasicAcceptanceTests.class); - - private static final String POSTGRES_INIT_SQL_FILE = "postgres_init_cdc.sql"; - private static final String CDC_METHOD = "CDC"; - // must match replication slot name used in the above POSTGRES_INIT_SQL_FILE - private static final String REPLICATION_SLOT = "airbyte_slot"; - // must match publication name used in the above POSTGRES_INIT_SQL_FILE - private static final String PUBLICATION = "airbyte_publication"; - private static final Integer INITIAL_WAITING_SECONDS = 5; - - private static final String SOURCE_NAME = "CDC Source"; - private static final String CONNECTION_NAME = "test-connection"; - private static final String SCHEMA_NAME = "public"; - private static final String CDC_UPDATED_AT_COLUMN = "_ab_cdc_updated_at"; - private static final String CDC_DELETED_AT_COLUMN = "_ab_cdc_deleted_at"; - private static final String ID_AND_NAME_TABLE = "id_and_name"; - private static final String COLOR_PALETTE_TABLE = "color_palette"; - private static final String COLUMN_COLOR = "color"; - private static final String STARTING = "Starting {}"; - private static final String STARTING_SYNC_ONE = "Starting {} sync 1"; - - // version of the postgres destination connector that was built with the - // old Airbyte protocol that does not contain any per-stream logic/fields - private static final String POSTGRES_DESTINATION_LEGACY_CONNECTOR_VERSION = "0.3.19"; - - private static AirbyteApiClient apiClient; - private static WebBackendApi webBackendApi; - private static UUID workspaceId; - private static OperationRead operationRead; - - private AirbyteAcceptanceTestHarness testHarness; - - @BeforeAll - static void init() throws URISyntaxException, IOException, InterruptedException, ApiException { - apiClient = new AirbyteApiClient( - new ApiClient().setScheme("http") - .setHost("localhost") - .setPort(8001) - .setBasePath("/api")); - webBackendApi = new WebBackendApi( - new ApiClient().setScheme("http") - .setHost("localhost") - .setPort(8001) - .setBasePath("/api")); - // work in whatever default workspace is present. - workspaceId = apiClient.getWorkspaceApi().listWorkspaces().getWorkspaces().get(0).getWorkspaceId(); - LOGGER.info("workspaceId = " + workspaceId); - - // log which connectors are being used. - final SourceDefinitionRead sourceDef = apiClient.getSourceDefinitionApi() - .getSourceDefinition(new SourceDefinitionIdRequestBody() - .sourceDefinitionId(UUID.fromString("decd338e-5647-4c0b-adf4-da0e75f5a750"))); - final DestinationDefinitionRead destinationDef = apiClient.getDestinationDefinitionApi() - .getDestinationDefinition(new DestinationDefinitionIdRequestBody() - .destinationDefinitionId(UUID.fromString("25c5221d-dce2-4163-ade9-739ef790f503"))); - LOGGER.info("pg source definition: {}", sourceDef.getDockerImageTag()); - LOGGER.info("pg destination definition: {}", destinationDef.getDockerImageTag()); - } - - @BeforeEach - void setup() throws URISyntaxException, IOException, InterruptedException, ApiException, SQLException { - testHarness = new AirbyteAcceptanceTestHarness(apiClient, workspaceId, POSTGRES_INIT_SQL_FILE); - testHarness.setup(); - } - - @AfterEach - void end() { - testHarness.cleanup(); - testHarness.stopDbAndContainers(); - } - - @Test - void testIncrementalCdcSync(final TestInfo testInfo) throws Exception { - LOGGER.info(STARTING, testInfo.getDisplayName()); - - final UUID connectionId = createCdcConnection(); - LOGGER.info(STARTING_SYNC_ONE, testInfo.getDisplayName()); - - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); - LOGGER.info("state after sync 1: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - final Database source = testHarness.getSourceDatabase(); - - List expectedIdAndNameRecords = getCdcRecordMatchersFromSource(source, ID_AND_NAME_TABLE); - assertDestinationMatches(ID_AND_NAME_TABLE, expectedIdAndNameRecords); - - List expectedColorPaletteRecords = getCdcRecordMatchersFromSource(source, COLOR_PALETTE_TABLE); - assertDestinationMatches(COLOR_PALETTE_TABLE, expectedColorPaletteRecords); - - final List expectedStreams = List.of( - new StreamDescriptor().namespace(SCHEMA_NAME).name(ID_AND_NAME_TABLE), - new StreamDescriptor().namespace(SCHEMA_NAME).name(COLOR_PALETTE_TABLE)); - assertGlobalStateContainsStreams(connectionId, expectedStreams); - - final Instant beforeFirstUpdate = Instant.now(); - - LOGGER.info("Inserting and updating source db records"); - // add new records and run again. - // add a new record - source.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(6, 'geralt')")); - // mutate a record that was already synced with out updating its cursor value. - // since this is a CDC connection, the destination should contain a record with this - // new value and an updated_at time corresponding to this update query - source.query(ctx -> ctx.execute("UPDATE id_and_name SET name='yennefer' WHERE id=2")); - expectedIdAndNameRecords.add(new DestinationCdcRecordMatcher( - Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).put(COLUMN_NAME, "geralt").build()), - beforeFirstUpdate, - Optional.empty())); - expectedIdAndNameRecords.add(new DestinationCdcRecordMatcher( - Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 2).put(COLUMN_NAME, "yennefer").build()), - beforeFirstUpdate, - Optional.empty())); - - // do the same for the other table - source.query(ctx -> ctx.execute("INSERT INTO color_palette(id, color) VALUES(4, 'yellow')")); - source.query(ctx -> ctx.execute("UPDATE color_palette SET color='purple' WHERE id=2")); - expectedColorPaletteRecords.add(new DestinationCdcRecordMatcher( - Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 4).put(COLUMN_COLOR, "yellow").build()), - beforeFirstUpdate, - Optional.empty())); - expectedColorPaletteRecords.add(new DestinationCdcRecordMatcher( - Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 2).put(COLUMN_COLOR, "purple").build()), - beforeFirstUpdate, - Optional.empty())); - - LOGGER.info("Starting {} sync 2", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead2 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead2.getJob()); - LOGGER.info("state after sync 2: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - assertDestinationMatches(ID_AND_NAME_TABLE, expectedIdAndNameRecords); - assertDestinationMatches(COLOR_PALETTE_TABLE, expectedColorPaletteRecords); - assertGlobalStateContainsStreams(connectionId, expectedStreams); - - // reset back to no data. - - LOGGER.info("Starting {} reset", testInfo.getDisplayName()); - final JobInfoRead jobInfoRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), jobInfoRead.getJob()); - - LOGGER.info("state after reset: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - assertDestinationMatches(ID_AND_NAME_TABLE, Collections.emptyList()); - assertDestinationMatches(COLOR_PALETTE_TABLE, Collections.emptyList()); - assertNoState(connectionId); - - // sync one more time. verify it is the equivalent of a full refresh. - LOGGER.info("Starting {} sync 3", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead3 = - apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead3.getJob()); - LOGGER.info("state after sync 3: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - expectedIdAndNameRecords = getCdcRecordMatchersFromSource(source, ID_AND_NAME_TABLE); - assertDestinationMatches(ID_AND_NAME_TABLE, expectedIdAndNameRecords); - - expectedColorPaletteRecords = getCdcRecordMatchersFromSource(source, COLOR_PALETTE_TABLE); - assertDestinationMatches(COLOR_PALETTE_TABLE, expectedColorPaletteRecords); - - assertGlobalStateContainsStreams(connectionId, expectedStreams); - } - - // tests that incremental syncs still work properly even when using a destination connector that was - // built on the old protocol that did not have any per-stream state fields - @Test - void testIncrementalCdcSyncWithLegacyDestinationConnector(final TestInfo testInfo) throws Exception { - LOGGER.info(STARTING, testInfo.getDisplayName()); - final UUID postgresDestDefId = testHarness.getPostgresDestinationDefinitionId(); - // Fetch the current/most recent source definition version - final DestinationDefinitionRead destinationDefinitionRead = apiClient.getDestinationDefinitionApi().getDestinationDefinition( - new DestinationDefinitionIdRequestBody().destinationDefinitionId(postgresDestDefId)); - LOGGER.info("Current postgres destination definition version: {}", destinationDefinitionRead.getDockerImageTag()); - - try { - LOGGER.info("Setting postgres destination definition to version {}", POSTGRES_DESTINATION_LEGACY_CONNECTOR_VERSION); - testHarness.updateDestinationDefinitionVersion(postgresDestDefId, POSTGRES_DESTINATION_LEGACY_CONNECTOR_VERSION); - - testIncrementalCdcSync(testInfo); - } finally { - // set postgres destination definition back to latest version for other tests - LOGGER.info("Setting postgres destination definition back to version {}", destinationDefinitionRead.getDockerImageTag()); - testHarness.updateDestinationDefinitionVersion(postgresDestDefId, destinationDefinitionRead.getDockerImageTag()); - } - } - - @Test - void testDeleteRecordCdcSync(final TestInfo testInfo) throws Exception { - LOGGER.info(STARTING, testInfo.getDisplayName()); - - final UUID connectionId = createCdcConnection(); - LOGGER.info(STARTING_SYNC_ONE, testInfo.getDisplayName()); - - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); - LOGGER.info("state after sync 1: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - final Database source = testHarness.getSourceDatabase(); - final List expectedIdAndNameRecords = getCdcRecordMatchersFromSource(source, ID_AND_NAME_TABLE); - assertDestinationMatches(ID_AND_NAME_TABLE, expectedIdAndNameRecords); - - final Instant beforeDelete = Instant.now(); - - LOGGER.info("Deleting record"); - // delete a record - source.query(ctx -> ctx.execute("DELETE FROM id_and_name WHERE id=1")); - - final Map deletedRecordMap = new HashMap<>(); - deletedRecordMap.put(COLUMN_ID, 1); - deletedRecordMap.put(COLUMN_NAME, null); - expectedIdAndNameRecords.add(new DestinationCdcRecordMatcher( - Jsons.jsonNode(deletedRecordMap), - beforeDelete, - Optional.of(beforeDelete))); - - LOGGER.info("Starting {} sync 2", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead2 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead2.getJob()); - LOGGER.info("state after sync 2: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); - - assertDestinationMatches(ID_AND_NAME_TABLE, expectedIdAndNameRecords); - } - - @Test - void testPartialResetFromSchemaUpdate(final TestInfo testInfo) throws Exception { - LOGGER.info(STARTING, testInfo.getDisplayName()); - - final UUID connectionId = createCdcConnection(); - LOGGER.info(STARTING_SYNC_ONE, testInfo.getDisplayName()); - - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); - - final Database source = testHarness.getSourceDatabase(); - - final List expectedIdAndNameRecords = getCdcRecordMatchersFromSource(source, ID_AND_NAME_TABLE); - assertDestinationMatches(ID_AND_NAME_TABLE, expectedIdAndNameRecords); - - final List expectedColorPaletteRecords = getCdcRecordMatchersFromSource(source, COLOR_PALETTE_TABLE); - assertDestinationMatches(COLOR_PALETTE_TABLE, expectedColorPaletteRecords); - - final StreamDescriptor idAndNameStreamDescriptor = new StreamDescriptor().namespace(SCHEMA_NAME).name(ID_AND_NAME_TABLE); - final StreamDescriptor colorPaletteStreamDescriptor = new StreamDescriptor().namespace(SCHEMA_NAME).name(COLOR_PALETTE_TABLE); - assertGlobalStateContainsStreams(connectionId, List.of(idAndNameStreamDescriptor, colorPaletteStreamDescriptor)); - - LOGGER.info("Removing color palette table"); - source.query(ctx -> ctx.dropTable(COLOR_PALETTE_TABLE).execute()); - - LOGGER.info("Refreshing schema and updating connection"); - final ConnectionRead connectionRead = apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - final UUID sourceId = createCdcSource().getSourceId(); - final AirbyteCatalog refreshedCatalog = testHarness.discoverSourceSchema(sourceId); - LOGGER.info("Refreshed catalog: {}", refreshedCatalog); - final WebBackendConnectionUpdate update = testHarness.getUpdateInput(connectionRead, refreshedCatalog, operationRead); - webBackendApi.webBackendUpdateConnection(update); - - LOGGER.info("Waiting for sync job after update to complete"); - final JobRead syncFromTheUpdate = testHarness.waitUntilTheNextJobIsStarted(connectionId); - waitForSuccessfulJob(apiClient.getJobsApi(), syncFromTheUpdate); - - // We do not check that the source and the dest are in sync here because removing a stream doesn't - // delete its data in the destination - assertGlobalStateContainsStreams(connectionId, List.of(idAndNameStreamDescriptor)); - } - - @Test - void testPartialResetFromStreamSelection(final TestInfo testInfo) throws Exception { - LOGGER.info(STARTING, testInfo.getDisplayName()); - - final UUID connectionId = createCdcConnection(); - LOGGER.info(STARTING_SYNC_ONE, testInfo.getDisplayName()); - - final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead1.getJob()); - - final Database source = testHarness.getSourceDatabase(); - - final List expectedIdAndNameRecords = getCdcRecordMatchersFromSource(source, ID_AND_NAME_TABLE); - assertDestinationMatches(ID_AND_NAME_TABLE, expectedIdAndNameRecords); - - final List expectedColorPaletteRecords = getCdcRecordMatchersFromSource(source, COLOR_PALETTE_TABLE); - assertDestinationMatches(COLOR_PALETTE_TABLE, expectedColorPaletteRecords); - - final StreamDescriptor idAndNameStreamDescriptor = new StreamDescriptor().namespace(SCHEMA_NAME).name(ID_AND_NAME_TABLE); - final StreamDescriptor colorPaletteStreamDescriptor = new StreamDescriptor().namespace(SCHEMA_NAME).name(COLOR_PALETTE_TABLE); - assertGlobalStateContainsStreams(connectionId, List.of(idAndNameStreamDescriptor, colorPaletteStreamDescriptor)); - - LOGGER.info("Removing color palette stream from configured catalog"); - final ConnectionRead connectionRead = apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - final UUID sourceId = connectionRead.getSourceId(); - AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final List streams = catalog.getStreams(); - // filter out color_palette stream - final List updatedStreams = streams - .stream() - .filter(stream -> !COLOR_PALETTE_TABLE.equals(stream.getStream().getName())) - .toList(); - catalog.setStreams(updatedStreams); - LOGGER.info("Updated catalog: {}", catalog); - WebBackendConnectionUpdate update = testHarness.getUpdateInput(connectionRead, catalog, operationRead); - webBackendApi.webBackendUpdateConnection(update); - - LOGGER.info("Waiting for sync job after update to start"); - JobRead syncFromTheUpdate = testHarness.waitUntilTheNextJobIsStarted(connectionId); - LOGGER.info("Waiting for sync job after update to complete"); - waitForSuccessfulJob(apiClient.getJobsApi(), syncFromTheUpdate); - - // We do not check that the source and the dest are in sync here because removing a stream doesn't - // delete its data in the destination - assertGlobalStateContainsStreams(connectionId, List.of(idAndNameStreamDescriptor)); - - LOGGER.info("Adding color palette stream back to configured catalog"); - catalog = testHarness.discoverSourceSchema(sourceId); - LOGGER.info("Updated catalog: {}", catalog); - update = testHarness.getUpdateInput(connectionRead, catalog, operationRead); - webBackendApi.webBackendUpdateConnection(update); - - LOGGER.info("Waiting for sync job after update to start"); - syncFromTheUpdate = testHarness.waitUntilTheNextJobIsStarted(connectionId); - LOGGER.info("Checking that id_and_name table is unaffected by the partial reset"); - assertDestinationMatches(ID_AND_NAME_TABLE, expectedIdAndNameRecords); - LOGGER.info("Checking that color_palette table was cleared in the destination due to the reset triggered by the update"); - assertDestinationMatches(COLOR_PALETTE_TABLE, List.of()); - LOGGER.info("Waiting for sync job after update to complete"); - waitForSuccessfulJob(apiClient.getJobsApi(), syncFromTheUpdate); - - // Verify that color palette table records exist in destination again after sync. - // If we see 0 records for this table in the destination, that means the CDC partial reset logic is - // not working properly, and it continued from the replication log cursor for this stream despite - // this stream's state being reset - assertDestinationMatches(COLOR_PALETTE_TABLE, expectedColorPaletteRecords); - assertGlobalStateContainsStreams(connectionId, List.of(idAndNameStreamDescriptor, colorPaletteStreamDescriptor)); - - // Verify that incremental still works properly after partial reset - LOGGER.info("Adding new records to tables"); - final Instant beforeInsert = Instant.now(); - source.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(6, 'geralt')")); - expectedIdAndNameRecords.add(new DestinationCdcRecordMatcher( - Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).put(COLUMN_NAME, "geralt").build()), - beforeInsert, - Optional.empty())); - - source.query(ctx -> ctx.execute("INSERT INTO color_palette(id, color) VALUES(4, 'yellow')")); - expectedColorPaletteRecords.add(new DestinationCdcRecordMatcher( - Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 4).put(COLUMN_COLOR, "yellow").build()), - beforeInsert, - Optional.empty())); - - LOGGER.info("Starting {} sync after insert", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead2 = apiClient.getConnectionApi() - .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead2.getJob()); - - assertDestinationMatches(ID_AND_NAME_TABLE, expectedIdAndNameRecords); - assertDestinationMatches(COLOR_PALETTE_TABLE, expectedColorPaletteRecords); - assertGlobalStateContainsStreams(connectionId, List.of(idAndNameStreamDescriptor, colorPaletteStreamDescriptor)); - } - - private List getCdcRecordMatchersFromSource(final Database source, final String tableName) throws SQLException { - final List sourceRecords = testHarness.retrieveSourceRecords(source, tableName); - return new ArrayList<>(sourceRecords - .stream() - .map(sourceRecord -> new DestinationCdcRecordMatcher(sourceRecord, Instant.EPOCH, Optional.empty())) - .toList()); - } - - private UUID createCdcConnection() throws ApiException { - final SourceRead sourceRead = createCdcSource(); - final UUID sourceId = sourceRead.getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - - operationRead = testHarness.createOperation(); - final UUID operationId = operationRead.getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final AirbyteStream stream = catalog.getStreams().get(0).getStream(); - LOGGER.info("stream: {}", stream); - - assertEquals(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL), stream.getSupportedSyncModes()); - assertTrue(stream.getSourceDefinedCursor()); - assertTrue(stream.getDefaultCursorField().isEmpty()); - assertEquals(List.of(List.of("id")), stream.getSourceDefinedPrimaryKey()); - - final SyncMode syncMode = SyncMode.INCREMENTAL; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND; - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(syncMode) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(CONNECTION_NAME, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - return connectionId; - } - - private SourceRead createCdcSource() throws ApiException { - final UUID postgresSourceDefinitionId = testHarness.getPostgresSourceDefinitionId(); - final JsonNode sourceDbConfig = testHarness.getSourceDbConfig(); - final Map sourceDbConfigMap = Jsons.object(sourceDbConfig, Map.class); - sourceDbConfigMap.put("is_test", true); - sourceDbConfigMap.put("replication_method", ImmutableMap.builder() - .put("method", CDC_METHOD) - .put("replication_slot", REPLICATION_SLOT) - .put("publication", PUBLICATION) - .put("initial_waiting_seconds", INITIAL_WAITING_SECONDS) - .build()); - LOGGER.info("final sourceDbConfigMap: {}", sourceDbConfigMap); - - return testHarness.createSource( - SOURCE_NAME, - workspaceId, - postgresSourceDefinitionId, - Jsons.jsonNode(sourceDbConfigMap)); - } - - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - private void assertDestinationMatches(final String streamName, final List expectedDestRecordMatchers) - throws Exception { - final List destRecords = testHarness.retrieveRawDestinationRecords(new SchemaTableNamePair(SCHEMA_NAME, streamName)); - if (destRecords.size() != expectedDestRecordMatchers.size()) { - final String errorMessage = String.format( - "The number of destination records %d does not match the expected number %d", - destRecords.size(), - expectedDestRecordMatchers.size()); - LOGGER.error(errorMessage); - LOGGER.error("Expected dest record matchers: {}\nActual destination records: {}", expectedDestRecordMatchers, destRecords); - throw new IllegalStateException(errorMessage); - } - - for (final DestinationCdcRecordMatcher recordMatcher : expectedDestRecordMatchers) { - final List matchingDestRecords = destRecords.stream().filter(destRecord -> { - final Map sourceRecordMap = Jsons.object(recordMatcher.sourceRecord, Map.class); - final Map destRecordMap = Jsons.object(destRecord, Map.class); - - final boolean sourceRecordValuesMatch = sourceRecordMap.keySet() - .stream() - .allMatch(column -> Objects.equals(sourceRecordMap.get(column), destRecordMap.get(column))); - - final Object cdcUpdatedAtValue = destRecordMap.get(CDC_UPDATED_AT_COLUMN); - // use epoch millis to guarantee the two values are at the same precision - final boolean cdcUpdatedAtMatches = cdcUpdatedAtValue != null - && Instant.parse(String.valueOf(cdcUpdatedAtValue)).toEpochMilli() >= recordMatcher.minUpdatedAt.toEpochMilli(); - - final Object cdcDeletedAtValue = destRecordMap.get(CDC_DELETED_AT_COLUMN); - final boolean cdcDeletedAtMatches; - if (recordMatcher.minDeletedAt.isPresent()) { - cdcDeletedAtMatches = cdcDeletedAtValue != null - && Instant.parse(String.valueOf(cdcDeletedAtValue)).toEpochMilli() >= recordMatcher.minDeletedAt.get().toEpochMilli(); - } else { - cdcDeletedAtMatches = cdcDeletedAtValue == null; - } - - return sourceRecordValuesMatch && cdcUpdatedAtMatches && cdcDeletedAtMatches; - }).toList(); - - if (matchingDestRecords.isEmpty()) { - throw new IllegalStateException(String.format( - "Could not find a matching CDC destination record for record matcher %s. Destination records: %s", recordMatcher, destRecords)); - } - if (matchingDestRecords.size() > 1) { - throw new IllegalStateException(String.format( - "Expected only one matching CDC destination record for record matcher %s, but found multiple: %s", recordMatcher, matchingDestRecords)); - } - } - } - - private void assertGlobalStateContainsStreams(final UUID connectionId, final List expectedStreams) throws ApiException { - final ConnectionState state = apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId)); - LOGGER.info("state: {}", state); - assertEquals(ConnectionStateType.GLOBAL, state.getStateType()); - final List stateStreams = state.getGlobalState().getStreamStates().stream().map(StreamState::getStreamDescriptor).toList(); - - Assertions.assertTrue(stateStreams.containsAll(expectedStreams) && expectedStreams.containsAll(stateStreams), - String.format("Expected state to have streams %s, but it actually had streams %s", expectedStreams, stateStreams)); - } - - private void assertNoState(final UUID connectionId) throws ApiException { - final ConnectionState state = apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId)); - assertEquals(ConnectionStateType.NOT_SET, state.getStateType()); - assertNull(state.getState()); - assertNull(state.getStreamState()); - assertNull(state.getGlobalState()); - } - - // can be helpful for debugging - @SuppressWarnings("PMD.UnusedPrivateMethod") - private void printDbs() throws SQLException { - final Database sourceDb = testHarness.getSourceDatabase(); - Set pairs = testHarness.listAllTables(sourceDb); - LOGGER.info("Printing source tables"); - for (final SchemaTableNamePair pair : pairs) { - final Result result = sourceDb.query( - context -> context.fetch(String.format("SELECT * FROM %s.%s", pair.schemaName(), pair.tableName()))); - LOGGER.info("{}.{} contents:\n{}", pair.schemaName(), pair.tableName(), result); - } - - final Database destDb = testHarness.getDestinationDatabase(); - pairs = testHarness.listAllTables(destDb); - LOGGER.info("Printing destination tables"); - for (final SchemaTableNamePair pair : pairs) { - final Result result = destDb.query(context -> context.fetch(String.format("SELECT * FROM %s.%s", pair.schemaName(), pair.tableName()))); - LOGGER.info("{}.{} contents:\n{}", pair.schemaName(), pair.tableName(), result); - } - } - -} diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/ContainerOrchestratorAcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/ContainerOrchestratorAcceptanceTests.java deleted file mode 100644 index 7ed69814a6f1..000000000000 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/ContainerOrchestratorAcceptanceTests.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.test.acceptance; - -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitForSuccessfulJob; -import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitWhileJobHasStatus; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.model.generated.AirbyteCatalog; -import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.client.model.generated.ConnectionScheduleType; -import io.airbyte.api.client.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.DestinationSyncMode; -import io.airbyte.api.client.model.generated.JobIdRequestBody; -import io.airbyte.api.client.model.generated.JobInfoRead; -import io.airbyte.api.client.model.generated.JobStatus; -import io.airbyte.api.client.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.client.model.generated.SourceDefinitionRead; -import io.airbyte.api.client.model.generated.SyncMode; -import io.airbyte.test.utils.AirbyteAcceptanceTestHarness; -import io.fabric8.kubernetes.client.KubernetesClient; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.SQLException; -import java.util.List; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - -/** - * This class tests behavior that is specific to container-orchestrator deployments, such as scaling - * down and back up workers while a sync is running to ensure it is not affected by a deployment. - *

    - * This test class is only enabled if the KUBE environment variable is true, because container - * orchestrators are currently only used by kuberenetes deployments, as container orchestrators have - * not yet been ported over to docker. - */ -@SuppressWarnings({"rawtypes", "ConstantConditions"}) -@EnabledIfEnvironmentVariable(named = "KUBE", - matches = "true") -class ContainerOrchestratorAcceptanceTests { - - private static final Logger LOGGER = LoggerFactory.getLogger(ContainerOrchestratorAcceptanceTests.class); - private static final String AIRBYTE_WORKER = "airbyte-worker"; - private static final String DEFAULT = "default"; - - private static AirbyteAcceptanceTestHarness testHarness; - private static AirbyteApiClient apiClient; - private static UUID workspaceId; - private static KubernetesClient kubernetesClient; - - @SuppressWarnings("UnstableApiUsage") - @BeforeAll - static void init() throws URISyntaxException, IOException, InterruptedException, ApiException { - apiClient = new AirbyteApiClient( - new ApiClient().setScheme("http") - .setHost("localhost") - .setPort(8001) - .setBasePath("/api")); - // work in whatever default workspace is present. - workspaceId = apiClient.getWorkspaceApi().listWorkspaces().getWorkspaces().get(0).getWorkspaceId(); - LOGGER.info("workspaceId = " + workspaceId); - - // log which connectors are being used. - final SourceDefinitionRead sourceDef = apiClient.getSourceDefinitionApi() - .getSourceDefinition(new SourceDefinitionIdRequestBody() - .sourceDefinitionId(UUID.fromString("decd338e-5647-4c0b-adf4-da0e75f5a750"))); - final DestinationDefinitionRead destinationDef = apiClient.getDestinationDefinitionApi() - .getDestinationDefinition(new DestinationDefinitionIdRequestBody() - .destinationDefinitionId(UUID.fromString("25c5221d-dce2-4163-ade9-739ef790f503"))); - LOGGER.info("pg source definition: {}", sourceDef.getDockerImageTag()); - LOGGER.info("pg destination definition: {}", destinationDef.getDockerImageTag()); - - testHarness = new AirbyteAcceptanceTestHarness(apiClient, workspaceId); - kubernetesClient = testHarness.getKubernetesClient(); - } - - @AfterAll - static void end() { - testHarness.stopDbAndContainers(); - } - - @BeforeEach - void setup() throws URISyntaxException, IOException, SQLException { - testHarness.setup(); - } - - // This test is flaky. Warnings are suppressed until that condition us understood - // See: https://github.com/airbytehq/airbyte/issues/19948 - @Test - @Disabled("Flaky test, to be investigated before re-enabling") - @SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") - void testDowntimeDuringSync() throws Exception { - // NOTE: PMD assert warning suppressed because the assertion was flaky. The test will throw if the - // sync does not succeed. - final String connectionName = "test-connection"; - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - - LOGGER.info("Creating connection..."); - final UUID connectionId = - testHarness.createConnection(connectionName, sourceId, destinationId, List.of(), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - LOGGER.info("Run manual sync..."); - final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - LOGGER.info("Waiting for job to run..."); - waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead.getJob(), Set.of(JobStatus.PENDING)); - - LOGGER.info("Scaling down worker..."); - kubernetesClient.apps().deployments().inNamespace(DEFAULT).withName(AIRBYTE_WORKER).scale(0, true); - - LOGGER.info("Scaling up worker..."); - kubernetesClient.apps().deployments().inNamespace(DEFAULT).withName(AIRBYTE_WORKER).scale(1); - - waitForSuccessfulJob(apiClient.getJobsApi(), connectionSyncRead.getJob()); - } - - @AfterEach - void tearDown() { - testHarness.cleanup(); - } - - @Test - void testCancelSyncWithInterruption() throws Exception { - final String connectionName = "test-connection"; - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead.getJob(), Set.of(JobStatus.PENDING)); - - kubernetesClient.apps().deployments().inNamespace(DEFAULT).withName(AIRBYTE_WORKER).scale(0, true); - kubernetesClient.apps().deployments().inNamespace(DEFAULT).withName(AIRBYTE_WORKER).scale(1); - - final var resp = apiClient.getJobsApi().cancelJob(new JobIdRequestBody().id(connectionSyncRead.getJob().getId())); - assertEquals(JobStatus.CANCELLED, resp.getJob().getStatus()); - } - - @Test - void testCancelSyncWhenCancelledWhenWorkerIsNotRunning() throws Exception { - final String connectionName = "test-connection"; - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID operationId = testHarness.createOperation().getOperationId(); - final AirbyteCatalog catalog = testHarness.discoverSourceSchema(sourceId); - final SyncMode syncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(syncMode).destinationSyncMode(destinationSyncMode)); - - LOGGER.info("Creating connection..."); - final UUID connectionId = - testHarness.createConnection(connectionName, sourceId, destinationId, List.of(operationId), catalog, ConnectionScheduleType.MANUAL, null) - .getConnectionId(); - - LOGGER.info("Waiting for connection to be available in Temporal..."); - - LOGGER.info("Run manual sync..."); - final JobInfoRead connectionSyncRead = apiClient.getConnectionApi().syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - - LOGGER.info("Waiting for job to run..."); - waitWhileJobHasStatus(apiClient.getJobsApi(), connectionSyncRead.getJob(), Set.of(JobStatus.PENDING)); - - LOGGER.info("Waiting for job to run a little..."); - Thread.sleep(1000); - - LOGGER.info("Scale down workers..."); - kubernetesClient.apps().deployments().inNamespace(DEFAULT).withName(AIRBYTE_WORKER).scale(0, true); - - LOGGER.info("Starting background cancellation request..."); - final var pool = Executors.newSingleThreadExecutor(); - final var mdc = MDC.getCopyOfContextMap(); - final Future resp = - pool.submit(() -> { - MDC.setContextMap(mdc); - try { - final JobInfoRead jobInfoRead = apiClient.getJobsApi().cancelJob(new JobIdRequestBody().id(connectionSyncRead.getJob().getId())); - LOGGER.info("jobInfoRead = " + jobInfoRead); - return jobInfoRead; - } catch (final ApiException e) { - LOGGER.error("Failed to read from api", e); - throw e; - } - }); - Thread.sleep(2000); - - LOGGER.info("Scaling up workers..."); - kubernetesClient.apps().deployments().inNamespace(DEFAULT).withName(AIRBYTE_WORKER).scale(1); - - LOGGER.info("Waiting for cancellation to go into effect..."); - assertEquals(JobStatus.CANCELLED, resp.get().getJob().getStatus()); - } - -} diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/VersioningAcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/VersioningAcceptanceTests.java deleted file mode 100644 index f36977f22014..000000000000 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/VersioningAcceptanceTests.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.test.acceptance; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.model.generated.CustomDestinationDefinitionCreate; -import io.airbyte.api.client.model.generated.CustomSourceDefinitionCreate; -import io.airbyte.api.client.model.generated.DestinationDefinitionCreate; -import io.airbyte.api.client.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.SourceDefinitionCreate; -import io.airbyte.api.client.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.client.model.generated.SourceDefinitionRead; -import io.airbyte.test.utils.AirbyteAcceptanceTestHarness; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.sql.SQLException; -import java.util.UUID; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; - -class VersioningAcceptanceTests { - - private static AirbyteApiClient apiClient; - private static UUID workspaceId; - - private static AirbyteAcceptanceTestHarness testHarness; - - @BeforeAll - static void init() throws ApiException, URISyntaxException, IOException, InterruptedException { - apiClient = new AirbyteApiClient( - new ApiClient().setScheme("http") - .setHost("localhost") - .setPort(8001) - .setBasePath("/api")); - - workspaceId = apiClient.getWorkspaceApi().listWorkspaces().getWorkspaces().get(0).getWorkspaceId(); - - testHarness = new AirbyteAcceptanceTestHarness(apiClient, workspaceId); - } - - @AfterAll - static void afterAll() { - testHarness.stopDbAndContainers(); - } - - @BeforeEach - void setup() throws SQLException, URISyntaxException, IOException { - testHarness.setup(); - } - - @AfterEach - void tearDown() { - testHarness.cleanup(); - } - - @ParameterizedTest - @CsvSource({ - "2.1.1, 0.2.0", - "2.1.2, 0.2.1", - }) - void testCreateSourceSpec(final String dockerImageTag, final String expectedProtocolVersion) - throws ApiException, URISyntaxException { - final CustomSourceDefinitionCreate sourceDefinitionCreate = new CustomSourceDefinitionCreate() - .workspaceId(workspaceId) - .sourceDefinition(new SourceDefinitionCreate() - .dockerImageTag(dockerImageTag) - .dockerRepository("airbyte/source-e2e-test") - .documentationUrl(new URI("https://hub.docker.com/r/airbyte/source-e2e-test")) - .name("Source E2E Test Connector")); - - final SourceDefinitionRead sourceDefinitionRead = apiClient.getSourceDefinitionApi().createCustomSourceDefinition(sourceDefinitionCreate); - assertEquals(expectedProtocolVersion, sourceDefinitionRead.getProtocolVersion()); - - final SourceDefinitionIdRequestBody sourceDefinitionReq = new SourceDefinitionIdRequestBody() - .sourceDefinitionId(sourceDefinitionRead.getSourceDefinitionId()); - final SourceDefinitionRead sourceDefinitionReadSanityCheck = - apiClient.getSourceDefinitionApi().getSourceDefinition(sourceDefinitionReq); - assertEquals(sourceDefinitionRead.getProtocolVersion(), sourceDefinitionReadSanityCheck.getProtocolVersion()); - - // Clean up the source - apiClient.getSourceDefinitionApi().deleteSourceDefinition(sourceDefinitionReq); - } - - @ParameterizedTest - @CsvSource({ - "2.1.1, 0.2.0", - "2.1.2, 0.2.1", - }) - void testCreateDestinationSpec(final String dockerImageTag, final String expectedProtocolVersion) - throws ApiException, URISyntaxException { - final CustomDestinationDefinitionCreate destDefinitionCreate = - new CustomDestinationDefinitionCreate() - .workspaceId(workspaceId) - .destinationDefinition(new DestinationDefinitionCreate() - .dockerImageTag(dockerImageTag) - // We are currently using source because the destination-e2e-test connector is facing a regression - // For the purpose of the test, at this moment, using source works because we only check version - .dockerRepository("airbyte/source-e2e-test") - .documentationUrl(new URI("https://hub.docker.com/r/airbyte/destination-e2e-test")) - .name("Dest E2E Test Connector")); - - final DestinationDefinitionRead destDefinitionRead = - apiClient.getDestinationDefinitionApi().createCustomDestinationDefinition(destDefinitionCreate); - assertEquals(expectedProtocolVersion, destDefinitionRead.getProtocolVersion()); - - final DestinationDefinitionIdRequestBody destDefinitionReq = new DestinationDefinitionIdRequestBody() - .destinationDefinitionId(destDefinitionRead.getDestinationDefinitionId()); - final DestinationDefinitionRead destDefinitionReadSanityCheck = - apiClient.getDestinationDefinitionApi().getDestinationDefinition(destDefinitionReq); - assertEquals(destDefinitionRead.getProtocolVersion(), destDefinitionReadSanityCheck.getProtocolVersion()); - - // Clean up the destination - apiClient.getDestinationDefinitionApi().deleteDestinationDefinition(destDefinitionReq); - } - -} diff --git a/airbyte-tests/src/acceptanceTests/resources/postgres_init.sql b/airbyte-tests/src/acceptanceTests/resources/postgres_init.sql deleted file mode 100644 index 0c35dc457609..000000000000 --- a/airbyte-tests/src/acceptanceTests/resources/postgres_init.sql +++ /dev/null @@ -1,33 +0,0 @@ -CREATE - TABLE - id_and_name( - id INTEGER NOT NULL, - name VARCHAR(200) - ); - -INSERT - INTO - id_and_name( - id, - name - ) - VALUES( - 1, - 'sherif' - ), - ( - 2, - 'charles' - ), - ( - 3, - 'jared' - ), - ( - 4, - 'michel' - ), - ( - 5, - 'john' - ); diff --git a/airbyte-tests/src/acceptanceTests/resources/postgres_init_cdc.sql b/airbyte-tests/src/acceptanceTests/resources/postgres_init_cdc.sql deleted file mode 100644 index ce7c2da4a538..000000000000 --- a/airbyte-tests/src/acceptanceTests/resources/postgres_init_cdc.sql +++ /dev/null @@ -1,79 +0,0 @@ -CREATE - TABLE - id_and_name( - id INTEGER PRIMARY KEY, - name VARCHAR(200) - ); - -INSERT - INTO - id_and_name( - id, - name - ) - VALUES( - 1, - 'sherif' - ), - ( - 2, - 'charles' - ), - ( - 3, - 'jared' - ), - ( - 4, - 'michel' - ), - ( - 5, - 'john' - ); - -CREATE - TABLE - color_palette( - id INTEGER PRIMARY KEY, - color VARCHAR(200) - ); - -INSERT - INTO - color_palette( - id, - color - ) - VALUES( - 1, - 'red' - ), - ( - 2, - 'blue' - ), - ( - 3, - 'green' - ); - -CREATE - ROLE airbyte_role REPLICATION LOGIN; - -ALTER TABLE - id_and_name REPLICA IDENTITY DEFAULT; - -ALTER TABLE - color_palette REPLICA IDENTITY DEFAULT; - -CREATE - PUBLICATION airbyte_publication FOR TABLE - id_and_name, - color_palette; - -SELECT - pg_create_logical_replication_slot( - 'airbyte_slot', - 'pgoutput' - ); diff --git a/airbyte-tests/src/acceptanceTests/resources/postgres_second_schema_multiple_tables.sql b/airbyte-tests/src/acceptanceTests/resources/postgres_second_schema_multiple_tables.sql deleted file mode 100644 index 48414b255763..000000000000 --- a/airbyte-tests/src/acceptanceTests/resources/postgres_second_schema_multiple_tables.sql +++ /dev/null @@ -1,58 +0,0 @@ -CREATE - SCHEMA staging; - -CREATE - TABLE - staging.cool_employees( - id INTEGER NOT NULL, - name VARCHAR(200) - ); - -INSERT - INTO - staging.cool_employees( - id, - name - ) - VALUES( - 1, - 'sherif' - ), - ( - 2, - 'charles' - ), - ( - 3, - 'jared' - ), - ( - 4, - 'michel' - ), - ( - 5, - 'john' - ); - -CREATE - TABLE - staging.awesome_people( - id INTEGER NOT NULL, - name VARCHAR(200) - ); - -INSERT - INTO - staging.awesome_people( - id, - name - ) - VALUES( - 1, - 'davin' - ), - ( - 2, - 'chris' - ); diff --git a/airbyte-tests/src/acceptanceTests/resources/postgres_separate_schema_same_table.sql b/airbyte-tests/src/acceptanceTests/resources/postgres_separate_schema_same_table.sql deleted file mode 100644 index 6cf35b644a42..000000000000 --- a/airbyte-tests/src/acceptanceTests/resources/postgres_separate_schema_same_table.sql +++ /dev/null @@ -1,36 +0,0 @@ -CREATE - SCHEMA staging; - -CREATE - TABLE - staging.id_and_name( - id INTEGER NOT NULL, - name VARCHAR(200) - ); - -INSERT - INTO - staging.id_and_name( - id, - name - ) - VALUES( - 1, - 'sherif' - ), - ( - 2, - 'charles' - ), - ( - 3, - 'jared' - ), - ( - 4, - 'michel' - ), - ( - 5, - 'john' - ); diff --git a/airbyte-tests/src/automaticMigrationAcceptanceTest/java/io/airbyte/test/automaticMigrationAcceptance/ImportApi.java b/airbyte-tests/src/automaticMigrationAcceptanceTest/java/io/airbyte/test/automaticMigrationAcceptance/ImportApi.java deleted file mode 100644 index 77ffecfb81d9..000000000000 --- a/airbyte-tests/src/automaticMigrationAcceptanceTest/java/io/airbyte/test/automaticMigrationAcceptance/ImportApi.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.test.automaticMigrationAcceptance; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.invoker.generated.ApiResponse; -import io.airbyte.api.client.model.generated.ImportRead; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpRequest.Builder; -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.time.Duration; -import java.util.function.Consumer; - -/** - * The reason we are using this class instead of - * {@link io.airbyte.api.client.generated.DeploymentApi is cause there is a bug in the the method - * {@link io.airbyte.api.client.generated.DeploymentApi#importArchiveRequestBuilder(File)}, The - * method specifies the content type as `localVarRequestBuilder.header("Content-Type", - * "application/json");` but its supposed to be localVarRequestBuilder.header("Content-Type", - * "application/x-gzip"); - */ -public class ImportApi { - - private final HttpClient memberVarHttpClient; - private final ObjectMapper memberVarObjectMapper; - private final String memberVarBaseUri; - private final Consumer memberVarInterceptor; - private final Duration memberVarReadTimeout; - private final Consumer> memberVarResponseInterceptor; - - public ImportApi(final ApiClient apiClient) { - memberVarHttpClient = apiClient.getHttpClient(); - memberVarObjectMapper = apiClient.getObjectMapper(); - memberVarBaseUri = apiClient.getBaseUri(); - memberVarInterceptor = apiClient.getRequestInterceptor(); - memberVarReadTimeout = apiClient.getReadTimeout(); - memberVarResponseInterceptor = apiClient.getResponseInterceptor(); - } - - public ImportRead importArchive(final File body) throws ApiException { - final ApiResponse localVarResponse = importArchiveWithHttpInfo(body); - return localVarResponse.getData(); - } - - public ApiResponse importArchiveWithHttpInfo(final File body) throws ApiException { - final HttpRequest.Builder localVarRequestBuilder = importArchiveRequestBuilder(body); - try { - final HttpResponse localVarResponse = memberVarHttpClient.send( - localVarRequestBuilder.build(), - HttpResponse.BodyHandlers.ofInputStream()); - if (memberVarResponseInterceptor != null) { - memberVarResponseInterceptor.accept(localVarResponse); - } - if (errorResponse(localVarResponse)) { - throw new ApiException(localVarResponse.statusCode(), - "importArchive call received non-success response", - localVarResponse.headers(), - localVarResponse.body() == null ? null - : new String(localVarResponse.body().readAllBytes(), StandardCharsets.UTF_8)); - } - return new ApiResponse( - localVarResponse.statusCode(), - localVarResponse.headers().map(), - memberVarObjectMapper.readValue(localVarResponse.body(), new TypeReference() {})); - } catch (final IOException e) { - throw new ApiException(e); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - throw new ApiException(e); - } - } - - private Boolean errorResponse(final HttpResponse localVarResponse) { - return localVarResponse.statusCode() / 100 != 2; - } - - private HttpRequest.Builder importArchiveRequestBuilder(final File body) throws ApiException { - // verify the required parameter 'body' is set - if (body == null) { - throw new ApiException(400, - "Missing the required parameter 'body' when calling importArchive"); - } - - final HttpRequest.Builder localVarRequestBuilder = HttpRequest.newBuilder(); - - final String localVarPath = "/v1/deployment/import"; - - localVarRequestBuilder.uri(URI.create(memberVarBaseUri + localVarPath)); - - localVarRequestBuilder.header("Content-Type", "application/x-gzip"); - localVarRequestBuilder.header("Accept", "application/json"); - - try { - localVarRequestBuilder.method("POST", HttpRequest.BodyPublishers.ofFile(body.toPath())); - } catch (final IOException e) { - throw new ApiException(e); - } - if (memberVarReadTimeout != null) { - localVarRequestBuilder.timeout(memberVarReadTimeout); - } - if (memberVarInterceptor != null) { - memberVarInterceptor.accept(localVarRequestBuilder); - } - return localVarRequestBuilder; - } - -} diff --git a/airbyte-tests/src/automaticMigrationAcceptanceTest/java/io/airbyte/test/automaticMigrationAcceptance/MigrationAcceptanceTest.java b/airbyte-tests/src/automaticMigrationAcceptanceTest/java/io/airbyte/test/automaticMigrationAcceptance/MigrationAcceptanceTest.java deleted file mode 100644 index fa5c7276c9ea..000000000000 --- a/airbyte-tests/src/automaticMigrationAcceptanceTest/java/io/airbyte/test/automaticMigrationAcceptance/MigrationAcceptanceTest.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.test.automaticMigrationAcceptance; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.api.client.generated.DestinationDefinitionApi; -import io.airbyte.api.client.generated.HealthApi; -import io.airbyte.api.client.generated.SourceDefinitionApi; -import io.airbyte.api.client.generated.WorkspaceApi; -import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.SourceDefinitionRead; -import io.airbyte.api.client.model.generated.WorkspaceIdRequestBody; -import io.airbyte.api.client.model.generated.WorkspaceRead; -import io.airbyte.commons.concurrency.VoidCallable; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.util.MoreProperties; -import io.airbyte.test.container.AirbyteTestContainer; -import java.io.File; -import java.nio.file.Path; -import java.util.List; -import java.util.Properties; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; -import java.util.function.Supplier; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class contains an e2e test simulating what a user encounter when trying to upgrade Airybte. - * - upgrading from 0.32.0 to the latest version should work. - This test previously tested - * upgrading from even older versions, which has since been removed - *

    - * This test runs on the current code version and expects local images with the `dev` tag to be - * available. To do so, run SUB_BUILD=PLATFORM ./gradlew build. - *

    - * When running this test consecutively locally, it might be necessary to run `docker volume prune` - * to remove hanging volumes. - */ -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class MigrationAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(MigrationAcceptanceTest.class); - - // assume env file is one directory level up from airbyte-tests. - private static final File ENV_FILE = Path.of(System.getProperty("user.dir")).getParent().resolve(".env").toFile(); - - private static final String TEST_DATA_DOCKER_MOUNT = "airbyte_data_migration_test"; - private static final String TEST_DB_DOCKER_MOUNT = "airbyte_db_migration_test"; - private static final String TEST_WORKSPACE_DOCKER_MOUNT = "airbyte_workspace_migration_test"; - private static final String TEST_LOCAL_ROOT = "/tmp/airbyte_local_migration_test"; - private static final String TEST_LOCAL_DOCKER_MOUNT = "/tmp/airbyte_local_migration_test"; - - private static WorkspaceIdRequestBody workspaceIdRequestBody = null; - - @Test - @Disabled - void testAutomaticMigration() throws Exception { - // start at "faux" major version bump version. This was the last version that required db data - // migrations. - final File version32DockerComposeFile = MoreResources.readResourceAsFile("docker-compose-migration-test-0-32-0-alpha.yaml"); - final Properties version32EnvFileProperties = MoreProperties - .envFileToProperties(MoreResources.readResourceAsFile("env-file-migration-test-0-32-0.env")); - runAirbyte(version32DockerComposeFile, version32EnvFileProperties, MigrationAcceptanceTest::assertHealthy); - - final File currentDockerComposeFile = MoreResources.readResourceAsFile("docker-compose.yaml"); - // piggybacks off of whatever the existing .env file is, so override default filesystem values in to - // point at test paths. - final Properties envFileProperties = overrideDirectoriesForTest(MoreProperties.envFileToProperties(ENV_FILE)); - // run from last major version bump to current version. - runAirbyte(currentDockerComposeFile, envFileProperties, MigrationAcceptanceTest::assertHealthy, false); - } - - private Properties overrideDirectoriesForTest(final Properties properties) { - final Properties propertiesWithOverrides = new Properties(properties); - propertiesWithOverrides.put("DATA_DOCKER_MOUNT", TEST_DATA_DOCKER_MOUNT); - propertiesWithOverrides.put("DB_DOCKER_MOUNT", TEST_DB_DOCKER_MOUNT); - propertiesWithOverrides.put("WORKSPACE_DOCKER_MOUNT", TEST_WORKSPACE_DOCKER_MOUNT); - propertiesWithOverrides.put("LOCAL_ROOT", TEST_LOCAL_ROOT); - propertiesWithOverrides.put("LOCAL_DOCKER_MOUNT", TEST_LOCAL_DOCKER_MOUNT); - return propertiesWithOverrides; - } - - private void runAirbyte(final File dockerComposeFile, final Properties env, final VoidCallable assertionExecutable) throws Exception { - runAirbyte(dockerComposeFile, env, assertionExecutable, true); - } - - private void runAirbyte(final File dockerComposeFile, - final Properties env, - final VoidCallable postStartupExecutable, - final boolean retainVolumesOnStop) - throws Exception { - LOGGER.info("Start up Airbyte at version {}", env.get("VERSION")); - final AirbyteTestContainer airbyteTestContainer = new AirbyteTestContainer.Builder(dockerComposeFile) - .setEnv(env) - .build(); - - airbyteTestContainer.startBlocking(); - postStartupExecutable.call(); - if (retainVolumesOnStop) { - airbyteTestContainer.stopRetainVolumes(); - } else { - airbyteTestContainer.stop(); - } - } - - /** - * Allows the test to listen for a specific log line so that the test can end as soon as that log - * line has been encountered. - */ - private static class WaitForLogLine { - - final AtomicBoolean hasSeenLine = new AtomicBoolean(); - - public Consumer getListener(final String stringToListenFor) { - return (logLine) -> { - if (logLine.contains(stringToListenFor)) { - hasSeenLine.set(true); - } - }; - } - - public Supplier hasSeenLine() { - return hasSeenLine::get; - } - - } - - private static void assertHealthy() throws ApiException { - final ApiClient apiClient = getApiClient(); - healthCheck(apiClient); - assertDataFromApi(apiClient); - } - - @SuppressWarnings("PMD.NonThreadSafeSingleton") - private static void assertDataFromApi(final ApiClient apiClient) throws ApiException { - if (workspaceIdRequestBody != null) { - assertEquals(assertWorkspaceInformation(apiClient).getWorkspaceId(), workspaceIdRequestBody.getWorkspaceId()); - } else { - workspaceIdRequestBody = assertWorkspaceInformation(apiClient); - } - - assertSourceDefinitionInformation(apiClient); - assertDestinationDefinitionInformation(apiClient); - } - - private static void assertSourceDefinitionInformation(final ApiClient apiClient) throws ApiException { - final SourceDefinitionApi sourceDefinitionApi = new SourceDefinitionApi(apiClient); - final List sourceDefinitions = sourceDefinitionApi.listSourceDefinitions().getSourceDefinitions(); - assertTrue(sourceDefinitions.size() >= 58); - boolean foundMysqlSourceDefinition = false; - boolean foundPostgresSourceDefinition = false; - for (final SourceDefinitionRead sourceDefinitionRead : sourceDefinitions) { - if ("435bb9a5-7887-4809-aa58-28c27df0d7ad".equals(sourceDefinitionRead.getSourceDefinitionId().toString())) { - assertEquals(sourceDefinitionRead.getName(), "MySQL"); - foundMysqlSourceDefinition = true; - } else if ("decd338e-5647-4c0b-adf4-da0e75f5a750".equals(sourceDefinitionRead.getSourceDefinitionId().toString())) { - assertEquals(sourceDefinitionRead.getName(), "Postgres"); - foundPostgresSourceDefinition = true; - } - } - - assertTrue(foundMysqlSourceDefinition); - assertTrue(foundPostgresSourceDefinition); - } - - private static void assertDestinationDefinitionInformation(final ApiClient apiClient) throws ApiException { - final DestinationDefinitionApi destinationDefinitionApi = new DestinationDefinitionApi(apiClient); - final List destinationDefinitions = destinationDefinitionApi.listDestinationDefinitions().getDestinationDefinitions(); - assertTrue(destinationDefinitions.size() >= 10); - boolean foundPostgresDestinationDefinition = false; - boolean foundLocalCSVDestinationDefinition = false; - boolean foundSnowflakeDestinationDefinition = false; - String destinationId; - for (final DestinationDefinitionRead destinationDefinitionRead : destinationDefinitions) { - destinationId = destinationDefinitionRead.getDestinationDefinitionId().toString(); - if ("25c5221d-dce2-4163-ade9-739ef790f503".equals(destinationId)) { - assertEquals("Postgres", destinationDefinitionRead.getName()); - foundPostgresDestinationDefinition = true; - } else if ("8be1cf83-fde1-477f-a4ad-318d23c9f3c6".equals(destinationId)) { - assertTrue(destinationDefinitionRead.getName().contains("Local CSV")); - foundLocalCSVDestinationDefinition = true; - } else if ("424892c4-daac-4491-b35d-c6688ba547ba".equals(destinationId)) { - assertTrue(destinationDefinitionRead.getName().contains("Snowflake")); - foundSnowflakeDestinationDefinition = true; - } - } - - assertTrue(foundPostgresDestinationDefinition); - assertTrue(foundLocalCSVDestinationDefinition); - assertTrue(foundSnowflakeDestinationDefinition); - } - - private static WorkspaceIdRequestBody assertWorkspaceInformation(final ApiClient apiClient) throws ApiException { - final WorkspaceApi workspaceApi = new WorkspaceApi(apiClient); - final WorkspaceRead workspace = workspaceApi.listWorkspaces().getWorkspaces().get(0); - assertNotNull(workspace.getWorkspaceId().toString()); - assertNotNull(workspace.getName()); - assertNotNull(workspace.getSlug()); - assertEquals(false, workspace.getInitialSetupComplete()); - - return new WorkspaceIdRequestBody().workspaceId(workspace.getWorkspaceId()); - } - - private static void healthCheck(final ApiClient apiClient) { - final HealthApi healthApi = new HealthApi(apiClient); - try { - healthApi.getHealthCheck(); - } catch (final ApiException e) { - throw new RuntimeException("Health check failed, usually due to auto migration failure. Please check the logs for details.", e); - } - } - - private static ApiClient getApiClient() { - return new ApiClient().setScheme("http") - .setHost("localhost") - .setPort(8001) - .setBasePath("/api"); - } - -} diff --git a/airbyte-tests/src/automaticMigrationAcceptanceTest/resources/03a4c904-c91d-447f-ab59-27a43b52c2fd.gz b/airbyte-tests/src/automaticMigrationAcceptanceTest/resources/03a4c904-c91d-447f-ab59-27a43b52c2fd.gz deleted file mode 100644 index 0e9f6b2c4069..000000000000 Binary files a/airbyte-tests/src/automaticMigrationAcceptanceTest/resources/03a4c904-c91d-447f-ab59-27a43b52c2fd.gz and /dev/null differ diff --git a/airbyte-tests/src/automaticMigrationAcceptanceTest/resources/docker-compose-migration-test-0-32-0-alpha.yaml b/airbyte-tests/src/automaticMigrationAcceptanceTest/resources/docker-compose-migration-test-0-32-0-alpha.yaml deleted file mode 100644 index 10807814277d..000000000000 --- a/airbyte-tests/src/automaticMigrationAcceptanceTest/resources/docker-compose-migration-test-0-32-0-alpha.yaml +++ /dev/null @@ -1,203 +0,0 @@ -version: "3.7" -#https://github.com/compose-spec/compose-spec/blob/master/spec.md#using-extensions-as-fragments -x-logging: &default-logging - options: - max-size: "100m" - max-file: "5" - driver: json-file -services: - # hook in case we need to add init behavior - # every root service (no depends_on) should depend on init - init: - image: airbyte/init:${VERSION} - logging: *default-logging - container_name: init - command: /bin/sh -c "./scripts/create_mount_directories.sh /local_parent ${HACK_LOCAL_ROOT_PARENT} ${LOCAL_ROOT}" - environment: - - LOCAL_ROOT=${LOCAL_ROOT} - - HACK_LOCAL_ROOT_PARENT=${HACK_LOCAL_ROOT_PARENT} - volumes: - - ${HACK_LOCAL_ROOT_PARENT}:/local_parent - db: - image: airbyte/db:${VERSION} - logging: *default-logging - container_name: airbyte-db - restart: unless-stopped - environment: - - POSTGRES_USER=${DATABASE_USER} - - POSTGRES_PASSWORD=${DATABASE_PASSWORD} - - DATABASE_USER=${DATABASE_USER} - - DATABASE_PASSWORD=${DATABASE_PASSWORD} - - DATABASE_URL=${DATABASE_URL} - - CONFIG_DATABASE_USER=${CONFIG_DATABASE_USER:-} - - CONFIG_DATABASE_PASSWORD=${CONFIG_DATABASE_PASSWORD:-} - - CONFIG_DATABASE_URL=${CONFIG_DATABASE_URL:-} - volumes: - - db:/var/lib/postgresql/data - scheduler: - image: airbyte/scheduler:${VERSION} - logging: *default-logging - container_name: airbyte-scheduler - restart: unless-stopped - environment: - - WEBAPP_URL=${WEBAPP_URL} - - DATABASE_USER=${DATABASE_USER} - - DATABASE_PASSWORD=${DATABASE_PASSWORD} - - DATABASE_URL=${DATABASE_URL} - - CONFIG_DATABASE_USER=${CONFIG_DATABASE_USER:-} - - CONFIG_DATABASE_PASSWORD=${CONFIG_DATABASE_PASSWORD:-} - - CONFIG_DATABASE_URL=${CONFIG_DATABASE_URL:-} - - RUN_DATABASE_MIGRATION_ON_STARTUP=${RUN_DATABASE_MIGRATION_ON_STARTUP} - - WORKSPACE_ROOT=${WORKSPACE_ROOT} - - WORKSPACE_DOCKER_MOUNT=${WORKSPACE_DOCKER_MOUNT} - - LOCAL_ROOT=${LOCAL_ROOT} - - LOCAL_DOCKER_MOUNT=${LOCAL_DOCKER_MOUNT} - - CONFIG_ROOT=${CONFIG_ROOT} - - TRACKING_STRATEGY=${TRACKING_STRATEGY} - - AIRBYTE_VERSION=${VERSION} - - AIRBYTE_ROLE=${AIRBYTE_ROLE:-} - - TEMPORAL_HOST=${TEMPORAL_HOST} - - WORKER_ENVIRONMENT=${WORKER_ENVIRONMENT} - - S3_LOG_BUCKET=${S3_LOG_BUCKET} - - S3_LOG_BUCKET_REGION=${S3_LOG_BUCKET_REGION} - - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} - - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - - GCP_STORAGE_BUCKET=${GCP_STORAGE_BUCKET} - - LOG_LEVEL=${LOG_LEVEL} - - SUBMITTER_NUM_THREADS=${SUBMITTER_NUM_THREADS} - - RESOURCE_CPU_REQUEST=${RESOURCE_CPU_REQUEST} - - RESOURCE_CPU_LIMIT=${RESOURCE_CPU_LIMIT} - - RESOURCE_MEMORY_REQUEST=${RESOURCE_MEMORY_REQUEST} - - RESOURCE_MEMORY_LIMIT=${RESOURCE_MEMORY_LIMIT} - - MAX_SYNC_JOB_ATTEMPTS=${MAX_SYNC_JOB_ATTEMPTS} - - MAX_SYNC_TIMEOUT_DAYS=${MAX_SYNC_TIMEOUT_DAYS} - - INTERNAL_API_HOST=${INTERNAL_API_HOST} - - SECRET_PERSISTENCE=${SECRET_PERSISTENCE} - volumes: - - workspace:${WORKSPACE_ROOT} - - ${LOCAL_ROOT}:${LOCAL_ROOT} - - data:${CONFIG_ROOT} - worker: - image: airbyte/worker:${VERSION} - logging: *default-logging - container_name: airbyte-worker - restart: unless-stopped - environment: - - WEBAPP_URL=${WEBAPP_URL} - - DATABASE_USER=${DATABASE_USER} - - DATABASE_PASSWORD=${DATABASE_PASSWORD} - - DATABASE_URL=${DATABASE_URL} - - CONFIG_DATABASE_USER=${CONFIG_DATABASE_USER:-} - - CONFIG_DATABASE_PASSWORD=${CONFIG_DATABASE_PASSWORD:-} - - CONFIG_DATABASE_URL=${CONFIG_DATABASE_URL:-} - - RUN_DATABASE_MIGRATION_ON_STARTUP=${RUN_DATABASE_MIGRATION_ON_STARTUP} - - WORKSPACE_ROOT=${WORKSPACE_ROOT} - - WORKSPACE_DOCKER_MOUNT=${WORKSPACE_DOCKER_MOUNT} - - LOCAL_ROOT=${LOCAL_ROOT} - - LOCAL_DOCKER_MOUNT=${LOCAL_DOCKER_MOUNT} - - CONFIG_ROOT=${CONFIG_ROOT} - - TRACKING_STRATEGY=${TRACKING_STRATEGY} - - AIRBYTE_VERSION=${VERSION} - - AIRBYTE_ROLE=${AIRBYTE_ROLE:-} - - TEMPORAL_HOST=${TEMPORAL_HOST} - - WORKER_ENVIRONMENT=${WORKER_ENVIRONMENT} - - S3_LOG_BUCKET=${S3_LOG_BUCKET} - - S3_LOG_BUCKET_REGION=${S3_LOG_BUCKET_REGION} - - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} - - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - - GCP_STORAGE_BUCKET=${GCP_STORAGE_BUCKET} - - LOG_LEVEL=${LOG_LEVEL} - - SUBMITTER_NUM_THREADS=${SUBMITTER_NUM_THREADS} - - RESOURCE_CPU_REQUEST=${RESOURCE_CPU_REQUEST} - - RESOURCE_CPU_LIMIT=${RESOURCE_CPU_LIMIT} - - RESOURCE_MEMORY_REQUEST=${RESOURCE_MEMORY_REQUEST} - - RESOURCE_MEMORY_LIMIT=${RESOURCE_MEMORY_LIMIT} - - MAX_SYNC_JOB_ATTEMPTS=${MAX_SYNC_JOB_ATTEMPTS} - - MAX_SYNC_TIMEOUT_DAYS=${MAX_SYNC_TIMEOUT_DAYS} - - INTERNAL_API_HOST=${INTERNAL_API_HOST} - - SECRET_PERSISTENCE=${SECRET_PERSISTENCE} - volumes: - - /var/run/docker.sock:/var/run/docker.sock - - workspace:${WORKSPACE_ROOT} - - ${LOCAL_ROOT}:${LOCAL_ROOT} - server: - image: airbyte/server:${VERSION} - logging: *default-logging - container_name: airbyte-server - restart: unless-stopped - environment: - - WEBAPP_URL=${WEBAPP_URL} - - DATABASE_USER=${DATABASE_USER} - - DATABASE_PASSWORD=${DATABASE_PASSWORD} - - DATABASE_URL=${DATABASE_URL} - - CONFIG_DATABASE_USER=${CONFIG_DATABASE_USER:-} - - CONFIG_DATABASE_PASSWORD=${CONFIG_DATABASE_PASSWORD:-} - - CONFIG_DATABASE_URL=${CONFIG_DATABASE_URL:-} - - RUN_DATABASE_MIGRATION_ON_STARTUP=${RUN_DATABASE_MIGRATION_ON_STARTUP} - - WORKSPACE_ROOT=${WORKSPACE_ROOT} - - CONFIG_ROOT=${CONFIG_ROOT} - - TRACKING_STRATEGY=${TRACKING_STRATEGY} - - AIRBYTE_VERSION=${VERSION} - - AIRBYTE_ROLE=${AIRBYTE_ROLE:-} - - TEMPORAL_HOST=${TEMPORAL_HOST} - - WORKER_ENVIRONMENT=${WORKER_ENVIRONMENT} - - S3_LOG_BUCKET=${S3_LOG_BUCKET} - - S3_LOG_BUCKET_REGION=${S3_LOG_BUCKET_REGION} - - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} - - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - - GCP_STORAGE_BUCKET=${GCP_STORAGE_BUCKET} - - LOG_LEVEL=${LOG_LEVEL} - - RESOURCE_CPU_REQUEST=${RESOURCE_CPU_REQUEST} - - RESOURCE_CPU_LIMIT=${RESOURCE_CPU_LIMIT} - - RESOURCE_MEMORY_REQUEST=${RESOURCE_MEMORY_REQUEST} - - RESOURCE_MEMORY_LIMIT=${RESOURCE_MEMORY_LIMIT} - - SECRET_PERSISTENCE=${SECRET_PERSISTENCE} - - VERSION_0_32_0_FORCE_UPGRADE=${VERSION_0_32_0_FORCE_UPGRADE} - ports: - - 8001:8001 - volumes: - - workspace:${WORKSPACE_ROOT} - - data:${CONFIG_ROOT} - - ${LOCAL_ROOT}:${LOCAL_ROOT} - webapp: - image: airbyte/webapp:${VERSION} - logging: *default-logging - container_name: airbyte-webapp - restart: unless-stopped - ports: - - 8000:80 - environment: - - AIRBYTE_ROLE=${AIRBYTE_ROLE:-} - - AIRBYTE_VERSION=${VERSION} - - API_URL=${API_URL:-} - - TRACKING_STRATEGY=${TRACKING_STRATEGY} - - INTERNAL_API_HOST=${INTERNAL_API_HOST} - - OPENREPLAY=${OPENREPLAY:-} - - PAPERCUPS_STORYTIME=${PAPERCUPS_STORYTIME:-} - airbyte-temporal: - image: airbyte/temporal:${VERSION} - logging: *default-logging - container_name: airbyte-temporal - restart: unless-stopped - ports: - - 7233:7233 - environment: - - DB=postgresql - - DB_PORT=${DATABASE_PORT} - - POSTGRES_USER=${DATABASE_USER} - - POSTGRES_PWD=${DATABASE_PASSWORD} - - POSTGRES_SEEDS=${DATABASE_HOST} - - DYNAMIC_CONFIG_FILE_PATH=config/dynamicconfig/development.yaml - - LOG_LEVEL=${LOG_LEVEL} - volumes: - - ./temporal/dynamicconfig:/etc/temporal/config/dynamicconfig -volumes: - workspace: - name: ${WORKSPACE_DOCKER_MOUNT} - # the data volume is only needed for backward compatibility; when users upgrade - # from an old Airbyte version that relies on file-based configs, the server needs - # to read this volume to copy their configs to the database - data: - name: ${DATA_DOCKER_MOUNT} - db: - name: ${DB_DOCKER_MOUNT} diff --git a/airbyte-tests/src/automaticMigrationAcceptanceTest/resources/env-file-migration-test-0-32-0.env b/airbyte-tests/src/automaticMigrationAcceptanceTest/resources/env-file-migration-test-0-32-0.env deleted file mode 100644 index 2dd0169e2146..000000000000 --- a/airbyte-tests/src/automaticMigrationAcceptanceTest/resources/env-file-migration-test-0-32-0.env +++ /dev/null @@ -1,86 +0,0 @@ -VERSION=0.32.0-alpha-patch-1 - -# Airbyte Internal Job Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db -DATABASE_USER=docker -DATABASE_PASSWORD=docker -DATABASE_HOST=db -DATABASE_PORT=5432 -DATABASE_DB=airbyte -# translate manually DATABASE_URL=jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT/${DATABASE_DB} (do not include the username or password here) -DATABASE_URL=jdbc:postgresql://db:5432/airbyte - -# Airbyte Internal Config Database, default to reuse the Job Database when they are empty -# Usually you do not need to set them; they are explicitly left empty to mute docker compose warnings -CONFIG_DATABASE_USER= -CONFIG_DATABASE_PASSWORD= -CONFIG_DATABASE_URL= - -RUN_DATABASE_MIGRATION_ON_STARTUP=true - -# When using the airbyte-db via default docker image: -CONFIG_ROOT=/data -DATA_DOCKER_MOUNT=airbyte_data_migration_test -DB_DOCKER_MOUNT=airbyte_db_migration_test - -# Temporal.io worker configuration -TEMPORAL_HOST=airbyte-temporal:7233 - -# Workspace storage for running jobs (logs, etc) -WORKSPACE_ROOT=/tmp/workspace -WORKSPACE_DOCKER_MOUNT=airbyte_workspace_migration_test - -# Local mount to access local files from filesystem -# todo (cgardens) - when we are mount raw directories instead of named volumes, *_DOCKER_MOUNT must -# be the same as *_ROOT. -# Issue: https://github.com/airbytehq/airbyte/issues/578 -LOCAL_ROOT=/tmp/airbyte_local_migration_test -LOCAL_DOCKER_MOUNT=/tmp/airbyte_local_migration_test -# todo (cgardens) - hack to handle behavior change in docker compose. *_PARENT directories MUST -# already exist on the host filesystem and MUST be parents of *_ROOT. -# Issue: https://github.com/airbytehq/airbyte/issues/577 -HACK_LOCAL_ROOT_PARENT=/tmp - -# Maximum simultaneous jobs -SUBMITTER_NUM_THREADS=10 - -# Job container images -# Usually you should not need to set these, they have defaults already set -JOB_KUBE_SOCAT_IMAGE= -JOB_KUBE_BUSYBOX_IMAGE= -JOB_KUBE_CURL_IMAGE= - -# Miscellaneous -TRACKING_STRATEGY=segment -WEBAPP_URL=http://localhost:8000/ -API_URL=/api/v1/ -INTERNAL_API_HOST=airbyte-server:8001 -LOG_LEVEL=INFO - -WORKER_ENVIRONMENT=docker - -# Cloud log backups. Don't use this unless you know what you're doing. Mainly for Airbyte devs. -# If you just want to capture Docker logs, you probably want to use something like this instead: -# https://docs.docker.com/config/containers/logging/configure/ -S3_LOG_BUCKET= -S3_LOG_BUCKET_REGION= -AWS_ACCESS_KEY_ID= -AWS_SECRET_ACCESS_KEY= -S3_MINIO_ENDPOINT= -S3_PATH_STYLE_ACCESS= - -GCP_STORAGE_BUCKET= - -# Docker Resource Limits -RESOURCE_CPU_REQUEST= -RESOURCE_CPU_LIMIT= -RESOURCE_MEMORY_REQUEST= -RESOURCE_MEMORY_LIMIT= - -# Max attempts per sync and max retries per attempt -MAX_SYNC_JOB_ATTEMPTS=3 - -# Time in days to reach a timeout to cancel the synchronization -MAX_SYNC_TIMEOUT_DAYS=3 - -# Set secret persistence store to use. Do not change this for existing installations! -SECRET_PERSISTENCE=NONE diff --git a/airbyte-webapp-e2e-tests/.eslintrc.js b/airbyte-webapp-e2e-tests/.eslintrc.js deleted file mode 100644 index f0abf5c00175..000000000000 --- a/airbyte-webapp-e2e-tests/.eslintrc.js +++ /dev/null @@ -1,52 +0,0 @@ -module.exports = { - env: { - browser: true, - node: true, - }, - extends: [ - "plugin:cypress/recommended", - "plugin:@typescript-eslint/recommended", - "prettier", - "plugin:prettier/recommended", - ], - plugins: ["@typescript-eslint", "prettier"], - parser: "@typescript-eslint/parser", - rules: { - "cypress/no-unnecessary-waiting": "warn", - "prettier/prettier": "warn", - - curly: "warn", - "dot-location": ["warn", "property"], - "dot-notation": "warn", - "no-else-return": "warn", - "no-lonely-if": "warn", - "no-inner-declarations": "off", - "no-unused-vars": "off", - "no-useless-computed-key": "warn", - "no-useless-return": "warn", - "no-var": "warn", - "object-shorthand": ["warn", "always"], - "prefer-arrow-callback": "warn", - "prefer-const": "warn", - "prefer-destructuring": ["warn", { AssignmentExpression: { array: true } }], - "prefer-object-spread": "warn", - "prefer-template": "warn", - "spaced-comment": ["warn", "always", { markers: ["/"] }], - yoda: "warn", - - "@typescript-eslint/array-type": ["warn", { default: "array-simple" }], - "@typescript-eslint/ban-ts-comment": [ - "warn", - { - "ts-expect-error": "allow-with-description", - }, - ], - "@typescript-eslint/ban-types": "warn", - "@typescript-eslint/consistent-indexed-object-style": ["warn", "record"], - "@typescript-eslint/consistent-type-definitions": ["warn", "interface"], - "@typescript-eslint/no-unused-vars": "warn", - - "@typescript-eslint/no-var-requires": "off", - "@typescript-eslint/triple-slash-reference": "off", - }, -}; diff --git a/airbyte-webapp-e2e-tests/.gitignore b/airbyte-webapp-e2e-tests/.gitignore deleted file mode 100644 index 15386a1f0f80..000000000000 --- a/airbyte-webapp-e2e-tests/.gitignore +++ /dev/null @@ -1,33 +0,0 @@ -# dependencies -/node_modules -/.pnp -.pnp.js - -# testing -/coverage - -# production -/build - -# misc -.DS_Store -.env.local -.env.development.local -.env.test.local -.env.production.local - -npm-debug.log* -yarn-debug.log* -yarn-error.log* - -*.iml -/.idea - -.env -.env.development -.env.production - -/cypress/screenshots -/cypress/videos -/cypress/fixtures -/cypress/downloads diff --git a/airbyte-webapp-e2e-tests/.npmrc b/airbyte-webapp-e2e-tests/.npmrc deleted file mode 100644 index b6f27f135954..000000000000 --- a/airbyte-webapp-e2e-tests/.npmrc +++ /dev/null @@ -1 +0,0 @@ -engine-strict=true diff --git a/airbyte-webapp-e2e-tests/.nvmrc b/airbyte-webapp-e2e-tests/.nvmrc deleted file mode 100644 index a2d511aff36c..000000000000 --- a/airbyte-webapp-e2e-tests/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -16.18.1 \ No newline at end of file diff --git a/airbyte-webapp-e2e-tests/.prettierrc.js b/airbyte-webapp-e2e-tests/.prettierrc.js deleted file mode 120000 index d0e72529dfa1..000000000000 --- a/airbyte-webapp-e2e-tests/.prettierrc.js +++ /dev/null @@ -1 +0,0 @@ -../airbyte-webapp/.prettierrc.js \ No newline at end of file diff --git a/airbyte-webapp-e2e-tests/README.md b/airbyte-webapp-e2e-tests/README.md deleted file mode 100644 index 4774d55b19ce..000000000000 --- a/airbyte-webapp-e2e-tests/README.md +++ /dev/null @@ -1,38 +0,0 @@ -## Running an interactive Cypress session with `npm run cypress:open` -The most useful way to run tests locally is with the `cypress open` command. It opens a dispatcher window that lets you select which tests and browser to run; the Electron browser (whose devtools will be very familiar to chrome users) opens a child window, and having both cypress windows grouped behaves nicely when switching between applications. In an interactive session, you can use `it.skip` and `it.only` to focus on the tests you care about; any change to the source file of a running test will cause tests to be automatically rerun. At the end of a test run, the web page is left "dangling" with all state present at the end of the last test; you can click around, "inspect element", and interact with the page however you wish, which makes it easy to incrementally develop tests. - -By default, this command is configured to visit page urls from port 3000 (as used by a locally-run dev server), not port 8000 (as used by docker-compose's `webapp` service). If you want to run tests against the dockerized UI instead, leave the `webapp` docker-compose service running in step 4) and start the test runner with `CYPRESS_BASE_URL=http://localhost:8000 npm run cypress:open` in step 8). - -Except as noted, all commands are written as if run from inside the `airbyte-webapp-e2e-tests/` directory. - -Steps: -1) If you have not already done so, run `npm install` to install the e2e test dependencies. -2) Build the OSS backend for the current commit with `SUB_BUILD=PLATFORM ../gradlew clean build`. -3) Create the test database: `npm run createdbsource` and `npm run createdbdestination`. -4) When running the connector builder tests, start the dummy API server: `npm run createdummyapi` -5) Start the OSS backend: `BASIC_AUTH_USERNAME="" BASIC_AUTH_PASSWORD="" VERSION=dev docker compose --file ../docker-compose.yaml up`. If you want, follow this with `docker compose stop webapp` to turn off the dockerized frontend build; interactive cypress sessions don't use it. -6) The following two commands will start a separate long-running server, so open another terminal window. In it, `cd` into the `airbyte-webapp/` directory. -7) If you have not already done so, run `pnpm install` to install the frontend app's dependencies. -8) Start the frontend development server with `pnpm start`. -9) Back in the `airbyte-webapp-e2e-tests/` directory, start the cypress test runner with `npm run cypress:open`. - -## Reproducing CI test results with `npm run cypress:ci` or `npm run cypress:ci:record` -Unlike `npm run cypress:open`, `npm run cypress:ci` and `npm run cypress:ci:record` use the dockerized UI (i.e. they expect the UI at port 8000, rather than port 3000). If the OSS backend is running but you have run `docker-compose stop webapp`, you'll have to re-enable it with `docker-compose start webapp`. These trigger headless runs: you won't have a live browser to interact with, just terminal output. - -Except as noted, all commands are written as if run from inside the `airbyte-webapp-e2e-tests/` directory. - -Steps: -1) If you have not already done so, run `npm install` to install the e2e test dependencies. -2) Build the OSS backend for the current commit with `SUB_BUILD=PLATFORM ../gradlew clean build`. -3) Create the test database: `npm run createdbsource` and `npm run createdbdestination`. -4) When running the connector builder tests, start the dummy API server: `npm run createdummyapi` -5) Start the OSS backend: `BASIC_AUTH_USERNAME="" BASIC_AUTH_PASSWORD="" VERSION=dev docker compose --file ../docker-compose.yaml up`. -6) Start the cypress test run with `npm run cypress:ci` or `npm run cypress:ci:record`. - -## Test setup - -When the tests are run as described above, the platform under test is started via docker compose on the local docker host. To test connections from real sources and destinations, additional docker containers are started for hosting these. For basic connections, additional postgres instances are started (`createdbsource` and `createdbdestination`). - -For testing the connector builder UI, a dummy api server based on a node script is started (`createdummyapi`). It is providing a simple http API with bearer authentication returning a few records of hardcoded data. By running it in the internal airbyte network, the connector builder server can access it under its container name. - -The tests in here are instrumenting a Chrome instance to test the full functionality of Airbyte from the frontend, so other components of the platform (scheduler, worker, connector builder server) are also tested in a rudimentary way. diff --git a/airbyte-webapp-e2e-tests/build.gradle b/airbyte-webapp-e2e-tests/build.gradle deleted file mode 100644 index 9a2389aac681..000000000000 --- a/airbyte-webapp-e2e-tests/build.gradle +++ /dev/null @@ -1,28 +0,0 @@ -plugins { - id "base" - id "com.github.node-gradle.node" version "3.3.0" -} - -def nodeVersion = new File("${projectDir}/.nvmrc").text.trim(); - -node { - download = true - version = nodeVersion -} - -task e2etest(type: NpmTask) { - dependsOn npmInstall - // If the cypressWebappKey property has been set from the outside (see tools/bin/e2e_test.sh) - // we'll record the cypress session, otherwise we're not recording - def recordCypress = project.hasProperty('cypressWebappKey') && project.getProperty('cypressWebappKey') - if (recordCypress) { - environment = [CYPRESS_KEY: project.getProperty('cypressWebappKey')] - args = ['run', 'cypress:ci:record'] - } else { - args = ['run', 'cypress:ci'] - } - inputs.files fileTree('cypress') - inputs.file 'package.json' - inputs.file 'package-lock.json' -} - diff --git a/airbyte-webapp-e2e-tests/cypress.json b/airbyte-webapp-e2e-tests/cypress.json deleted file mode 100644 index 995bc36be2de..000000000000 --- a/airbyte-webapp-e2e-tests/cypress.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "projectId": "916nvw", - "baseUrl": "https://localhost:3000", - "testFiles": ["**/*.spec.*"], - "viewportHeight": 800, - "viewportWidth": 1280, - "retries": { - "runMode": 5, - "openMode": 0 - }, - "defaultCommandTimeout": 10000, - "db": { - "user": "postgres", - "host": "localhost", - "database": "airbyte_ci_source", - "password": "secret_password", - "port": 5433 - } -} diff --git a/airbyte-webapp-e2e-tests/cypress/commands/api/api.ts b/airbyte-webapp-e2e-tests/cypress/commands/api/api.ts deleted file mode 100644 index a7e4ea35d218..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/api/api.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { - ConnectionGetBody, - Connection, - ConnectionCreateRequestBody, - ConnectionsList, - Destination, - DestinationsList, - Source, - SourceDiscoverSchema, - SourcesList, -} from "./types"; -import { getWorkspaceId, setWorkspaceId } from "./workspace"; - -const getApiUrl = (path: string): string => `http://localhost:8001/api/v1${path}`; - -const apiRequest = ( - method: Cypress.HttpMethod, - path: string, - payload?: Cypress.RequestBody, - expectedStatus = 200 -): Cypress.Chainable => - cy.request(method, getApiUrl(path), payload).then((response) => { - expect(response.status).to.eq(expectedStatus, "response status"); - return response.body; - }); - -export const requestWorkspaceId = () => - apiRequest<{ workspaces: Array<{ workspaceId: string }> }>("POST", "/workspaces/list").then( - ({ workspaces: [{ workspaceId }] }) => { - setWorkspaceId(workspaceId); - } - ); - -export const requestConnectionsList = () => - apiRequest("POST", "/connections/list", { workspaceId: getWorkspaceId() }); - -export const requestCreateConnection = (body: ConnectionCreateRequestBody) => - apiRequest("POST", "/web_backend/connections/create", body); - -export const requestUpdateConnection = (body: Record) => - apiRequest("POST", "/web_backend/connections/update", body); - -export const requestGetConnection = (body: ConnectionGetBody) => - apiRequest("POST", "/web_backend/connections/get", body); - -export const requestDeleteConnection = (connectionId: string) => - apiRequest("POST", "/connections/delete", { connectionId }, 204); - -export const requestSourcesList = () => - apiRequest("POST", "/sources/list", { workspaceId: getWorkspaceId() }); - -export const requestSourceDiscoverSchema = (sourceId: string) => - apiRequest("POST", "/sources/discover_schema", { sourceId, disable_cache: true }); - -export const requestCreateSource = (body: Record) => - apiRequest("POST", "/sources/create", body); - -export const requestDeleteSource = (sourceId: string) => apiRequest("POST", "/sources/delete", { sourceId }, 204); - -export const requestDestinationsList = () => - apiRequest("POST", "/destinations/list", { workspaceId: getWorkspaceId() }); - -export const requestCreateDestination = (body: Record) => - apiRequest("POST", "/destinations/create", body); - -export const requestDeleteDestination = (destinationId: string) => - apiRequest("POST", "/destinations/delete", { destinationId }, 204); diff --git a/airbyte-webapp-e2e-tests/cypress/commands/api/index.ts b/airbyte-webapp-e2e-tests/cypress/commands/api/index.ts deleted file mode 100644 index f5d0fb3b55c2..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/api/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./api"; -export * from "./payloads"; diff --git a/airbyte-webapp-e2e-tests/cypress/commands/api/payloads.ts b/airbyte-webapp-e2e-tests/cypress/commands/api/payloads.ts deleted file mode 100644 index 44db5c99d501..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/api/payloads.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { ConnectionCreateRequestBody } from "./types"; -import { getWorkspaceId } from "./workspace"; - -type RequiredConnectionCreateRequestProps = "name" | "sourceId" | "destinationId" | "syncCatalog" | "sourceCatalogId"; -type CreationConnectRequestParams = Pick & - Partial>; - -export const getConnectionCreateRequest = (params: CreationConnectRequestParams): ConnectionCreateRequestBody => ({ - geography: "auto", - namespaceDefinition: "source", - namespaceFormat: "${SOURCE_NAMESPACE}", - nonBreakingChangesPreference: "ignore", - operations: [], - prefix: "", - scheduleType: "manual", - status: "active", - ...params, -}); - -export const getPostgresCreateSourceBody = (name: string) => ({ - name, - sourceDefinitionId: "decd338e-5647-4c0b-adf4-da0e75f5a750", - workspaceId: getWorkspaceId(), - connectionConfiguration: { - ssl_mode: { mode: "disable" }, - tunnel_method: { tunnel_method: "NO_TUNNEL" }, - replication_method: { method: "Standard" }, - ssl: false, - port: 5433, - schemas: ["public"], - host: "localhost", - database: "airbyte_ci_source", - username: "postgres", - password: "secret_password", - }, -}); - -export const getE2ETestingCreateDestinationBody = (name: string) => ({ - name, - workspaceId: getWorkspaceId(), - destinationDefinitionId: "2eb65e87-983a-4fd7-b3e3-9d9dc6eb8537", - connectionConfiguration: { - type: "LOGGING", - logging_config: { - logging_type: "FirstN", - max_entry_count: 100, - }, - }, -}); - -export const getPostgresCreateDestinationBody = (name: string) => ({ - name, - workspaceId: getWorkspaceId(), - destinationDefinitionId: "25c5221d-dce2-4163-ade9-739ef790f503", - connectionConfiguration: { - ssl_mode: { mode: "disable" }, - tunnel_method: { tunnel_method: "NO_TUNNEL" }, - ssl: false, - port: 5434, - schema: "public", - host: "localhost", - database: "airbyte_ci_destination", - username: "postgres", - password: "secret_password", - }, -}); diff --git a/airbyte-webapp-e2e-tests/cypress/commands/api/types.ts b/airbyte-webapp-e2e-tests/cypress/commands/api/types.ts deleted file mode 100644 index f21ebd75b112..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/api/types.ts +++ /dev/null @@ -1,77 +0,0 @@ -export interface Connection { - connectionId: string; - destination: Destination; - destinationId: string; - isSyncing: boolean; - name: string; - scheduleType: string; - schemaChange: string; - source: Source; - sourceId: string; - status: "active" | "inactive" | "deprecated"; - nonBreakingChangesPreference: "ignore" | "disable"; - syncCatalog: SyncCatalog; -} - -export interface ConnectionCreateRequestBody { - destinationId: string; - geography: string; - name: string; - namespaceDefinition: string; - namespaceFormat: string; - nonBreakingChangesPreference: "ignore" | "disable"; - operations: unknown[]; - prefix: string; - scheduleType: string; - sourceCatalogId: string; - sourceId: string; - status: "active"; - syncCatalog: SyncCatalog; -} - -export interface ConnectionGetBody { - connectionId: string; - withRefreshedCatalog?: boolean; -} - -export interface ConnectionsList { - connections: Connection[]; -} - -export interface Destination { - name: string; - destinationDefinitionId: string; - destinationName: string; - destinationId: string; - connectionConfiguration: Record; -} - -export interface DestinationsList { - destinations: Destination[]; -} - -export interface Source { - name: string; - sourceDefinitionId: string; - sourceName: string; - sourceId: string; - connectionConfiguration: Record; -} - -export interface SourceDiscoverSchema { - catalog: SyncCatalog; - catalogId: string; -} - -export interface SourcesList { - sources: Source[]; -} - -export interface SyncCatalog { - streams: SyncCatalogStream[]; -} - -export interface SyncCatalogStream { - config: Record; - stream: Record; -} diff --git a/airbyte-webapp-e2e-tests/cypress/commands/api/workspace.ts b/airbyte-webapp-e2e-tests/cypress/commands/api/workspace.ts deleted file mode 100644 index 9e1bcf6c1d25..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/api/workspace.ts +++ /dev/null @@ -1,9 +0,0 @@ -let _workspaceId: string; - -export const setWorkspaceId = (workspaceId: string) => { - _workspaceId = workspaceId; -}; - -export const getWorkspaceId = () => { - return _workspaceId; -}; diff --git a/airbyte-webapp-e2e-tests/cypress/commands/common.ts b/airbyte-webapp-e2e-tests/cypress/commands/common.ts deleted file mode 100644 index 39ad108232fd..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/common.ts +++ /dev/null @@ -1,51 +0,0 @@ -export const submitButtonClick = (force: boolean = false) => { - cy.get("button[type=submit]").click({ force: force }); -}; - -export const updateField = (field: string, value: string) => { - cy.get(`input[name='${field}']`).clear().type(value); -}; - -export const openSettingForm = (name: string) => { - cy.get("div").contains(name).click(); - cy.get("div[data-id='settings-step']").click(); -}; - -export const deleteEntity = () => { - cy.get("button[data-id='open-delete-modal']").click(); - cy.get("button[data-id='delete']").click(); -}; - -export const clearApp = () => { - indexedDB.deleteDatabase("firebaseLocalStorageDb"); - cy.clearLocalStorage(); - cy.clearCookies(); -}; - -export const fillEmail = (email: string) => { - cy.get("input[name=email]").type(email); -}; - -// useful for ensuring that a name is unique from one test run to the next -export const appendRandomString = (string: string) => { - const randomString = Math.random().toString(36).substring(2, 10); - return `${string} _${randomString}`; -}; - -/** - * Click on specific cell found by column name in desired table - * @param tableSelector - table selector - * @param columnName - column name - * @param connectName - cell text - */ -export const clickOnCellInTable = (tableSelector: string, columnName: string, connectName: string) => { - cy.contains(`${tableSelector} th`, columnName) - .invoke("index") - .then((value) => { - cy.log(`${value}`); - return cy.wrap(value); - }) - .then((columnIndex) => { - cy.contains("tbody tr", connectName).find("td").eq(columnIndex).click(); - }); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/commands/connection.ts b/airbyte-webapp-e2e-tests/cypress/commands/connection.ts deleted file mode 100644 index 07d7e351a64c..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/connection.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { submitButtonClick } from "./common"; -import { createLocalJsonDestination, createPostgresDestination } from "./destination"; -import { createPokeApiSource, createPostgresSource } from "./source"; -import { openAddSource } from "pages/destinationPage"; -import { selectSchedule, setupDestinationNamespaceSourceFormat, enterConnectionName } from "pages/replicationPage"; - -export const createTestConnection = (sourceName: string, destinationName: string) => { - cy.intercept("/api/v1/sources/discover_schema").as("discoverSchema"); - cy.intercept("/api/v1/web_backend/connections/create").as("createConnection"); - - switch (true) { - case sourceName.includes("PokeAPI"): - createPokeApiSource(sourceName, "luxray"); - break; - - case sourceName.includes("Postgres"): - createPostgresSource(sourceName); - break; - default: - createPostgresSource(sourceName); - } - - switch (true) { - case destinationName.includes("Postgres"): - createPostgresDestination(destinationName); - break; - case destinationName.includes("JSON"): - createLocalJsonDestination(destinationName); - break; - default: - createLocalJsonDestination(destinationName); - } - - cy.wait(5000); - - openAddSource(); - cy.get("div").contains(sourceName).click(); - - cy.wait("@discoverSchema"); - - enterConnectionName("Connection name"); - selectSchedule("Manual"); - - setupDestinationNamespaceSourceFormat(); - submitButtonClick(); - - cy.wait("@createConnection", { requestTimeout: 10000 }); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/commands/connector.ts b/airbyte-webapp-e2e-tests/cypress/commands/connector.ts deleted file mode 100644 index 9e3ab477c1c3..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/connector.ts +++ /dev/null @@ -1,54 +0,0 @@ -import { - enterDestinationPath, - selectServiceType, - enterName, - enterHost, - enterPort, - enterDatabase, - enterUsername, - enterPassword, - enterPokemonName, - enterSchema, -} from "pages/createConnectorPage"; - -export const fillPostgresForm = ( - name: string, - host: string, - port: string, - database: string, - username: string, - password: string, - schema: string -) => { - cy.intercept("/api/v1/source_definition_specifications/get").as("getSourceSpecifications"); - - selectServiceType("Postgres"); - - enterName(name); - enterHost(host); - enterPort(port); - enterDatabase(database); - enterSchema(schema); - enterUsername(username); - enterPassword(password); -}; - -export const fillPokeAPIForm = (name: string, pokeName: string) => { - cy.intercept("/api/v1/source_definition_specifications/get").as("getSourceSpecifications"); - - selectServiceType("PokeAPI"); - - enterName(name); - enterPokemonName(pokeName); -}; - -export const fillLocalJsonForm = (name: string, destinationPath: string) => { - cy.intercept("/api/v1/destination_definition_specifications/get").as("getDestinationSpecifications"); - - selectServiceType("Local JSON"); - - cy.wait("@getDestinationSpecifications"); - - enterName(name); - enterDestinationPath(destinationPath); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/commands/connectorBuilder.ts b/airbyte-webapp-e2e-tests/cypress/commands/connectorBuilder.ts deleted file mode 100644 index e6c2e8ee17b4..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/connectorBuilder.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { - addStream, - configureOffsetPagination, - enterName, - enterRecordSelector, - enterStreamName, - enterTestInputs, - enterUrlBase, - enterUrlPath, - goToTestPage, - goToView, - openTestInputs, - selectAuthMethod, - submitForm, - togglePagination -} from "pages/connectorBuilderPage"; - -export const configureGlobals = () => { - goToView("global"); - enterName("Dummy API"); - enterUrlBase("http://dummy_api:6767/"); -} - -export const configureStream = () => { - addStream(); - enterStreamName("Items"); - enterUrlPath("items/"); - submitForm(); - enterRecordSelector("items"); -} - -export const configureAuth = () => { - goToView("global"); - selectAuthMethod("Bearer"); - openTestInputs(); - enterTestInputs({ apiKey: "theauthkey" }) - submitForm(); -} - -export const configurePagination = () => { - goToView("0"); - togglePagination(); - configureOffsetPagination("2", "header", "offset"); -} - -const testPanelContains = (str: string) => { - cy.get("pre").contains(str).should("exist"); -} - -export const assertTestReadAuthFailure = () => { - testPanelContains('"error": "Bad credentials"'); -}; - -export const assertTestReadItems = () => { - testPanelContains('"name": "abc"'); - testPanelContains('"name": "def"'); -}; - -export const assertMultiPageReadItems = () => { - goToTestPage(1); - assertTestReadItems(); - - goToTestPage(2); - testPanelContains('"name": "xxx"'); - testPanelContains('"name": "yyy"'); - - goToTestPage(3); - testPanelContains('[]'); -}; \ No newline at end of file diff --git a/airbyte-webapp-e2e-tests/cypress/commands/db/db.ts b/airbyte-webapp-e2e-tests/cypress/commands/db/db.ts deleted file mode 100644 index fe5524ac60af..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/db/db.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { - alterCitiesTableQuery, - createCarsTableQuery, - createCitiesTableQuery, - createUsersTableQuery, - dropCarsTableQuery, - dropCitiesTableQuery, - dropUsersTableQuery, - insertCitiesTableQuery, - insertUsersTableQuery, -} from "./queries"; - -/** - * Wrapper for DB Query Cypress task - * @param queryString - */ -export const runDbQuery = (queryString: string) => cy.task("dbQuery", { query: queryString }); - -interface TableExistsResponse { - exists: boolean; -} -/** - * Function for composing the query for checking the existence of a table - * @param tableName - * @return string - */ -const composeIsTableExistQuery = (tableName: string) => - `SELECT EXISTS (SELECT FROM pg_tables - WHERE - schemaname = 'public' AND - tablename = '${tableName}' - )`; - -export const populateDBSource = () => { - runDbQuery(createUsersTableQuery); - runDbQuery(insertUsersTableQuery); - runDbQuery(createCitiesTableQuery); - runDbQuery(insertCitiesTableQuery); -}; - -export const makeChangesInDBSource = () => { - runDbQuery(dropUsersTableQuery); - runDbQuery(alterCitiesTableQuery); - runDbQuery(createCarsTableQuery); -}; - -export const cleanDBSource = () => { - runDbQuery(dropUsersTableQuery); - runDbQuery(dropCitiesTableQuery); - runDbQuery(dropCarsTableQuery); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/commands/db/index.ts b/airbyte-webapp-e2e-tests/cypress/commands/db/index.ts deleted file mode 100644 index 9071a7574a6a..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/db/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { populateDBSource, makeChangesInDBSource, cleanDBSource } from "./db"; diff --git a/airbyte-webapp-e2e-tests/cypress/commands/db/queries.ts b/airbyte-webapp-e2e-tests/cypress/commands/db/queries.ts deleted file mode 100644 index a05c742fad39..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/db/queries.ts +++ /dev/null @@ -1,88 +0,0 @@ -export const createTable = (tableName: string, columns: string[]): string => - `CREATE TABLE ${tableName}(${columns.join(", ")});`; - -export const dropTable = (tableName: string) => `DROP TABLE IF EXISTS ${tableName}`; - -export const alterTable = (tableName: string, params: { add?: string[]; drop?: string[] }): string => { - const adds = params.add ? params.add.map((add) => `ADD COLUMN ${add}`) : []; - const drops = params.drop ? params.drop.map((columnName) => `DROP COLUMN ${columnName}`) : []; - const alterations = [...adds, ...drops]; - - return `ALTER TABLE ${tableName} ${alterations.join(", ")};`; -}; - -export const insertIntoTable = (tableName: string, valuesByColumn: Record): string => { - const keys = Object.keys(valuesByColumn); - const values = keys - .map((key) => valuesByColumn[key]) - .map((value) => (typeof value === "string" ? `'${value}'` : value)); - - return `INSERT INTO ${tableName}(${keys.join(", ")}) VALUES(${values.join(", ")});`; -}; - -export const insertMultipleIntoTable = (tableName: string, valuesByColumns: Array>): string => - valuesByColumns.map((valuesByColumn) => insertIntoTable(tableName, valuesByColumn)).join("\n"); - -// Users table -export const createUsersTableQuery = createTable("public.users", [ - "id SERIAL", - "name VARCHAR(200) NULL", - "email VARCHAR(200) NULL", - "updated_at TIMESTAMP", - "CONSTRAINT users_pkey PRIMARY KEY (id)", -]); -export const insertUsersTableQuery = insertMultipleIntoTable("public.users", [ - { name: "Abigail", email: "abigail@example.com", updated_at: "2022-12-19 00:00:00" }, - { name: "Andrew", email: "andrew@example.com", updated_at: "2022-12-19 00:00:00" }, - { name: "Kat", email: "kat@example.com", updated_at: "2022-12-19 00:00:00" }, -]); - -export const dropUsersTableQuery = dropTable("public.users"); - -// Cities table -export const createCitiesTableQuery = createTable("public.cities", ["city_code VARCHAR(8)", "city VARCHAR(200)"]); - -export const insertCitiesTableQuery = insertMultipleIntoTable("public.cities", [ - { - city_code: "BCN", - city: "Barcelona", - }, - { city_code: "MAD", city: "Madrid" }, - { city_code: "VAL", city: "Valencia" }, -]); - -export const alterCitiesTableQuery = alterTable("public.cities", { - add: ["state TEXT", "country TEXT"], - drop: ["city_code"], -}); -export const dropCitiesTableQuery = dropTable("public.cities"); - -// Cars table -export const createCarsTableQuery = createTable("public.cars", [ - "id SERIAL PRIMARY KEY", - "mark VARCHAR(200)", - "model VARCHAR(200)", - "color VARCHAR(200)", -]); - -export const dropCarsTableQuery = dropTable("public.cars"); - -// Dummy tables - used only for populating stream table with a lot of streams(tables) -// NOTE: Not for testing stream functionality! -export const createDummyTablesQuery = (amountOfTables: number) => - Array.from({ length: amountOfTables }, (_, index) => { - const tableName = `public.dummy_table_${index + 1}`; - const columns = [ - "id serial PRIMARY KEY", - "column_1 INTEGER NOT NULL", - "column_2 VARCHAR(100) NOT NULL", - "column_3 DECIMAL(10, 2) NOT NULL", - ]; - return createTable(tableName, columns); - }).join("\n"); - -export const dropDummyTablesQuery = (amountOfTables: number) => { - // postgres doesn't allow to drop multiple tables using wildcard, so need to compose the list of table names - const tables = Array.from({ length: amountOfTables }, (_, index) => `public.dummy_table_${index + 1}`).join(", "); - return dropTable(tables); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/commands/destination.ts b/airbyte-webapp-e2e-tests/cypress/commands/destination.ts deleted file mode 100644 index f52bd8e14081..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/destination.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { deleteEntity, openSettingForm, submitButtonClick, updateField } from "./common"; -import { fillLocalJsonForm, fillPostgresForm } from "./connector"; -import { goToDestinationPage, openNewDestinationForm } from "pages/destinationPage"; - -export const createLocalJsonDestination = (name: string, destinationPath = "/local") => { - cy.intercept("/api/v1/scheduler/destinations/check_connection").as("checkDestinationConnection"); - cy.intercept("/api/v1/destinations/create").as("createDestination"); - - goToDestinationPage(); - openNewDestinationForm(); - fillLocalJsonForm(name, destinationPath); - submitButtonClick(); - - cy.wait("@checkDestinationConnection", { requestTimeout: 8000 }); - cy.wait("@createDestination"); -}; - -export const createPostgresDestination = ( - name: string, - host = "localhost", - port = "5434", - database = "airbyte_ci_destination", - username = "postgres", - password = "secret_password", - schema = "" -) => { - cy.intercept("/api/v1/scheduler/destinations/check_connection").as("checkDestinationConnection"); - cy.intercept("/api/v1/destinations/create").as("createDestination"); - - goToDestinationPage(); - openNewDestinationForm(); - fillPostgresForm(name, host, port, database, username, password, schema); - submitButtonClick(); - - cy.wait("@checkDestinationConnection", { requestTimeout: 8000 }); - cy.wait("@createDestination"); -}; - -export const updateDestination = (name: string, field: string, value: string) => { - cy.intercept("/api/v1/destinations/check_connection_for_update").as("checkDestinationUpdateConnection"); - cy.intercept("/api/v1/destinations/update").as("updateDestination"); - - goToDestinationPage(); - openSettingForm(name); - updateField(field, value); - submitButtonClick(); - - cy.wait("@checkDestinationUpdateConnection"); - cy.wait("@updateDestination"); -}; - -export const deleteDestination = (name: string) => { - cy.intercept("/api/v1/destinations/delete").as("deleteDestination"); - goToDestinationPage(); - openSettingForm(name); - deleteEntity(); - cy.wait("@deleteDestination"); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/commands/interceptors.ts b/airbyte-webapp-e2e-tests/cypress/commands/interceptors.ts deleted file mode 100644 index 0cd74091122a..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/interceptors.ts +++ /dev/null @@ -1,22 +0,0 @@ -export const interceptGetConnectionRequest = () => - cy.intercept("/api/v1/web_backend/connections/get").as("getConnection"); -export const waitForGetConnectionRequest = () => cy.wait("@getConnection"); - -export const interceptUpdateConnectionRequest = () => - cy.intercept("/api/v1/web_backend/connections/update").as("updateConnection"); -export const waitForUpdateConnectionRequest = () => cy.wait("@updateConnection", { timeout: 10000 }); - -export const interceptDiscoverSchemaRequest = () => - cy.intercept("/api/v1/sources/discover_schema").as("discoverSchema"); -export const waitForDiscoverSchemaRequest = () => cy.wait("@discoverSchema"); - -export const interceptCreateConnectionRequest = () => - cy.intercept("/api/v1/web_backend/connections/create").as("createConnection"); -export const waitForCreateConnectionRequest = () => cy.wait("@createConnection"); - -export const interceptGetSourcesListRequest = () => cy.intercept("/api/v1/sources/list").as("getSourcesList"); -export const waitForGetSourcesListRequest = () => cy.wait("@getSourcesList"); - -export const interceptGetSourceDefinitionsRequest = () => - cy.intercept("/api/v1/source_definitions/list_for_workspace").as("getSourceDefinitions"); -export const waitForGetSourceDefinitionsRequest = () => cy.wait("@getSourceDefinitions"); diff --git a/airbyte-webapp-e2e-tests/cypress/commands/source.ts b/airbyte-webapp-e2e-tests/cypress/commands/source.ts deleted file mode 100644 index 083ec4e16a57..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/source.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { deleteEntity, openSettingForm, submitButtonClick, updateField } from "./common"; -import { goToSourcePage, openNewSourceForm } from "pages/sourcePage"; -import { fillPostgresForm, fillPokeAPIForm } from "./connector"; - -export const createPostgresSource = ( - name: string, - host = "localhost", - port = "5433", - database = "airbyte_ci_source", - username = "postgres", - password = "secret_password", - schema = "" -) => { - cy.intercept("/api/v1/scheduler/sources/check_connection").as("checkSourceUpdateConnection"); - cy.intercept("/api/v1/sources/create").as("createSource"); - - goToSourcePage(); - openNewSourceForm(); - fillPostgresForm(name, host, port, database, username, password, schema); - submitButtonClick(); - - cy.wait("@checkSourceUpdateConnection", { requestTimeout: 10000 }); - cy.wait("@createSource"); -}; - -export const createPokeApiSource = (name: string, pokeName: string) => { - cy.intercept("/api/v1/scheduler/sources/check_connection").as("checkSourceUpdateConnection"); - cy.intercept("/api/v1/sources/create").as("createSource"); - - goToSourcePage(); - openNewSourceForm(); - fillPokeAPIForm(name, pokeName); - submitButtonClick(); - - cy.wait("@checkSourceUpdateConnection"); - cy.wait("@createSource"); -}; - -export const updateSource = (name: string, field: string, value: string) => { - cy.intercept("/api/v1/sources/check_connection_for_update").as("checkSourceConnection"); - cy.intercept("/api/v1/sources/update").as("updateSource"); - - goToSourcePage(); - openSettingForm(name); - updateField(field, value); - submitButtonClick(); - - cy.wait("@checkSourceConnection"); - cy.wait("@updateSource"); -}; - -export const deleteSource = (name: string) => { - cy.intercept("/api/v1/sources/delete").as("deleteSource"); - goToSourcePage(); - openSettingForm(name); - deleteEntity(); - cy.wait("@deleteSource"); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/commands/workspaces.ts b/airbyte-webapp-e2e-tests/cypress/commands/workspaces.ts deleted file mode 100644 index 4019d489b2aa..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/commands/workspaces.ts +++ /dev/null @@ -1,14 +0,0 @@ -export const initialSetupCompleted = (completed = true) => { - // Modify the workspaces/list response to mark every workspace as "initialSetupComplete" to ensure we're not showing - // the setup/preference page for any workspace if this method got called. - cy.intercept("POST", "/api/v1/workspaces/get", (req) => { - req.continue((res) => { - res.body.initialSetupComplete = completed; - res.send(res.body); - }); - }); - - cy.on("uncaught:exception", (err, runnable) => { - return false; - }); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/global.d.ts b/airbyte-webapp-e2e-tests/cypress/global.d.ts deleted file mode 100644 index 3f1ee73dee12..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/global.d.ts +++ /dev/null @@ -1 +0,0 @@ -declare global {} diff --git a/airbyte-webapp-e2e-tests/cypress/integration/autoDetectSchema.spec.ts b/airbyte-webapp-e2e-tests/cypress/integration/autoDetectSchema.spec.ts deleted file mode 100644 index 32724670b8e0..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/integration/autoDetectSchema.spec.ts +++ /dev/null @@ -1,174 +0,0 @@ -import { - getConnectionCreateRequest, - getPostgresCreateDestinationBody, - getPostgresCreateSourceBody, - requestCreateConnection, - requestCreateDestination, - requestCreateSource, - requestDeleteConnection, - requestDeleteDestination, - requestDeleteSource, - requestGetConnection, - requestSourceDiscoverSchema, - requestWorkspaceId, -} from "commands/api"; -import { Connection, Destination, Source } from "commands/api/types"; -import { appendRandomString } from "commands/common"; -import { runDbQuery } from "commands/db/db"; -import { alterTable, createUsersTableQuery, dropUsersTableQuery } from "commands/db/queries"; -import { initialSetupCompleted } from "commands/workspaces"; -import { getSyncEnabledSwitch, visitConnectionPage } from "pages/connectionPage"; -import { getManualSyncButton, getSchemaChangeIcon, visitConnectionsListPage } from "pages/connnectionsListPage"; -import { checkCatalogDiffModal, clickCatalogDiffCloseButton } from "pages/modals/catalogDiffModal"; -import { - checkSchemaChangesDetected, - checkSchemaChangesDetectedCleared, - clickSaveReplication, - clickSchemaChangesReviewButton, - searchStream, - selectCursorField, - selectNonBreakingChangesPreference, - selectSyncMode, -} from "pages/replicationPage"; - -describe("Connection - Auto-detect schema changes", () => { - let source: Source; - let destination: Destination; - let connection: Connection; - - beforeEach(() => { - initialSetupCompleted(); - runDbQuery(dropUsersTableQuery); - runDbQuery(createUsersTableQuery); - - requestWorkspaceId().then(() => { - const sourceRequestBody = getPostgresCreateSourceBody(appendRandomString("Auto-detect schema Source")); - const destinationRequestBody = getPostgresCreateDestinationBody( - appendRandomString("Auto-detect schema Destination") - ); - - requestCreateSource(sourceRequestBody).then((sourceResponse) => { - source = sourceResponse; - requestCreateDestination(destinationRequestBody).then((destinationResponse) => { - destination = destinationResponse; - }); - - requestSourceDiscoverSchema(source.sourceId).then(({ catalog, catalogId }) => { - const connectionRequestBody = getConnectionCreateRequest({ - name: appendRandomString("Auto-detect schema test connection"), - sourceId: source.sourceId, - destinationId: destination.destinationId, - syncCatalog: catalog, - sourceCatalogId: catalogId, - }); - requestCreateConnection(connectionRequestBody).then((connectionResponse) => { - connection = connectionResponse; - }); - }); - }); - }); - }); - - afterEach(() => { - if (connection) { - requestDeleteConnection(connection.connectionId); - } - if (source) { - requestDeleteSource(source.sourceId); - } - if (destination) { - requestDeleteDestination(destination.destinationId); - } - - runDbQuery(dropUsersTableQuery); - }); - - describe("non-breaking changes", () => { - beforeEach(() => { - runDbQuery(alterTable("public.users", { drop: ["updated_at"] })); - requestGetConnection({ connectionId: connection.connectionId, withRefreshedCatalog: true }); - }); - - it("shows non-breaking change on list page", () => { - visitConnectionsListPage(); - getSchemaChangeIcon(connection, "non_breaking").should("exist"); - getManualSyncButton(connection).should("be.enabled"); - }); - - it("shows non-breaking change that can be saved after refresh", () => { - // Need to continue running but async breaks everything - visitConnectionPage(connection, "replication"); - - checkSchemaChangesDetected({ breaking: false }); - clickSchemaChangesReviewButton(); - getSyncEnabledSwitch().should("be.enabled"); - - checkCatalogDiffModal(); - clickCatalogDiffCloseButton(); - - checkSchemaChangesDetectedCleared(); - - clickSaveReplication(); - getSyncEnabledSwitch().should("be.enabled"); - }); - }); - - describe("breaking changes", () => { - beforeEach(() => { - const streamName = "users"; - visitConnectionPage(connection, "replication"); - - // Change users sync mode - searchStream(streamName); - selectSyncMode("Incremental", "Deduped + history"); - selectCursorField(streamName, "updated_at"); - clickSaveReplication(); - - // Remove cursor from db and refreshs schema to force breaking change detection - runDbQuery(alterTable("public.users", { drop: ["updated_at"] })); - requestGetConnection({ connectionId: connection.connectionId, withRefreshedCatalog: true }); - cy.reload(); - }); - - it("shows breaking change on list page", () => { - visitConnectionsListPage(); - getSchemaChangeIcon(connection, "breaking").should("exist"); - getManualSyncButton(connection).should("be.disabled"); - }); - - it("shows breaking change that can be saved after refresh and fix", () => { - visitConnectionPage(connection, "replication"); - - // Confirm that breaking changes are there - checkSchemaChangesDetected({ breaking: true }); - clickSchemaChangesReviewButton(); - getSyncEnabledSwitch().should("be.disabled"); - - checkCatalogDiffModal(); - clickCatalogDiffCloseButton(); - checkSchemaChangesDetectedCleared(); - - // Fix the conflict - searchStream("users"); - selectSyncMode("Full refresh", "Append"); - - clickSaveReplication(); - getSyncEnabledSwitch().should("be.enabled"); - }); - }); - - describe("non-breaking schema update preference", () => { - it("saves non-breaking schema update preference change", () => { - visitConnectionPage(connection, "replication"); - selectNonBreakingChangesPreference("disable"); - - cy.intercept("/api/v1/web_backend/connections/update").as("updatesNonBreakingPreference"); - - clickSaveReplication({ confirm: false }); - - cy.wait("@updatesNonBreakingPreference").then((interception) => { - assert.equal((interception.response?.body as Connection).nonBreakingChangesPreference, "disable"); - }); - }); - }); -}); diff --git a/airbyte-webapp-e2e-tests/cypress/integration/base.spec.ts b/airbyte-webapp-e2e-tests/cypress/integration/base.spec.ts deleted file mode 100644 index b4f59dd559d2..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/integration/base.spec.ts +++ /dev/null @@ -1,30 +0,0 @@ -describe("Error handling view", () => { - it("Shows Version Mismatch page", () => { - cy.intercept("/api/v1/**", { - statusCode: 500, - body: { - error: - "Version mismatch between 0.0.1-ci and 0.0.2-ci.\nPlease upgrade or reset your Airbyte Database, see more at https://docs.airbyte.io/operator-guides/upgrading-airbyte", - }, - }); - - cy.on("uncaught:exception", () => false); - - cy.visit("/"); - - cy.get("div").contains("Version mismatch between 0.0.1-ci and 0.0.2-ci.").should("exist"); - }); - - it("Shows Server Unavailable page", () => { - cy.intercept("/api/v1/**", { - statusCode: 502, - body: "Failed to fetch", - }); - - cy.on("uncaught:exception", () => false); - - cy.visit("/"); - - cy.get("div").contains("Cannot reach server. The server may still be starting up.").should("exist"); - }); -}); diff --git a/airbyte-webapp-e2e-tests/cypress/integration/connection.spec.ts b/airbyte-webapp-e2e-tests/cypress/integration/connection.spec.ts deleted file mode 100644 index 0f9ece580688..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/integration/connection.spec.ts +++ /dev/null @@ -1,581 +0,0 @@ -import { appendRandomString, deleteEntity, submitButtonClick } from "commands/common"; -import { createTestConnection } from "commands/connection"; -import { deleteDestination } from "commands/destination"; -import { deleteSource } from "commands/source"; -import { initialSetupCompleted } from "commands/workspaces"; -import { - confirmStreamConfigurationChangedPopup, - selectSchedule, - fillOutDestinationPrefix, - goToReplicationTab, - setupDestinationNamespaceCustomFormat, - checkSuccessResult, - refreshSourceSchemaBtnClick, - resetModalSaveBtnClick, - toggleStreamEnabledState, - searchStream, - selectCursorField, - checkCursorField, - selectSyncMode, - setupDestinationNamespaceDefaultFormat, - checkPrimaryKey, - isPrimaryKeyNonExist, - selectPrimaryKeyField, - checkPreFilledPrimaryKeyField, - checkStreamFields, - expandStreamDetailsByName, -} from "pages/replicationPage"; -import { goToSourcePage, openSourceOverview } from "pages/sourcePage"; -import { goToSettingsPage, openConnectionOverviewByDestinationName } from "pages/settingsConnectionPage"; -import { cleanDBSource, makeChangesInDBSource, populateDBSource } from "commands/db"; -import { - checkCatalogDiffModal, - clickCatalogDiffCloseButton, - newFieldsTable, - newStreamsTable, - removedFieldsTable, - removedStreamsTable, - toggleStreamWithChangesAccordion, -} from "pages/modals/catalogDiffModal"; -import { - interceptGetConnectionRequest, - interceptUpdateConnectionRequest, - waitForGetConnectionRequest, - waitForUpdateConnectionRequest, -} from "commands/interceptors"; - -describe("Connection - creation, updating connection replication settings, deletion", () => { - beforeEach(() => { - initialSetupCompleted(); - - interceptGetConnectionRequest(); - interceptUpdateConnectionRequest(); - }); - - it("Create Postgres <> LocalJSON connection, check it's creation", () => { - const sourceName = appendRandomString("Test connection source cypress"); - const destName = appendRandomString("Test connection destination cypress"); - - createTestConnection(sourceName, destName); - cy.get("div").contains(sourceName).should("exist"); - cy.get("div").contains(destName).should("exist"); - - deleteSource(sourceName); - deleteDestination(destName); - }); - - it("Create Postgres <> LocalJSON connection, update connection replication settings - select schedule and add destination prefix", () => { - const sourceName = appendRandomString("Test update connection source cypress"); - const destName = appendRandomString("Test update connection destination cypress"); - - createTestConnection(sourceName, destName); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - selectSchedule("Every hour"); - fillOutDestinationPrefix("auto_test"); - - submitButtonClick(); - - waitForUpdateConnectionRequest().then((interception) => { - assert.isNotNull(interception.response?.statusCode, "200"); - }); - - checkSuccessResult(); - - deleteSource(sourceName); - deleteDestination(destName); - }); - - it(`Creates PokeAPI <> Local JSON connection, update connection replication settings - - select schedule, add destination prefix, set destination namespace custom format, change prefix and make sure that it's applied to all streams`, () => { - const sourceName = appendRandomString("Test update connection PokeAPI source cypress"); - const destName = appendRandomString("Test update connection Local JSON destination cypress"); - - createTestConnection(sourceName, destName); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - selectSchedule("Every hour"); - fillOutDestinationPrefix("auto_test"); - setupDestinationNamespaceCustomFormat("_test"); - selectSyncMode("Full refresh", "Append"); - - const prefix = "auto_test"; - fillOutDestinationPrefix(prefix); - - // Ensures the prefix is applied to the streams - assert(cy.get(`[title*="${prefix}"]`)); - - submitButtonClick(); - confirmStreamConfigurationChangedPopup(); - - waitForUpdateConnectionRequest().then((interception) => { - assert.isNotNull(interception.response?.statusCode, "200"); - expect(interception.request.method).to.eq("POST"); - expect(interception.request) - .property("body") - .to.contain({ - name: `${sourceName} <> ${destName}Connection name`, - prefix: "auto_test", - namespaceDefinition: "customformat", - namespaceFormat: "${SOURCE_NAMESPACE}_test", - status: "active", - }); - expect(interception.request.body.scheduleData.basicSchedule).to.contain({ - units: 1, - timeUnit: "hours", - }); - - const streamToUpdate = interception.request.body.syncCatalog.streams[0]; - - expect(streamToUpdate.config).to.contain({ - aliasName: "pokemon", - destinationSyncMode: "append", - selected: true, - }); - - expect(streamToUpdate.stream).to.contain({ - name: "pokemon", - }); - expect(streamToUpdate.stream.supportedSyncModes).to.contain("full_refresh"); - }); - checkSuccessResult(); - - deleteSource(sourceName); - deleteDestination(destName); - }); - - it("Create PokeAPI <> Local JSON connection, update connection replication settings - edit the schedule type one by one - cron, manual, every hour", () => { - const sourceName = appendRandomString("Test connection source cypress PokeAPI"); - const destName = appendRandomString("Test connection destination cypress"); - - createTestConnection(sourceName, destName); - - goToReplicationTab(); - - selectSchedule("Cron"); - submitButtonClick(); - checkSuccessResult(); - - selectSchedule("Manual"); - submitButtonClick(); - checkSuccessResult(); - - selectSchedule("Every hour"); - submitButtonClick(); - checkSuccessResult(); - - deleteSource(sourceName); - deleteDestination(destName); - }); - - it("Create PokeAPI <> Local JSON connection, update connection replication settings - make sure that saving a connection's schedule type only changes expected values", () => { - const sourceName = appendRandomString("Test update connection PokeAPI source cypress"); - const destName = appendRandomString("Test update connection Local JSON destination cypress"); - - createTestConnection(sourceName, destName); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - let loadedConnection: any = null; // Should be a WebBackendConnectionRead - waitForGetConnectionRequest().then((interception) => { - const { - scheduleType: readScheduleType, - scheduleData: readScheduleData, - ...connectionRead - } = interception.response?.body; - loadedConnection = connectionRead; - - expect(loadedConnection).not.to.eq(null); - expect(readScheduleType).to.eq("manual"); - expect(readScheduleData).to.eq(undefined); - }); - - goToReplicationTab(); - - selectSchedule("Every hour"); - submitButtonClick(); - - waitForUpdateConnectionRequest().then((interception) => { - // Schedule is pulled out here, but we don't do anything with is as it's legacy - const { scheduleType, scheduleData, schedule, ...connectionUpdate } = interception.response?.body; - expect(scheduleType).to.eq("basic"); - expect(scheduleData.basicSchedule).to.deep.eq({ - timeUnit: "hours", - units: 1, - }); - - expect(loadedConnection).to.deep.eq(connectionUpdate); - }); - checkSuccessResult(); - - deleteSource(sourceName); - deleteDestination(destName); - }); - - it("Create PokeAPI <> Local JSON connection, and delete connection", () => { - const sourceName = "Test delete connection source cypress"; - const destName = "Test delete connection destination cypress"; - createTestConnection(sourceName, destName); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToSettingsPage(); - - deleteEntity(); - - deleteSource(sourceName); - deleteDestination(destName); - }); - - it("Create PokeAPI <> Local JSON connection, update connection replication settings - set destination namespace with 'Custom format' option", () => { - const sourceName = appendRandomString("Test update connection PokeAPI source cypress"); - const destName = appendRandomString("Test update connection Local JSON destination cypress"); - - createTestConnection(sourceName, destName); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - const namespace = "_DestinationNamespaceCustomFormat"; - setupDestinationNamespaceCustomFormat(namespace); - - // Ensures the DestinationNamespace is applied to the streams - assert(cy.get(`[title*="${namespace}"]`)); - - submitButtonClick(); - - waitForUpdateConnectionRequest().then((interception) => { - assert.isNotNull(interception.response?.statusCode, "200"); - expect(interception.request.method).to.eq("POST"); - expect(interception.request) - .property("body") - .to.contain({ - name: `${sourceName} <> ${destName}Connection name`, - namespaceDefinition: "customformat", - namespaceFormat: "${SOURCE_NAMESPACE}_DestinationNamespaceCustomFormat", - status: "active", - }); - - const streamToUpdate = interception.request.body.syncCatalog.streams[0]; - - expect(streamToUpdate.stream).to.contain({ - name: "pokemon", - }); - }); - checkSuccessResult(); - - deleteSource(sourceName); - deleteDestination(destName); - }); - - it("Create PokeAPI <> Local JSON connection, update connection replication settings - set destination namespace with 'Mirror source structure' option", () => { - const sourceName = appendRandomString("Test update connection PokeAPI source cypress"); - const destName = appendRandomString("Test update connection Local JSON destination cypress"); - - createTestConnection(sourceName, destName); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - const namespace = ""; - - // Ensures the DestinationNamespace is applied to the streams - assert(cy.get(`[title*="${namespace}"]`)); - - deleteSource(sourceName); - deleteDestination(destName); - }); - - it("Create PokeAPI <> Local JSON connection, update connection replication settings - set destination namespace with 'Destination default' option", () => { - const sourceName = appendRandomString("Test update connection PokeAPI source cypress"); - const destName = appendRandomString("Test update connection Local JSON destination cypress"); - - createTestConnection(sourceName, destName); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - setupDestinationNamespaceDefaultFormat(); - - const namespace = ""; - - // Ensures the DestinationNamespace is applied to the streams - assert(cy.get(`[title*="${namespace}"]`)); - - submitButtonClick(); - - waitForUpdateConnectionRequest().then((interception) => { - assert.isNotNull(interception.response?.statusCode, "200"); - expect(interception.request.method).to.eq("POST"); - expect(interception.request) - .property("body") - .to.contain({ - name: `${sourceName} <> ${destName}Connection name`, - namespaceDefinition: "destination", - namespaceFormat: "${SOURCE_NAMESPACE}", - status: "active", - }); - - const streamToUpdate = interception.request.body.syncCatalog.streams[0]; - - expect(streamToUpdate.stream).to.contain({ - name: "pokemon", - }); - }); - checkSuccessResult(); - - deleteSource(sourceName); - deleteDestination(destName); - }); -}); - -describe("Connection - stream details", () => { - beforeEach(() => { - initialSetupCompleted(); - populateDBSource(); - }); - - afterEach(() => { - cleanDBSource(); - }); - - it("Create Postgres <> Postgres connection, connection replication settings, expand stream details", () => { - const sourceName = appendRandomString("Test connection Postgres source cypress"); - const destName = appendRandomString("Test connection Postgres destination cypress"); - const streamName = "users"; - - const collectionNames = ["email", "id", "name", "updated_at"]; - const collectionTypes = ["String", "Integer", "String", "Datetime"]; - - createTestConnection(sourceName, destName); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - searchStream(streamName); - expandStreamDetailsByName(streamName); - checkStreamFields(collectionNames, collectionTypes); - - deleteSource(sourceName); - deleteDestination(destName); - }); -}); - -describe("Connection sync modes", () => { - beforeEach(() => { - initialSetupCompleted(); - populateDBSource(); - - interceptUpdateConnectionRequest(); - }); - - afterEach(() => { - cleanDBSource(); - }); - - it("Create Postgres <> Postgres connection, update connection replication settings - select 'Incremental Append' sync mode, select required Cursor field, verify changes", () => { - const sourceName = appendRandomString("Test connection Postgres source cypress"); - const destName = appendRandomString("Test connection Postgres destination cypress"); - const streamName = "users"; - - createTestConnection(sourceName, destName); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - searchStream(streamName); - selectSyncMode("Incremental", "Append"); - selectCursorField(streamName, "updated_at"); - - submitButtonClick(); - confirmStreamConfigurationChangedPopup(); - - waitForUpdateConnectionRequest().then((interception) => { - assert.isNotNull(interception.response?.statusCode, "200"); - }); - - checkSuccessResult(); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - searchStream("users"); - checkCursorField(streamName, "updated_at"); - - deleteSource(sourceName); - deleteDestination(destName); - }); - - it("Create Postgres <> Postgres connection, update connection replication settings - select 'Incremental Deduped History'(PK is defined), select Cursor field, verify changes", () => { - const sourceName = appendRandomString("Test connection Postgres source cypress"); - const destName = appendRandomString("Test connection Postgres destination cypress"); - const streamName = "users"; - - createTestConnection(sourceName, destName); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - searchStream(streamName); - selectSyncMode("Incremental", "Deduped + history"); - selectCursorField(streamName, "updated_at"); - checkPreFilledPrimaryKeyField(streamName, "id"); - - submitButtonClick(); - confirmStreamConfigurationChangedPopup(); - - waitForUpdateConnectionRequest().then((interception) => { - assert.isNotNull(interception.response?.statusCode, "200"); - }); - - checkSuccessResult(); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - searchStream(streamName); - - checkCursorField(streamName, "updated_at"); - checkPreFilledPrimaryKeyField(streamName, "id"); - - deleteSource(sourceName); - deleteDestination(destName); - }); - - it("Create Postgres <> Postgres connection, update connection replication settings - select 'Incremental Deduped History'(PK is NOT defined), select Cursor field, select PK, verify changes", () => { - const sourceName = appendRandomString("Test connection Postgres source cypress"); - const destName = appendRandomString("Test connection Postgres destination cypress"); - const streamName = "cities"; - - createTestConnection(sourceName, destName); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - searchStream(streamName); - selectSyncMode("Incremental", "Deduped + history"); - selectCursorField(streamName, "city"); - isPrimaryKeyNonExist(streamName); - selectPrimaryKeyField(streamName, ["city_code"]); - - submitButtonClick(); - confirmStreamConfigurationChangedPopup(); - - waitForUpdateConnectionRequest().then((interception) => { - assert.isNotNull(interception.response?.statusCode, "200"); - }); - - checkSuccessResult(); - - goToSourcePage(); - openSourceOverview(sourceName); - openConnectionOverviewByDestinationName(destName); - - goToReplicationTab(); - - searchStream(streamName); - - checkCursorField(streamName, "city"); - checkPrimaryKey(streamName, ["city_code"]); - - deleteSource(sourceName); - deleteDestination(destName); - }); -}); - -describe("Connection - detect source schema changes in source", () => { - beforeEach(() => { - initialSetupCompleted(); - populateDBSource(); - - interceptUpdateConnectionRequest(); - }); - - afterEach(() => { - cleanDBSource(); - }); - - it("Create Postgres <> Local JSON connection, update data in source (async), refresh source schema, check diff modal, reset streams", () => { - const sourceName = appendRandomString( - "Test refresh source schema with changed data - connection Postgres source cypress" - ); - const destName = appendRandomString( - "Test refresh source schema with changed data - connection Local JSON destination cypress" - ); - - createTestConnection(sourceName, destName); - cy.get("div").contains(sourceName).should("exist"); - cy.get("div").contains(destName).should("exist"); - - makeChangesInDBSource(); - goToReplicationTab(); - refreshSourceSchemaBtnClick(); - - checkCatalogDiffModal(); - - cy.get(removedStreamsTable).should("contain", "users"); - - cy.get(newStreamsTable).should("contain", "cars"); - - toggleStreamWithChangesAccordion("cities"); - cy.get(removedFieldsTable).should("contain", "city_code"); - cy.get(newFieldsTable).children().should("contain", "country").and("contain", "state"); - - clickCatalogDiffCloseButton(); - - toggleStreamEnabledState("cars"); - - submitButtonClick(); - resetModalSaveBtnClick(); - - waitForUpdateConnectionRequest().then((interception) => { - assert.isNotNull(interception.response?.statusCode, "200"); - }); - - checkSuccessResult(); - - deleteSource(sourceName); - deleteDestination(destName); - }); -}); diff --git a/airbyte-webapp-e2e-tests/cypress/integration/connection/streamTable.spec.ts b/airbyte-webapp-e2e-tests/cypress/integration/connection/streamTable.spec.ts deleted file mode 100644 index 8b88d911469e..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/integration/connection/streamTable.spec.ts +++ /dev/null @@ -1,175 +0,0 @@ -import { initialSetupCompleted } from "commands/workspaces"; -import { - getPostgresCreateDestinationBody, - getPostgresCreateSourceBody, - requestCreateDestination, - requestCreateSource, - requestDeleteConnection, - requestDeleteDestination, - requestDeleteSource, - requestWorkspaceId, -} from "commands/api"; -import { appendRandomString, submitButtonClick } from "commands/common"; -import { clickNewConnectionButton, visitConnectionsListPage } from "pages/connnectionsListPage"; -import { - checkAmountOfStreamTableRows, - checkColumnNames, - checkConnectorIconAndTitle, - clickUseExistingConnectorButton, - isAtConnectionOverviewPage, - isAtNewConnectionPage, - isNewConnectionPageHeaderVisible, - isStreamTableRowVisible, - scrollTableToStream, - selectExistingConnectorFromDropdown, -} from "pages/newConnectionPage"; -import { - interceptCreateConnectionRequest, - interceptDiscoverSchemaRequest, - interceptGetSourceDefinitionsRequest, - interceptGetSourcesListRequest, - waitForCreateConnectionRequest, - waitForDiscoverSchemaRequest, - waitForGetSourceDefinitionsRequest, - waitForGetSourcesListRequest, -} from "commands/interceptors"; -import { Connection, Destination, Source } from "commands/api/types"; -import { clearStreamSearch, searchStream, selectSchedule } from "pages/replicationPage"; -import { runDbQuery } from "commands/db/db"; -import { - createUsersTableQuery, - dropUsersTableQuery, - createDummyTablesQuery, - dropDummyTablesQuery, -} from "commands/db/queries"; - -// TODO: Enable this test when the new stream table will be turned on -describe.skip("New stream table - new connection set up ", () => { - let source: Source; - let destination: Destination; - let connectionId: string; - - before(() => { - initialSetupCompleted(); - runDbQuery(dropUsersTableQuery); - runDbQuery(dropDummyTablesQuery(20)); - - runDbQuery(createUsersTableQuery); - runDbQuery(createDummyTablesQuery(20)); - - requestWorkspaceId().then(() => { - const sourceRequestBody = getPostgresCreateSourceBody(appendRandomString("Stream table Source")); - const destinationRequestBody = getPostgresCreateDestinationBody(appendRandomString("Stream table Destination")); - - requestCreateSource(sourceRequestBody).then((sourceResponse) => { - source = sourceResponse; - requestCreateDestination(destinationRequestBody).then((destinationResponse) => { - destination = destinationResponse; - }); - }); - }); - }); - - after(() => { - if (connectionId) { - requestDeleteConnection(connectionId); - } - if (source) { - requestDeleteSource(source.sourceId); - } - if (destination) { - requestDeleteDestination(destination.destinationId); - } - }); - - it("should open 'New connection' page", () => { - visitConnectionsListPage(); - interceptGetSourcesListRequest(); - interceptGetSourceDefinitionsRequest(); - - clickNewConnectionButton(); - waitForGetSourcesListRequest(); - waitForGetSourceDefinitionsRequest(); - }); - - it("should select existing Source from dropdown and click button", () => { - selectExistingConnectorFromDropdown(source.name); - clickUseExistingConnectorButton("source"); - }); - - it("should select existing Destination from dropdown and click button", () => { - interceptDiscoverSchemaRequest(); - selectExistingConnectorFromDropdown(destination.name); - clickUseExistingConnectorButton("destination"); - waitForDiscoverSchemaRequest(); - }); - - it("should redirect to 'New connection' settings page with stream table'", () => { - isAtNewConnectionPage(); - }); - - it("should show 'New connection' page header", () => { - isNewConnectionPageHeaderVisible(); - }); - - it("should set 'Replication frequency' to 'Manual'", () => { - selectSchedule("Manual"); - }); - - it("should check check connector icons and titles in table", () => { - checkConnectorIconAndTitle("source"); - checkConnectorIconAndTitle("destination"); - }); - - it("should check columns names in table", () => { - checkColumnNames(); - }); - - it("should check total amount of table streams", () => { - // dummy tables amount + users table - checkAmountOfStreamTableRows(21); - }); - - it("should allow to scroll table to desired stream table row and it should be visible", () => { - const desiredStreamTableRow = "dummy_table_18"; - - scrollTableToStream(desiredStreamTableRow); - isStreamTableRowVisible(desiredStreamTableRow); - }); - - it("should filter table by stream name", () => { - searchStream("dummy_table_10"); - checkAmountOfStreamTableRows(1); - }); - - it("should clear stream search input field and show all available streams", () => { - clearStreamSearch(); - checkAmountOfStreamTableRows(21); - }); - - /* - here will be added more tests to extend the test flow - */ - - it("should set up a connection", () => { - interceptCreateConnectionRequest(); - submitButtonClick(true); - waitForCreateConnectionRequest().then((interception) => { - assert.isNotNull(interception.response?.statusCode, "200"); - expect(interception.request.method).to.eq("POST"); - - const connection: Partial = { - name: `${source.name} <> ${destination.name}`, - scheduleType: "manual", - }; - expect(interception.request.body).to.contain(connection); - expect(interception.response?.body).to.contain(connection); - - connectionId = interception.response?.body?.connectionId; - }); - }); - - it("should redirect to connection overview page after connection set up", () => { - isAtConnectionOverviewPage(connectionId); - }); -}); diff --git a/airbyte-webapp-e2e-tests/cypress/integration/connectorBuilder.spec.ts b/airbyte-webapp-e2e-tests/cypress/integration/connectorBuilder.spec.ts deleted file mode 100644 index c719ed6df81d..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/integration/connectorBuilder.spec.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { goToConnectorBuilderPage, startFromScratch, testStream } from "pages/connectorBuilderPage"; -import { - assertTestReadItems, - assertTestReadAuthFailure, - configureAuth, - configureGlobals, - configureStream, - configurePagination, - assertMultiPageReadItems, -} from "commands/connectorBuilder"; -import { initialSetupCompleted } from "commands/workspaces"; - -describe("Connector builder", () => { - before(() => { - initialSetupCompleted(); - goToConnectorBuilderPage(); - startFromScratch(); - }); - - it("Configure basic connector", () => { - configureGlobals(); - configureStream(); - }); - - it("Fail on missing auth", () => { - testStream(); - assertTestReadAuthFailure(); - }); - - it("Succeed on provided auth", () => { - configureAuth(); - testStream(); - assertTestReadItems(); - }); - - it("Pagination", () => { - configurePagination(); - testStream(); - assertMultiPageReadItems(); - }); -}); diff --git a/airbyte-webapp-e2e-tests/cypress/integration/destination.spec.ts b/airbyte-webapp-e2e-tests/cypress/integration/destination.spec.ts deleted file mode 100644 index 26fd9109a07b..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/integration/destination.spec.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { appendRandomString } from "commands/common"; -import { createLocalJsonDestination, deleteDestination, updateDestination } from "commands/destination"; -import { initialSetupCompleted } from "commands/workspaces"; - -describe("Destination main actions", () => { - beforeEach(() => { - initialSetupCompleted(); - }); - - it("Create new destination", () => { - createLocalJsonDestination("Test destination cypress", "/local"); - - cy.url().should("include", `/destination/`); - }); - - it("Update destination", () => { - const destName = appendRandomString("Test destination cypress for update"); - createLocalJsonDestination(destName, "/local"); - updateDestination(destName, "connectionConfiguration.destination_path", "/local/my-json"); - - cy.get("div[data-id='success-result']").should("exist"); - cy.get("input[value='/local/my-json']").should("exist"); - }); - - it("Delete destination", () => { - const destName = appendRandomString("Test destination cypress for delete"); - createLocalJsonDestination(destName, "/local"); - deleteDestination(destName); - - cy.visit("/destination"); - cy.get("div").contains(destName).should("not.exist"); - }); -}); diff --git a/airbyte-webapp-e2e-tests/cypress/integration/onboarding.spec.ts b/airbyte-webapp-e2e-tests/cypress/integration/onboarding.spec.ts deleted file mode 100644 index 2ea56269f443..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/integration/onboarding.spec.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { submitButtonClick, fillEmail } from "commands/common"; -import { initialSetupCompleted } from "commands/workspaces"; - -describe("Preferences actions", () => { - beforeEach(() => { - initialSetupCompleted(false); - }); - - it("Should redirect to connections page after email is entered", () => { - cy.visit("/preferences"); - cy.url().should("include", `/preferences`); - - fillEmail("test-email-onboarding@test-onboarding-domain.com"); - cy.get("input[name=securityUpdates]").parent().click(); - - submitButtonClick(); - - cy.url().should("match", /.*\/connections/); - }); -}); diff --git a/airbyte-webapp-e2e-tests/cypress/integration/source.spec.ts b/airbyte-webapp-e2e-tests/cypress/integration/source.spec.ts deleted file mode 100644 index 995039304486..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/integration/source.spec.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { appendRandomString, submitButtonClick } from "commands/common"; -import { createPostgresSource, deleteSource, updateSource } from "commands/source"; -import { initialSetupCompleted } from "commands/workspaces"; -import { goToSourcePage, openNewSourceForm } from "pages/sourcePage"; -import { openHomepage } from "pages/sidebar"; -import { selectServiceType } from "pages/createConnectorPage"; -import { fillPokeAPIForm } from "commands/connector"; - -describe("Source main actions", () => { - beforeEach(() => initialSetupCompleted()); - - it("Create new source", () => { - cy.intercept("/api/v1/sources/create").as("createSource"); - createPostgresSource("Test source cypress"); - - cy.wait("@createSource", { timeout: 30000 }).then((interception) => { - assert("include", `/source/${interception.response?.body.Id}`); - }); - }); - - // TODO: add update source on some other connector or create 1 more user for pg - it.skip("Update source", () => { - const sourceName = appendRandomString("Test source cypress for update"); - createPostgresSource(sourceName); - updateSource(sourceName, "connectionConfiguration.start_date", "2020-11-11"); - - cy.get("div[data-id='success-result']").should("exist"); - cy.get("input[value='2020-11-11']").should("exist"); - }); - - it("Delete source", () => { - const sourceName = appendRandomString("Test source cypress for delete"); - createPostgresSource(sourceName); - deleteSource(sourceName); - - cy.visit("/"); - cy.get("div").contains(sourceName).should("not.exist"); - }); -}); - -describe("Unsaved changes modal", () => { - beforeEach(() => initialSetupCompleted()); - - it("Check leaving Source page without any changes", () => { - goToSourcePage(); - openNewSourceForm(); - - openHomepage(); - - cy.url().should("include", "/connections"); - cy.get("[data-testid='confirmationModal']").should("not.exist"); - }); - - it("Check leaving Source page without any changes after selection type", () => { - goToSourcePage(); - openNewSourceForm(); - selectServiceType("PokeAPI"); - - openHomepage(); - - cy.url().should("include", "/connections"); - cy.get("[data-testid='confirmationModal']").should("not.exist"); - }); - - it("Check leaving Source page without any changes", () => { - goToSourcePage(); - openNewSourceForm(); - fillPokeAPIForm("testName", "ditto"); - - openHomepage(); - - cy.get("[data-testid='confirmationModal']").should("exist"); - cy.get("[data-testid='confirmationModal']").contains("Discard changes"); - cy.get("[data-testid='confirmationModal']").contains( - "There are unsaved changes. Are you sure you want to discard your changes?" - ); - }); - - it("Check leaving Source page after failing testing", () => { - cy.intercept("/api/v1/scheduler/sources/check_connection").as("checkSourceUpdateConnection"); - - goToSourcePage(); - openNewSourceForm(); - fillPokeAPIForm("testName", "name"); - submitButtonClick(); - - cy.wait("@checkSourceUpdateConnection", { timeout: 5000 }); - - openHomepage(); - - cy.get("[data-testid='confirmationModal']").should("exist"); - cy.get("[data-testid='confirmationModal']").contains("Discard changes"); - cy.get("[data-testid='confirmationModal']").contains( - "There are unsaved changes. Are you sure you want to discard your changes?" - ); - }); -}); diff --git a/airbyte-webapp-e2e-tests/cypress/pages/connectionPage.ts b/airbyte-webapp-e2e-tests/cypress/pages/connectionPage.ts deleted file mode 100644 index f901c448d8da..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/pages/connectionPage.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Connection } from "commands/api/types"; -import { getWorkspaceId } from "commands/api/workspace"; - -const syncEnabledSwitch = "[data-testid='enabledControl-switch']"; - -export const visitConnectionPage = (connection: Connection, tab = "") => { - cy.intercept("**/web_backend/connections/get").as("getConnection"); - cy.visit(`/workspaces/${getWorkspaceId()}/connections/${connection.connectionId}/${tab}`); - cy.wait("@getConnection", { timeout: 20000 }); -}; - -export const getSyncEnabledSwitch = () => cy.get(syncEnabledSwitch); diff --git a/airbyte-webapp-e2e-tests/cypress/pages/connectorBuilderPage.ts b/airbyte-webapp-e2e-tests/cypress/pages/connectorBuilderPage.ts deleted file mode 100644 index ec8e025523af..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/pages/connectorBuilderPage.ts +++ /dev/null @@ -1,96 +0,0 @@ -const startFromScratchButton = "button[data-testid='start-from-scratch']"; -const nameInput = "input[name='global.connectorName']"; -const urlBaseInput = "input[name='global.urlBase']"; -const addStreamButton = "button[data-testid='add-stream']"; -const apiKeyInput = "input[name='connectionConfiguration.api_key']"; -const toggleInput = "input[data-testid='toggle']"; -const streamNameInput = "input[name='streamName']"; -const streamUrlPath = "input[name='urlPath']"; -const recordSelectorInput = "[data-testid='tag-input'] input"; -const authType = "[data-testid='global.authenticator.type']"; -const testInputsButton = "[data-testid='test-inputs']"; -const limitInput = "[name='streams[0].paginator.strategy.page_size']"; -const injectOffsetInto = "[data-testid$='paginator.pageTokenOption.inject_into']"; -const injectOffsetFieldName = "[name='streams[0].paginator.pageTokenOption.field_name']"; -const testPageItem = "[data-testid='test-pages'] li"; -const submit = "button[type='submit']"; -const testStreamButton = "button[data-testid='read-stream']"; - -export const goToConnectorBuilderPage = () => { - cy.visit("/connector-builder"); - cy.wait(3000); -}; - -export const startFromScratch = () => { - cy.get(startFromScratchButton).click(); -}; - -export const enterName = (name: string) => { - cy.get(nameInput).clear().type(name); -}; - -export const enterUrlBase = (urlBase: string) => { - cy.get(urlBaseInput).type(urlBase); -}; - -export const enterRecordSelector = (recordSelector: string) => { - cy.get(recordSelectorInput).first().type(recordSelector, { force: true }).type("{enter}", { force: true }); -}; - -const selectFromDropdown = (selector: string, value: string) => { - cy.get(`${selector} .react-select__dropdown-indicator`).last().click({ force: true }); - - cy.get(`.react-select__option`).contains(value).click(); -}; - -export const selectAuthMethod = (value: string) => { - selectFromDropdown(authType, value); -}; - -export const goToView = (view: string) => { - cy.get(`button[data-testid=navbutton-${view}]`).click(); -}; - -export const openTestInputs = () => { - cy.get(testInputsButton).click(); -}; - -export const enterTestInputs = ({ apiKey }: { apiKey: string }) => { - cy.get(apiKeyInput).type(apiKey); -}; - -export const goToTestPage = (page: number) => { - cy.get(testPageItem).contains(page).click(); -}; - -export const togglePagination = () => { - cy.get(toggleInput).first().click({ force: true }); -}; - -export const configureOffsetPagination = (limit: string, into: string, fieldName: string) => { - cy.get(limitInput).type(limit); - selectFromDropdown(injectOffsetInto, into); - cy.get(injectOffsetFieldName).type(fieldName); -}; - -export const addStream = () => { - cy.get(addStreamButton).click(); -}; - -export const enterStreamName = (streamName: string) => { - cy.get(streamNameInput).type(streamName); -}; - -export const enterUrlPath = (urlPath: string) => { - cy.get(streamUrlPath).type(urlPath); -}; - -export const submitForm = () => { - cy.get(submit).click(); -}; - -export const testStream = () => { - // wait for debounced form - cy.wait(500); - cy.get(testStreamButton).click(); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/pages/connnectionsListPage.ts b/airbyte-webapp-e2e-tests/cypress/pages/connnectionsListPage.ts deleted file mode 100644 index bb468f1b2673..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/pages/connnectionsListPage.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { Connection } from "commands/api/types"; -import { getWorkspaceId } from "commands/api/workspace"; - -const statusCell = (connectionId: string) => `[data-testId='statusCell-${connectionId}']`; -const changesStatusIcon = (type: string) => `[data-testId='changesStatusIcon-${type}']`; -const manualSyncButton = "button[data-testId='manual-sync-button']"; -const newConnectionButton = "button[data-testId='new-connection-button']"; - -export const visitConnectionsListPage = () => { - cy.intercept("**/web_backend/connections/list").as("listConnections"); - cy.visit(`/workspaces/${getWorkspaceId()}/connections`); - cy.wait("@listConnections", { timeout: 20000 }); -}; - -export const getSchemaChangeIcon = (connection: Connection, type: "breaking" | "non_breaking") => - cy.get(`${statusCell(connection.connectionId)} ${changesStatusIcon(type)}`); - -export const getManualSyncButton = (connection: Connection) => - cy.get(`${statusCell(connection.connectionId)} ${manualSyncButton}`); - -export const clickNewConnectionButton = () => { - cy.get(newConnectionButton).click(); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/pages/createConnectorPage.ts b/airbyte-webapp-e2e-tests/cypress/pages/createConnectorPage.ts deleted file mode 100644 index 44bee2632352..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/pages/createConnectorPage.ts +++ /dev/null @@ -1,63 +0,0 @@ -const selectTypeDropdown = "div[data-testid='serviceType']"; -const getServiceTypeDropdownOption = (serviceName: string) => `div[data-testid='${serviceName}']`; -const nameInput = "input[name=name]"; -const hostInput = "input[name='connectionConfiguration.host']"; -const portInput = "input[name='connectionConfiguration.port']"; -const databaseInput = "input[name='connectionConfiguration.database']"; -const usernameInput = "input[name='connectionConfiguration.username']"; -const passwordInput = "input[name='connectionConfiguration.password']"; -const pokemonNameInput = "input[name='connectionConfiguration.pokemon_name']"; -const schemaInput = "[data-testid='tag-input'] input"; -const destinationPathInput = "input[name='connectionConfiguration.destination_path']"; - -export const selectServiceType = (type: string) => - cy - .get(selectTypeDropdown) - .click() - .within(() => cy.get(getServiceTypeDropdownOption(type)).click()); - -export const enterName = (name: string) => { - cy.get(nameInput).clear().type(name); -}; - -export const enterHost = (host: string) => { - cy.get(hostInput).type(host); -}; - -export const enterPort = (port: string) => { - cy.get(portInput).type("{selectAll}{del}").type(port); -}; - -export const enterDatabase = (database: string) => { - cy.get(databaseInput).type(database); -}; - -export const enterUsername = (username: string) => { - cy.get(usernameInput).type(username); -}; - -export const enterPassword = (password: string) => { - cy.get(passwordInput).type(password); -}; - -export const enterPokemonName = (pokeName: string) => { - cy.get(pokemonNameInput).type(pokeName); -}; - -export const enterDestinationPath = (destinationPath: string) => { - cy.get(destinationPathInput).type(destinationPath); -}; - -export const enterSchema = (value: string) => { - if (!value) { - return; - } - cy.get(schemaInput).first().type(value, { force: true }).type("{enter}", { force: true }); -}; - -export const removeSchema = (value = "Remove public") => { - if (!value) { - return; - } - cy.get(`[aria-label*="${value}"]`).click(); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/pages/destinationPage.ts b/airbyte-webapp-e2e-tests/cypress/pages/destinationPage.ts deleted file mode 100644 index b217d99dad8e..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/pages/destinationPage.ts +++ /dev/null @@ -1,21 +0,0 @@ -const newDestination = "button[data-id='new-destination']"; -const addSourceButton = "button[data-testid='select-source']"; - -export const goToDestinationPage = () => { - cy.intercept("/api/v1/destinations/list").as("getDestinationsList"); - cy.visit("/destination"); - cy.wait(3000); -}; - -export const openNewDestinationForm = () => { - cy.wait("@getDestinationsList").then(({ response }) => { - if (response?.body.destinations.length) { - cy.get(newDestination).click(); - } - }); - cy.url().should("include", `/destination/new-destination`); -}; - -export const openAddSource = () => { - cy.get(addSourceButton).click(); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/pages/modals/catalogDiffModal.ts b/airbyte-webapp-e2e-tests/cypress/pages/modals/catalogDiffModal.ts deleted file mode 100644 index 4abcf8891b3d..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/pages/modals/catalogDiffModal.ts +++ /dev/null @@ -1,20 +0,0 @@ -export const catalogDiffModal = "[data-testid='catalog-diff-modal']"; -export const removedStreamsTable = "table[aria-label='removed streams table']"; -export const newStreamsTable = "table[aria-label='new streams table']"; -const streamWithChangesToggleBtn = (streamName: string) => - `button[data-testid='toggle-accordion-${streamName}-stream']`; -export const removedFieldsTable = "table[aria-label='removed fields']"; -export const newFieldsTable = "table[aria-label='new fields']"; -export const closeButton = "[data-testid='update-schema-confirm-btn']"; - -export const checkCatalogDiffModal = () => { - cy.get(catalogDiffModal).should("exist"); -}; - -export const toggleStreamWithChangesAccordion = (streamName: string) => { - cy.get(streamWithChangesToggleBtn(streamName)).click(); -}; - -export const clickCatalogDiffCloseButton = () => { - cy.get(closeButton).click(); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/pages/newConnectionPage.ts b/airbyte-webapp-e2e-tests/cypress/pages/newConnectionPage.ts deleted file mode 100644 index 34c3209f7a41..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/pages/newConnectionPage.ts +++ /dev/null @@ -1,70 +0,0 @@ -type ConnectorType = "source" | "destination"; -const existingConnectorDropdown = `div[data-testid='entityId']`; -const getExistingConnectorDropdownOption = (connectorName: string) => `div[data-testid='${connectorName}']`; -const useExistingConnectorButton = (connectorType: ConnectorType) => - `button[data-testid='use-existing-${connectorType}-button']`; - -const pageHeaderContainer = `div[data-testid='page-header-container']`; -const newConnectionPageTitle = "New connection"; - -const connectorHeaderGroupIcon = (connectorType: ConnectorType) => - `span[data-testid='connector-header-group-icon-container-${connectorType}']`; -const catalogTreeTableHeader = `div[data-testid='catalog-tree-table-header']`; -const catalogTreeTableBody = `div[data-testid='catalog-tree-table-body']`; - -export const selectExistingConnectorFromDropdown = (connectorName: string) => - cy - .get(existingConnectorDropdown) - .click() - .within(() => cy.get(getExistingConnectorDropdownOption(connectorName)).click()); - -export const clickUseExistingConnectorButton = (connectorType: ConnectorType) => - cy.get(useExistingConnectorButton(connectorType)).click(); - -export const isNewConnectionPageHeaderVisible = () => - cy.get(pageHeaderContainer).contains(newConnectionPageTitle).should("be.visible"); - -/* - Route checking - */ -export const isAtNewConnectionPage = () => cy.url().should("include", `/connections/new-connection`); -export const isAtConnectionOverviewPage = (connectionId: string) => - cy.url().should("include", `connections/${connectionId}/status`); - -/* - Stream table - */ -export const checkConnectorIconAndTitle = (connectorType: ConnectorType) => { - const connectorIcon = connectorHeaderGroupIcon(connectorType); - cy.get(connectorIcon) - .contains(connectorType, { matchCase: false }) - .within(() => { - cy.get("img").should("have.attr", "src").should("not.be.empty"); - }); -}; - -export const checkColumnNames = () => { - const columnNames = ["Sync", "Namespace", "Stream name", "Sync mode", "Cursor field", "Primary key"]; - cy.get(catalogTreeTableHeader).within(($header) => { - columnNames.forEach((columnName) => { - cy.contains(columnName); - }); - // we have two Namespace columns - cy.get(`div:contains(${columnNames[1]})`).should("have.length", 2); - // we have two Stream Name columns - cy.get(`div:contains(${columnNames[2]})`).should("have.length", 2); - }); -}; - -export const checkAmountOfStreamTableRows = (expectedAmountOfRows: number) => - cy - .get(catalogTreeTableBody) - .find("[data-testid^='catalog-tree-table-row-']") - .should("have.length", expectedAmountOfRows); - -export const scrollTableToStream = (streamName: string) => { - cy.get(catalogTreeTableBody).contains(streamName).scrollIntoView(); -}; - -export const isStreamTableRowVisible = (streamName: string) => - cy.get(catalogTreeTableBody).contains(streamName).should("be.visible"); diff --git a/airbyte-webapp-e2e-tests/cypress/pages/replicationPage.ts b/airbyte-webapp-e2e-tests/cypress/pages/replicationPage.ts deleted file mode 100644 index 3c55b5bf853f..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/pages/replicationPage.ts +++ /dev/null @@ -1,237 +0,0 @@ -import { submitButtonClick } from "commands/common"; - -const scheduleDropdown = "div[data-testid='scheduleData']"; -const scheduleValue = (value: string) => `div[data-testid='${value}']`; -const destinationPrefix = "input[data-testid='prefixInput']"; -const replicationTab = "div[data-id='replication-step']"; -const destinationNamespace = "div[data-testid='namespaceDefinition']"; -const destinationNamespaceCustom = "div[data-testid='namespaceDefinition-customformat']"; -const destinationNamespaceDefault = "div[data-testid='namespaceDefinition-destination']"; -const destinationNamespaceSource = "div[data-testid='namespaceDefinition-source']"; -const destinationNamespaceCustomInput = "input[data-testid='input']"; -const syncModeDropdown = "div[data-testid='syncSettingsDropdown'] input"; -const getFieldDropdownContainer = (streamName: string, type: Dropdown) => `div[id='${streamName}_${type}_pathPopout']`; -const getFieldDropdownButton = (streamName: string, type: Dropdown) => - `button[data-testid='${streamName}_${type}_pathPopout']`; -const getFieldDropdownOption = (value: string) => `div[data-testid='${value}']`; -const dropDownOverlayContainer = "div[data-testid='overlayContainer']"; -const streamNameCell = "[data-testid='nameCell']"; -const streamDataTypeCell = "[data-testid='dataTypeCell']"; -const getExpandStreamArrowBtn = (streamName: string) => `[data-testid='${streamName}_expandStreamDetails']`; -const getPreFilledPrimaryKeyText = (streamName: string) => `[data-testid='${streamName}_primaryKey_pathPopout_text']`; -const successResult = "div[data-id='success-result']"; -const resetModalResetCheckbox = "[data-testid='resetModal-reset-checkbox']"; -const saveStreamChangesButton = "button[data-testid='resetModal-save']"; -const connectionNameInput = "input[data-testid='connectionName']"; -const refreshSourceSchemaButton = "button[data-testid='refresh-source-schema-btn']"; -const streamSyncEnabledSwitch = (streamName: string) => `[data-testid='${streamName}-stream-sync-switch']`; -const streamNameInput = "input[data-testid='input']"; -const resetModalSaveButton = "[data-testid='resetModal-save']"; -const schemaChangesDetectedBanner = "[data-testid='schemaChangesDetected']"; -const schemaChangesReviewButton = "[data-testid='schemaChangesReviewButton']"; -const schemaChangesBackdrop = "[data-testid='schemaChangesBackdrop']"; -const nonBreakingChangesPreference = "[data-testid='nonBreakingChangesPreference']"; -const nonBreakingChangesPreferenceValue = (value: string) => `div[data-testid='nonBreakingChangesPreference-${value}']`; - -export const goToReplicationTab = () => { - cy.get(replicationTab).click(); -}; - -export const enterConnectionName = (name: string) => { - cy.get(connectionNameInput).type(name); -}; - -export const expandStreamDetailsByName = (streamName: string) => cy.get(getExpandStreamArrowBtn(streamName)).click(); - -export const selectSchedule = (value: string) => { - cy.get(scheduleDropdown).click(); - cy.get(scheduleValue(value)).click(); -}; - -export const fillOutDestinationPrefix = (value: string) => { - cy.get(destinationPrefix).clear().type(value).should("have.value", value); -}; - -export const setupDestinationNamespaceCustomFormat = (value: string) => { - cy.get(destinationNamespace).click(); - cy.get(destinationNamespaceCustom).click(); - cy.get(destinationNamespaceCustomInput).first().type(value).should("have.value", `\${SOURCE_NAMESPACE}${value}`); -}; - -export const setupDestinationNamespaceSourceFormat = () => { - cy.get(destinationNamespace).click(); - cy.get(destinationNamespaceSource).click(); -}; - -export const refreshSourceSchemaBtnClick = () => cy.get(refreshSourceSchemaButton).click(); - -export const resetModalSaveBtnClick = () => cy.get(resetModalSaveButton).click(); - -export const setupDestinationNamespaceDefaultFormat = () => { - cy.get(destinationNamespace).click(); - cy.get(destinationNamespaceDefault).click(); -}; - -export const selectSyncMode = (source: string, dest: string) => { - cy.get(syncModeDropdown).first().click({ force: true }); - - cy.get(`.react-select__option`).contains(`Source:${source}|Dest:${dest}`).click(); -}; - -type Dropdown = "cursor" | "primaryKey"; -/** - * General function - select dropdown option(s) - * @param streamName - * @param dropdownType - * @param value - */ -const selectFieldDropdownOption = (streamName: string, dropdownType: Dropdown, value: string | string[]) => { - const container = getFieldDropdownContainer(streamName, dropdownType); - const button = getFieldDropdownButton(streamName, dropdownType); - - cy.get(container).within(() => { - cy.get(button).click(); - - if (Array.isArray(value)) { - // in case if multiple options need to be selected - value.forEach((v) => cy.get(getFieldDropdownOption(v)).click()); - } else { - // in case if one option need to be selected - cy.get(getFieldDropdownOption(value)).click(); - } - }); - // close dropdown - // (dropdown need to be closed manually by clicking on overlay in case if multiple option selection is available) - cy.get("body").then(($body) => { - if ($body.find(dropDownOverlayContainer).length > 0) { - cy.get(dropDownOverlayContainer).click(); - } - }); -}; - -/** - * Select cursor value from cursor dropdown(pathPopout) in desired stream - * @param streamName - * @param cursorValue - */ -export const selectCursorField = (streamName: string, cursorValue: string) => - selectFieldDropdownOption(streamName, "cursor", cursorValue); - -/** - * Select primary key value(s) from primary key dropdown(pathPopout) in desired stream - * @param streamName - * @param primaryKeyValues - */ -export const selectPrimaryKeyField = (streamName: string, primaryKeyValues: string[]) => - selectFieldDropdownOption(streamName, "primaryKey", primaryKeyValues); - -export const checkStreamFields = (listNames: string[], listTypes: string[]) => { - cy.get(streamNameCell).each(($span, i) => { - expect($span.text()).to.equal(listNames[i]); - }); - - cy.get(streamDataTypeCell).each(($span, i) => { - expect($span.text()).to.equal(listTypes[i]); - }); -}; - -/** - * General function - check selected field dropdown option or options - * @param streamName - * @param dropdownType - * @param expectedValue - */ -const checkDropdownField = (streamName: string, dropdownType: Dropdown, expectedValue: string | string[]) => { - const button = getFieldDropdownButton(streamName, dropdownType); - const isButtonContainsExactValue = (value: string) => cy.get(button).contains(new RegExp(`^${value}$`)); - - return Array.isArray(expectedValue) - ? expectedValue.every((value) => isButtonContainsExactValue(value)) - : isButtonContainsExactValue(expectedValue); -}; - -/** - * Check selected value in cursor dropdown - * @param streamName - * @param expectedValue - */ -export const checkCursorField = (streamName: string, expectedValue: string) => - checkDropdownField(streamName, "cursor", expectedValue); - -/** - * Check selected value(s) in primary key dropdown - * @param streamName - * @param expectedValues - */ -export const checkPrimaryKey = (streamName: string, expectedValues: string[]) => - checkDropdownField(streamName, "primaryKey", expectedValues); - -export const checkPreFilledPrimaryKeyField = (streamName: string, expectedValue: string) => { - cy.get(getPreFilledPrimaryKeyText(streamName)).contains(expectedValue); -}; - -export const isPrimaryKeyNonExist = (streamName: string) => { - cy.get(getPreFilledPrimaryKeyText(streamName)).should("not.exist"); -}; - -export const searchStream = (value: string) => { - cy.get(streamNameInput).type(value); -}; - -export const clearStreamSearch = () => { - cy.get(streamNameInput).clear(); -}; - -export const clickSaveReplication = ({ reset = false, confirm = true } = {}) => { - cy.intercept("/api/v1/web_backend/connections/update").as("updateConnection"); - - submitButtonClick(); - - if (confirm) { - confirmStreamConfigurationChangedPopup({ reset }); - } - - cy.wait("@updateConnection").then((interception) => { - assert.isNotNull(interception.response?.statusCode, "200"); - }); - - checkSuccessResult(); -}; - -export const checkSuccessResult = () => { - cy.get(successResult).should("exist"); -}; - -export const confirmStreamConfigurationChangedPopup = ({ reset = false } = {}) => { - if (!reset) { - cy.get(resetModalResetCheckbox).click({ force: true }); - } - cy.get(saveStreamChangesButton).click(); -}; - -export const toggleStreamEnabledState = (streamName: string) => { - cy.get(streamSyncEnabledSwitch(streamName)).check({ force: true }); -}; - -export const checkSchemaChangesDetected = ({ breaking }: { breaking: boolean }) => { - cy.get(schemaChangesDetectedBanner).should("exist"); - cy.get(schemaChangesDetectedBanner) - .invoke("attr", "class") - .should("match", breaking ? /\_breaking/ : /nonBreaking/); - cy.get(schemaChangesBackdrop).should(breaking ? "exist" : "not.exist"); -}; - -export const checkSchemaChangesDetectedCleared = () => { - cy.get(schemaChangesDetectedBanner).should("not.exist"); - cy.get(schemaChangesBackdrop).should("not.exist"); -}; - -export const clickSchemaChangesReviewButton = () => { - cy.get(schemaChangesReviewButton).click(); - cy.get(schemaChangesReviewButton).should("be.disabled"); -}; - -export const selectNonBreakingChangesPreference = (preference: "ignore" | "disable") => { - cy.get(nonBreakingChangesPreference).click(); - cy.get(nonBreakingChangesPreferenceValue(preference)).click(); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/pages/settingsConnectionPage.ts b/airbyte-webapp-e2e-tests/cypress/pages/settingsConnectionPage.ts deleted file mode 100644 index 5c4b1c00d568..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/pages/settingsConnectionPage.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { clickOnCellInTable } from "commands/common"; - -const settingsTab = "div[data-id='settings-step']"; -const sourceColumnName = "Source name"; -const destinationColumnName = "Destination name"; -const connectionsTable = "table[data-testid='connectionsTable']"; - -export const openConnectionOverviewBySourceName = (sourceName: string) => { - clickOnCellInTable(connectionsTable, sourceColumnName, sourceName); -}; - -export const openConnectionOverviewByDestinationName = (destinationName: string) => { - clickOnCellInTable(connectionsTable, destinationColumnName, destinationName); -}; - -export const goToSettingsPage = () => { - cy.get(settingsTab).click(); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/pages/sidebar.ts b/airbyte-webapp-e2e-tests/cypress/pages/sidebar.ts deleted file mode 100644 index c19b45ef907d..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/pages/sidebar.ts +++ /dev/null @@ -1,10 +0,0 @@ -const setting = "nav a[href*='settings']"; -const homepage = "[aria-label='Homepage']"; - -export const openSettings = () => { - cy.get(setting).click(); -}; - -export const openHomepage = () => { - cy.get(homepage).click(); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/pages/sourcePage.ts b/airbyte-webapp-e2e-tests/cypress/pages/sourcePage.ts deleted file mode 100644 index ed80ecebb866..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/pages/sourcePage.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { clickOnCellInTable } from "commands/common"; - -const newSource = "button[data-id='new-source']"; -const sourcesTable = "table[data-testid='sourcesTable']"; -const sourceNameColumn = "Name"; - -export const goToSourcePage = () => { - cy.intercept("/api/v1/sources/list").as("getSourcesList"); - cy.visit("/source"); - cy.wait(3000); -}; - -export const openSourceDestinationFromGrid = (value: string) => { - cy.get("div").contains(value).click(); -}; - -export const openSourceOverview = (sourceName: string) => { - clickOnCellInTable(sourcesTable, sourceNameColumn, sourceName); -}; - -export const openNewSourceForm = () => { - cy.wait("@getSourcesList").then(({ response }) => { - if (response?.body.sources.length) { - cy.get(newSource).click(); - } - }); - cy.url().should("include", `/source/new-source`); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/plugins/index.ts b/airbyte-webapp-e2e-tests/cypress/plugins/index.ts deleted file mode 100644 index e13e5aa5f1c5..000000000000 --- a/airbyte-webapp-e2e-tests/cypress/plugins/index.ts +++ /dev/null @@ -1,46 +0,0 @@ -/// -// *********************************************************** -// This example plugins/index.js can be used to load plugins -// -// You can change the location of this file or turn off loading -// the plugins file with the 'pluginsFile' configuration option. -// -// You can read more here: -// https://on.cypress.io/plugins-guide -// *********************************************************** - -// This function is called when a project is opened or re-opened (e.g. due to -// the project's config changing) - -import Cypress from "cypress"; - -const pgp = require("pg-promise")(); -const cypressConfig = require(require("path").resolve("cypress.json")); - -interface dbConfig { - user: string; - host: string; - database: string; - password: string; - port: number; -} - -function dbConnection(query: any, userDefineConnection: dbConfig) { - let connection = cypressConfig.db; - if (userDefineConnection !== undefined) { - connection = userDefineConnection; - } - const db = pgp(connection); - return db.any(query).finally(db.$pool.end); -} - -/** - * @type {Cypress.PluginConfig} - */ -module.exports = (on: Cypress.PluginEvents, config: Cypress.PluginConfigOptions) => { - // `on` is used to hook into various events Cypress emits - // `config` is the resolved Cypress config - on("task", { - dbQuery: (query) => dbConnection(query.query, query.connection), - }); -}; diff --git a/airbyte-webapp-e2e-tests/cypress/support/index.js b/airbyte-webapp-e2e-tests/cypress/support/index.js deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/airbyte-webapp-e2e-tests/dummy_api.js b/airbyte-webapp-e2e-tests/dummy_api.js deleted file mode 100644 index 83a878b2f75f..000000000000 --- a/airbyte-webapp-e2e-tests/dummy_api.js +++ /dev/null @@ -1,28 +0,0 @@ -// Script starting a basic webserver returning mocked data over an authenticated API to test the connector builder UI and connector builder server in an -// end to end fashion. - -// Start with `npm run createdummyapi` - -const http = require('http'); - -const items = [{ name: "abc" }, { name: "def" }, { name: "xxx" }, { name: "yyy" }]; - -const requestListener = function (req, res) { - if (req.headers["authorization"] !== "Bearer theauthkey") { - res.writeHead(403); res.end(JSON.stringify({ error: "Bad credentials" })); return; - } - if (req.url !== "/items") { - res.writeHead(404); res.end(JSON.stringify({ error: "Not found" })); return; - } - // Add more dummy logic in here - res.setHeader("Content-Type", "application/json"); - res.writeHead(200); - res.end(JSON.stringify({ items: [...items].splice(req.headers["offset"] ? Number(req.headers["offset"]) : 0, 2) })); -} - -const server = http.createServer(requestListener); -server.listen(6767); - -process.on('SIGINT', function () { - process.exit() -}) diff --git a/airbyte-webapp-e2e-tests/package-lock.json b/airbyte-webapp-e2e-tests/package-lock.json deleted file mode 100644 index 63caf3c6dc48..000000000000 --- a/airbyte-webapp-e2e-tests/package-lock.json +++ /dev/null @@ -1,5732 +0,0 @@ -{ - "name": "airbyte-webapp-e2e-tests", - "version": "0.0.0", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "name": "airbyte-webapp-e2e-tests", - "version": "0.0.0", - "devDependencies": { - "@types/node": "^18.11.9", - "@typescript-eslint/eslint-plugin": "^5.27.1", - "@typescript-eslint/parser": "^5.27.1", - "cypress": "^9.2.0", - "eslint-config-prettier": "^8.5.0", - "eslint-plugin-cypress": "^2.12.1", - "eslint-plugin-prettier": "^4.0.0", - "pg-promise": "^10.15.4", - "prettier": "^2.6.2", - "typescript": "^4.5.4" - }, - "engines": { - "node": "16.18.1" - } - }, - "node_modules/@cypress/request": { - "version": "2.88.10", - "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.10.tgz", - "integrity": "sha512-Zp7F+R93N0yZyG34GutyTNr+okam7s/Fzc1+i3kcqOP8vk6OuajuE9qZJ6Rs+10/1JFtXFYMdyarnU1rZuJesg==", - "dev": true, - "dependencies": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "http-signature": "~1.3.6", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.5.0", - "tunnel-agent": "^0.6.0", - "uuid": "^8.3.2" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/@cypress/xvfb": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", - "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", - "dev": true, - "dependencies": { - "debug": "^3.1.0", - "lodash.once": "^4.1.1" - } - }, - "node_modules/@cypress/xvfb/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/@eslint/eslintrc": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.0.5.tgz", - "integrity": "sha512-BLxsnmK3KyPunz5wmCCpqy0YelEoxxGmH73Is+Z74oOTMtExcjkr3dDR6quwrjh1YspA8DH9gnX1o069KiS9AQ==", - "dev": true, - "peer": true, - "dependencies": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^9.2.0", - "globals": "^13.9.0", - "ignore": "^4.0.6", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.0.4", - "strip-json-comments": "^3.1.1" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/@eslint/eslintrc/node_modules/globals": { - "version": "13.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.0.tgz", - "integrity": "sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg==", - "dev": true, - "peer": true, - "dependencies": { - "type-fest": "^0.20.2" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@eslint/eslintrc/node_modules/type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@humanwhocodes/config-array": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.2.tgz", - "integrity": "sha512-UXOuFCGcwciWckOpmfKDq/GyhlTf9pN/BzG//x8p8zTOFEcGuA68ANXheFS0AGvy3qgZqLBUkMs7hqzqCKOVwA==", - "dev": true, - "peer": true, - "dependencies": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", - "minimatch": "^3.0.4" - }, - "engines": { - "node": ">=10.10.0" - } - }, - "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", - "dev": true, - "peer": true - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@types/json-schema": { - "version": "7.0.11", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", - "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", - "dev": true - }, - "node_modules/@types/node": { - "version": "18.11.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.9.tgz", - "integrity": "sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg==", - "dev": true - }, - "node_modules/@types/semver": { - "version": "7.3.13", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.13.tgz", - "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==", - "dev": true - }, - "node_modules/@types/sinonjs__fake-timers": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-6.0.2.tgz", - "integrity": "sha512-dIPoZ3g5gcx9zZEszaxLSVTvMReD3xxyyDnQUjA6IYDG9Ba2AV0otMPs+77sG9ojB4Qr2N2Vk5RnKeuA0X/0bg==", - "dev": true - }, - "node_modules/@types/sizzle": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.2.tgz", - "integrity": "sha512-7EJYyKTL7tFR8+gDbB6Wwz/arpGa0Mywk1TJbNzKzHtzbwVmY4HR9WqS5VV7dsBUKQmPNr192jHr/VpBluj/hg==", - "dev": true - }, - "node_modules/@types/yauzl": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.9.2.tgz", - "integrity": "sha512-8uALY5LTvSuHgloDVUvWP3pIauILm+8/0pDMokuDYIoNsOkSwd5AiHBTSEJjKTDcZr5z8UpgOWZkxBF4iJftoA==", - "dev": true, - "optional": true, - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@typescript-eslint/eslint-plugin": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.45.1.tgz", - "integrity": "sha512-cOizjPlKEh0bXdFrBLTrI/J6B/QMlhwE9auOov53tgB+qMukH6/h8YAK/qw+QJGct/PTbdh2lytGyipxCcEtAw==", - "dev": true, - "dependencies": { - "@typescript-eslint/scope-manager": "5.45.1", - "@typescript-eslint/type-utils": "5.45.1", - "@typescript-eslint/utils": "5.45.1", - "debug": "^4.3.4", - "ignore": "^5.2.0", - "natural-compare-lite": "^1.4.0", - "regexpp": "^3.2.0", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "@typescript-eslint/parser": "^5.0.0", - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.1.tgz", - "integrity": "sha512-d2qQLzTJ9WxQftPAuEQpSPmKqzxePjzVbpAVv62AQ64NTL+wR4JkrVqR/LqFsFEUsHDAiId52mJteHDFuDkElA==", - "dev": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/@typescript-eslint/parser": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.45.1.tgz", - "integrity": "sha512-JQ3Ep8bEOXu16q0ztsatp/iQfDCtvap7sp/DKo7DWltUquj5AfCOpX2zSzJ8YkAVnrQNqQ5R62PBz2UtrfmCkA==", - "dev": true, - "dependencies": { - "@typescript-eslint/scope-manager": "5.45.1", - "@typescript-eslint/types": "5.45.1", - "@typescript-eslint/typescript-estree": "5.45.1", - "debug": "^4.3.4" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.45.1.tgz", - "integrity": "sha512-D6fCileR6Iai7E35Eb4Kp+k0iW7F1wxXYrOhX/3dywsOJpJAQ20Fwgcf+P/TDtvQ7zcsWsrJaglaQWDhOMsspQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.45.1", - "@typescript-eslint/visitor-keys": "5.45.1" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/type-utils": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.45.1.tgz", - "integrity": "sha512-aosxFa+0CoYgYEl3aptLe1svP910DJq68nwEJzyQcrtRhC4BN0tJAvZGAe+D0tzjJmFXe+h4leSsiZhwBa2vrA==", - "dev": true, - "dependencies": { - "@typescript-eslint/typescript-estree": "5.45.1", - "@typescript-eslint/utils": "5.45.1", - "debug": "^4.3.4", - "tsutils": "^3.21.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "*" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/types": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.45.1.tgz", - "integrity": "sha512-HEW3U0E5dLjUT+nk7b4lLbOherS1U4ap+b9pfu2oGsW3oPu7genRaY9dDv3nMczC1rbnRY2W/D7SN05wYoGImg==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.45.1.tgz", - "integrity": "sha512-76NZpmpCzWVrrb0XmYEpbwOz/FENBi+5W7ipVXAsG3OoFrQKJMiaqsBMbvGRyLtPotGqUfcY7Ur8j0dksDJDng==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.45.1", - "@typescript-eslint/visitor-keys": "5.45.1", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/utils": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.45.1.tgz", - "integrity": "sha512-rlbC5VZz68+yjAzQBc4I7KDYVzWG2X/OrqoZrMahYq3u8FFtmQYc+9rovo/7wlJH5kugJ+jQXV5pJMnofGmPRw==", - "dev": true, - "dependencies": { - "@types/json-schema": "^7.0.9", - "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "5.45.1", - "@typescript-eslint/types": "5.45.1", - "@typescript-eslint/typescript-estree": "5.45.1", - "eslint-scope": "^5.1.1", - "eslint-utils": "^3.0.0", - "semver": "^7.3.7" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "dev": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.45.1.tgz", - "integrity": "sha512-cy9ln+6rmthYWjH9fmx+5FU/JDpjQb586++x2FZlveq7GdGuLLW9a2Jcst2TGekH82bXpfmRNSwP9tyEs6RjvQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.45.1", - "eslint-visitor-keys": "^3.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/acorn": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.6.0.tgz", - "integrity": "sha512-U1riIR+lBSNi3IbxtaHOIKdH8sLFv3NYfNv8sg7ZsNhcfl4HF2++BfqqrNAxoCLQW1iiylOj76ecnaUxz+z9yw==", - "dev": true, - "peer": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "peer": true, - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "dev": true, - "dependencies": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ansi-colors": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", - "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "dev": true, - "dependencies": { - "type-fest": "^0.21.3" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/arch": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", - "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true, - "peer": true - }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/asn1": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", - "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", - "dev": true, - "dependencies": { - "safer-buffer": "~2.1.0" - } - }, - "node_modules/assert-options": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/assert-options/-/assert-options-0.8.0.tgz", - "integrity": "sha512-qSELrEaEz4sGwTs4Qh+swQkjiHAysC4rot21+jzXU86dJzNG+FDqBzyS3ohSoTRf4ZLA3FSwxQdiuNl5NXUtvA==", - "dev": true, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", - "dev": true, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/astral-regex": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", - "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/async": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", - "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==", - "dev": true - }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "dev": true - }, - "node_modules/at-least-node": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", - "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", - "dev": true, - "engines": { - "node": ">= 4.0.0" - } - }, - "node_modules/aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/aws4": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", - "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==", - "dev": true - }, - "node_modules/balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "node_modules/bcrypt-pbkdf": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", - "dev": true, - "dependencies": { - "tweetnacl": "^0.14.3" - } - }, - "node_modules/blob-util": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", - "integrity": "sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==", - "dev": true - }, - "node_modules/bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/buffer-writer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", - "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/cachedir": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.3.0.tgz", - "integrity": "sha512-A+Fezp4zxnit6FanDmv9EqXNAi3vt9DWp51/71UEhXukb7QUuvtv9344h91dyAxuTLoSYJFU299qzR3tzwPAhw==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", - "dev": true - }, - "node_modules/chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/check-more-types": { - "version": "2.24.0", - "resolved": "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz", - "integrity": "sha1-FCD/sQ/URNz8ebQ4kbv//TKoRgA=", - "dev": true, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/ci-info": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.3.0.tgz", - "integrity": "sha512-riT/3vI5YpVH6/qomlDnJow6TBee2PBKSEpx3O32EGPYbWGIRsIlGRms3Sm74wYE1JMo8RnO04Hb12+v1J5ICw==", - "dev": true - }, - "node_modules/clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", - "dev": true, - "dependencies": { - "restore-cursor": "^3.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cli-table3": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.0.tgz", - "integrity": "sha512-gnB85c3MGC7Nm9I/FkiasNBOKjOiO1RNuXXarQms37q4QMpWdlbBgD/VnOStA2faG1dpXMv31RFApjX1/QdgWQ==", - "dev": true, - "dependencies": { - "object-assign": "^4.1.0", - "string-width": "^4.2.0" - }, - "engines": { - "node": "10.* || >= 12.*" - }, - "optionalDependencies": { - "colors": "^1.1.2" - } - }, - "node_modules/cli-truncate": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", - "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", - "dev": true, - "dependencies": { - "slice-ansi": "^3.0.0", - "string-width": "^4.2.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/colorette": { - "version": "2.0.16", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz", - "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==", - "dev": true - }, - "node_modules/colors": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", - "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==", - "dev": true, - "optional": true, - "engines": { - "node": ">=0.1.90" - } - }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/commander": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", - "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", - "dev": true, - "engines": { - "node": ">= 6" - } - }, - "node_modules/common-tags": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", - "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", - "dev": true, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "node_modules/core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", - "dev": true - }, - "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/cypress": { - "version": "9.2.0", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-9.2.0.tgz", - "integrity": "sha512-Jn26Tprhfzh/a66Sdj9SoaYlnNX6Mjfmj5PHu2a7l3YHXhrgmavM368wjCmgrxC6KHTOv9SpMQGhAJn+upDViA==", - "dev": true, - "hasInstallScript": true, - "dependencies": { - "@cypress/request": "^2.88.10", - "@cypress/xvfb": "^1.2.4", - "@types/node": "^14.14.31", - "@types/sinonjs__fake-timers": "^6.0.2", - "@types/sizzle": "^2.3.2", - "arch": "^2.2.0", - "blob-util": "^2.0.2", - "bluebird": "3.7.2", - "cachedir": "^2.3.0", - "chalk": "^4.1.0", - "check-more-types": "^2.24.0", - "cli-cursor": "^3.1.0", - "cli-table3": "~0.6.0", - "commander": "^5.1.0", - "common-tags": "^1.8.0", - "dayjs": "^1.10.4", - "debug": "^4.3.2", - "enquirer": "^2.3.6", - "eventemitter2": "^6.4.3", - "execa": "4.1.0", - "executable": "^4.1.1", - "extract-zip": "2.0.1", - "figures": "^3.2.0", - "fs-extra": "^9.1.0", - "getos": "^3.2.1", - "is-ci": "^3.0.0", - "is-installed-globally": "~0.4.0", - "lazy-ass": "^1.6.0", - "listr2": "^3.8.3", - "lodash": "^4.17.21", - "log-symbols": "^4.0.0", - "minimist": "^1.2.5", - "ospath": "^1.2.2", - "pretty-bytes": "^5.6.0", - "proxy-from-env": "1.0.0", - "request-progress": "^3.0.0", - "supports-color": "^8.1.1", - "tmp": "~0.2.1", - "untildify": "^4.0.0", - "url": "^0.11.0", - "yauzl": "^2.10.0" - }, - "bin": { - "cypress": "bin/cypress" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/cypress/node_modules/@types/node": { - "version": "14.18.33", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.33.tgz", - "integrity": "sha512-qelS/Ra6sacc4loe/3MSjXNL1dNQ/GjxNHVzuChwMfmk7HuycRLVQN2qNY3XahK+fZc5E2szqQSKUyAF0E+2bg==", - "dev": true - }, - "node_modules/cypress/node_modules/supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "dev": true, - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/supports-color?sponsor=1" - } - }, - "node_modules/dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", - "dev": true, - "dependencies": { - "assert-plus": "^1.0.0" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/dayjs": { - "version": "1.10.4", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.10.4.tgz", - "integrity": "sha512-RI/Hh4kqRc1UKLOAf/T5zdMMX5DQIlDxwUe3wSyMMnEbGunnpENCdbUgM+dW7kXidZqCttBrmw7BhN4TMddkCw==", - "dev": true - }, - "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/debug/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "node_modules/deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true, - "peer": true - }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dev": true, - "peer": true, - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/ecc-jsbn": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", - "dev": true, - "dependencies": { - "jsbn": "~0.1.0", - "safer-buffer": "^2.1.0" - } - }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "node_modules/end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, - "dependencies": { - "once": "^1.4.0" - } - }, - "node_modules/enquirer": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", - "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", - "dev": true, - "dependencies": { - "ansi-colors": "^4.1.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.5.0.tgz", - "integrity": "sha512-tVGSkgNbOfiHyVte8bCM8OmX+xG9PzVG/B4UCF60zx7j61WIVY/AqJECDgpLD4DbbESD0e174gOg3ZlrX15GDg==", - "dev": true, - "peer": true, - "dependencies": { - "@eslint/eslintrc": "^1.0.5", - "@humanwhocodes/config-array": "^0.9.2", - "ajv": "^6.10.0", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.3.2", - "doctrine": "^3.0.0", - "enquirer": "^2.3.5", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.1.0", - "eslint-utils": "^3.0.0", - "eslint-visitor-keys": "^3.1.0", - "espree": "^9.2.0", - "esquery": "^1.4.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "functional-red-black-tree": "^1.0.1", - "glob-parent": "^6.0.1", - "globals": "^13.6.0", - "ignore": "^4.0.6", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "js-yaml": "^4.1.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.0.4", - "natural-compare": "^1.4.0", - "optionator": "^0.9.1", - "progress": "^2.0.0", - "regexpp": "^3.2.0", - "semver": "^7.2.1", - "strip-ansi": "^6.0.1", - "strip-json-comments": "^3.1.0", - "text-table": "^0.2.0", - "v8-compile-cache": "^2.0.3" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-config-prettier": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz", - "integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==", - "dev": true, - "bin": { - "eslint-config-prettier": "bin/cli.js" - }, - "peerDependencies": { - "eslint": ">=7.0.0" - } - }, - "node_modules/eslint-plugin-cypress": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-cypress/-/eslint-plugin-cypress-2.12.1.tgz", - "integrity": "sha512-c2W/uPADl5kospNDihgiLc7n87t5XhUbFDoTl6CfVkmG+kDAb5Ux10V9PoLPu9N+r7znpc+iQlcmAqT1A/89HA==", - "dev": true, - "dependencies": { - "globals": "^11.12.0" - }, - "peerDependencies": { - "eslint": ">= 3.2.1" - } - }, - "node_modules/eslint-plugin-prettier": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz", - "integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==", - "dev": true, - "dependencies": { - "prettier-linter-helpers": "^1.0.0" - }, - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "eslint": ">=7.28.0", - "prettier": ">=2.0.0" - }, - "peerDependenciesMeta": { - "eslint-config-prettier": { - "optional": true - } - } - }, - "node_modules/eslint-scope": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.0.tgz", - "integrity": "sha512-aWwkhnS0qAXqNOgKOK0dJ2nvzEbhEvpy8OlJ9kZ0FeZnA6zpjv1/Vei+puGFFX7zkPCkHHXb7IDX3A+7yPrRWg==", - "dev": true, - "peer": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/eslint-utils": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", - "dev": true, - "dependencies": { - "eslint-visitor-keys": "^2.0.0" - }, - "engines": { - "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - }, - "peerDependencies": { - "eslint": ">=5" - } - }, - "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", - "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/eslint/node_modules/globals": { - "version": "13.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.0.tgz", - "integrity": "sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg==", - "dev": true, - "peer": true, - "dependencies": { - "type-fest": "^0.20.2" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint/node_modules/type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/espree": { - "version": "9.2.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.2.0.tgz", - "integrity": "sha512-oP3utRkynpZWF/F2x/HZJ+AGtnIclaR7z1pYPxy7NYM2fSO6LgK/Rkny8anRSPK/VwEA1eqm2squui0T7ZMOBg==", - "dev": true, - "peer": true, - "dependencies": { - "acorn": "^8.6.0", - "acorn-jsx": "^5.3.1", - "eslint-visitor-keys": "^3.1.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", - "dev": true, - "peer": true, - "dependencies": { - "estraverse": "^5.1.0" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dev": true, - "dependencies": { - "estraverse": "^5.2.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/eventemitter2": { - "version": "6.4.3", - "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.3.tgz", - "integrity": "sha512-t0A2msp6BzOf+QAcI6z9XMktLj52OjGQg+8SJH6v5+3uxNpWYRR3wQmfA+6xtMU9kOC59qk9licus5dYcrYkMQ==", - "dev": true - }, - "node_modules/execa": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", - "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", - "dev": true, - "dependencies": { - "cross-spawn": "^7.0.0", - "get-stream": "^5.0.0", - "human-signals": "^1.1.1", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.0", - "onetime": "^5.1.0", - "signal-exit": "^3.0.2", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/executable": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", - "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", - "dev": true, - "dependencies": { - "pify": "^2.2.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "dev": true - }, - "node_modules/extract-zip": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", - "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", - "dev": true, - "dependencies": { - "debug": "^4.1.1", - "get-stream": "^5.1.0", - "yauzl": "^2.10.0" - }, - "bin": { - "extract-zip": "cli.js" - }, - "engines": { - "node": ">= 10.17.0" - }, - "optionalDependencies": { - "@types/yauzl": "^2.9.1" - } - }, - "node_modules/extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", - "dev": true, - "engines": [ - "node >=0.6.0" - ] - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, - "peer": true - }, - "node_modules/fast-diff": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", - "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", - "dev": true - }, - "node_modules/fast-glob": { - "version": "3.2.12", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", - "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", - "dev": true, - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-glob/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true, - "peer": true - }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", - "dev": true, - "peer": true - }, - "node_modules/fastq": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.14.0.tgz", - "integrity": "sha512-eR2D+V9/ExcbF9ls441yIuN6TI2ED1Y2ZcA5BmMtJsOkWOFRJQ0Jt0g1UwqXJJVAb+V+umH5Dfr8oh4EVP7VVg==", - "dev": true, - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4=", - "dev": true, - "dependencies": { - "pend": "~1.2.0" - } - }, - "node_modules/figures": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", - "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", - "dev": true, - "dependencies": { - "escape-string-regexp": "^1.0.5" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/figures/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", - "dev": true, - "peer": true, - "dependencies": { - "flat-cache": "^3.0.4" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", - "dev": true, - "peer": true, - "dependencies": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/flatted": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.4.tgz", - "integrity": "sha512-8/sOawo8tJ4QOBX8YlQBMxL8+RLZfxMQOif9o0KUKTNTjMYElWPE0r/m5VNFxTRd0NSw8qSy8dajrwX4RYI1Hw==", - "dev": true, - "peer": true - }, - "node_modules/forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dev": true, - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 0.12" - } - }, - "node_modules/fs-extra": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", - "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", - "dev": true, - "dependencies": { - "at-least-node": "^1.0.0", - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true - }, - "node_modules/functional-red-black-tree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", - "dev": true, - "peer": true - }, - "node_modules/get-stream": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", - "dev": true, - "dependencies": { - "pump": "^3.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/getos": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", - "integrity": "sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==", - "dev": true, - "dependencies": { - "async": "^3.2.0" - } - }, - "node_modules/getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "dev": true, - "dependencies": { - "assert-plus": "^1.0.0" - } - }, - "node_modules/glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "peer": true, - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/global-dirs": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", - "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", - "dev": true, - "dependencies": { - "ini": "2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/globby/node_modules/ignore": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.1.tgz", - "integrity": "sha512-d2qQLzTJ9WxQftPAuEQpSPmKqzxePjzVbpAVv62AQ64NTL+wR4JkrVqR/LqFsFEUsHDAiId52mJteHDFuDkElA==", - "dev": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz", - "integrity": "sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==", - "dev": true - }, - "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/http-signature": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", - "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", - "dev": true, - "dependencies": { - "assert-plus": "^1.0.0", - "jsprim": "^2.0.2", - "sshpk": "^1.14.1" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/human-signals": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", - "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", - "dev": true, - "engines": { - "node": ">=8.12.0" - } - }, - "node_modules/ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "dev": true, - "peer": true, - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dev": true, - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "node_modules/ini": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", - "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/is-ci": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", - "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", - "dev": true, - "dependencies": { - "ci-info": "^3.2.0" - }, - "bin": { - "is-ci": "bin.js" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-installed-globally": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", - "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", - "dev": true, - "dependencies": { - "global-dirs": "^3.0.0", - "is-path-inside": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", - "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", - "dev": true - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "node_modules/isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", - "dev": true - }, - "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "peer": true, - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "dev": true - }, - "node_modules/json-schema": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", - "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", - "dev": true - }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, - "peer": true - }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", - "dev": true, - "peer": true - }, - "node_modules/json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", - "dev": true - }, - "node_modules/jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "dev": true, - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/jsprim": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", - "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", - "dev": true, - "engines": [ - "node >=0.6.0" - ], - "dependencies": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.4.0", - "verror": "1.10.0" - } - }, - "node_modules/lazy-ass": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz", - "integrity": "sha1-eZllXoZGwX8In90YfRUNMyTVRRM=", - "dev": true, - "engines": { - "node": "> 0.8" - } - }, - "node_modules/levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, - "peer": true, - "dependencies": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/listr2": { - "version": "3.13.5", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.13.5.tgz", - "integrity": "sha512-3n8heFQDSk+NcwBn3CgxEibZGaRzx+pC64n3YjpMD1qguV4nWus3Al+Oo3KooqFKTQEJ1v7MmnbnyyNspgx3NA==", - "dev": true, - "dependencies": { - "cli-truncate": "^2.1.0", - "colorette": "^2.0.16", - "log-update": "^4.0.0", - "p-map": "^4.0.0", - "rfdc": "^1.3.0", - "rxjs": "^7.4.0", - "through": "^2.3.8", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "enquirer": ">= 2.3.0 < 3" - }, - "peerDependenciesMeta": { - "enquirer": { - "optional": true - } - } - }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true, - "peer": true - }, - "node_modules/lodash.once": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", - "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=", - "dev": true - }, - "node_modules/log-symbols": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", - "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", - "dev": true, - "dependencies": { - "chalk": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/log-update": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", - "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", - "dev": true, - "dependencies": { - "ansi-escapes": "^4.3.0", - "cli-cursor": "^3.1.0", - "slice-ansi": "^4.0.0", - "wrap-ansi": "^6.2.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-update/node_modules/slice-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", - "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/slice-ansi?sponsor=1" - } - }, - "node_modules/log-update/node_modules/wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", - "dev": true, - "dependencies": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/mime-db": { - "version": "1.51.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", - "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", - "dev": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.34", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", - "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", - "dev": true, - "dependencies": { - "mime-db": "1.51.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", - "dev": true - }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true - }, - "node_modules/natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", - "dev": true, - "peer": true - }, - "node_modules/natural-compare-lite": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", - "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", - "dev": true - }, - "node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", - "dev": true, - "peer": true, - "dependencies": { - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.3" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/ospath": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/ospath/-/ospath-1.2.2.tgz", - "integrity": "sha1-EnZjl3Sj+O8lcvf+QoDg6kVQwHs=", - "dev": true - }, - "node_modules/p-map": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", - "dev": true, - "dependencies": { - "aggregate-error": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/packet-reader": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", - "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==", - "dev": true - }, - "node_modules/parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dev": true, - "peer": true, - "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/pend": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=", - "dev": true - }, - "node_modules/performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", - "dev": true - }, - "node_modules/pg": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/pg/-/pg-8.8.0.tgz", - "integrity": "sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw==", - "dev": true, - "dependencies": { - "buffer-writer": "2.0.0", - "packet-reader": "1.0.0", - "pg-connection-string": "^2.5.0", - "pg-pool": "^3.5.2", - "pg-protocol": "^1.5.0", - "pg-types": "^2.1.0", - "pgpass": "1.x" - }, - "engines": { - "node": ">= 8.0.0" - }, - "peerDependencies": { - "pg-native": ">=3.0.1" - }, - "peerDependenciesMeta": { - "pg-native": { - "optional": true - } - } - }, - "node_modules/pg-connection-string": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", - "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==", - "dev": true - }, - "node_modules/pg-int8": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", - "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", - "dev": true, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/pg-minify": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/pg-minify/-/pg-minify-1.6.2.tgz", - "integrity": "sha512-1KdmFGGTP6jplJoI8MfvRlfvMiyBivMRP7/ffh4a11RUFJ7kC2J0ZHlipoKiH/1hz+DVgceon9U2qbaHpPeyPg==", - "dev": true, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/pg-pool": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.2.tgz", - "integrity": "sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w==", - "dev": true, - "peerDependencies": { - "pg": ">=8.0" - } - }, - "node_modules/pg-promise": { - "version": "10.15.4", - "resolved": "https://registry.npmjs.org/pg-promise/-/pg-promise-10.15.4.tgz", - "integrity": "sha512-BKlHCMCdNUmF6gagVbehRWSEiVcZzPVltEx14OJExR9Iz9/1R6KETDWLLGv2l6yRqYFnEZZy1VDjRhArzeIGrw==", - "dev": true, - "dependencies": { - "assert-options": "0.8.0", - "pg": "8.8.0", - "pg-minify": "1.6.2", - "spex": "3.2.0" - }, - "engines": { - "node": ">=12.0" - } - }, - "node_modules/pg-protocol": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", - "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==", - "dev": true - }, - "node_modules/pg-types": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", - "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", - "dev": true, - "dependencies": { - "pg-int8": "1.0.1", - "postgres-array": "~2.0.0", - "postgres-bytea": "~1.0.0", - "postgres-date": "~1.0.4", - "postgres-interval": "^1.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/pgpass": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", - "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", - "dev": true, - "dependencies": { - "split2": "^4.1.0" - } - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/postgres-array": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", - "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/postgres-bytea": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", - "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/postgres-date": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", - "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/postgres-interval": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", - "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", - "dev": true, - "dependencies": { - "xtend": "^4.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/prettier": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", - "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", - "dev": true, - "bin": { - "prettier": "bin-prettier.js" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, - "node_modules/prettier-linter-helpers": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", - "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", - "dev": true, - "dependencies": { - "fast-diff": "^1.1.2" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/pretty-bytes": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", - "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==", - "dev": true, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/proxy-from-env": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", - "integrity": "sha1-M8UDmPcOp+uW0h97gXYwpVeRx+4=", - "dev": true - }, - "node_modules/psl": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", - "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", - "dev": true - }, - "node_modules/pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "node_modules/punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", - "dev": true, - "engines": { - "node": ">=0.6" - } - }, - "node_modules/querystring": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", - "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", - "dev": true, - "engines": { - "node": ">=0.4.x" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "dev": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, - "node_modules/request-progress": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-3.0.0.tgz", - "integrity": "sha1-TKdUCBx/7GP1BeT6qCWqBs1mnb4=", - "dev": true, - "dependencies": { - "throttleit": "^1.0.0" - } - }, - "node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", - "dev": true, - "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true, - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rfdc": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", - "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==", - "dev": true - }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/rxjs": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.4.0.tgz", - "integrity": "sha512-7SQDi7xeTMCJpqViXh8gL/lebcwlp3d831F05+9B44A4B0WfsEwUQHR64gsH1kvJ+Ep/J9K2+n1hVl1CsGN23w==", - "dev": true, - "dependencies": { - "tslib": "~2.1.0" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true - }, - "node_modules/semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", - "dev": true - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/slice-ansi": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", - "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/spex": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spex/-/spex-3.2.0.tgz", - "integrity": "sha512-9srjJM7NaymrpwMHvSmpDeIK5GoRMX/Tq0E8aOlDPS54dDnDUIp30DrP9SphMPEETDLzEM9+4qo+KipmbtPecg==", - "dev": true, - "engines": { - "node": ">=4.5" - } - }, - "node_modules/split2": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz", - "integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==", - "dev": true, - "engines": { - "node": ">= 10.x" - } - }, - "node_modules/sshpk": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", - "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", - "dev": true, - "dependencies": { - "asn1": "~0.2.3", - "assert-plus": "^1.0.0", - "bcrypt-pbkdf": "^1.0.0", - "dashdash": "^1.12.0", - "ecc-jsbn": "~0.1.1", - "getpass": "^0.1.1", - "jsbn": "~0.1.0", - "safer-buffer": "^2.0.2", - "tweetnacl": "~0.14.0" - }, - "bin": { - "sshpk-conv": "bin/sshpk-conv", - "sshpk-sign": "bin/sshpk-sign", - "sshpk-verify": "bin/sshpk-verify" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", - "dev": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", - "dev": true, - "peer": true - }, - "node_modules/throttleit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", - "integrity": "sha1-nnhYNtr0Z0MUWlmEtiaNgoUorGw=", - "dev": true - }, - "node_modules/through": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", - "dev": true - }, - "node_modules/tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "dev": true, - "dependencies": { - "rimraf": "^3.0.0" - }, - "engines": { - "node": ">=8.17.0" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "dev": true, - "dependencies": { - "psl": "^1.1.28", - "punycode": "^2.1.1" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/tslib": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.1.0.tgz", - "integrity": "sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A==", - "dev": true - }, - "node_modules/tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", - "dev": true, - "dependencies": { - "tslib": "^1.8.1" - }, - "engines": { - "node": ">= 6" - }, - "peerDependencies": { - "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" - } - }, - "node_modules/tsutils/node_modules/tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true - }, - "node_modules/tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "dev": true, - "dependencies": { - "safe-buffer": "^5.0.1" - }, - "engines": { - "node": "*" - } - }, - "node_modules/tweetnacl": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "dev": true - }, - "node_modules/type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "peer": true, - "dependencies": { - "prelude-ls": "^1.2.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/typescript": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.4.tgz", - "integrity": "sha512-VgYs2A2QIRuGphtzFV7aQJduJ2gyfTljngLzjpfW9FoYZF6xuw1W0vW9ghCKLfcWrCFxK81CSGRAvS1pn4fIUg==", - "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", - "dev": true, - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/untildify": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", - "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "peer": true, - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/url": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", - "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=", - "dev": true, - "dependencies": { - "punycode": "1.3.2", - "querystring": "0.2.0" - } - }, - "node_modules/url/node_modules/punycode": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", - "dev": true - }, - "node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/v8-compile-cache": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", - "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", - "dev": true, - "peer": true - }, - "node_modules/verror": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", - "dev": true, - "engines": [ - "node >=0.6.0" - ], - "dependencies": { - "assert-plus": "^1.0.0", - "core-util-is": "1.0.2", - "extsprintf": "^1.2.0" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - }, - "node_modules/xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "dev": true, - "engines": { - "node": ">=0.4" - } - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, - "node_modules/yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk=", - "dev": true, - "dependencies": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - } - }, - "dependencies": { - "@cypress/request": { - "version": "2.88.10", - "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.10.tgz", - "integrity": "sha512-Zp7F+R93N0yZyG34GutyTNr+okam7s/Fzc1+i3kcqOP8vk6OuajuE9qZJ6Rs+10/1JFtXFYMdyarnU1rZuJesg==", - "dev": true, - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "http-signature": "~1.3.6", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.5.0", - "tunnel-agent": "^0.6.0", - "uuid": "^8.3.2" - } - }, - "@cypress/xvfb": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", - "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", - "dev": true, - "requires": { - "debug": "^3.1.0", - "lodash.once": "^4.1.1" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - } - } - }, - "@eslint/eslintrc": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.0.5.tgz", - "integrity": "sha512-BLxsnmK3KyPunz5wmCCpqy0YelEoxxGmH73Is+Z74oOTMtExcjkr3dDR6quwrjh1YspA8DH9gnX1o069KiS9AQ==", - "dev": true, - "peer": true, - "requires": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^9.2.0", - "globals": "^13.9.0", - "ignore": "^4.0.6", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.0.4", - "strip-json-comments": "^3.1.1" - }, - "dependencies": { - "globals": { - "version": "13.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.0.tgz", - "integrity": "sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg==", - "dev": true, - "peer": true, - "requires": { - "type-fest": "^0.20.2" - } - }, - "type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true, - "peer": true - } - } - }, - "@humanwhocodes/config-array": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.2.tgz", - "integrity": "sha512-UXOuFCGcwciWckOpmfKDq/GyhlTf9pN/BzG//x8p8zTOFEcGuA68ANXheFS0AGvy3qgZqLBUkMs7hqzqCKOVwA==", - "dev": true, - "peer": true, - "requires": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", - "minimatch": "^3.0.4" - } - }, - "@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", - "dev": true, - "peer": true - }, - "@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "requires": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - } - }, - "@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true - }, - "@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "requires": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - } - }, - "@types/json-schema": { - "version": "7.0.11", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", - "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", - "dev": true - }, - "@types/node": { - "version": "18.11.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.9.tgz", - "integrity": "sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg==", - "dev": true - }, - "@types/semver": { - "version": "7.3.13", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.13.tgz", - "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==", - "dev": true - }, - "@types/sinonjs__fake-timers": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-6.0.2.tgz", - "integrity": "sha512-dIPoZ3g5gcx9zZEszaxLSVTvMReD3xxyyDnQUjA6IYDG9Ba2AV0otMPs+77sG9ojB4Qr2N2Vk5RnKeuA0X/0bg==", - "dev": true - }, - "@types/sizzle": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.2.tgz", - "integrity": "sha512-7EJYyKTL7tFR8+gDbB6Wwz/arpGa0Mywk1TJbNzKzHtzbwVmY4HR9WqS5VV7dsBUKQmPNr192jHr/VpBluj/hg==", - "dev": true - }, - "@types/yauzl": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.9.2.tgz", - "integrity": "sha512-8uALY5LTvSuHgloDVUvWP3pIauILm+8/0pDMokuDYIoNsOkSwd5AiHBTSEJjKTDcZr5z8UpgOWZkxBF4iJftoA==", - "dev": true, - "optional": true, - "requires": { - "@types/node": "*" - } - }, - "@typescript-eslint/eslint-plugin": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.45.1.tgz", - "integrity": "sha512-cOizjPlKEh0bXdFrBLTrI/J6B/QMlhwE9auOov53tgB+qMukH6/h8YAK/qw+QJGct/PTbdh2lytGyipxCcEtAw==", - "dev": true, - "requires": { - "@typescript-eslint/scope-manager": "5.45.1", - "@typescript-eslint/type-utils": "5.45.1", - "@typescript-eslint/utils": "5.45.1", - "debug": "^4.3.4", - "ignore": "^5.2.0", - "natural-compare-lite": "^1.4.0", - "regexpp": "^3.2.0", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - }, - "dependencies": { - "ignore": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.1.tgz", - "integrity": "sha512-d2qQLzTJ9WxQftPAuEQpSPmKqzxePjzVbpAVv62AQ64NTL+wR4JkrVqR/LqFsFEUsHDAiId52mJteHDFuDkElA==", - "dev": true - } - } - }, - "@typescript-eslint/parser": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.45.1.tgz", - "integrity": "sha512-JQ3Ep8bEOXu16q0ztsatp/iQfDCtvap7sp/DKo7DWltUquj5AfCOpX2zSzJ8YkAVnrQNqQ5R62PBz2UtrfmCkA==", - "dev": true, - "requires": { - "@typescript-eslint/scope-manager": "5.45.1", - "@typescript-eslint/types": "5.45.1", - "@typescript-eslint/typescript-estree": "5.45.1", - "debug": "^4.3.4" - } - }, - "@typescript-eslint/scope-manager": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.45.1.tgz", - "integrity": "sha512-D6fCileR6Iai7E35Eb4Kp+k0iW7F1wxXYrOhX/3dywsOJpJAQ20Fwgcf+P/TDtvQ7zcsWsrJaglaQWDhOMsspQ==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.45.1", - "@typescript-eslint/visitor-keys": "5.45.1" - } - }, - "@typescript-eslint/type-utils": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.45.1.tgz", - "integrity": "sha512-aosxFa+0CoYgYEl3aptLe1svP910DJq68nwEJzyQcrtRhC4BN0tJAvZGAe+D0tzjJmFXe+h4leSsiZhwBa2vrA==", - "dev": true, - "requires": { - "@typescript-eslint/typescript-estree": "5.45.1", - "@typescript-eslint/utils": "5.45.1", - "debug": "^4.3.4", - "tsutils": "^3.21.0" - } - }, - "@typescript-eslint/types": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.45.1.tgz", - "integrity": "sha512-HEW3U0E5dLjUT+nk7b4lLbOherS1U4ap+b9pfu2oGsW3oPu7genRaY9dDv3nMczC1rbnRY2W/D7SN05wYoGImg==", - "dev": true - }, - "@typescript-eslint/typescript-estree": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.45.1.tgz", - "integrity": "sha512-76NZpmpCzWVrrb0XmYEpbwOz/FENBi+5W7ipVXAsG3OoFrQKJMiaqsBMbvGRyLtPotGqUfcY7Ur8j0dksDJDng==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.45.1", - "@typescript-eslint/visitor-keys": "5.45.1", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - } - }, - "@typescript-eslint/utils": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.45.1.tgz", - "integrity": "sha512-rlbC5VZz68+yjAzQBc4I7KDYVzWG2X/OrqoZrMahYq3u8FFtmQYc+9rovo/7wlJH5kugJ+jQXV5pJMnofGmPRw==", - "dev": true, - "requires": { - "@types/json-schema": "^7.0.9", - "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "5.45.1", - "@typescript-eslint/types": "5.45.1", - "@typescript-eslint/typescript-estree": "5.45.1", - "eslint-scope": "^5.1.1", - "eslint-utils": "^3.0.0", - "semver": "^7.3.7" - }, - "dependencies": { - "eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "dev": true, - "requires": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - } - }, - "estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "dev": true - } - } - }, - "@typescript-eslint/visitor-keys": { - "version": "5.45.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.45.1.tgz", - "integrity": "sha512-cy9ln+6rmthYWjH9fmx+5FU/JDpjQb586++x2FZlveq7GdGuLLW9a2Jcst2TGekH82bXpfmRNSwP9tyEs6RjvQ==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.45.1", - "eslint-visitor-keys": "^3.3.0" - } - }, - "acorn": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.6.0.tgz", - "integrity": "sha512-U1riIR+lBSNi3IbxtaHOIKdH8sLFv3NYfNv8sg7ZsNhcfl4HF2++BfqqrNAxoCLQW1iiylOj76ecnaUxz+z9yw==", - "dev": true, - "peer": true - }, - "acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "peer": true, - "requires": {} - }, - "aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "dev": true, - "requires": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - } - }, - "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "peer": true, - "requires": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "ansi-colors": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", - "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", - "dev": true - }, - "ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "dev": true, - "requires": { - "type-fest": "^0.21.3" - } - }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - }, - "arch": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", - "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", - "dev": true - }, - "argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true, - "peer": true - }, - "array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true - }, - "asn1": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", - "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", - "dev": true, - "requires": { - "safer-buffer": "~2.1.0" - } - }, - "assert-options": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/assert-options/-/assert-options-0.8.0.tgz", - "integrity": "sha512-qSELrEaEz4sGwTs4Qh+swQkjiHAysC4rot21+jzXU86dJzNG+FDqBzyS3ohSoTRf4ZLA3FSwxQdiuNl5NXUtvA==", - "dev": true - }, - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", - "dev": true - }, - "astral-regex": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", - "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", - "dev": true - }, - "async": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", - "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==", - "dev": true - }, - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "dev": true - }, - "at-least-node": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", - "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", - "dev": true - }, - "aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", - "dev": true - }, - "aws4": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", - "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==", - "dev": true - }, - "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "bcrypt-pbkdf": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", - "dev": true, - "requires": { - "tweetnacl": "^0.14.3" - } - }, - "blob-util": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", - "integrity": "sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==", - "dev": true - }, - "bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, - "buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=", - "dev": true - }, - "buffer-writer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", - "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", - "dev": true - }, - "cachedir": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.3.0.tgz", - "integrity": "sha512-A+Fezp4zxnit6FanDmv9EqXNAi3vt9DWp51/71UEhXukb7QUuvtv9344h91dyAxuTLoSYJFU299qzR3tzwPAhw==", - "dev": true - }, - "callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "dev": true, - "peer": true - }, - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", - "dev": true - }, - "chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", - "dev": true, - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "check-more-types": { - "version": "2.24.0", - "resolved": "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz", - "integrity": "sha1-FCD/sQ/URNz8ebQ4kbv//TKoRgA=", - "dev": true - }, - "ci-info": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.3.0.tgz", - "integrity": "sha512-riT/3vI5YpVH6/qomlDnJow6TBee2PBKSEpx3O32EGPYbWGIRsIlGRms3Sm74wYE1JMo8RnO04Hb12+v1J5ICw==", - "dev": true - }, - "clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "dev": true - }, - "cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", - "dev": true, - "requires": { - "restore-cursor": "^3.1.0" - } - }, - "cli-table3": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.0.tgz", - "integrity": "sha512-gnB85c3MGC7Nm9I/FkiasNBOKjOiO1RNuXXarQms37q4QMpWdlbBgD/VnOStA2faG1dpXMv31RFApjX1/QdgWQ==", - "dev": true, - "requires": { - "colors": "^1.1.2", - "object-assign": "^4.1.0", - "string-width": "^4.2.0" - } - }, - "cli-truncate": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", - "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", - "dev": true, - "requires": { - "slice-ansi": "^3.0.0", - "string-width": "^4.2.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "colorette": { - "version": "2.0.16", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz", - "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==", - "dev": true - }, - "colors": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", - "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==", - "dev": true, - "optional": true - }, - "combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, - "requires": { - "delayed-stream": "~1.0.0" - } - }, - "commander": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", - "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", - "dev": true - }, - "common-tags": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", - "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", - "dev": true - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", - "dev": true - }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "cypress": { - "version": "9.2.0", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-9.2.0.tgz", - "integrity": "sha512-Jn26Tprhfzh/a66Sdj9SoaYlnNX6Mjfmj5PHu2a7l3YHXhrgmavM368wjCmgrxC6KHTOv9SpMQGhAJn+upDViA==", - "dev": true, - "requires": { - "@cypress/request": "^2.88.10", - "@cypress/xvfb": "^1.2.4", - "@types/node": "^14.14.31", - "@types/sinonjs__fake-timers": "^6.0.2", - "@types/sizzle": "^2.3.2", - "arch": "^2.2.0", - "blob-util": "^2.0.2", - "bluebird": "3.7.2", - "cachedir": "^2.3.0", - "chalk": "^4.1.0", - "check-more-types": "^2.24.0", - "cli-cursor": "^3.1.0", - "cli-table3": "~0.6.0", - "commander": "^5.1.0", - "common-tags": "^1.8.0", - "dayjs": "^1.10.4", - "debug": "^4.3.2", - "enquirer": "^2.3.6", - "eventemitter2": "^6.4.3", - "execa": "4.1.0", - "executable": "^4.1.1", - "extract-zip": "2.0.1", - "figures": "^3.2.0", - "fs-extra": "^9.1.0", - "getos": "^3.2.1", - "is-ci": "^3.0.0", - "is-installed-globally": "~0.4.0", - "lazy-ass": "^1.6.0", - "listr2": "^3.8.3", - "lodash": "^4.17.21", - "log-symbols": "^4.0.0", - "minimist": "^1.2.5", - "ospath": "^1.2.2", - "pretty-bytes": "^5.6.0", - "proxy-from-env": "1.0.0", - "request-progress": "^3.0.0", - "supports-color": "^8.1.1", - "tmp": "~0.2.1", - "untildify": "^4.0.0", - "url": "^0.11.0", - "yauzl": "^2.10.0" - }, - "dependencies": { - "@types/node": { - "version": "14.18.33", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.33.tgz", - "integrity": "sha512-qelS/Ra6sacc4loe/3MSjXNL1dNQ/GjxNHVzuChwMfmk7HuycRLVQN2qNY3XahK+fZc5E2szqQSKUyAF0E+2bg==", - "dev": true - }, - "supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, - "dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "dayjs": { - "version": "1.10.4", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.10.4.tgz", - "integrity": "sha512-RI/Hh4kqRc1UKLOAf/T5zdMMX5DQIlDxwUe3wSyMMnEbGunnpENCdbUgM+dW7kXidZqCttBrmw7BhN4TMddkCw==", - "dev": true - }, - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "requires": { - "ms": "2.1.2" - }, - "dependencies": { - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - } - } - }, - "deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true, - "peer": true - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "dev": true - }, - "dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "requires": { - "path-type": "^4.0.0" - } - }, - "doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dev": true, - "peer": true, - "requires": { - "esutils": "^2.0.2" - } - }, - "ecc-jsbn": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", - "dev": true, - "requires": { - "jsbn": "~0.1.0", - "safer-buffer": "^2.1.0" - } - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, - "requires": { - "once": "^1.4.0" - } - }, - "enquirer": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", - "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", - "dev": true, - "requires": { - "ansi-colors": "^4.1.1" - } - }, - "escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true, - "peer": true - }, - "eslint": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.5.0.tgz", - "integrity": "sha512-tVGSkgNbOfiHyVte8bCM8OmX+xG9PzVG/B4UCF60zx7j61WIVY/AqJECDgpLD4DbbESD0e174gOg3ZlrX15GDg==", - "dev": true, - "peer": true, - "requires": { - "@eslint/eslintrc": "^1.0.5", - "@humanwhocodes/config-array": "^0.9.2", - "ajv": "^6.10.0", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.3.2", - "doctrine": "^3.0.0", - "enquirer": "^2.3.5", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.1.0", - "eslint-utils": "^3.0.0", - "eslint-visitor-keys": "^3.1.0", - "espree": "^9.2.0", - "esquery": "^1.4.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "functional-red-black-tree": "^1.0.1", - "glob-parent": "^6.0.1", - "globals": "^13.6.0", - "ignore": "^4.0.6", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "js-yaml": "^4.1.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.0.4", - "natural-compare": "^1.4.0", - "optionator": "^0.9.1", - "progress": "^2.0.0", - "regexpp": "^3.2.0", - "semver": "^7.2.1", - "strip-ansi": "^6.0.1", - "strip-json-comments": "^3.1.0", - "text-table": "^0.2.0", - "v8-compile-cache": "^2.0.3" - }, - "dependencies": { - "globals": { - "version": "13.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.0.tgz", - "integrity": "sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg==", - "dev": true, - "peer": true, - "requires": { - "type-fest": "^0.20.2" - } - }, - "type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true, - "peer": true - } - } - }, - "eslint-config-prettier": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz", - "integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==", - "dev": true, - "requires": {} - }, - "eslint-plugin-cypress": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-cypress/-/eslint-plugin-cypress-2.12.1.tgz", - "integrity": "sha512-c2W/uPADl5kospNDihgiLc7n87t5XhUbFDoTl6CfVkmG+kDAb5Ux10V9PoLPu9N+r7znpc+iQlcmAqT1A/89HA==", - "dev": true, - "requires": { - "globals": "^11.12.0" - } - }, - "eslint-plugin-prettier": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz", - "integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==", - "dev": true, - "requires": { - "prettier-linter-helpers": "^1.0.0" - } - }, - "eslint-scope": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.0.tgz", - "integrity": "sha512-aWwkhnS0qAXqNOgKOK0dJ2nvzEbhEvpy8OlJ9kZ0FeZnA6zpjv1/Vei+puGFFX7zkPCkHHXb7IDX3A+7yPrRWg==", - "dev": true, - "peer": true, - "requires": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - } - }, - "eslint-utils": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^2.0.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true - } - } - }, - "eslint-visitor-keys": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", - "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", - "dev": true - }, - "espree": { - "version": "9.2.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.2.0.tgz", - "integrity": "sha512-oP3utRkynpZWF/F2x/HZJ+AGtnIclaR7z1pYPxy7NYM2fSO6LgK/Rkny8anRSPK/VwEA1eqm2squui0T7ZMOBg==", - "dev": true, - "peer": true, - "requires": { - "acorn": "^8.6.0", - "acorn-jsx": "^5.3.1", - "eslint-visitor-keys": "^3.1.0" - } - }, - "esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", - "dev": true, - "peer": true, - "requires": { - "estraverse": "^5.1.0" - } - }, - "esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dev": true, - "requires": { - "estraverse": "^5.2.0" - } - }, - "estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true - }, - "esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true, - "peer": true - }, - "eventemitter2": { - "version": "6.4.3", - "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.3.tgz", - "integrity": "sha512-t0A2msp6BzOf+QAcI6z9XMktLj52OjGQg+8SJH6v5+3uxNpWYRR3wQmfA+6xtMU9kOC59qk9licus5dYcrYkMQ==", - "dev": true - }, - "execa": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", - "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", - "dev": true, - "requires": { - "cross-spawn": "^7.0.0", - "get-stream": "^5.0.0", - "human-signals": "^1.1.1", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.0", - "onetime": "^5.1.0", - "signal-exit": "^3.0.2", - "strip-final-newline": "^2.0.0" - } - }, - "executable": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", - "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", - "dev": true, - "requires": { - "pify": "^2.2.0" - } - }, - "extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "dev": true - }, - "extract-zip": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", - "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", - "dev": true, - "requires": { - "@types/yauzl": "^2.9.1", - "debug": "^4.1.1", - "get-stream": "^5.1.0", - "yauzl": "^2.10.0" - } - }, - "extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", - "dev": true - }, - "fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, - "peer": true - }, - "fast-diff": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", - "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", - "dev": true - }, - "fast-glob": { - "version": "3.2.12", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", - "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", - "dev": true, - "requires": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "dependencies": { - "glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "requires": { - "is-glob": "^4.0.1" - } - } - } - }, - "fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true, - "peer": true - }, - "fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", - "dev": true, - "peer": true - }, - "fastq": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.14.0.tgz", - "integrity": "sha512-eR2D+V9/ExcbF9ls441yIuN6TI2ED1Y2ZcA5BmMtJsOkWOFRJQ0Jt0g1UwqXJJVAb+V+umH5Dfr8oh4EVP7VVg==", - "dev": true, - "requires": { - "reusify": "^1.0.4" - } - }, - "fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4=", - "dev": true, - "requires": { - "pend": "~1.2.0" - } - }, - "figures": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", - "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", - "dev": true, - "requires": { - "escape-string-regexp": "^1.0.5" - }, - "dependencies": { - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true - } - } - }, - "file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", - "dev": true, - "peer": true, - "requires": { - "flat-cache": "^3.0.4" - } - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, - "flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", - "dev": true, - "peer": true, - "requires": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" - } - }, - "flatted": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.4.tgz", - "integrity": "sha512-8/sOawo8tJ4QOBX8YlQBMxL8+RLZfxMQOif9o0KUKTNTjMYElWPE0r/m5VNFxTRd0NSw8qSy8dajrwX4RYI1Hw==", - "dev": true, - "peer": true - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", - "dev": true - }, - "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dev": true, - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - }, - "fs-extra": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", - "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", - "dev": true, - "requires": { - "at-least-node": "^1.0.0", - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - } - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true - }, - "functional-red-black-tree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", - "dev": true, - "peer": true - }, - "get-stream": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", - "dev": true, - "requires": { - "pump": "^3.0.0" - } - }, - "getos": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", - "integrity": "sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==", - "dev": true, - "requires": { - "async": "^3.2.0" - } - }, - "getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "peer": true, - "requires": { - "is-glob": "^4.0.3" - } - }, - "global-dirs": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", - "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", - "dev": true, - "requires": { - "ini": "2.0.0" - } - }, - "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true - }, - "globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "requires": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "dependencies": { - "ignore": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.1.tgz", - "integrity": "sha512-d2qQLzTJ9WxQftPAuEQpSPmKqzxePjzVbpAVv62AQ64NTL+wR4JkrVqR/LqFsFEUsHDAiId52mJteHDFuDkElA==", - "dev": true - } - } - }, - "graceful-fs": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz", - "integrity": "sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==", - "dev": true - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true - }, - "http-signature": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", - "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "jsprim": "^2.0.2", - "sshpk": "^1.14.1" - } - }, - "human-signals": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", - "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", - "dev": true - }, - "ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "dev": true, - "peer": true - }, - "import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "dev": true, - "peer": true, - "requires": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - } - }, - "imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "dev": true, - "peer": true - }, - "indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "dev": true - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dev": true, - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "ini": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", - "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", - "dev": true - }, - "is-ci": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", - "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", - "dev": true, - "requires": { - "ci-info": "^3.2.0" - } - }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true - }, - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-installed-globally": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", - "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", - "dev": true, - "requires": { - "global-dirs": "^3.0.0", - "is-path-inside": "^3.0.2" - } - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, - "is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "dev": true - }, - "is-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", - "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", - "dev": true - }, - "is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", - "dev": true - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", - "dev": true - }, - "js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "peer": true, - "requires": { - "argparse": "^2.0.1" - } - }, - "jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "dev": true - }, - "json-schema": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", - "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", - "dev": true - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, - "peer": true - }, - "json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", - "dev": true, - "peer": true - }, - "json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", - "dev": true - }, - "jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.6", - "universalify": "^2.0.0" - } - }, - "jsprim": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", - "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", - "dev": true, - "requires": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.4.0", - "verror": "1.10.0" - } - }, - "lazy-ass": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz", - "integrity": "sha1-eZllXoZGwX8In90YfRUNMyTVRRM=", - "dev": true - }, - "levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, - "peer": true, - "requires": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - } - }, - "listr2": { - "version": "3.13.5", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.13.5.tgz", - "integrity": "sha512-3n8heFQDSk+NcwBn3CgxEibZGaRzx+pC64n3YjpMD1qguV4nWus3Al+Oo3KooqFKTQEJ1v7MmnbnyyNspgx3NA==", - "dev": true, - "requires": { - "cli-truncate": "^2.1.0", - "colorette": "^2.0.16", - "log-update": "^4.0.0", - "p-map": "^4.0.0", - "rfdc": "^1.3.0", - "rxjs": "^7.4.0", - "through": "^2.3.8", - "wrap-ansi": "^7.0.0" - } - }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, - "lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true, - "peer": true - }, - "lodash.once": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", - "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=", - "dev": true - }, - "log-symbols": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", - "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", - "dev": true, - "requires": { - "chalk": "^4.0.0" - } - }, - "log-update": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", - "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", - "dev": true, - "requires": { - "ansi-escapes": "^4.3.0", - "cli-cursor": "^3.1.0", - "slice-ansi": "^4.0.0", - "wrap-ansi": "^6.2.0" - }, - "dependencies": { - "slice-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", - "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" - } - }, - "wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - } - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, - "merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true - }, - "merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true - }, - "micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", - "dev": true, - "requires": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - } - }, - "mime-db": { - "version": "1.51.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", - "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", - "dev": true - }, - "mime-types": { - "version": "2.1.34", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", - "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", - "dev": true, - "requires": { - "mime-db": "1.51.0" - } - }, - "mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true - }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", - "dev": true - }, - "ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true - }, - "natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", - "dev": true, - "peer": true - }, - "natural-compare-lite": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", - "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", - "dev": true - }, - "npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "requires": { - "path-key": "^3.0.0" - } - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "dev": true - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, - "requires": { - "wrappy": "1" - } - }, - "onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "requires": { - "mimic-fn": "^2.1.0" - } - }, - "optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", - "dev": true, - "peer": true, - "requires": { - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.3" - } - }, - "ospath": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/ospath/-/ospath-1.2.2.tgz", - "integrity": "sha1-EnZjl3Sj+O8lcvf+QoDg6kVQwHs=", - "dev": true - }, - "p-map": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", - "dev": true, - "requires": { - "aggregate-error": "^3.0.0" - } - }, - "packet-reader": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", - "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==", - "dev": true - }, - "parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dev": true, - "peer": true, - "requires": { - "callsites": "^3.0.0" - } - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true - }, - "path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true - }, - "pend": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=", - "dev": true - }, - "performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", - "dev": true - }, - "pg": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/pg/-/pg-8.8.0.tgz", - "integrity": "sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw==", - "dev": true, - "requires": { - "buffer-writer": "2.0.0", - "packet-reader": "1.0.0", - "pg-connection-string": "^2.5.0", - "pg-pool": "^3.5.2", - "pg-protocol": "^1.5.0", - "pg-types": "^2.1.0", - "pgpass": "1.x" - } - }, - "pg-connection-string": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", - "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==", - "dev": true - }, - "pg-int8": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", - "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", - "dev": true - }, - "pg-minify": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/pg-minify/-/pg-minify-1.6.2.tgz", - "integrity": "sha512-1KdmFGGTP6jplJoI8MfvRlfvMiyBivMRP7/ffh4a11RUFJ7kC2J0ZHlipoKiH/1hz+DVgceon9U2qbaHpPeyPg==", - "dev": true - }, - "pg-pool": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.2.tgz", - "integrity": "sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w==", - "dev": true, - "requires": {} - }, - "pg-promise": { - "version": "10.15.4", - "resolved": "https://registry.npmjs.org/pg-promise/-/pg-promise-10.15.4.tgz", - "integrity": "sha512-BKlHCMCdNUmF6gagVbehRWSEiVcZzPVltEx14OJExR9Iz9/1R6KETDWLLGv2l6yRqYFnEZZy1VDjRhArzeIGrw==", - "dev": true, - "requires": { - "assert-options": "0.8.0", - "pg": "8.8.0", - "pg-minify": "1.6.2", - "spex": "3.2.0" - } - }, - "pg-protocol": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", - "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==", - "dev": true - }, - "pg-types": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", - "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", - "dev": true, - "requires": { - "pg-int8": "1.0.1", - "postgres-array": "~2.0.0", - "postgres-bytea": "~1.0.0", - "postgres-date": "~1.0.4", - "postgres-interval": "^1.1.0" - } - }, - "pgpass": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", - "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", - "dev": true, - "requires": { - "split2": "^4.1.0" - } - }, - "picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true - }, - "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", - "dev": true - }, - "postgres-array": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", - "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", - "dev": true - }, - "postgres-bytea": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", - "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", - "dev": true - }, - "postgres-date": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", - "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", - "dev": true - }, - "postgres-interval": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", - "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", - "dev": true, - "requires": { - "xtend": "^4.0.0" - } - }, - "prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "dev": true, - "peer": true - }, - "prettier": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", - "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", - "dev": true - }, - "prettier-linter-helpers": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", - "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", - "dev": true, - "requires": { - "fast-diff": "^1.1.2" - } - }, - "pretty-bytes": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", - "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==", - "dev": true - }, - "progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "dev": true, - "peer": true - }, - "proxy-from-env": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", - "integrity": "sha1-M8UDmPcOp+uW0h97gXYwpVeRx+4=", - "dev": true - }, - "psl": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", - "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", - "dev": true - }, - "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "dev": true - }, - "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", - "dev": true - }, - "querystring": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", - "dev": true - }, - "queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true - }, - "regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "dev": true - }, - "request-progress": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-3.0.0.tgz", - "integrity": "sha1-TKdUCBx/7GP1BeT6qCWqBs1mnb4=", - "dev": true, - "requires": { - "throttleit": "^1.0.0" - } - }, - "resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true, - "peer": true - }, - "restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", - "dev": true, - "requires": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - } - }, - "reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true - }, - "rfdc": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", - "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==", - "dev": true - }, - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } - }, - "run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "requires": { - "queue-microtask": "^1.2.2" - } - }, - "rxjs": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.4.0.tgz", - "integrity": "sha512-7SQDi7xeTMCJpqViXh8gL/lebcwlp3d831F05+9B44A4B0WfsEwUQHR64gsH1kvJ+Ep/J9K2+n1hVl1CsGN23w==", - "dev": true, - "requires": { - "tslib": "~2.1.0" - } - }, - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true - }, - "safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true - }, - "semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true - }, - "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", - "dev": true - }, - "slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true - }, - "slice-ansi": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", - "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" - } - }, - "spex": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spex/-/spex-3.2.0.tgz", - "integrity": "sha512-9srjJM7NaymrpwMHvSmpDeIK5GoRMX/Tq0E8aOlDPS54dDnDUIp30DrP9SphMPEETDLzEM9+4qo+KipmbtPecg==", - "dev": true - }, - "split2": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz", - "integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==", - "dev": true - }, - "sshpk": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", - "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", - "dev": true, - "requires": { - "asn1": "~0.2.3", - "assert-plus": "^1.0.0", - "bcrypt-pbkdf": "^1.0.0", - "dashdash": "^1.12.0", - "ecc-jsbn": "~0.1.1", - "getpass": "^0.1.1", - "jsbn": "~0.1.0", - "safer-buffer": "^2.0.2", - "tweetnacl": "~0.14.0" - } - }, - "string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", - "dev": true, - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, - "strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true - }, - "strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, - "peer": true - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - }, - "text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", - "dev": true, - "peer": true - }, - "throttleit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", - "integrity": "sha1-nnhYNtr0Z0MUWlmEtiaNgoUorGw=", - "dev": true - }, - "through": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", - "dev": true - }, - "tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "dev": true, - "requires": { - "rimraf": "^3.0.0" - } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } - }, - "tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "dev": true, - "requires": { - "psl": "^1.1.28", - "punycode": "^2.1.1" - } - }, - "tslib": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.1.0.tgz", - "integrity": "sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A==", - "dev": true - }, - "tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", - "dev": true, - "requires": { - "tslib": "^1.8.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true - } - } - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "dev": true, - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "tweetnacl": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "dev": true - }, - "type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "peer": true, - "requires": { - "prelude-ls": "^1.2.1" - } - }, - "type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", - "dev": true - }, - "typescript": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.4.tgz", - "integrity": "sha512-VgYs2A2QIRuGphtzFV7aQJduJ2gyfTljngLzjpfW9FoYZF6xuw1W0vW9ghCKLfcWrCFxK81CSGRAvS1pn4fIUg==", - "dev": true - }, - "universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", - "dev": true - }, - "untildify": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", - "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", - "dev": true - }, - "uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "peer": true, - "requires": { - "punycode": "^2.1.0" - } - }, - "url": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", - "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=", - "dev": true, - "requires": { - "punycode": "1.3.2", - "querystring": "0.2.0" - }, - "dependencies": { - "punycode": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", - "dev": true - } - } - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true - }, - "v8-compile-cache": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", - "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", - "dev": true, - "peer": true - }, - "verror": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "core-util-is": "1.0.2", - "extsprintf": "^1.2.0" - } - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", - "dev": true, - "peer": true - }, - "wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - }, - "xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "dev": true - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, - "yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk=", - "dev": true, - "requires": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - } - } -} diff --git a/airbyte-webapp-e2e-tests/package.json b/airbyte-webapp-e2e-tests/package.json deleted file mode 100644 index c7d07699e859..000000000000 --- a/airbyte-webapp-e2e-tests/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "airbyte-webapp-e2e-tests", - "version": "0.0.0", - "description": "Airbyte e2e testing", - "engines": { - "node": "16.18.1" - }, - "scripts": { - "cypress:open": "cypress open", - "cypress:ci": "CYPRESS_BASE_URL=http://localhost:8000 cypress run", - "cypress:ci:record": "CYPRESS_BASE_URL=http://localhost:8000 cypress run --record --key $CYPRESS_KEY", - "createdbsource": "docker run --rm -d -p 5433:5432 -e POSTGRES_PASSWORD=secret_password -e POSTGRES_DB=airbyte_ci_source --name airbyte_ci_pg_source postgres", - "createdbdestination": "docker run --rm -d -p 5434:5432 -e POSTGRES_PASSWORD=secret_password -e POSTGRES_DB=airbyte_ci_destination --name airbyte_ci_pg_destination postgres", - "createdummyapi": "docker run --rm -d -p 6767:6767 --network=airbyte_airbyte_internal --mount type=bind,source=\"$(pwd)\"/dummy_api.js,target=/index.js --name=dummy_api node:16-alpine \"index.js\"", - "lint": "eslint --ext js,ts,tsx cypress" - }, - "devDependencies": { - "@types/node": "^18.11.9", - "@typescript-eslint/eslint-plugin": "^5.27.1", - "@typescript-eslint/parser": "^5.27.1", - "cypress": "^9.2.0", - "eslint-config-prettier": "^8.5.0", - "eslint-plugin-cypress": "^2.12.1", - "eslint-plugin-prettier": "^4.0.0", - "pg-promise": "^10.15.4", - "prettier": "^2.6.2", - "typescript": "^4.5.4" - } -} diff --git a/airbyte-webapp-e2e-tests/tsconfig.json b/airbyte-webapp-e2e-tests/tsconfig.json deleted file mode 100644 index 43def3592a3f..000000000000 --- a/airbyte-webapp-e2e-tests/tsconfig.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "include": ["./**/*.ts"], - "exclude": [], - "compilerOptions": { - "baseUrl": "cypress", - "target": "es5", - "skipLibCheck": true, - "esModuleInterop": true, - "allowSyntheticDefaultImports": true, - "strict": true, - "forceConsistentCasingInFileNames": true, - "module": "esnext", - "moduleResolution": "node", - "resolveJsonModule": true, - "jsx": "react-jsx", - "noFallthroughCasesInSwitch": true, - "types": ["cypress", "node"], - "lib": ["es2015", "dom"], - "isolatedModules": false, - "allowJs": true, - "noEmit": true, - "paths": { - "commands/*": ["commands/*"], - "fixtures/*": ["fixtures/*"], - "integration": ["integration/*"], - "pages/*": ["pages/*"], - "plugins/*": ["plugins/*"] - } - } -} diff --git a/airbyte-webapp/.env b/airbyte-webapp/.env deleted file mode 100644 index df37c64423e8..000000000000 --- a/airbyte-webapp/.env +++ /dev/null @@ -1,6 +0,0 @@ -REACT_APP_SEGMENT_TOKEN=6cxNSmQyGSKcATLdJ2pL6WsawkzEMDAN -REACT_APP_FULL_STORY_ORG=13AXQ4 -REACT_APP_SENTRY_DSN= -REACT_APP_INTERCOM_APP_ID=nj1oam7s -REACT_APP_OSANO=16A0CTTE7vE8m1Qif/67beec9b-e563-4736-bdb4-4fe4adc39d48 -REACT_APP_CLOUD_PUBLIC_API_URL=/cloud_api diff --git a/airbyte-webapp/.eslintrc.js b/airbyte-webapp/.eslintrc.js deleted file mode 100644 index 28aba8e623a2..000000000000 --- a/airbyte-webapp/.eslintrc.js +++ /dev/null @@ -1,129 +0,0 @@ -module.exports = { - extends: [ - "react-app", - "plugin:@typescript-eslint/recommended", - "plugin:jest/recommended", - "prettier", - "plugin:prettier/recommended", - "plugin:css-modules/recommended", - "plugin:jsx-a11y/recommended", - "plugin:@airbyte/recommended", - ], - plugins: ["@typescript-eslint", "prettier", "unused-imports", "css-modules", "jsx-a11y", "@airbyte"], - parserOptions: { - ecmaVersion: 2020, - sourceType: "module", - ecmaFeatures: { - jsx: true, - }, - }, - rules: { - "jsx-a11y/label-has-associated-control": "error", - curly: "warn", - "css-modules/no-undef-class": "off", - "css-modules/no-unused-class": ["error", { camelCase: true }], - "dot-location": "warn", - "dot-notation": "warn", - eqeqeq: "error", - "prettier/prettier": "warn", - "unused-imports/no-unused-imports": "warn", - "no-else-return": "warn", - "no-lonely-if": "warn", - "no-inner-declarations": "off", - "no-unused-vars": "off", - "no-useless-computed-key": "warn", - "no-useless-return": "warn", - "no-var": "warn", - "object-shorthand": ["warn", "always"], - "prefer-arrow-callback": "warn", - "prefer-const": "warn", - "prefer-destructuring": ["warn", { AssignmentExpression: { array: true } }], - "prefer-object-spread": "warn", - "prefer-template": "warn", - "spaced-comment": ["warn", "always", { markers: ["/"] }], - yoda: "warn", - "import/order": [ - "warn", - { - "newlines-between": "always", - groups: ["type", "builtin", "external", "internal", ["parent", "sibling"], "index"], - pathGroupsExcludedImportTypes: ["builtin"], - pathGroups: [ - { - pattern: "components{/**,}", - group: "internal", - }, - { - pattern: "+(config|core|hooks|locales|packages|pages|services|types|utils|views){/**,}", - group: "internal", - position: "after", - }, - ], - alphabetize: { - order: "asc" /* sort in ascending order. Options: ['ignore', 'asc', 'desc'] */, - caseInsensitive: true /* ignore case. Options: [true, false] */, - }, - }, - ], - "@typescript-eslint/array-type": ["warn", { default: "array-simple" }], - "@typescript-eslint/ban-ts-comment": [ - "warn", - { - "ts-expect-error": "allow-with-description", - }, - ], - "@typescript-eslint/ban-types": "warn", - "@typescript-eslint/consistent-indexed-object-style": ["warn", "record"], - "@typescript-eslint/consistent-type-definitions": ["warn", "interface"], - "@typescript-eslint/no-unused-vars": "warn", - "react/function-component-definition": [ - "warn", - { - namedComponents: "arrow-function", - unnamedComponents: "arrow-function", - }, - ], - "jest/consistent-test-it": ["warn", { fn: "it", withinDescribe: "it" }], - "react/no-danger": "error", - "react/jsx-boolean-value": "warn", - "react/jsx-curly-brace-presence": "warn", - "react/jsx-fragments": "warn", - "react/jsx-no-useless-fragment": ["warn", { allowExpressions: true }], - "react/self-closing-comp": "warn", - "react/style-prop-object": ["warn", { allow: ["FormattedNumber"] }], - "no-restricted-imports": [ - "error", - { - paths: [ - { - name: "lodash", - message: 'Please use `import [function] from "lodash/[function]";` instead.', - }, - ], - patterns: ["!lodash/*"], - }, - ], - }, - parser: "@typescript-eslint/parser", - overrides: [ - { - files: ["scripts/**/*", "packages/**/*"], - rules: { - "@typescript-eslint/no-var-requires": "off", - }, - }, - { - // Only applies to files in src. Rules should be in here that are requiring type information - // and thus require the below parserOptions. - files: ["src/**/*"], - parserOptions: { - tsconfigRootDir: __dirname, - project: "./tsconfig.json", - }, - rules: { - "@typescript-eslint/await-thenable": "warn", - "@typescript-eslint/no-unnecessary-type-assertion": "warn", - }, - }, - ], -}; diff --git a/airbyte-webapp/.gitattributes b/airbyte-webapp/.gitattributes deleted file mode 100644 index f00342d0d17e..000000000000 --- a/airbyte-webapp/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -/public/fonts/**/*.svg binary diff --git a/airbyte-webapp/.gitignore b/airbyte-webapp/.gitignore deleted file mode 100644 index 10ee29f524a0..000000000000 --- a/airbyte-webapp/.gitignore +++ /dev/null @@ -1,42 +0,0 @@ -# dependencies -/node_modules -/.pnp -.pnp.js - -# testing -/coverage - -# production -/build - -# misc -.DS_Store - -# Log files -npm-debug.log* -yarn-debug.log* -yarn-error.log* - -*.iml -/.idea - -# Environment overwrites -.env.development -.env.production -.env.local -.env.development.local -.env.test.local -.env.production.local - -# Local overwrites -.experiments.json - -storybook-static/ - -# Generated by our build-info plugin -/public/buildInfo.json - -# Ignore generated API clients, since they're automatically generated -/src/core/request/AirbyteClient.ts -/src/core/request/ConnectorBuilderClient.ts -/src/core/request/ConnectorManifest.ts diff --git a/airbyte-webapp/.husky/pre-commit b/airbyte-webapp/.husky/pre-commit deleted file mode 100755 index 4e96aec869c2..000000000000 --- a/airbyte-webapp/.husky/pre-commit +++ /dev/null @@ -1,10 +0,0 @@ -# Only run this pre-commit hook when npx is actually in the path. -# Since CI will fail on misformated frontend code we consider this hook optional -# and don't want to fail if the system doesn't have the requirements to run it. -if command -v npx &> /dev/null; then - # Only run if `npx` is at least version 8, since earlier versions didn't support the --no flag - npxMajorVersion=$(npx --version | cut -d. -f1) - if [ "$npxMajorVersion" -ge "8" ]; then - cd airbyte-webapp && npx --no lint-staged - fi -fi \ No newline at end of file diff --git a/airbyte-webapp/.npmrc b/airbyte-webapp/.npmrc deleted file mode 100644 index 618365b27463..000000000000 --- a/airbyte-webapp/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -engine-strict=true -enable-pre-post-scripts=true diff --git a/airbyte-webapp/.nvmrc b/airbyte-webapp/.nvmrc deleted file mode 100644 index a2d511aff36c..000000000000 --- a/airbyte-webapp/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -16.18.1 \ No newline at end of file diff --git a/airbyte-webapp/.prettierrc.js b/airbyte-webapp/.prettierrc.js deleted file mode 100644 index c59c9621a425..000000000000 --- a/airbyte-webapp/.prettierrc.js +++ /dev/null @@ -1,4 +0,0 @@ -module.exports = { - printWidth: 120, - endOfLine: "lf", -}; diff --git a/airbyte-webapp/.storybook/logo.png b/airbyte-webapp/.storybook/logo.png deleted file mode 100644 index 8b38e50dec15..000000000000 Binary files a/airbyte-webapp/.storybook/logo.png and /dev/null differ diff --git a/airbyte-webapp/.storybook/main.ts b/airbyte-webapp/.storybook/main.ts deleted file mode 100644 index 3712b33bf91c..000000000000 --- a/airbyte-webapp/.storybook/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { StorybookConfig } from "@storybook/react-vite"; - -const config: StorybookConfig = { - framework: "@storybook/react-vite", - stories: ["../src/**/*.stories.@(ts|tsx)"], - addons: [ - "@storybook/addon-links", - "@storybook/addon-essentials", - ], -}; - -export default config; diff --git a/airbyte-webapp/.storybook/manager.ts b/airbyte-webapp/.storybook/manager.ts deleted file mode 100644 index 4df681017652..000000000000 --- a/airbyte-webapp/.storybook/manager.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { addons } from "@storybook/addons"; -import theme from "./theme"; - -addons.setConfig({ - panelPosition: "bottom", - theme -}); \ No newline at end of file diff --git a/airbyte-webapp/.storybook/preview.ts b/airbyte-webapp/.storybook/preview.ts deleted file mode 100644 index 1e86bf0d54bf..000000000000 --- a/airbyte-webapp/.storybook/preview.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { withProviders } from "./withProvider"; - -import "../public/index.css"; -import "../src/scss/global.scss"; -import "../src/dayjs-setup"; - -export const parameters = {}; -export const decorators = [withProviders]; diff --git a/airbyte-webapp/.storybook/theme.tsx b/airbyte-webapp/.storybook/theme.tsx deleted file mode 100644 index c48a91506125..000000000000 --- a/airbyte-webapp/.storybook/theme.tsx +++ /dev/null @@ -1,8 +0,0 @@ -import { create } from "@storybook/theming/create"; -import Image from "./logo.png"; - -export default create({ - brandTitle: "Airbyte", - brandUrl: "https://airbyte.com", - brandImage: Image, -}); diff --git a/airbyte-webapp/.storybook/withProvider.tsx b/airbyte-webapp/.storybook/withProvider.tsx deleted file mode 100644 index fd2f316c4090..000000000000 --- a/airbyte-webapp/.storybook/withProvider.tsx +++ /dev/null @@ -1,58 +0,0 @@ -import React from "react"; - -import { MemoryRouter } from "react-router-dom"; -import { IntlProvider } from "react-intl"; -import { ThemeProvider } from "styled-components"; -import { QueryClientProvider, QueryClient } from "react-query"; - -// TODO: theme was not working correctly so imported directly -import { theme } from "../src/theme"; -import messages from "../src/locales/en.json"; -import { FeatureService } from "../src/hooks/services/Feature"; -import { ConfigServiceProvider, config } from "../src/config"; -import { DocumentationPanelProvider } from "../src/views/Connector/ConnectorDocumentationLayout/DocumentationPanelContext"; -import { ServicesProvider } from "../src/core/servicesProvider"; -import { analyticsServiceContext } from "../src/hooks/services/Analytics"; -import { AppMonitoringServiceProvider } from "../src/hooks/services/AppMonitoringService"; -import type { AnalyticsService } from "../src/core/analytics"; - -const analyticsContextMock: AnalyticsService = { - track: () => {}, - setContext: () => {}, - removeFromContext: () => {}, -} as unknown as AnalyticsService; - -const queryClient = new QueryClient({ - defaultOptions: { - queries: { - retry: false, - suspense: true, - }, - }, -}); - -export const withProviders = (getStory) => ( - - - - - - - - - - - - {getStory()} - - - - - - - - - - - -); diff --git a/airbyte-webapp/.stylelintignore b/airbyte-webapp/.stylelintignore deleted file mode 100644 index 806b6a32b8e9..000000000000 --- a/airbyte-webapp/.stylelintignore +++ /dev/null @@ -1 +0,0 @@ -coverage/**/* diff --git a/airbyte-webapp/.stylelintrc b/airbyte-webapp/.stylelintrc deleted file mode 100644 index e777d222acd5..000000000000 --- a/airbyte-webapp/.stylelintrc +++ /dev/null @@ -1,24 +0,0 @@ -{ - "extends": [ - "stylelint-config-standard", - "stylelint-config-standard-scss", - "stylelint-config-css-modules", - "stylelint-config-prettier-scss" - ], - "rules": { - "selector-class-pattern": "^[a-z][a-zA-Z0-9]+$", - "color-function-notation": null, - "font-family-name-quotes": null, - "no-unknown-animations": true, - "custom-property-empty-line-before": null, - "scss/dollar-variable-empty-line-before": null, - "scss/dollar-variable-pattern": null, - "scss/percent-placeholder-pattern": null, - "value-keyword-case": null, - "color-no-hex": true, - "airbyte/no-color-variables-in-rgba": true, - "airbyte/no-use-renaming": true - }, - "ignoreFiles": ["**/build/**", "**/dist/**"], - "plugins": ["./packages/stylelint-plugin/index.js"] -} diff --git a/airbyte-webapp/Dockerfile b/airbyte-webapp/Dockerfile deleted file mode 100644 index 4e90bb10c5cd..000000000000 --- a/airbyte-webapp/Dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -ARG NGINX_IMAGE=nginx:alpine -FROM ${NGINX_IMAGE} as webapp - -EXPOSE 80 - -COPY bin/build /usr/share/nginx/html -COPY bin/nginx/default.conf.template /etc/nginx/templates/default.conf.template diff --git a/airbyte-webapp/README.md b/airbyte-webapp/README.md deleted file mode 100644 index 3feefee4c40a..000000000000 --- a/airbyte-webapp/README.md +++ /dev/null @@ -1,26 +0,0 @@ -# airbyte-webapp - -This module contains the Airbyte Webapp. It is a React app written in TypeScript. -The webapp compiles to static HTML, JavaScript and CSS, which is served (in OSS) via -a nginx in the airbyte-webapp docker image. This nginx also serves as the reverse proxy -for accessing the server APIs in other images. - -## Building the webapp - -You can build the webapp using Gradle in the root of the repository: - -```sh -# Only compile and build the docker webapp image: -SUB_BUILD=PLATFORM ./gradlew :airbyte-webapp:assemble -# Build the webapp and additional artifacts and run tests: -SUB_BUILD=PLATFORM ./gradlew :airbyte-webapp:build -``` - -## Developing the webapp - -For an instruction how to develop on the webapp, please refer to our [documentation](https://docs.airbyte.com/contributing-to-airbyte/developing-locally/#develop-on-airbyte-webapp). - -### Entrypoints - -* `airbyte-webapp/src/App.tsx` is the entrypoint into the OSS version of the webapp. -* `airbyte-webapp/src/packages/cloud/App.tsx` is the entrypoint into the Cloud version of the webapp. diff --git a/airbyte-webapp/STYLEGUIDE.md b/airbyte-webapp/STYLEGUIDE.md deleted file mode 100644 index 6c1e65e50f77..000000000000 --- a/airbyte-webapp/STYLEGUIDE.md +++ /dev/null @@ -1,72 +0,0 @@ -# Frontend Style Guide - -This serves as a living document regarding conventions we have agreed upon as a frontend team. In general, the aim of these decisions and discussions is to both (a) increase the readability and consistency of our code and (b) decrease day to day decision-making so we can spend more time writing better code. - -## General Code Style and Formatting - -- Where possible, we rely on automated systems to maintain consistency in code style -- We use eslint, Prettier, and VSCode settings to automate these choices. The configuration files for these are checked into our repository, so no individual setup should be required beyond ensuring your VSCode settings include: - -``` -"editor.codeActionsOnSave": { - "source.fixAll.eslint": true, -} -``` - -- Don’t use single-character names. Using meaningful name for function parameters is a way of making the code self-documented and we always should do it. Example: - - .filter(([key, value]) => isDefined(value.default) ✅ - - .filter(([k, v]) => isDefined(v.default) ❌ - -## Exporting - -- Export at declaration, not at the bottom. For example: - - export const myVar ✅ - - const myVar; export { myVar }; ❌ - -## Component Props - -- Use explicit, verbose naming - - ie: `interface ConnectionFormProps` not `interface iProps` - -## Testing - -- Test files should be store alongside the files/features they are testing -- Use the prop `data-testid` instead of `data-id` - -## Types - -- For component props, prefer type unions over enums: - - `type SomeType = “some” | “type”;` ✅ - - `enum SomeEnum = { SOME: “some”, TYPE: “type” };` ❌ - - Exceptions may include: - - Generated using enums from the API - - When the value on an enum is cleaner than the string - - In this case use `const enum` instead - -## Styling - -### Color variables cannot be used inside of rgba() functions - -Our SCSS color variables compile to `rgb(X, Y, Z)`, which is an invalid value in the CSS `rgba()` function. A custom stylelint rule, `airbyte/no-color-variables-in-rgba`, enforces this rule. - -❌ Incorrect - -```scss -@use "scss/colors"; - -.myClass { - background-color: rgba(colors.$blue-400, 50%); -} -``` - -✅ Correct - define a color variable with transparency and use it directly - -```scss -@use "scss/colors"; - -.myClass { - background-color: colors.$blue-transparent; -} -``` - -> Historical context: previously there was some usage of color variables inside `rgba()` functions. In these cases, _SASS_ was actually compiling this into CSS for us, because it knew to convert `rgba(rgb(255, 0, 0), 50%)` into `rgb(255, 0, 0, 50%)`. Now that we use CSS Custom Properties for colors, SASS cannot know the value of the color variable at build time. So it outputs `rgba(var(--blue), 50%)` which will not work as expected. diff --git a/airbyte-webapp/babel.config.js b/airbyte-webapp/babel.config.js deleted file mode 100644 index e867c8c8a46f..000000000000 --- a/airbyte-webapp/babel.config.js +++ /dev/null @@ -1,12 +0,0 @@ -module.exports = { - env: { - test: { - // Define presets used to compile code when running jest tests - presets: [ - ["@babel/preset-env", { targets: { node: "current" } }], - ["@babel/preset-react", { runtime: "automatic" }], - "@babel/preset-typescript", - ], - }, - }, -}; diff --git a/airbyte-webapp/build.gradle b/airbyte-webapp/build.gradle deleted file mode 100644 index a052ea55ebba..000000000000 --- a/airbyte-webapp/build.gradle +++ /dev/null @@ -1,145 +0,0 @@ -plugins { - id "base" - id "com.github.node-gradle.node" version "3.4.0" -} - -// Use the node version that's defined in the .nvmrc file -def nodeVersion = new File("${projectDir}/.nvmrc").text.trim(); - -// Read pnpm version to use from package.json engines.pnpm entry -def pnpmVer = new groovy.json.JsonSlurper().parse(new File("${projectDir}/package.json")).engines.pnpm.trim(); - -// This array should contain a path to all configs that are common to most build tasks and -// might affect them (i.e. if any of those files change we want to rerun most tasks) -def commonConfigs = [ - '.env', - 'package.json', - 'pnpm-lock.yaml', - 'tsconfig.json', - '.prettierrc.js' -] - -node { - download = true - version = nodeVersion - pnpmVersion = pnpmVer -} - -task validateLockFiles { - description "Validate only a pnpm-lock.yaml lock file exists" - doLast { - assert file("pnpm-lock.yaml").exists() - assert !file("package-lock.json").exists() - assert !file("yarn.lock").exists() - } -} - -pnpmInstall.dependsOn validateLockFiles -// Make sure `pnpmInstall` always runs, since it's better at determining -// whether it's needs to do anything than Gradle can -pnpmInstall.outputs.upToDateWhen { false } - -task pnpmBuild(type: PnpmTask) { - dependsOn pnpmInstall - - args = ['build'] - - inputs.files commonConfigs - inputs.file '.eslintrc.js' - inputs.dir 'public' - inputs.dir 'src' - - outputs.dir 'build/app' -} - -task test(type: PnpmTask) { - dependsOn assemble - - args = ['run', 'test:ci'] - inputs.files commonConfigs - inputs.dir 'src' -} - -task licenseCheck(type: PnpmTask) { - dependsOn pnpmInstall - - args = ['run', 'license-check'] - inputs.files commonConfigs - inputs.file 'scripts/license-check.js' - - // The licenseCheck has no outputs, thus we always treat the outpus up to date - // as long as the inputs have not changed - outputs.upToDateWhen { true } -} - -task validateLinks(type: PnpmTask) { - dependsOn pnpmInstall - - args = ['run', 'validate-links'] - - // Since the output of this task depends on availability of URLs - // we never want to treat it as "up-to-date" and always want to run it - outputs.upToDateWhen { false } -} - -task buildStorybook(type: PnpmTask) { - dependsOn pnpmInstall - args = ['run', 'build:storybook'] - - inputs.files commonConfigs - inputs.dir '.storybook' - inputs.dir 'public' - inputs.dir 'src' - - outputs.dir 'build/storybook' - - environment = [ - 'NODE_OPTIONS': '--max_old_space_size=4096' - ] -} - -task copyBuildOutput(type: Copy) { - dependsOn copyDocker, pnpmBuild - - from "${project.projectDir}/build/app" - into 'build/docker/bin/build' -} - -task copyDocs(type: Copy) { - dependsOn copyDocker, copyBuildOutput - - from "${project.rootProject.projectDir}/docs/integrations" - into "build/docker/bin/build/docs/integrations" - // google-ads.md is blocked by Ad Blockers - rename ('google-ads.md', 'gglad.md') - duplicatesStrategy DuplicatesStrategy.INCLUDE -} - -// Copy images that are used in .md integration documentation docs -task copyDocAssets(type: Copy) { - dependsOn copyDocker, copyBuildOutput - - from "${project.rootProject.projectDir}/docs/.gitbook" - into "build/docker/bin/build/docs/.gitbook" - duplicatesStrategy DuplicatesStrategy.INCLUDE -} - -task copyNginx(type: Copy) { - dependsOn copyDocker - - from "${project.projectDir}/nginx" - into "build/docker/bin/nginx" -} - -// Those tasks should be run as part of the "check" task -check.dependsOn validateLinks, licenseCheck, test - -build.dependsOn buildStorybook - -tasks.named("buildDockerImage") { - dependsOn copyDocker - dependsOn copyBuildOutput - dependsOn copyNginx - dependsOn copyDocs - dependsOn copyDocAssets -} diff --git a/airbyte-webapp/docs/HowTo-ConnectionSpecification.md b/airbyte-webapp/docs/HowTo-ConnectionSpecification.md deleted file mode 100644 index f9b264cf2082..000000000000 --- a/airbyte-webapp/docs/HowTo-ConnectionSpecification.md +++ /dev/null @@ -1,35 +0,0 @@ -0. run `npm install` (make sure to install node 16 if you don't already have it) -1. run `npm run storybook` in `airbyte-webapp` directory -2. open `ServiceForm` component story http://localhost:9009/?path=/story/views-serviceform--source -3. press `raw` on the specifications property, so you will be able to past json in string format. -4. edit specifications property to the connectionConfigration you want - -e.g. -``` -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "BigQuery Destination Spec", - "type": "object", - "required": ["project_id", "dataset_id"], - "properties": { - "project_id": { - "type": "string", - "description": "The GCP project ID for the project containing the target BigQuery dataset.", - "title": "Project ID" - }, - "dataset_id": { - "type": "string", - "description": "Default BigQuery Dataset ID tables are replicated to if the source does not specify a namespace.", - "title": "Default Dataset ID" - }, - "credentials_json": { - "type": "string", - "description": "The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.", - "title": "Credentials JSON", - "airbyte_secret": true - } - } - } -``` - -![img.png](img.png) diff --git a/airbyte-webapp/docs/HowTo-EnvVariables.md b/airbyte-webapp/docs/HowTo-EnvVariables.md deleted file mode 100644 index a2edf83120f7..000000000000 --- a/airbyte-webapp/docs/HowTo-EnvVariables.md +++ /dev/null @@ -1,28 +0,0 @@ -## Environment variables - -Currently we have 2 types of environment variables: - -1. Statically injected build time variables -2. Dynamic env variables injected via `window` - -### Static env variables - -The environment variables are embedded during the build time. Since our app is based on Create React App that produces a -static HTML/CSS/JS bundle, it can’t possibly read them at runtime. - -Static env variables name should always start with `REACT_APP_` - -### Dynamic env variables - -Dynamic env variables in our cases are injected into app by nginx - -```html - -; -``` - -later we can use any of the declared variables from window diff --git a/airbyte-webapp/docs/img.png b/airbyte-webapp/docs/img.png deleted file mode 100644 index 6bd0f60607a6..000000000000 Binary files a/airbyte-webapp/docs/img.png and /dev/null differ diff --git a/airbyte-webapp/gradle.properties b/airbyte-webapp/gradle.properties deleted file mode 100644 index 737005e687a2..000000000000 --- a/airbyte-webapp/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -dockerImageName=webapp diff --git a/airbyte-webapp/index.html b/airbyte-webapp/index.html deleted file mode 100644 index 138941802120..000000000000 --- a/airbyte-webapp/index.html +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - - - - - - - Airbyte - - -

    - You need to enable JavaScript to run this app. -
    - -
    - - - diff --git a/airbyte-webapp/jest.config.ts b/airbyte-webapp/jest.config.ts deleted file mode 100644 index 39f03b7089dd..000000000000 --- a/airbyte-webapp/jest.config.ts +++ /dev/null @@ -1,22 +0,0 @@ -// eslint-disable-next-line jest/no-jest-import -import type { Config } from "jest"; - -const jestConfig: Config = { - verbose: true, - // Required to overwrite the default which would ignore node_modules from transformation, - // but several node_modules are not transpiled so they would fail without babel transformation running - transformIgnorePatterns: [], - snapshotSerializers: ["./src/test-utils/classname-serializer.js"], - coveragePathIgnorePatterns: ["\\.stories\\.tsx$"], - modulePathIgnorePatterns: ["src/.*/__mocks__"], - testEnvironment: "jsdom", - moduleDirectories: ["node_modules", "src"], - moduleNameMapper: { - "\\.module\\.scss$": "test-utils/mock-data/mockIdentity.js", - "\\.(css|png|scss)$": "test-utils/mock-data/mockEmpty.js", - "\\.svg$": "test-utils/mock-data/mockSvg.js", - }, - setupFilesAfterEnv: ["./src/test-utils/setup-tests.ts"], -}; - -export default jestConfig; diff --git a/airbyte-webapp/nginx/default.conf.template b/airbyte-webapp/nginx/default.conf.template deleted file mode 100644 index ee2c2271faf1..000000000000 --- a/airbyte-webapp/nginx/default.conf.template +++ /dev/null @@ -1,55 +0,0 @@ -upstream api-server { - server $INTERNAL_API_HOST; -} - -upstream connector-builder-server { - server $CONNECTOR_BUILDER_API_HOST; -} - -server { - listen 80; - listen [::]:80; - server_name localhost; - - #charset koi8-r; - #access_log /var/log/nginx/host.access.log main; - - add_header Content-Security-Policy "script-src * 'unsafe-inline'; worker-src self blob:;"; - - location / { - root /usr/share/nginx/html; - try_files $uri $uri/ /index.html; - - sub_filter - ''; - sub_filter_once on; - } - - #error_page 404 /404.html; - - # redirect server error pages to the static page /50x.html - # - error_page 500 502 503 504 /50x.html; - location = /50x.html { - root /usr/share/nginx/html; - } - - location /api/ { - fastcgi_read_timeout 1h; - proxy_read_timeout 1h; - client_max_body_size 200M; - proxy_pass http://api-server/api/; - } - - location /connector-builder-api/ { - fastcgi_read_timeout 1h; - proxy_read_timeout 1h; - client_max_body_size 200M; - proxy_pass http://connector-builder-server/; - } -} diff --git a/airbyte-webapp/orval.config.ts b/airbyte-webapp/orval.config.ts deleted file mode 100644 index f32b8e5fa769..000000000000 --- a/airbyte-webapp/orval.config.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { defineConfig } from "orval"; - -export default defineConfig({ - api: { - input: "../airbyte-api/src/main/openapi/config.yaml", - output: { - target: "./src/core/request/AirbyteClient.ts", - prettier: true, - override: { - header: (info) => [ - `eslint-disable`, - `Generated by orval 🍺`, - `Do not edit manually. Run "npm run generate-client" instead.`, - ...(info.title ? [info.title] : []), - ...(info.description ? [info.description] : []), - ...(info.version ? [`OpenAPI spec version: ${info.version}`] : []), - ], - mutator: { - path: "./src/core/request/apiOverride.ts", - name: "apiOverride", - }, - }, - }, - }, - connectorBuilder: { - input: "../airbyte-connector-builder-server/src/main/openapi/openapi.yaml", - output: { - target: "./src/core/request/ConnectorBuilderClient.ts", - prettier: true, - override: { - header: (info) => [ - `eslint-disable`, - `Generated by orval 🍺`, - `Do not edit manually. Run "npm run generate-client" instead.`, - ...(info.title ? [info.title] : []), - ...(info.description ? [info.description] : []), - ...(info.version ? [`OpenAPI spec version: ${info.version}`] : []), - ], - mutator: { - path: "./src/core/request/apiOverride.ts", - name: "apiOverride", - }, - }, - }, - }, - connectorManifest: { - input: "./src/services/connectorBuilder/connector_manifest_openapi.yaml", - output: { - target: "./src/core/request/ConnectorManifest.ts", - prettier: true, - override: { - header: (info) => [ - `eslint-disable`, - `Generated by orval 🍺`, - `Do not edit manually. Run "npm run generate-client" instead.`, - ...(info.title ? [info.title] : []), - ...(info.description ? [info.description] : []), - ...(info.version ? [`OpenAPI spec version: ${info.version}`] : []), - ], - }, - }, - }, -}); diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json deleted file mode 100644 index 31bcb5017117..000000000000 --- a/airbyte-webapp/package.json +++ /dev/null @@ -1,215 +0,0 @@ -{ - "name": "airbyte-webapp", - "version": "0.40.32", - "private": true, - "engines": { - "node": "16.18.1", - "pnpm": "7.26.3" - }, - "scripts": { - "prepare": "cd .. && husky install airbyte-webapp/.husky", - "prestart": "pnpm run generate-client", - "start": "NODE_OPTIONS='-r ./scripts/dev-overwrites.js' vite", - "prestart:cloud": "pnpm run generate-client", - "start:cloud": "AB_ENV=${AB_ENV-frontend-dev} NODE_OPTIONS='-r ./scripts/environment.js -r ./scripts/dev-overwrites.js' vite", - "prebuild": "pnpm run generate-client", - "build": "vite build", - "pretest": "pnpm run generate-client", - "test": "jest --watch", - "test:ci": "jest --watchAll=false --silent", - "test:coverage": "jest --coverage --watchAll=false", - "format": "prettier --write 'src/**/*.{ts,tsx}'", - "storybook": "storybook dev -p 9009", - "build:storybook": "storybook build -o 'build/storybook'", - "lint": "eslint --ext js,ts,tsx src", - "stylelint": "stylelint 'src/**/*.{css,scss}'", - "stylelint-check": "stylelint-config-prettier-scss-check", - "license-check": "node ./scripts/license-check.js", - "generate-client": "./scripts/load-declarative-schema.sh && orval", - "validate-links": "ts-node --skip-project ./scripts/validate-links.ts" - }, - "dependencies": { - "@datadog/browser-rum": "^4.21.2", - "@floating-ui/react-dom": "^1.0.0", - "@fortawesome/fontawesome-svg-core": "^6.1.1", - "@fortawesome/free-brands-svg-icons": "^6.1.1", - "@fortawesome/free-regular-svg-icons": "^6.1.1", - "@fortawesome/free-solid-svg-icons": "^6.1.1", - "@fortawesome/react-fontawesome": "^0.1.18", - "@headlessui/react": "^1.6.5", - "@monaco-editor/react": "^4.4.5", - "@sentry/react": "^6.19.6", - "@sentry/tracing": "^6.19.6", - "@tanstack/react-table": "^8.7.0", - "@types/diff": "^5.0.2", - "@types/node-fetch": "^2.6.2", - "@types/segment-analytics": "^0.0.34", - "@types/uuid": "^9.0.0", - "classnames": "^2.3.1", - "date-fns": "^2.29.3", - "dayjs": "^1.11.3", - "diff": "^5.1.0", - "firebase": "^9.8.2", - "flat": "^5.0.2", - "formik": "^2.2.9", - "framer-motion": "^6.3.11", - "js-yaml": "^4.1.0", - "json-schema": "^0.4.0", - "launchdarkly-js-client-sdk": "^3.1.0", - "lodash": "^4.17.21", - "monaco-editor": "^0.34.1", - "query-string": "^6.13.1", - "react": "^17.0.2", - "react-datepicker": "^4.8.0", - "react-dom": "^17.0.2", - "react-helmet-async": "^1.3.0", - "react-intersection-observer": "^9.4.2", - "react-intl": "^6.1.1", - "react-lazylog": "^4.5.3", - "react-markdown": "^7.0.1", - "react-paginate": "^8.1.3", - "react-query": "^3.39.1", - "react-reflex": "^4.0.9", - "react-resize-detector": "^8.0.3", - "react-router-dom": "6.3.0", - "react-select": "^5.4.0", - "react-slick": "^0.29.0", - "react-table": "^7.8.0", - "react-use": "^17.4.0", - "react-use-intercom": "^1.5.2", - "react-widgets": "^4.6.1", - "recharts": "^2.1.13", - "rehype-slug": "^5.0.1", - "rehype-urls": "^1.1.1", - "remark-directive": "^2.0.1", - "remark-frontmatter": "^4.0.1", - "remark-gfm": "^3.0.0", - "rxjs": "^7.5.5", - "sanitize-html": "^2.7.1", - "sass": "^1.52.2", - "styled-components": "^5.3.5", - "typesafe-actions": "^5.1.0", - "unified": "^10.1.2", - "unist-util-visit": "^4.1.0", - "url": "^0.11.0", - "uuid": "^9.0.0", - "yup": "^0.32.11" - }, - "devDependencies": { - "@airbyte/eslint-plugin": "link:packages/eslint-plugin", - "@babel/core": "^7.20.12", - "@babel/preset-env": "^7.20.2", - "@babel/preset-react": "^7.18.6", - "@babel/preset-typescript": "^7.18.6", - "@storybook/addon-actions": "^7.0.0-beta.36", - "@storybook/addon-essentials": "^7.0.0-beta.36", - "@storybook/addon-links": "^7.0.0-beta.36", - "@storybook/react": "^7.0.0-beta.36", - "@storybook/react-vite": "^7.0.0-beta.36", - "@storybook/theming": "^7.0.0-beta.36", - "@testing-library/jest-dom": "^5.16.4", - "@testing-library/react": "^12.1.3", - "@testing-library/react-hooks": "^7.0.2", - "@testing-library/user-event": "^13.5.0", - "@types/flat": "^5.0.2", - "@types/jest": "^27.5.2", - "@types/js-yaml": "^4.0.5", - "@types/json-schema": "^7.0.11", - "@types/lodash": "^4.14.182", - "@types/node": "^17.0.40", - "@types/react": "^17.0.39", - "@types/react-datepicker": "^4.8.0", - "@types/react-dom": "^17.0.11", - "@types/react-helmet": "^6.1.5", - "@types/react-lazylog": "^4.5.1", - "@types/react-paginate": "^7.1.1", - "@types/react-slick": "^0.23.10", - "@types/react-table": "^7.7.12", - "@types/react-widgets": "^4.4.7", - "@types/sanitize-html": "^2.6.2", - "@types/styled-components": "^5.1.25", - "@types/testing-library__jest-dom": "^5.14.5", - "@types/unist": "^2.0.5", - "@typescript-eslint/eslint-plugin": "^5.27.1", - "@typescript-eslint/parser": "^5.27.1", - "@vitejs/plugin-basic-ssl": "^1.0.1", - "@vitejs/plugin-react": "^3.0.1", - "babel-jest": "^29.3.1", - "chalk": "^4.1.2", - "dotenv": "^16.0.3", - "eslint": "^8.32.0", - "eslint-config-prettier": "^8.6.0", - "eslint-config-react-app": "^7.0.1", - "eslint-plugin-css-modules": "^2.11.0", - "eslint-plugin-import": "^2.27.5", - "eslint-plugin-jest": "^26.5.3", - "eslint-plugin-jsx-a11y": "^6.7.1", - "eslint-plugin-prettier": "^4.2.1", - "eslint-plugin-unused-imports": "^2.0.0", - "express": "^4.18.1", - "history": "^5.3.0", - "husky": "^8.0.1", - "jest": "^29.3.0", - "jest-environment-jsdom": "^29.3.1", - "license-checker": "^25.0.1", - "lint-staged": "^12.3.7", - "meow": "^9.0.0", - "node-fetch": "^2.6.7", - "optionator": "^0.9.1", - "orval": "^6.11.1", - "prettier": "^2.6.2", - "react-select-event": "^5.5.0", - "storybook": "^7.0.0-beta.36", - "stylelint": "^14.9.1", - "stylelint-config-css-modules": "^4.1.0", - "stylelint-config-prettier-scss": "^0.0.1", - "stylelint-config-standard": "^26.0.0", - "stylelint-config-standard-scss": "^5.0.0", - "tar": "^6.1.11", - "timezone-mock": "^1.3.4", - "tmpl": "^1.0.5", - "ts-node": "^10.8.1", - "typescript": "^4.9.5", - "typescript-plugin-css-modules": "^4.1.1", - "vite": "^4.0.4", - "vite-plugin-checker": "^0.5.5", - "vite-plugin-svgr": "^2.4.0", - "vite-tsconfig-paths": "^4.0.3" - }, - "overrides": { - "minimatch": "^3.0.5" - }, - "lint-staged": { - "src/**/*.{js,jsx,ts,tsx}": [ - "eslint --fix" - ], - "src/**/*.{css,scss,md,json}": [ - "prettier --write" - ], - "{public,src}/**/*.{css,scss}": [ - "stylelint --fix" - ] - }, - "browserslist": { - "production": [ - ">0.2%", - "not dead", - "not op_mini all" - ], - "development": [ - "last 1 chrome version", - "last 1 firefox version", - "last 1 safari version" - ] - }, - "pnpm": { - "patchedDependencies": { - "react-virtualized@9.22.3": "patches/react-virtualized@9.22.3.patch" - }, - "overrides": { - "json5@<1.0.2": ">=1.0.2", - "postcss@<7.0.36": ">=7.0.36", - "loader-utils@<1.4.1": ">=1.4.1" - } - } -} diff --git a/airbyte-webapp/packages/README.md b/airbyte-webapp/packages/README.md deleted file mode 100644 index 5f39b4e15be9..000000000000 --- a/airbyte-webapp/packages/README.md +++ /dev/null @@ -1,2 +0,0 @@ -This folder should only contain other folders that are treated like local npm packages, -e.g. because we want to provide plugins or configuration for build tools. \ No newline at end of file diff --git a/airbyte-webapp/packages/eslint-plugin/index.js b/airbyte-webapp/packages/eslint-plugin/index.js deleted file mode 100644 index e874928d7d49..000000000000 --- a/airbyte-webapp/packages/eslint-plugin/index.js +++ /dev/null @@ -1,12 +0,0 @@ -module.exports = { - rules: { - "no-hardcoded-connector-ids": require("./no-hardcoded-connector-ids"), - }, - configs: { - recommended: { - rules: { - "@airbyte/no-hardcoded-connector-ids": "error", - }, - }, - }, -}; diff --git a/airbyte-webapp/packages/eslint-plugin/no-hardcoded-connector-ids.js b/airbyte-webapp/packages/eslint-plugin/no-hardcoded-connector-ids.js deleted file mode 100644 index e08f08e4bae9..000000000000 --- a/airbyte-webapp/packages/eslint-plugin/no-hardcoded-connector-ids.js +++ /dev/null @@ -1,38 +0,0 @@ -const destinations = require("../../src/utils/connectors/destinations.json"); -const sources = require("../../src/utils/connectors/sources.json"); - -// Create a map for connector id to variable name (i.e. flip the direction of the map) -const sourceIdToName = Object.fromEntries(Object.entries(sources).map((entry) => [entry[1], entry[0]])); -const destinationIdToName = Object.fromEntries(Object.entries(destinations).map((entry) => [entry[1], entry[0]])); - -const validateStringContent = (context, node, nodeContent) => { - if (nodeContent in destinationIdToName) { - context.report({ node, messageId: "destinationId", data: { id: nodeContent, name: destinationIdToName[nodeContent] } }); - } else if (nodeContent in sourceIdToName) { - context.report({ node, messageId: "sourceId", data: { id: nodeContent, name: sourceIdToName[nodeContent] } }); - } -}; - -module.exports = { - meta: { - type: "suggestion", - messages: { - sourceId: "Found hard-coded connector id, use `ConnectorIds.Sources.{{ name }}` from `utils/connectors` instead.", - destinationId: - "Found hard-coded connector id, use `ConnectorIds.Destinations.{{ name }}` from `utils/connectors` instead.", - }, - }, - create: (context) => ({ - Literal: (node) => { - if (typeof node.value === "string") { - validateStringContent(context, node, node.value); - } - }, - TemplateLiteral: (node) => { - // Only check template literals which are "static", i.e. don't contain ${} elements - if (!node.expressions.length && node.quasis.length === 1) { - validateStringContent(context, node, node.quasis[0].value.raw); - } - }, - }), -}; diff --git a/airbyte-webapp/packages/stylelint-plugin/index.js b/airbyte-webapp/packages/stylelint-plugin/index.js deleted file mode 100644 index bafdf357ddf0..000000000000 --- a/airbyte-webapp/packages/stylelint-plugin/index.js +++ /dev/null @@ -1,12 +0,0 @@ -const stylelint = require("stylelint"); - -const rules = { - "no-color-variables-in-rgba": require("./no-color-variables-in-rgba"), - "no-use-renaming": require("./no-use-renaming"), -}; - -const rulesPlugins = Object.keys(rules).map((ruleName) => - stylelint.createPlugin(`airbyte/${ruleName}`, rules[ruleName]) -); - -module.exports = rulesPlugins; diff --git a/airbyte-webapp/packages/stylelint-plugin/no-color-variables-in-rgba.js b/airbyte-webapp/packages/stylelint-plugin/no-color-variables-in-rgba.js deleted file mode 100644 index 9f88d5bc950f..000000000000 --- a/airbyte-webapp/packages/stylelint-plugin/no-color-variables-in-rgba.js +++ /dev/null @@ -1,37 +0,0 @@ -const stylelint = require("stylelint"); -const { ruleMessages } = stylelint.utils; -const ruleName = "airbyte/no-color-variables-in-rgba"; -const messages = ruleMessages(ruleName, { - variableFoundInRgba: () => - `A color variable can't be used within an rgba() function. Explanation: ${LINK_TO_STYLEGUIDE}`, -}); -const LINK_TO_STYLEGUIDE = - "https://github.com/airbytehq/airbyte/blob/master/airbyte-webapp/STYLEGUIDE.md#color-variables-cannot-be-used-inside-of-rgba-functions"; - -module.exports.ruleName = ruleName; -module.exports.messages = messages; - -/** - * This stylelint rule checks if a color variable is used inside of rgba(), which we do not currently support. - * There are no options passed to the rule, as long as it is enabled in .stylelintrc it will be enforced. - */ -module.exports = (enabled) => { - return function lint(postcssRoot, postcssResult) { - if (!enabled) { - return () => null; - } - postcssRoot.walkDecls((decl) => { - // Check each value to see if it contains a string like "rgba(colors.$" or "rgba( ... colors.$" - const hasVariableInRgba = /rgba\([^)]*colors\.\$/.test(decl.value); - if (hasVariableInRgba) { - stylelint.utils.report({ - ruleName, - result: postcssResult, - message: messages.variableFoundInRgba(), - node: decl, - word: "blue", - }); - } - }); - }; -}; diff --git a/airbyte-webapp/packages/stylelint-plugin/no-use-renaming.js b/airbyte-webapp/packages/stylelint-plugin/no-use-renaming.js deleted file mode 100644 index 07d460461598..000000000000 --- a/airbyte-webapp/packages/stylelint-plugin/no-use-renaming.js +++ /dev/null @@ -1,33 +0,0 @@ -const stylelint = require("stylelint"); -const { ruleMessages } = stylelint.utils; -const ruleName = "airbyte/no-use-renaming"; -const messages = ruleMessages(ruleName, { - rejected: () => `You should not assign @use imports a different name.`, -}); - -module.exports.ruleName = ruleName; -module.exports.messages = messages; - -/** - * This stylelint rule checks a `@use` statement got a new name assigned e.g. `@use "scss/variables" as var` - * and fails if so. - */ -/** @type {import('stylelint').Rule} */ -module.exports = (enabled) => { - return function lint(postcssRoot, postcssResult) { - if (!enabled) { - return; - } - postcssRoot.walkAtRules((rule) => { - if (rule.name === "use" && rule.params.includes(" as ")) { - stylelint.utils.report({ - ruleName, - node: rule, - message: messages.rejected(), - result: postcssResult, - word: `as `, - }); - } - }); - }; -}; diff --git a/airbyte-webapp/packages/vite-plugins/build-info.ts b/airbyte-webapp/packages/vite-plugins/build-info.ts deleted file mode 100644 index 44e31c105128..000000000000 --- a/airbyte-webapp/packages/vite-plugins/build-info.ts +++ /dev/null @@ -1,26 +0,0 @@ -import type { Plugin } from "vite"; - -import fs from "fs"; -import path from "path"; - -import { v4 as uuidV4 } from "uuid"; - -const buildHash = uuidV4(); - -/** - * A Vite plugin that will generate on every build a new random UUID and write that to the `/buildInfo.json` - * file as well as make it available as `process.env.BUILD_HASH` in code. - */ -export function buildInfo(): Plugin { - return { - name: "airbyte/build-info", - buildStart() { - fs.writeFileSync(path.resolve(__dirname, "../../public/buildInfo.json"), JSON.stringify({ build: buildHash })); - }, - config: () => ({ - define: { - "process.env.BUILD_HASH": JSON.stringify(buildHash), - }, - }), - }; -} diff --git a/airbyte-webapp/packages/vite-plugins/doc-middleware.ts b/airbyte-webapp/packages/vite-plugins/doc-middleware.ts deleted file mode 100644 index 15ea716b37a0..000000000000 --- a/airbyte-webapp/packages/vite-plugins/doc-middleware.ts +++ /dev/null @@ -1,27 +0,0 @@ -import type { Connect, Plugin } from "vite"; - -import express from "express"; - -export function docMiddleware(): Plugin { - return { - name: "airbyte/doc-middleware", - configureServer(server) { - // Serve the docs used in the sidebar. During building Gradle will copy those into the docker image - // Relavant gradle task :airbyte-webapp:copyDocs - server.middlewares.use( - "/docs/integrations", - express.static(`${__dirname}/../../../docs/integrations`) as Connect.NextHandleFunction - ); - // workaround for adblockers to serve google ads docs in development - server.middlewares.use( - "/docs/integrations/sources/gglad.md", - express.static(`${__dirname}/../../../docs/integrations/sources/google-ads.md`) as Connect.NextHandleFunction - ); - // Server assets that can be used during. Related gradle task: :airbyte-webapp:copyDocAssets - server.middlewares.use( - "/docs/.gitbook", - express.static(`${__dirname}/../../../docs/.gitbook`) as Connect.NextHandleFunction - ); - }, - }; -} diff --git a/airbyte-webapp/packages/vite-plugins/index.ts b/airbyte-webapp/packages/vite-plugins/index.ts deleted file mode 100644 index 6dd797286257..000000000000 --- a/airbyte-webapp/packages/vite-plugins/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { docMiddleware } from "./doc-middleware"; -export { buildInfo } from "./build-info"; diff --git a/airbyte-webapp/patches/react-virtualized@9.22.3.patch b/airbyte-webapp/patches/react-virtualized@9.22.3.patch deleted file mode 100644 index 598ac4eb7beb..000000000000 --- a/airbyte-webapp/patches/react-virtualized@9.22.3.patch +++ /dev/null @@ -1,10 +0,0 @@ -diff --git a/dist/es/WindowScroller/utils/onScroll.js b/dist/es/WindowScroller/utils/onScroll.js -index d00f0f18c6596e4e57f4f762f91fed4282610c91..c8496e8eabafdf9cf6071986ec446839d7b65556 100644 ---- a/dist/es/WindowScroller/utils/onScroll.js -+++ b/dist/es/WindowScroller/utils/onScroll.js -@@ -71,4 +71,3 @@ export function unregisterScrollListener(component, element) { - } - } - } -\ No newline at end of file --import { bpfrpt_proptype_WindowScroller } from "../WindowScroller.js"; \ No newline at end of file diff --git a/airbyte-webapp/pnpm-lock.yaml b/airbyte-webapp/pnpm-lock.yaml deleted file mode 100644 index 2e4aa5b74d4c..000000000000 --- a/airbyte-webapp/pnpm-lock.yaml +++ /dev/null @@ -1,16033 +0,0 @@ -lockfileVersion: 5.4 - -overrides: - json5@<1.0.2: '>=1.0.2' - postcss@<7.0.36: '>=7.0.36' - loader-utils@<1.4.1: '>=1.4.1' - -patchedDependencies: - react-virtualized@9.22.3: - hash: wem7zdhrj6jola7ic3qcehiqii - path: patches/react-virtualized@9.22.3.patch - -specifiers: - '@airbyte/eslint-plugin': link:packages/eslint-plugin - '@babel/core': ^7.20.12 - '@babel/preset-env': ^7.20.2 - '@babel/preset-react': ^7.18.6 - '@babel/preset-typescript': ^7.18.6 - '@datadog/browser-rum': ^4.21.2 - '@floating-ui/react-dom': ^1.0.0 - '@fortawesome/fontawesome-svg-core': ^6.1.1 - '@fortawesome/free-brands-svg-icons': ^6.1.1 - '@fortawesome/free-regular-svg-icons': ^6.1.1 - '@fortawesome/free-solid-svg-icons': ^6.1.1 - '@fortawesome/react-fontawesome': ^0.1.18 - '@headlessui/react': ^1.6.5 - '@monaco-editor/react': ^4.4.5 - '@sentry/react': ^6.19.6 - '@sentry/tracing': ^6.19.6 - '@storybook/addon-actions': ^7.0.0-beta.36 - '@storybook/addon-essentials': ^7.0.0-beta.36 - '@storybook/addon-links': ^7.0.0-beta.36 - '@storybook/react': ^7.0.0-beta.36 - '@storybook/react-vite': ^7.0.0-beta.36 - '@storybook/theming': ^7.0.0-beta.36 - '@tanstack/react-table': ^8.7.0 - '@testing-library/jest-dom': ^5.16.4 - '@testing-library/react': ^12.1.3 - '@testing-library/react-hooks': ^7.0.2 - '@testing-library/user-event': ^13.5.0 - '@types/diff': ^5.0.2 - '@types/flat': ^5.0.2 - '@types/jest': ^27.5.2 - '@types/js-yaml': ^4.0.5 - '@types/json-schema': ^7.0.11 - '@types/lodash': ^4.14.182 - '@types/node': ^17.0.40 - '@types/node-fetch': ^2.6.2 - '@types/react': ^17.0.39 - '@types/react-datepicker': ^4.8.0 - '@types/react-dom': ^17.0.11 - '@types/react-helmet': ^6.1.5 - '@types/react-lazylog': ^4.5.1 - '@types/react-paginate': ^7.1.1 - '@types/react-slick': ^0.23.10 - '@types/react-table': ^7.7.12 - '@types/react-widgets': ^4.4.7 - '@types/sanitize-html': ^2.6.2 - '@types/segment-analytics': ^0.0.34 - '@types/styled-components': ^5.1.25 - '@types/testing-library__jest-dom': ^5.14.5 - '@types/unist': ^2.0.5 - '@types/uuid': ^9.0.0 - '@typescript-eslint/eslint-plugin': ^5.27.1 - '@typescript-eslint/parser': ^5.27.1 - '@vitejs/plugin-basic-ssl': ^1.0.1 - '@vitejs/plugin-react': ^3.0.1 - babel-jest: ^29.3.1 - chalk: ^4.1.2 - classnames: ^2.3.1 - date-fns: ^2.29.3 - dayjs: ^1.11.3 - diff: ^5.1.0 - dotenv: ^16.0.3 - eslint: ^8.32.0 - eslint-config-prettier: ^8.6.0 - eslint-config-react-app: ^7.0.1 - eslint-plugin-css-modules: ^2.11.0 - eslint-plugin-import: ^2.27.5 - eslint-plugin-jest: ^26.5.3 - eslint-plugin-jsx-a11y: ^6.7.1 - eslint-plugin-prettier: ^4.2.1 - eslint-plugin-unused-imports: ^2.0.0 - express: ^4.18.1 - firebase: ^9.8.2 - flat: ^5.0.2 - formik: ^2.2.9 - framer-motion: ^6.3.11 - history: ^5.3.0 - husky: ^8.0.1 - jest: ^29.3.0 - jest-environment-jsdom: ^29.3.1 - js-yaml: ^4.1.0 - json-schema: ^0.4.0 - launchdarkly-js-client-sdk: ^3.1.0 - license-checker: ^25.0.1 - lint-staged: ^12.3.7 - lodash: ^4.17.21 - meow: ^9.0.0 - monaco-editor: ^0.34.1 - node-fetch: ^2.6.7 - optionator: ^0.9.1 - orval: ^6.11.1 - prettier: ^2.6.2 - query-string: ^6.13.1 - react: ^17.0.2 - react-datepicker: ^4.8.0 - react-dom: ^17.0.2 - react-helmet-async: ^1.3.0 - react-intersection-observer: ^9.4.2 - react-intl: ^6.1.1 - react-lazylog: ^4.5.3 - react-markdown: ^7.0.1 - react-paginate: ^8.1.3 - react-query: ^3.39.1 - react-reflex: ^4.0.9 - react-resize-detector: ^8.0.3 - react-router-dom: 6.3.0 - react-select: ^5.4.0 - react-select-event: ^5.5.0 - react-slick: ^0.29.0 - react-table: ^7.8.0 - react-use: ^17.4.0 - react-use-intercom: ^1.5.2 - react-widgets: ^4.6.1 - recharts: ^2.1.13 - rehype-slug: ^5.0.1 - rehype-urls: ^1.1.1 - remark-directive: ^2.0.1 - remark-frontmatter: ^4.0.1 - remark-gfm: ^3.0.0 - rxjs: ^7.5.5 - sanitize-html: ^2.7.1 - sass: ^1.52.2 - storybook: ^7.0.0-beta.36 - styled-components: ^5.3.5 - stylelint: ^14.9.1 - stylelint-config-css-modules: ^4.1.0 - stylelint-config-prettier-scss: ^0.0.1 - stylelint-config-standard: ^26.0.0 - stylelint-config-standard-scss: ^5.0.0 - tar: ^6.1.11 - timezone-mock: ^1.3.4 - tmpl: ^1.0.5 - ts-node: ^10.8.1 - typesafe-actions: ^5.1.0 - typescript: ^4.9.5 - typescript-plugin-css-modules: ^4.1.1 - unified: ^10.1.2 - unist-util-visit: ^4.1.0 - url: ^0.11.0 - uuid: ^9.0.0 - vite: ^4.0.4 - vite-plugin-checker: ^0.5.5 - vite-plugin-svgr: ^2.4.0 - vite-tsconfig-paths: ^4.0.3 - yup: ^0.32.11 - -dependencies: - '@datadog/browser-rum': 4.30.1 - '@floating-ui/react-dom': 1.2.1_sfoxds7t5ydpegc3knd667wn6m - '@fortawesome/fontawesome-svg-core': 6.2.1 - '@fortawesome/free-brands-svg-icons': 6.2.1 - '@fortawesome/free-regular-svg-icons': 6.2.1 - '@fortawesome/free-solid-svg-icons': 6.2.1 - '@fortawesome/react-fontawesome': 0.1.19_chs37cqfjyeefvqfxhgbgy2rma - '@headlessui/react': 1.7.7_sfoxds7t5ydpegc3knd667wn6m - '@monaco-editor/react': 4.4.6_5rrpgbvkp3saaogcekwkrw2jia - '@sentry/react': 6.19.7_react@17.0.2 - '@sentry/tracing': 6.19.7 - '@tanstack/react-table': 8.7.6_sfoxds7t5ydpegc3knd667wn6m - '@types/diff': 5.0.2 - '@types/node-fetch': 2.6.2 - '@types/segment-analytics': 0.0.34 - '@types/uuid': 9.0.0 - classnames: 2.3.2 - date-fns: 2.29.3 - dayjs: 1.11.7 - diff: 5.1.0 - firebase: 9.15.0 - flat: 5.0.2 - formik: 2.2.9_react@17.0.2 - framer-motion: 6.5.1_sfoxds7t5ydpegc3knd667wn6m - js-yaml: 4.1.0 - json-schema: 0.4.0 - launchdarkly-js-client-sdk: 3.1.0 - lodash: 4.17.21 - monaco-editor: 0.34.1 - query-string: 6.14.1 - react: 17.0.2 - react-datepicker: 4.8.0_sfoxds7t5ydpegc3knd667wn6m - react-dom: 17.0.2_react@17.0.2 - react-helmet-async: 1.3.0_sfoxds7t5ydpegc3knd667wn6m - react-intersection-observer: 9.4.2_react@17.0.2 - react-intl: 6.2.5_oatgdhaahtizs2uezdzbohxvne - react-lazylog: 4.5.3_sfoxds7t5ydpegc3knd667wn6m - react-markdown: 7.1.2_q5o373oqrklnndq2vhekyuzhxi - react-paginate: 8.1.4_react@17.0.2 - react-query: 3.39.2_sfoxds7t5ydpegc3knd667wn6m - react-reflex: 4.0.9_sfoxds7t5ydpegc3knd667wn6m - react-resize-detector: 8.0.3_sfoxds7t5ydpegc3knd667wn6m - react-router-dom: 6.3.0_sfoxds7t5ydpegc3knd667wn6m - react-select: 5.7.0_dlps62spiehie4hvtd46aaye5u - react-slick: 0.29.0_sfoxds7t5ydpegc3knd667wn6m - react-table: 7.8.0_react@17.0.2 - react-use: 17.4.0_sfoxds7t5ydpegc3knd667wn6m - react-use-intercom: 1.5.2_sfoxds7t5ydpegc3knd667wn6m - react-widgets: 4.6.1_sfoxds7t5ydpegc3knd667wn6m - recharts: 2.3.2_sfoxds7t5ydpegc3knd667wn6m - rehype-slug: 5.1.0 - rehype-urls: 1.1.1 - remark-directive: 2.0.1 - remark-frontmatter: 4.0.1 - remark-gfm: 3.0.1 - rxjs: 7.8.0 - sanitize-html: 2.8.1 - sass: 1.57.1 - styled-components: 5.3.6_sfoxds7t5ydpegc3knd667wn6m - typesafe-actions: 5.1.0 - unified: 10.1.2 - unist-util-visit: 4.1.1 - url: 0.11.0 - uuid: 9.0.0 - yup: 0.32.11 - -devDependencies: - '@airbyte/eslint-plugin': link:packages/eslint-plugin - '@babel/core': 7.20.12 - '@babel/preset-env': 7.20.2_@babel+core@7.20.12 - '@babel/preset-react': 7.18.6_@babel+core@7.20.12 - '@babel/preset-typescript': 7.18.6_@babel+core@7.20.12 - '@storybook/addon-actions': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/addon-essentials': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/addon-links': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/react': 7.0.0-beta.38_jgxnvbe4faw3ohf4h6p42qq6oy - '@storybook/react-vite': 7.0.0-beta.38_ixkwjuwc6whfuuxbuybnyjw2h4 - '@storybook/theming': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@testing-library/jest-dom': 5.16.5 - '@testing-library/react': 12.1.5_sfoxds7t5ydpegc3knd667wn6m - '@testing-library/react-hooks': 7.0.2_sfoxds7t5ydpegc3knd667wn6m - '@testing-library/user-event': 13.5.0 - '@types/flat': 5.0.2 - '@types/jest': 27.5.2 - '@types/js-yaml': 4.0.5 - '@types/json-schema': 7.0.11 - '@types/lodash': 4.14.191 - '@types/node': 17.0.45 - '@types/react': 17.0.52 - '@types/react-datepicker': 4.8.0_sfoxds7t5ydpegc3knd667wn6m - '@types/react-dom': 17.0.18 - '@types/react-helmet': 6.1.6 - '@types/react-lazylog': 4.5.1 - '@types/react-paginate': 7.1.1 - '@types/react-slick': 0.23.10 - '@types/react-table': 7.7.14 - '@types/react-widgets': 4.4.7 - '@types/sanitize-html': 2.8.0 - '@types/styled-components': 5.1.26 - '@types/testing-library__jest-dom': 5.14.5 - '@types/unist': 2.0.6 - '@typescript-eslint/eslint-plugin': 5.48.2_azmbqzqvrlvblbdtiwxwvyvjjy - '@typescript-eslint/parser': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - '@vitejs/plugin-basic-ssl': 1.0.1_vite@4.0.4 - '@vitejs/plugin-react': 3.0.1_vite@4.0.4 - babel-jest: 29.3.1_@babel+core@7.20.12 - chalk: 4.1.2 - dotenv: 16.0.3 - eslint: 8.32.0 - eslint-config-prettier: 8.6.0_eslint@8.32.0 - eslint-config-react-app: 7.0.1_f3p7cdzsbkhsmnshbzhbbdgmve - eslint-plugin-css-modules: 2.11.0_eslint@8.32.0 - eslint-plugin-import: 2.27.5_2l6piu6guil2f63lj3qmhzbnn4 - eslint-plugin-jest: 26.9.0_i5clxtuiaceouxhg5syqkw5wwi - eslint-plugin-jsx-a11y: 6.7.1_eslint@8.32.0 - eslint-plugin-prettier: 4.2.1_cn4lalcyadplruoxa5mhp7j3dq - eslint-plugin-unused-imports: 2.0.0_virssgr5omih4ylyae2gddvmxu - express: 4.18.2 - history: 5.3.0 - husky: 8.0.3 - jest: 29.3.1_2263m44mchjafa7bz7l52hbcpa - jest-environment-jsdom: 29.3.1 - license-checker: 25.0.1 - lint-staged: 12.5.0 - meow: 9.0.0 - node-fetch: 2.6.8 - optionator: 0.9.1 - orval: 6.11.1_typescript@4.9.5 - prettier: 2.8.3 - react-select-event: 5.5.1 - storybook: 7.0.0-beta.38 - stylelint: 14.16.1 - stylelint-config-css-modules: 4.1.0_stylelint@14.16.1 - stylelint-config-prettier-scss: 0.0.1_stylelint@14.16.1 - stylelint-config-standard: 26.0.0_stylelint@14.16.1 - stylelint-config-standard-scss: 5.0.0_stylelint@14.16.1 - tar: 6.1.13 - timezone-mock: 1.3.6 - tmpl: 1.0.5 - ts-node: 10.9.1_cin3sed6ohfsopbmt6orxeb4o4 - typescript: 4.9.5 - typescript-plugin-css-modules: 4.1.1_6qtx7vkbdhwvdm4crzlegk4mvi - vite: 4.0.4_arwryhsn4zwmtf5pq2mmdxlt6a - vite-plugin-checker: 0.5.5_svpeoejlom624h637oo4yy72dy - vite-plugin-svgr: 2.4.0_vite@4.0.4 - vite-tsconfig-paths: 4.0.5_typescript@4.9.5 - -packages: - - /@adobe/css-tools/4.0.2: - resolution: {integrity: sha512-Fx6tYjk2wKUgLi8uMANZr8GNZx05u44ArIJldn9VxLvolzlJVgHbTUCbwhMd6bcYky178+WUSxPHO3DAtGLWpw==} - dev: true - - /@ampproject/remapping/2.2.0: - resolution: {integrity: sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/gen-mapping': 0.1.1 - '@jridgewell/trace-mapping': 0.3.17 - - /@apidevtools/json-schema-ref-parser/9.0.6: - resolution: {integrity: sha512-M3YgsLjI0lZxvrpeGVk9Ap032W6TPQkH6pRAZz81Ac3WUNF79VQooAFnp8umjvVzUmD93NkogxEwbSce7qMsUg==} - dependencies: - '@jsdevtools/ono': 7.1.3 - call-me-maybe: 1.0.2 - js-yaml: 3.14.1 - dev: true - - /@apidevtools/openapi-schemas/2.1.0: - resolution: {integrity: sha512-Zc1AlqrJlX3SlpupFGpiLi2EbteyP7fXmUOGup6/DnkRgjP9bgMM/ag+n91rsv0U1Gpz0H3VILA/o3bW7Ua6BQ==} - engines: {node: '>=10'} - dev: true - - /@apidevtools/swagger-methods/3.0.2: - resolution: {integrity: sha512-QAkD5kK2b1WfjDS/UQn/qQkbwF31uqRjPTrsCs5ZG9BQGAkjwvqGFjjPqAuzac/IYzpPtRzjCP1WrTuAIjMrXg==} - dev: true - - /@apidevtools/swagger-parser/10.1.0: - resolution: {integrity: sha512-9Kt7EuS/7WbMAUv2gSziqjvxwDbFSg3Xeyfuj5laUODX8o/k/CpsAKiQ8W7/R88eXFTMbJYg6+7uAmOWNKmwnw==} - peerDependencies: - openapi-types: '>=7' - dependencies: - '@apidevtools/json-schema-ref-parser': 9.0.6 - '@apidevtools/openapi-schemas': 2.1.0 - '@apidevtools/swagger-methods': 3.0.2 - '@jsdevtools/ono': 7.1.3 - ajv: 8.12.0 - ajv-draft-04: 1.0.0_ajv@8.12.0 - call-me-maybe: 1.0.2 - dev: true - - /@asyncapi/specs/3.2.1: - resolution: {integrity: sha512-FO+EteK+Gk3zwumrBw6frpp9cJ4oQL5++hBBpfM81w16e9KaiA4sKrzvQsvVjifoZZHNvVEX4D2zoz9i8CLccQ==} - dev: true - - /@aw-web-design/x-default-browser/1.4.88: - resolution: {integrity: sha512-AkEmF0wcwYC2QkhK703Y83fxWARttIWXDmQN8+cof8FmFZ5BRhnNXGymeb1S73bOCLfWjYELxtujL56idCN/XA==} - hasBin: true - dependencies: - default-browser-id: 3.0.0 - dev: true - - /@babel/code-frame/7.18.6: - resolution: {integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/highlight': 7.18.6 - - /@babel/compat-data/7.20.10: - resolution: {integrity: sha512-sEnuDPpOJR/fcafHMjpcpGN5M2jbUGUHwmuWKM/YdPzeEDJg8bgmbcWQFUfE32MQjti1koACvoPVsDe8Uq+idg==} - engines: {node: '>=6.9.0'} - - /@babel/core/7.20.12: - resolution: {integrity: sha512-XsMfHovsUYHFMdrIHkZphTN/2Hzzi78R08NuHfDBehym2VsPDL6Zn/JAD/JQdnRvbSsbQc4mVaU1m6JgtTEElg==} - engines: {node: '>=6.9.0'} - dependencies: - '@ampproject/remapping': 2.2.0 - '@babel/code-frame': 7.18.6 - '@babel/generator': 7.20.7 - '@babel/helper-compilation-targets': 7.20.7_@babel+core@7.20.12 - '@babel/helper-module-transforms': 7.20.11 - '@babel/helpers': 7.20.7 - '@babel/parser': 7.20.7 - '@babel/template': 7.20.7 - '@babel/traverse': 7.20.12 - '@babel/types': 7.20.7 - convert-source-map: 1.9.0 - debug: 4.3.4 - gensync: 1.0.0-beta.2 - json5: 2.2.3 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - - /@babel/eslint-parser/7.19.1_2je5tsgpdnpnp4f5qs5fqust6m: - resolution: {integrity: sha512-AqNf2QWt1rtu2/1rLswy6CDP7H9Oh3mMhk177Y67Rg8d7RD9WfOLLv8CGn6tisFvS2htm86yIe1yLF6I1UDaGQ==} - engines: {node: ^10.13.0 || ^12.13.0 || >=14.0.0} - peerDependencies: - '@babel/core': '>=7.11.0' - eslint: ^7.5.0 || ^8.0.0 - dependencies: - '@babel/core': 7.20.12 - '@nicolo-ribaudo/eslint-scope-5-internals': 5.1.1-v1 - eslint: 8.32.0 - eslint-visitor-keys: 2.1.0 - semver: 6.3.0 - dev: true - - /@babel/generator/7.20.7: - resolution: {integrity: sha512-7wqMOJq8doJMZmP4ApXTzLxSr7+oO2jroJURrVEp6XShrQUObV8Tq/D0NCcoYg2uHqUrjzO0zwBjoYzelxK+sw==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.20.7 - '@jridgewell/gen-mapping': 0.3.2 - jsesc: 2.5.2 - - /@babel/helper-annotate-as-pure/7.18.6: - resolution: {integrity: sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.20.7 - - /@babel/helper-builder-binary-assignment-operator-visitor/7.18.9: - resolution: {integrity: sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-explode-assignable-expression': 7.18.6 - '@babel/types': 7.20.7 - dev: true - - /@babel/helper-compilation-targets/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/compat-data': 7.20.10 - '@babel/core': 7.20.12 - '@babel/helper-validator-option': 7.18.6 - browserslist: 4.21.4 - lru-cache: 5.1.1 - semver: 6.3.0 - - /@babel/helper-create-class-features-plugin/7.20.12_@babel+core@7.20.12: - resolution: {integrity: sha512-9OunRkbT0JQcednL0UFvbfXpAsUXiGjUk0a7sN8fUXX7Mue79cUSMjHGDRRi/Vz9vYlpIhLV5fMD5dKoMhhsNQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-member-expression-to-functions': 7.20.7 - '@babel/helper-optimise-call-expression': 7.18.6 - '@babel/helper-replace-supers': 7.20.7 - '@babel/helper-skip-transparent-expression-wrappers': 7.20.0 - '@babel/helper-split-export-declaration': 7.18.6 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-create-regexp-features-plugin/7.20.5_@babel+core@7.20.12: - resolution: {integrity: sha512-m68B1lkg3XDGX5yCvGO0kPx3v9WIYLnzjKfPcQiwntEQa5ZeRkPmo2X/ISJc8qxWGfwUr+kvZAeEzAwLec2r2w==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-annotate-as-pure': 7.18.6 - regexpu-core: 5.2.2 - dev: true - - /@babel/helper-define-polyfill-provider/0.3.3_@babel+core@7.20.12: - resolution: {integrity: sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww==} - peerDependencies: - '@babel/core': ^7.4.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-compilation-targets': 7.20.7_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - debug: 4.3.4 - lodash.debounce: 4.0.8 - resolve: 1.22.1 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-environment-visitor/7.18.9: - resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==} - engines: {node: '>=6.9.0'} - - /@babel/helper-explode-assignable-expression/7.18.6: - resolution: {integrity: sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.20.7 - dev: true - - /@babel/helper-function-name/7.19.0: - resolution: {integrity: sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/template': 7.20.7 - '@babel/types': 7.20.7 - - /@babel/helper-hoist-variables/7.18.6: - resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.20.7 - - /@babel/helper-member-expression-to-functions/7.20.7: - resolution: {integrity: sha512-9J0CxJLq315fEdi4s7xK5TQaNYjZw+nDVpVqr1axNGKzdrdwYBD5b4uKv3n75aABG0rCCTK8Im8Ww7eYfMrZgw==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.20.7 - dev: true - - /@babel/helper-module-imports/7.18.6: - resolution: {integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.20.7 - - /@babel/helper-module-transforms/7.20.11: - resolution: {integrity: sha512-uRy78kN4psmji1s2QtbtcCSaj/LILFDp0f/ymhpQH5QY3nljUZCaNWz9X1dEj/8MBdBEFECs7yRhKn8i7NjZgg==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-module-imports': 7.18.6 - '@babel/helper-simple-access': 7.20.2 - '@babel/helper-split-export-declaration': 7.18.6 - '@babel/helper-validator-identifier': 7.19.1 - '@babel/template': 7.20.7 - '@babel/traverse': 7.20.12 - '@babel/types': 7.20.7 - transitivePeerDependencies: - - supports-color - - /@babel/helper-optimise-call-expression/7.18.6: - resolution: {integrity: sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.20.7 - dev: true - - /@babel/helper-plugin-utils/7.20.2: - resolution: {integrity: sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ==} - engines: {node: '>=6.9.0'} - - /@babel/helper-remap-async-to-generator/7.18.9_@babel+core@7.20.12: - resolution: {integrity: sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-wrap-function': 7.20.5 - '@babel/types': 7.20.7 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-replace-supers/7.20.7: - resolution: {integrity: sha512-vujDMtB6LVfNW13jhlCrp48QNslK6JXi7lQG736HVbHz/mbf4Dc7tIRh1Xf5C0rF7BP8iiSxGMCmY6Ci1ven3A==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-member-expression-to-functions': 7.20.7 - '@babel/helper-optimise-call-expression': 7.18.6 - '@babel/template': 7.20.7 - '@babel/traverse': 7.20.12 - '@babel/types': 7.20.7 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-simple-access/7.20.2: - resolution: {integrity: sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.20.7 - - /@babel/helper-skip-transparent-expression-wrappers/7.20.0: - resolution: {integrity: sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.20.7 - dev: true - - /@babel/helper-split-export-declaration/7.18.6: - resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.20.7 - - /@babel/helper-string-parser/7.19.4: - resolution: {integrity: sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==} - engines: {node: '>=6.9.0'} - - /@babel/helper-validator-identifier/7.19.1: - resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==} - engines: {node: '>=6.9.0'} - - /@babel/helper-validator-option/7.18.6: - resolution: {integrity: sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==} - engines: {node: '>=6.9.0'} - - /@babel/helper-wrap-function/7.20.5: - resolution: {integrity: sha512-bYMxIWK5mh+TgXGVqAtnu5Yn1un+v8DDZtqyzKRLUzrh70Eal2O3aZ7aPYiMADO4uKlkzOiRiZ6GX5q3qxvW9Q==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-function-name': 7.19.0 - '@babel/template': 7.20.7 - '@babel/traverse': 7.20.12 - '@babel/types': 7.20.7 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helpers/7.20.7: - resolution: {integrity: sha512-PBPjs5BppzsGaxHQCDKnZ6Gd9s6xl8bBCluz3vEInLGRJmnZan4F6BYCeqtyXqkk4W5IlPmjK4JlOuZkpJ3xZA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/template': 7.20.7 - '@babel/traverse': 7.20.12 - '@babel/types': 7.20.7 - transitivePeerDependencies: - - supports-color - - /@babel/highlight/7.18.6: - resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-validator-identifier': 7.19.1 - chalk: 2.4.2 - js-tokens: 4.0.0 - - /@babel/parser/7.20.7: - resolution: {integrity: sha512-T3Z9oHybU+0vZlY9CiDSJQTD5ZapcW18ZctFMi0MOAl/4BjFF4ul7NVSARLdbGO5vDqy9eQiGTV0LtKfvCYvcg==} - engines: {node: '>=6.0.0'} - hasBin: true - dependencies: - '@babel/types': 7.20.7 - - /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-sbr9+wNE5aXMBBFBICk01tt7sBf2Oc9ikRFEcem/ZORup9IMUdNhW7/wVLEbbtlWOsEubJet46mHAL2C8+2jKQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.13.0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-skip-transparent-expression-wrappers': 7.20.0 - '@babel/plugin-proposal-optional-chaining': 7.20.7_@babel+core@7.20.12 - dev: true - - /@babel/plugin-proposal-async-generator-functions/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-xMbiLsn/8RK7Wq7VeVytytS2L6qE69bXPB10YCmMdDZbKF4okCqY74pI/jJQ/8U0b/F6NrT2+14b8/P9/3AMGA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-remap-async-to-generator': 7.18.9_@babel+core@7.20.12 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.20.12 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-proposal-class-properties/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-create-class-features-plugin': 7.20.12_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-proposal-class-static-block/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-AveGOoi9DAjUYYuUAG//Ig69GlazLnoyzMw68VCDux+c1tsnnH/OkYcpz/5xzMkEFC6UxjR5Gw1c+iY2wOGVeQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.12.0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-create-class-features-plugin': 7.20.12_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-class-static-block': 7.14.5_@babel+core@7.20.12 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-proposal-decorators/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-JB45hbUweYpwAGjkiM7uCyXMENH2lG+9r3G2E+ttc2PRXAoEkpfd/KW5jDg4j8RS6tLtTG1jZi9LbHZVSfs1/A==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-create-class-features-plugin': 7.20.12_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-replace-supers': 7.20.7 - '@babel/helper-split-export-declaration': 7.18.6 - '@babel/plugin-syntax-decorators': 7.19.0_@babel+core@7.20.12 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-proposal-dynamic-import/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.20.12 - dev: true - - /@babel/plugin-proposal-export-namespace-from/7.18.9_@babel+core@7.20.12: - resolution: {integrity: sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-export-namespace-from': 7.8.3_@babel+core@7.20.12 - dev: true - - /@babel/plugin-proposal-json-strings/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.20.12 - dev: true - - /@babel/plugin-proposal-logical-assignment-operators/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-y7C7cZgpMIjWlKE5T7eJwp+tnRYM89HmRvWM5EQuB5BoHEONjmQ8lSNmBUwOyy/GFRsohJED51YBF79hE1djug==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.20.12 - dev: true - - /@babel/plugin-proposal-nullish-coalescing-operator/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.20.12 - dev: true - - /@babel/plugin-proposal-numeric-separator/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.20.12 - dev: true - - /@babel/plugin-proposal-object-rest-spread/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/compat-data': 7.20.10 - '@babel/core': 7.20.12 - '@babel/helper-compilation-targets': 7.20.7_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-transform-parameters': 7.20.7_@babel+core@7.20.12 - dev: true - - /@babel/plugin-proposal-optional-catch-binding/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.20.12 - dev: true - - /@babel/plugin-proposal-optional-chaining/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-T+A7b1kfjtRM51ssoOfS1+wbyCVqorfyZhT99TvxxLMirPShD8CzKMRepMlCBGM5RpHMbn8s+5MMHnPstJH6mQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-skip-transparent-expression-wrappers': 7.20.0 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.20.12 - dev: true - - /@babel/plugin-proposal-private-methods/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-create-class-features-plugin': 7.20.12_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-proposal-private-property-in-object/7.20.5_@babel+core@7.20.12: - resolution: {integrity: sha512-Vq7b9dUA12ByzB4EjQTPo25sFhY+08pQDBSZRtUAkj7lb7jahaHR5igera16QZ+3my1nYR4dKsNdYj5IjPHilQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-create-class-features-plugin': 7.20.12_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-private-property-in-object': 7.14.5_@babel+core@7.20.12 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-proposal-unicode-property-regex/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==} - engines: {node: '>=4'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-create-regexp-features-plugin': 7.20.5_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-async-generators/7.8.4_@babel+core@7.20.12: - resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-bigint/7.8.3_@babel+core@7.20.12: - resolution: {integrity: sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-class-properties/7.12.13_@babel+core@7.20.12: - resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-class-static-block/7.14.5_@babel+core@7.20.12: - resolution: {integrity: sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-decorators/7.19.0_@babel+core@7.20.12: - resolution: {integrity: sha512-xaBZUEDntt4faL1yN8oIFlhfXeQAWJW7CLKYsHTUqriCUbj8xOra8bfxxKGi/UwExPFBuPdH4XfHc9rGQhrVkQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-dynamic-import/7.8.3_@babel+core@7.20.12: - resolution: {integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-export-namespace-from/7.8.3_@babel+core@7.20.12: - resolution: {integrity: sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-flow/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-LUbR+KNTBWCUAqRG9ex5Gnzu2IOkt8jRJbHHXFT9q+L9zm7M/QQbEqXyw1n1pohYvOyWC8CjeyjrSaIwiYjK7A==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-import-assertions/7.20.0_@babel+core@7.20.12: - resolution: {integrity: sha512-IUh1vakzNoWalR8ch/areW7qFopR2AEw03JlG7BbrDqmQ4X3q9uuipQwSGrUn7oGiemKjtSLDhNtQHzMHr1JdQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-import-meta/7.10.4_@babel+core@7.20.12: - resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-json-strings/7.8.3_@babel+core@7.20.12: - resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-jsx/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - - /@babel/plugin-syntax-logical-assignment-operators/7.10.4_@babel+core@7.20.12: - resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-nullish-coalescing-operator/7.8.3_@babel+core@7.20.12: - resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-numeric-separator/7.10.4_@babel+core@7.20.12: - resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-object-rest-spread/7.8.3_@babel+core@7.20.12: - resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-optional-catch-binding/7.8.3_@babel+core@7.20.12: - resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-optional-chaining/7.8.3_@babel+core@7.20.12: - resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-private-property-in-object/7.14.5_@babel+core@7.20.12: - resolution: {integrity: sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-top-level-await/7.14.5_@babel+core@7.20.12: - resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-typescript/7.20.0_@babel+core@7.20.12: - resolution: {integrity: sha512-rd9TkG+u1CExzS4SM1BlMEhMXwFLKVjOAFFCDx9PbX5ycJWDoWMcwdJH9RhkPu1dOgn5TrxLot/Gx6lWFuAUNQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-arrow-functions/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-3poA5E7dzDomxj9WXWwuD6A5F3kc7VXwIJO+E+J8qtDtS+pXPAhrgEyh+9GBwBgPq1Z+bB+/JD60lp5jsN7JPQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-async-to-generator/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-Uo5gwHPT9vgnSXQxqGtpdufUiWp96gk7yiP4Mp5bm1QMkEmLXBO7PAGYbKoJ6DhAwiNkcHFBol/x5zZZkL/t0Q==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-module-imports': 7.18.6 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-remap-async-to-generator': 7.18.9_@babel+core@7.20.12 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-block-scoped-functions/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-block-scoping/7.20.11_@babel+core@7.20.12: - resolution: {integrity: sha512-tA4N427a7fjf1P0/2I4ScsHGc5jcHPbb30xMbaTke2gxDuWpUfXDuX1FEymJwKk4tuGUvGcejAR6HdZVqmmPyw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-classes/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-LWYbsiXTPKl+oBlXUGlwNlJZetXD5Am+CyBdqhPsDVjM9Jc8jwBJFrKhHf900Kfk2eZG1y9MAG3UNajol7A4VQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-compilation-targets': 7.20.7_@babel+core@7.20.12 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-optimise-call-expression': 7.18.6 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-replace-supers': 7.20.7 - '@babel/helper-split-export-declaration': 7.18.6 - globals: 11.12.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-computed-properties/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-Lz7MvBK6DTjElHAmfu6bfANzKcxpyNPeYBGEafyA6E5HtRpjpZwU+u7Qrgz/2OR0z+5TvKYbPdphfSaAcZBrYQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/template': 7.20.7 - dev: true - - /@babel/plugin-transform-destructuring/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-Xwg403sRrZb81IVB79ZPqNQME23yhugYVqgTxAhT99h485F4f+GMELFhhOsscDUB7HCswepKeCKLn/GZvUKoBA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-dotall-regex/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-create-regexp-features-plugin': 7.20.5_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-duplicate-keys/7.18.9_@babel+core@7.20.12: - resolution: {integrity: sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-exponentiation-operator/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-builder-binary-assignment-operator-visitor': 7.18.9 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-flow-strip-types/7.19.0_@babel+core@7.20.12: - resolution: {integrity: sha512-sgeMlNaQVbCSpgLSKP4ZZKfsJVnFnNQlUSk6gPYzR/q7tzCgQF2t8RBKAP6cKJeZdveei7Q7Jm527xepI8lNLg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-flow': 7.18.6_@babel+core@7.20.12 - dev: true - - /@babel/plugin-transform-for-of/7.18.8_@babel+core@7.20.12: - resolution: {integrity: sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-function-name/7.18.9_@babel+core@7.20.12: - resolution: {integrity: sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-compilation-targets': 7.20.7_@babel+core@7.20.12 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-literals/7.18.9_@babel+core@7.20.12: - resolution: {integrity: sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-member-expression-literals/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-modules-amd/7.20.11_@babel+core@7.20.12: - resolution: {integrity: sha512-NuzCt5IIYOW0O30UvqktzHYR2ud5bOWbY0yaxWZ6G+aFzOMJvrs5YHNikrbdaT15+KNO31nPOy5Fim3ku6Zb5g==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-module-transforms': 7.20.11 - '@babel/helper-plugin-utils': 7.20.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-modules-commonjs/7.20.11_@babel+core@7.20.12: - resolution: {integrity: sha512-S8e1f7WQ7cimJQ51JkAaDrEtohVEitXjgCGAS2N8S31Y42E+kWwfSz83LYz57QdBm7q9diARVqanIaH2oVgQnw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-module-transforms': 7.20.11 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-simple-access': 7.20.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-modules-systemjs/7.20.11_@babel+core@7.20.12: - resolution: {integrity: sha512-vVu5g9BPQKSFEmvt2TA4Da5N+QVS66EX21d8uoOihC+OCpUoGvzVsXeqFdtAEfVa5BILAeFt+U7yVmLbQnAJmw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-hoist-variables': 7.18.6 - '@babel/helper-module-transforms': 7.20.11 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-validator-identifier': 7.19.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-modules-umd/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-module-transforms': 7.20.11 - '@babel/helper-plugin-utils': 7.20.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-named-capturing-groups-regex/7.20.5_@babel+core@7.20.12: - resolution: {integrity: sha512-mOW4tTzi5iTLnw+78iEq3gr8Aoq4WNRGpmSlrogqaiCBoR1HFhpU4JkpQFOHfeYx3ReVIFWOQJS4aZBRvuZ6mA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-create-regexp-features-plugin': 7.20.5_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-new-target/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-object-super/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-replace-supers': 7.20.7 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-parameters/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-WiWBIkeHKVOSYPO0pWkxGPfKeWrCJyD3NJ53+Lrp/QMSZbsVPovrVl2aWZ19D/LTVnaDv5Ap7GJ/B2CTOZdrfA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-property-literals/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-react-display-name/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-react-jsx-development/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/plugin-transform-react-jsx': 7.20.7_@babel+core@7.20.12 - dev: true - - /@babel/plugin-transform-react-jsx-self/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-A0LQGx4+4Jv7u/tWzoJF7alZwnBDQd6cGLh9P+Ttk4dpiL+J5p7NSNv/9tlEFFJDq3kjxOavWmbm6t0Gk+A3Ig==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-react-jsx-source/7.19.6_@babel+core@7.20.12: - resolution: {integrity: sha512-RpAi004QyMNisst/pvSanoRdJ4q+jMCWyk9zdw/CyLB9j8RXEahodR6l2GyttDRyEVWZtbN+TpLiHJ3t34LbsQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-react-jsx/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-Tfq7qqD+tRj3EoDhY00nn2uP2hsRxgYGi5mLQ5TimKav0a9Lrpd4deE+fcLXU8zFYRjlKPHZhpCvfEA6qnBxqQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-module-imports': 7.18.6 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.20.12 - '@babel/types': 7.20.7 - dev: true - - /@babel/plugin-transform-react-pure-annotations/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-regenerator/7.20.5_@babel+core@7.20.12: - resolution: {integrity: sha512-kW/oO7HPBtntbsahzQ0qSE3tFvkFwnbozz3NWFhLGqH75vLEg+sCGngLlhVkePlCs3Jv0dBBHDzCHxNiFAQKCQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - regenerator-transform: 0.15.1 - dev: true - - /@babel/plugin-transform-reserved-words/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-runtime/7.19.6_@babel+core@7.20.12: - resolution: {integrity: sha512-PRH37lz4JU156lYFW1p8OxE5i7d6Sl/zV58ooyr+q1J1lnQPyg5tIiXlIwNVhJaY4W3TmOtdc8jqdXQcB1v5Yw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-module-imports': 7.18.6 - '@babel/helper-plugin-utils': 7.20.2 - babel-plugin-polyfill-corejs2: 0.3.3_@babel+core@7.20.12 - babel-plugin-polyfill-corejs3: 0.6.0_@babel+core@7.20.12 - babel-plugin-polyfill-regenerator: 0.4.1_@babel+core@7.20.12 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-shorthand-properties/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-spread/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-ewBbHQ+1U/VnH1fxltbJqDeWBU1oNLG8Dj11uIv3xVf7nrQu0bPGe5Rf716r7K5Qz+SqtAOVswoVunoiBtGhxw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-skip-transparent-expression-wrappers': 7.20.0 - dev: true - - /@babel/plugin-transform-sticky-regex/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-template-literals/7.18.9_@babel+core@7.20.12: - resolution: {integrity: sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-typeof-symbol/7.18.9_@babel+core@7.20.12: - resolution: {integrity: sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-typescript/7.20.7_@babel+core@7.20.12: - resolution: {integrity: sha512-m3wVKEvf6SoszD8pu4NZz3PvfKRCMgk6D6d0Qi9hNnlM5M6CFS92EgF4EiHVLKbU0r/r7ty1hg7NPZwE7WRbYw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-create-class-features-plugin': 7.20.12_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-syntax-typescript': 7.20.0_@babel+core@7.20.12 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-unicode-escapes/7.18.10_@babel+core@7.20.12: - resolution: {integrity: sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-transform-unicode-regex/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-create-regexp-features-plugin': 7.20.5_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/preset-env/7.20.2_@babel+core@7.20.12: - resolution: {integrity: sha512-1G0efQEWR1EHkKvKHqbG+IN/QdgwfByUpM5V5QroDzGV2t3S/WXNQd693cHiHTlCFMpr9B6FkPFXDA2lQcKoDg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/compat-data': 7.20.10 - '@babel/core': 7.20.12 - '@babel/helper-compilation-targets': 7.20.7_@babel+core@7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-validator-option': 7.18.6 - '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-proposal-async-generator-functions': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-proposal-class-properties': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-class-static-block': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-proposal-dynamic-import': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-export-namespace-from': 7.18.9_@babel+core@7.20.12 - '@babel/plugin-proposal-json-strings': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-logical-assignment-operators': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-numeric-separator': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-object-rest-spread': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-proposal-optional-catch-binding': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-optional-chaining': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-proposal-private-methods': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-private-property-in-object': 7.20.5_@babel+core@7.20.12 - '@babel/plugin-proposal-unicode-property-regex': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.20.12 - '@babel/plugin-syntax-class-properties': 7.12.13_@babel+core@7.20.12 - '@babel/plugin-syntax-class-static-block': 7.14.5_@babel+core@7.20.12 - '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-export-namespace-from': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-import-assertions': 7.20.0_@babel+core@7.20.12 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.20.12 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.20.12 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-private-property-in-object': 7.14.5_@babel+core@7.20.12 - '@babel/plugin-syntax-top-level-await': 7.14.5_@babel+core@7.20.12 - '@babel/plugin-transform-arrow-functions': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-transform-async-to-generator': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-transform-block-scoped-functions': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-block-scoping': 7.20.11_@babel+core@7.20.12 - '@babel/plugin-transform-classes': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-transform-computed-properties': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-transform-destructuring': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-transform-dotall-regex': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-duplicate-keys': 7.18.9_@babel+core@7.20.12 - '@babel/plugin-transform-exponentiation-operator': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-for-of': 7.18.8_@babel+core@7.20.12 - '@babel/plugin-transform-function-name': 7.18.9_@babel+core@7.20.12 - '@babel/plugin-transform-literals': 7.18.9_@babel+core@7.20.12 - '@babel/plugin-transform-member-expression-literals': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-modules-amd': 7.20.11_@babel+core@7.20.12 - '@babel/plugin-transform-modules-commonjs': 7.20.11_@babel+core@7.20.12 - '@babel/plugin-transform-modules-systemjs': 7.20.11_@babel+core@7.20.12 - '@babel/plugin-transform-modules-umd': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-named-capturing-groups-regex': 7.20.5_@babel+core@7.20.12 - '@babel/plugin-transform-new-target': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-object-super': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-parameters': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-transform-property-literals': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-regenerator': 7.20.5_@babel+core@7.20.12 - '@babel/plugin-transform-reserved-words': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-shorthand-properties': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-spread': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-transform-sticky-regex': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-template-literals': 7.18.9_@babel+core@7.20.12 - '@babel/plugin-transform-typeof-symbol': 7.18.9_@babel+core@7.20.12 - '@babel/plugin-transform-unicode-escapes': 7.18.10_@babel+core@7.20.12 - '@babel/plugin-transform-unicode-regex': 7.18.6_@babel+core@7.20.12 - '@babel/preset-modules': 0.1.5_@babel+core@7.20.12 - '@babel/types': 7.20.7 - babel-plugin-polyfill-corejs2: 0.3.3_@babel+core@7.20.12 - babel-plugin-polyfill-corejs3: 0.6.0_@babel+core@7.20.12 - babel-plugin-polyfill-regenerator: 0.4.1_@babel+core@7.20.12 - core-js-compat: 3.27.1 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/preset-flow/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-E7BDhL64W6OUqpuyHnSroLnqyRTcG6ZdOBl1OKI/QK/HJfplqK/S3sq1Cckx7oTodJ5yOXyfw7rEADJ6UjoQDQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-validator-option': 7.18.6 - '@babel/plugin-transform-flow-strip-types': 7.19.0_@babel+core@7.20.12 - dev: true - - /@babel/preset-modules/0.1.5_@babel+core@7.20.12: - resolution: {integrity: sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/plugin-proposal-unicode-property-regex': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-dotall-regex': 7.18.6_@babel+core@7.20.12 - '@babel/types': 7.20.7 - esutils: 2.0.3 - dev: true - - /@babel/preset-react/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-validator-option': 7.18.6 - '@babel/plugin-transform-react-display-name': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-react-jsx': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-transform-react-jsx-development': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-react-pure-annotations': 7.18.6_@babel+core@7.20.12 - dev: true - - /@babel/preset-typescript/7.18.6_@babel+core@7.20.12: - resolution: {integrity: sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-plugin-utils': 7.20.2 - '@babel/helper-validator-option': 7.18.6 - '@babel/plugin-transform-typescript': 7.20.7_@babel+core@7.20.12 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/register/7.18.9_@babel+core@7.20.12: - resolution: {integrity: sha512-ZlbnXDcNYHMR25ITwwNKT88JiaukkdVj/nG7r3wnuXkOTHc60Uy05PwMCPre0hSkY68E6zK3xz+vUJSP2jWmcw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - clone-deep: 4.0.1 - find-cache-dir: 2.1.0 - make-dir: 2.1.0 - pirates: 4.0.5 - source-map-support: 0.5.21 - dev: true - - /@babel/runtime/7.20.7: - resolution: {integrity: sha512-UF0tvkUtxwAgZ5W/KrkHf0Rn0fdnLDU9ScxBrEVNUprE/MzirjK4MJUX1/BVDv00Sv8cljtukVK1aky++X1SjQ==} - engines: {node: '>=6.9.0'} - dependencies: - regenerator-runtime: 0.13.11 - - /@babel/template/7.20.7: - resolution: {integrity: sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/code-frame': 7.18.6 - '@babel/parser': 7.20.7 - '@babel/types': 7.20.7 - - /@babel/traverse/7.20.12: - resolution: {integrity: sha512-MsIbFN0u+raeja38qboyF8TIT7K0BFzz/Yd/77ta4MsUsmP2RAnidIlwq7d5HFQrH/OZJecGV6B71C4zAgpoSQ==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/code-frame': 7.18.6 - '@babel/generator': 7.20.7 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-hoist-variables': 7.18.6 - '@babel/helper-split-export-declaration': 7.18.6 - '@babel/parser': 7.20.7 - '@babel/types': 7.20.7 - debug: 4.3.4 - globals: 11.12.0 - transitivePeerDependencies: - - supports-color - - /@babel/traverse/7.20.12_supports-color@5.5.0: - resolution: {integrity: sha512-MsIbFN0u+raeja38qboyF8TIT7K0BFzz/Yd/77ta4MsUsmP2RAnidIlwq7d5HFQrH/OZJecGV6B71C4zAgpoSQ==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/code-frame': 7.18.6 - '@babel/generator': 7.20.7 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-hoist-variables': 7.18.6 - '@babel/helper-split-export-declaration': 7.18.6 - '@babel/parser': 7.20.7 - '@babel/types': 7.20.7 - debug: 4.3.4_supports-color@5.5.0 - globals: 11.12.0 - transitivePeerDependencies: - - supports-color - dev: false - - /@babel/types/7.20.7: - resolution: {integrity: sha512-69OnhBxSSgK0OzTJai4kyPDiKTIe3j+ctaHdIGVbRahTLAT7L3R9oeXHC2aVSuGYt3cVnoAMDmOCgJ2yaiLMvg==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-string-parser': 7.19.4 - '@babel/helper-validator-identifier': 7.19.1 - to-fast-properties: 2.0.0 - - /@base2/pretty-print-object/1.0.1: - resolution: {integrity: sha512-4iri8i1AqYHJE2DstZYkyEprg6Pq6sKx3xn5FpySk9sNhH7qN2LLlHJCfDTZRILNwQNPD7mATWM0TBui7uC1pA==} - dev: true - - /@bcoe/v8-coverage/0.2.3: - resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} - dev: true - - /@colors/colors/1.5.0: - resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} - engines: {node: '>=0.1.90'} - requiresBuild: true - dev: true - optional: true - - /@cspotcode/source-map-support/0.8.1: - resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} - engines: {node: '>=12'} - dependencies: - '@jridgewell/trace-mapping': 0.3.9 - dev: true - - /@csstools/selector-specificity/2.0.2_wajs5nedgkikc5pcuwett7legi: - resolution: {integrity: sha512-IkpVW/ehM1hWKln4fCA3NzJU8KwD+kIOvPZA4cqxoJHtE21CCzjyp+Kxbu0i5I4tBNOlXPL9mjwnWlL0VEG4Fg==} - engines: {node: ^12 || ^14 || >=16} - peerDependencies: - postcss: ^8.2 - postcss-selector-parser: ^6.0.10 - dependencies: - postcss: 8.4.21 - postcss-selector-parser: 6.0.11 - dev: true - - /@datadog/browser-core/4.30.1: - resolution: {integrity: sha512-f2ETFJgZTHH3gzAKNEy8wI22Ae9jFtMk6t7s1QIyHwdFsftC0E0GKuCdMv2maeKMm/z8UYvP27vvGgXjbW1twg==} - dev: false - - /@datadog/browser-rum-core/4.30.1: - resolution: {integrity: sha512-scWb9zAJfIEQexabQxdm98wxy9CJzhqvsDX/5/DUyv1lQDAsjaP6IHWAMTkikHeHift67/E9j5FVTrUMaanydQ==} - dependencies: - '@datadog/browser-core': 4.30.1 - dev: false - - /@datadog/browser-rum/4.30.1: - resolution: {integrity: sha512-iOJJYjyS9j1EsD93RIwabqCPRpfDd7FrltbgVVll+4oJ+eE2lfO/wPWbLuV1Kr4jZ39tGSzef7GC/Q/dcNLYzQ==} - peerDependencies: - '@datadog/browser-logs': 4.30.1 - peerDependenciesMeta: - '@datadog/browser-logs': - optional: true - dependencies: - '@datadog/browser-core': 4.30.1 - '@datadog/browser-rum-core': 4.30.1 - dev: false - - /@discoveryjs/json-ext/0.5.7: - resolution: {integrity: sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==} - engines: {node: '>=10.0.0'} - dev: true - - /@emotion/babel-plugin/11.10.5_@babel+core@7.20.12: - resolution: {integrity: sha512-xE7/hyLHJac7D2Ve9dKroBBZqBT7WuPQmWcq7HSGb84sUuP4mlOWoB8dvVfD9yk5DHkU1m6RW7xSoDtnQHNQeA==} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-module-imports': 7.18.6 - '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.20.12 - '@babel/runtime': 7.20.7 - '@emotion/hash': 0.9.0 - '@emotion/memoize': 0.8.0 - '@emotion/serialize': 1.1.1 - babel-plugin-macros: 3.1.0 - convert-source-map: 1.9.0 - escape-string-regexp: 4.0.0 - find-root: 1.1.0 - source-map: 0.5.7 - stylis: 4.1.3 - dev: false - - /@emotion/cache/11.10.5: - resolution: {integrity: sha512-dGYHWyzTdmK+f2+EnIGBpkz1lKc4Zbj2KHd4cX3Wi8/OWr5pKslNjc3yABKH4adRGCvSX4VDC0i04mrrq0aiRA==} - dependencies: - '@emotion/memoize': 0.8.0 - '@emotion/sheet': 1.2.1 - '@emotion/utils': 1.2.0 - '@emotion/weak-memoize': 0.3.0 - stylis: 4.1.3 - dev: false - - /@emotion/hash/0.9.0: - resolution: {integrity: sha512-14FtKiHhy2QoPIzdTcvh//8OyBlknNs2nXRwIhG904opCby3l+9Xaf/wuPvICBF0rc1ZCNBd3nKe9cd2mecVkQ==} - dev: false - - /@emotion/is-prop-valid/0.8.8: - resolution: {integrity: sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==} - requiresBuild: true - dependencies: - '@emotion/memoize': 0.7.4 - dev: false - optional: true - - /@emotion/is-prop-valid/1.2.0: - resolution: {integrity: sha512-3aDpDprjM0AwaxGE09bOPkNxHpBd+kA6jty3RnaEXdweX1DF1U3VQpPYb0g1IStAuK7SVQ1cy+bNBBKp4W3Fjg==} - dependencies: - '@emotion/memoize': 0.8.0 - dev: false - - /@emotion/memoize/0.7.4: - resolution: {integrity: sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==} - dev: false - optional: true - - /@emotion/memoize/0.8.0: - resolution: {integrity: sha512-G/YwXTkv7Den9mXDO7AhLWkE3q+I92B+VqAE+dYG4NGPaHZGvt3G8Q0p9vmE+sq7rTGphUbAvmQ9YpbfMQGGlA==} - dev: false - - /@emotion/react/11.10.5_nehdyrcubdy45i2h35h56gfg7i: - resolution: {integrity: sha512-TZs6235tCJ/7iF6/rvTaOH4oxQg2gMAcdHemjwLKIjKz4rRuYe1HJ2TQJKnAcRAfOUDdU8XoDadCe1rl72iv8A==} - peerDependencies: - '@babel/core': ^7.0.0 - '@types/react': '*' - react: '>=16.8.0' - peerDependenciesMeta: - '@babel/core': - optional: true - '@types/react': - optional: true - dependencies: - '@babel/core': 7.20.12 - '@babel/runtime': 7.20.7 - '@emotion/babel-plugin': 11.10.5_@babel+core@7.20.12 - '@emotion/cache': 11.10.5 - '@emotion/serialize': 1.1.1 - '@emotion/use-insertion-effect-with-fallbacks': 1.0.0_react@17.0.2 - '@emotion/utils': 1.2.0 - '@emotion/weak-memoize': 0.3.0 - '@types/react': 17.0.52 - hoist-non-react-statics: 3.3.2 - react: 17.0.2 - dev: false - - /@emotion/serialize/1.1.1: - resolution: {integrity: sha512-Zl/0LFggN7+L1liljxXdsVSVlg6E/Z/olVWpfxUTxOAmi8NU7YoeWeLfi1RmnB2TATHoaWwIBRoL+FvAJiTUQA==} - dependencies: - '@emotion/hash': 0.9.0 - '@emotion/memoize': 0.8.0 - '@emotion/unitless': 0.8.0 - '@emotion/utils': 1.2.0 - csstype: 3.1.1 - dev: false - - /@emotion/sheet/1.2.1: - resolution: {integrity: sha512-zxRBwl93sHMsOj4zs+OslQKg/uhF38MB+OMKoCrVuS0nyTkqnau+BM3WGEoOptg9Oz45T/aIGs1qbVAsEFo3nA==} - dev: false - - /@emotion/stylis/0.8.5: - resolution: {integrity: sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ==} - dev: false - - /@emotion/unitless/0.7.5: - resolution: {integrity: sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==} - dev: false - - /@emotion/unitless/0.8.0: - resolution: {integrity: sha512-VINS5vEYAscRl2ZUDiT3uMPlrFQupiKgHz5AA4bCH1miKBg4qtwkim1qPmJj/4WG6TreYMY111rEFsjupcOKHw==} - dev: false - - /@emotion/use-insertion-effect-with-fallbacks/1.0.0_react@17.0.2: - resolution: {integrity: sha512-1eEgUGmkaljiBnRMTdksDV1W4kUnmwgp7X9G8B++9GYwl1lUdqSndSriIrTJ0N7LQaoauY9JJ2yhiOYK5+NI4A==} - peerDependencies: - react: '>=16.8.0' - dependencies: - react: 17.0.2 - - /@emotion/utils/1.2.0: - resolution: {integrity: sha512-sn3WH53Kzpw8oQ5mgMmIzzyAaH2ZqFEbozVVBSYp538E06OSE6ytOp7pRAjNQR+Q/orwqdQYJSe2m3hCOeznkw==} - dev: false - - /@emotion/weak-memoize/0.3.0: - resolution: {integrity: sha512-AHPmaAx+RYfZz0eYu6Gviiagpmiyw98ySSlQvCUhVGDRtDFe4DBS0x1bSjdF3gqUDYOczB+yYvBTtEylYSdRhg==} - dev: false - - /@esbuild/android-arm/0.15.18: - resolution: {integrity: sha512-5GT+kcs2WVGjVs7+boataCkO5Fg0y4kCjzkB5bAip7H4jfnOS3dA6KPiww9W1OEKTKeAcUVhdZGvgI65OXmUnw==} - engines: {node: '>=12'} - cpu: [arm] - os: [android] - requiresBuild: true - dev: true - optional: true - - /@esbuild/android-arm/0.16.17: - resolution: {integrity: sha512-N9x1CMXVhtWEAMS7pNNONyA14f71VPQN9Cnavj1XQh6T7bskqiLLrSca4O0Vr8Wdcga943eThxnVp3JLnBMYtw==} - engines: {node: '>=12'} - cpu: [arm] - os: [android] - requiresBuild: true - dev: true - optional: true - - /@esbuild/android-arm64/0.16.17: - resolution: {integrity: sha512-MIGl6p5sc3RDTLLkYL1MyL8BMRN4tLMRCn+yRJJmEDvYZ2M7tmAf80hx1kbNEUX2KJ50RRtxZ4JHLvCfuB6kBg==} - engines: {node: '>=12'} - cpu: [arm64] - os: [android] - requiresBuild: true - dev: true - optional: true - - /@esbuild/android-x64/0.16.17: - resolution: {integrity: sha512-a3kTv3m0Ghh4z1DaFEuEDfz3OLONKuFvI4Xqczqx4BqLyuFaFkuaG4j2MtA6fuWEFeC5x9IvqnX7drmRq/fyAQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [android] - requiresBuild: true - dev: true - optional: true - - /@esbuild/darwin-arm64/0.16.17: - resolution: {integrity: sha512-/2agbUEfmxWHi9ARTX6OQ/KgXnOWfsNlTeLcoV7HSuSTv63E4DqtAc+2XqGw1KHxKMHGZgbVCZge7HXWX9Vn+w==} - engines: {node: '>=12'} - cpu: [arm64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /@esbuild/darwin-x64/0.16.17: - resolution: {integrity: sha512-2By45OBHulkd9Svy5IOCZt376Aa2oOkiE9QWUK9fe6Tb+WDr8hXL3dpqi+DeLiMed8tVXspzsTAvd0jUl96wmg==} - engines: {node: '>=12'} - cpu: [x64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /@esbuild/freebsd-arm64/0.16.17: - resolution: {integrity: sha512-mt+cxZe1tVx489VTb4mBAOo2aKSnJ33L9fr25JXpqQqzbUIw/yzIzi+NHwAXK2qYV1lEFp4OoVeThGjUbmWmdw==} - engines: {node: '>=12'} - cpu: [arm64] - os: [freebsd] - requiresBuild: true - dev: true - optional: true - - /@esbuild/freebsd-x64/0.16.17: - resolution: {integrity: sha512-8ScTdNJl5idAKjH8zGAsN7RuWcyHG3BAvMNpKOBaqqR7EbUhhVHOqXRdL7oZvz8WNHL2pr5+eIT5c65kA6NHug==} - engines: {node: '>=12'} - cpu: [x64] - os: [freebsd] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-arm/0.16.17: - resolution: {integrity: sha512-iihzrWbD4gIT7j3caMzKb/RsFFHCwqqbrbH9SqUSRrdXkXaygSZCZg1FybsZz57Ju7N/SHEgPyaR0LZ8Zbe9gQ==} - engines: {node: '>=12'} - cpu: [arm] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-arm64/0.16.17: - resolution: {integrity: sha512-7S8gJnSlqKGVJunnMCrXHU9Q8Q/tQIxk/xL8BqAP64wchPCTzuM6W3Ra8cIa1HIflAvDnNOt2jaL17vaW+1V0g==} - engines: {node: '>=12'} - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-ia32/0.16.17: - resolution: {integrity: sha512-kiX69+wcPAdgl3Lonh1VI7MBr16nktEvOfViszBSxygRQqSpzv7BffMKRPMFwzeJGPxcio0pdD3kYQGpqQ2SSg==} - engines: {node: '>=12'} - cpu: [ia32] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-loong64/0.15.18: - resolution: {integrity: sha512-L4jVKS82XVhw2nvzLg/19ClLWg0y27ulRwuP7lcyL6AbUWB5aPglXY3M21mauDQMDfRLs8cQmeT03r/+X3cZYQ==} - engines: {node: '>=12'} - cpu: [loong64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-loong64/0.16.17: - resolution: {integrity: sha512-dTzNnQwembNDhd654cA4QhbS9uDdXC3TKqMJjgOWsC0yNCbpzfWoXdZvp0mY7HU6nzk5E0zpRGGx3qoQg8T2DQ==} - engines: {node: '>=12'} - cpu: [loong64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-mips64el/0.16.17: - resolution: {integrity: sha512-ezbDkp2nDl0PfIUn0CsQ30kxfcLTlcx4Foz2kYv8qdC6ia2oX5Q3E/8m6lq84Dj/6b0FrkgD582fJMIfHhJfSw==} - engines: {node: '>=12'} - cpu: [mips64el] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-ppc64/0.16.17: - resolution: {integrity: sha512-dzS678gYD1lJsW73zrFhDApLVdM3cUF2MvAa1D8K8KtcSKdLBPP4zZSLy6LFZ0jYqQdQ29bjAHJDgz0rVbLB3g==} - engines: {node: '>=12'} - cpu: [ppc64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-riscv64/0.16.17: - resolution: {integrity: sha512-ylNlVsxuFjZK8DQtNUwiMskh6nT0vI7kYl/4fZgV1llP5d6+HIeL/vmmm3jpuoo8+NuXjQVZxmKuhDApK0/cKw==} - engines: {node: '>=12'} - cpu: [riscv64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-s390x/0.16.17: - resolution: {integrity: sha512-gzy7nUTO4UA4oZ2wAMXPNBGTzZFP7mss3aKR2hH+/4UUkCOyqmjXiKpzGrY2TlEUhbbejzXVKKGazYcQTZWA/w==} - engines: {node: '>=12'} - cpu: [s390x] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-x64/0.16.17: - resolution: {integrity: sha512-mdPjPxfnmoqhgpiEArqi4egmBAMYvaObgn4poorpUaqmvzzbvqbowRllQ+ZgzGVMGKaPkqUmPDOOFQRUFDmeUw==} - engines: {node: '>=12'} - cpu: [x64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/netbsd-x64/0.16.17: - resolution: {integrity: sha512-/PzmzD/zyAeTUsduZa32bn0ORug+Jd1EGGAUJvqfeixoEISYpGnAezN6lnJoskauoai0Jrs+XSyvDhppCPoKOA==} - engines: {node: '>=12'} - cpu: [x64] - os: [netbsd] - requiresBuild: true - dev: true - optional: true - - /@esbuild/openbsd-x64/0.16.17: - resolution: {integrity: sha512-2yaWJhvxGEz2RiftSk0UObqJa/b+rIAjnODJgv2GbGGpRwAfpgzyrg1WLK8rqA24mfZa9GvpjLcBBg8JHkoodg==} - engines: {node: '>=12'} - cpu: [x64] - os: [openbsd] - requiresBuild: true - dev: true - optional: true - - /@esbuild/sunos-x64/0.16.17: - resolution: {integrity: sha512-xtVUiev38tN0R3g8VhRfN7Zl42YCJvyBhRKw1RJjwE1d2emWTVToPLNEQj/5Qxc6lVFATDiy6LjVHYhIPrLxzw==} - engines: {node: '>=12'} - cpu: [x64] - os: [sunos] - requiresBuild: true - dev: true - optional: true - - /@esbuild/win32-arm64/0.16.17: - resolution: {integrity: sha512-ga8+JqBDHY4b6fQAmOgtJJue36scANy4l/rL97W+0wYmijhxKetzZdKOJI7olaBaMhWt8Pac2McJdZLxXWUEQw==} - engines: {node: '>=12'} - cpu: [arm64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@esbuild/win32-ia32/0.16.17: - resolution: {integrity: sha512-WnsKaf46uSSF/sZhwnqE4L/F89AYNMiD4YtEcYekBt9Q7nj0DiId2XH2Ng2PHM54qi5oPrQ8luuzGszqi/veig==} - engines: {node: '>=12'} - cpu: [ia32] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@esbuild/win32-x64/0.16.17: - resolution: {integrity: sha512-y+EHuSchhL7FjHgvQL/0fnnFmO4T1bhvWANX6gcnqTjtnKWbTvUMCpGnv2+t+31d7RzyEAYAd4u2fnIhHL6N/Q==} - engines: {node: '>=12'} - cpu: [x64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@eslint/eslintrc/1.4.1: - resolution: {integrity: sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dependencies: - ajv: 6.12.6 - debug: 4.3.4 - espree: 9.4.1 - globals: 13.19.0 - ignore: 5.2.4 - import-fresh: 3.3.0 - js-yaml: 4.1.0 - minimatch: 3.1.2 - strip-json-comments: 3.1.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@exodus/schemasafe/1.0.0-rc.9: - resolution: {integrity: sha512-dGGHpb61hLwifAu7sotuHFDBw6GTdpG8aKC0fsK17EuTzMRvUrH7lEAr6LTJ+sx3AZYed9yZ77rltVDHyg2hRg==} - dev: true - - /@fal-works/esbuild-plugin-global-externals/2.1.2: - resolution: {integrity: sha512-cEee/Z+I12mZcFJshKcCqC8tuX5hG3s+d+9nZ3LabqKF1vKdF41B92pJVCBggjAGORAeOzyyDDKrZwIkLffeOQ==} - dev: true - - /@firebase/analytics-compat/0.2.0_5z7svkifsmkn6ro3hru7lnxwrq: - resolution: {integrity: sha512-brk8IN4ErWiZoB/UdJ0mWZhQOKt90ztv4MUwQjhuYJ4iwnVMz0Mzj9+tplU1hVpSZXdfbKQFfRN9kp/3sTiyWw==} - peerDependencies: - '@firebase/app-compat': 0.x - dependencies: - '@firebase/analytics': 0.9.0_@firebase+app@0.9.0 - '@firebase/analytics-types': 0.8.0 - '@firebase/app-compat': 0.2.0 - '@firebase/component': 0.6.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app' - dev: false - - /@firebase/analytics-types/0.8.0: - resolution: {integrity: sha512-iRP+QKI2+oz3UAh4nPEq14CsEjrjD6a5+fuypjScisAh9kXKFvdJOZJDwk7kikLvWVLGEs9+kIUS4LPQV7VZVw==} - dev: false - - /@firebase/analytics/0.9.0_@firebase+app@0.9.0: - resolution: {integrity: sha512-cE6JAvaGDVhn3B09VuQ5pATLCtmQg3AUSDuCmMNzWlP7+12LBarV1JcGWKIi7YQK2ks3B73wRsawi08XKwsolQ==} - peerDependencies: - '@firebase/app': 0.x - dependencies: - '@firebase/app': 0.9.0 - '@firebase/component': 0.6.0 - '@firebase/installations': 0.6.0_@firebase+app@0.9.0 - '@firebase/logger': 0.4.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - dev: false - - /@firebase/app-check-compat/0.3.0_5z7svkifsmkn6ro3hru7lnxwrq: - resolution: {integrity: sha512-CJFHWGMvWRkkvLPTvWdLrEYnfH7WS9zFLsWctSzRjQnzg6dQUTs5FDyg9RN7BIWoaSr9q7FTxkRnsOgardDPLA==} - peerDependencies: - '@firebase/app-compat': 0.x - dependencies: - '@firebase/app-check': 0.6.0_@firebase+app@0.9.0 - '@firebase/app-check-types': 0.5.0 - '@firebase/app-compat': 0.2.0 - '@firebase/component': 0.6.0 - '@firebase/logger': 0.4.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app' - dev: false - - /@firebase/app-check-interop-types/0.2.0: - resolution: {integrity: sha512-+3PQIeX6/eiVK+x/yg8r6xTNR97fN7MahFDm+jiQmDjcyvSefoGuTTNQuuMScGyx3vYUBeZn+Cp9kC0yY/9uxQ==} - dev: false - - /@firebase/app-check-types/0.5.0: - resolution: {integrity: sha512-uwSUj32Mlubybw7tedRzR24RP8M8JUVR3NPiMk3/Z4bCmgEKTlQBwMXrehDAZ2wF+TsBq0SN1c6ema71U/JPyQ==} - dev: false - - /@firebase/app-check/0.6.0_@firebase+app@0.9.0: - resolution: {integrity: sha512-DevuiUQujsG18NQ1fQ1g2X+75Vp1YfSxPsw363/HE2+ABmCWHf4ByPmxEf16y4PVcqJ2MZqYv8kXZYxzRJCS4g==} - peerDependencies: - '@firebase/app': 0.x - dependencies: - '@firebase/app': 0.9.0 - '@firebase/component': 0.6.0 - '@firebase/logger': 0.4.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - dev: false - - /@firebase/app-compat/0.2.0: - resolution: {integrity: sha512-Y8Cpuheai61jCdVflt437I94n8cdRbXY0e1dQMmTWHCShJUfWwpa5y2ZMnxClWnorXy9hC/3yNZMVlu79f1zGA==} - dependencies: - '@firebase/app': 0.9.0 - '@firebase/component': 0.6.0 - '@firebase/logger': 0.4.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - dev: false - - /@firebase/app-types/0.9.0: - resolution: {integrity: sha512-AeweANOIo0Mb8GiYm3xhTEBVCmPwTYAu9Hcd2qSkLuga/6+j9b1Jskl5bpiSQWy9eJ/j5pavxj6eYogmnuzm+Q==} - dev: false - - /@firebase/app/0.9.0: - resolution: {integrity: sha512-sa15stSK6FoGW4mCeAVDt0TvBFxPjvNcG2rhacGudOzMaW3g2TS326zXTFG+p5jnTCPZ2SO5TTSiGHn1NNcD9Q==} - dependencies: - '@firebase/component': 0.6.0 - '@firebase/logger': 0.4.0 - '@firebase/util': 1.8.0 - idb: 7.0.1 - tslib: 2.4.1 - dev: false - - /@firebase/auth-compat/0.3.0_z6klzwxqggigirvqix3ggnu6f4: - resolution: {integrity: sha512-tcofcrQKBOo5Wrz59onWtZDJfVW09auvG/XRh7lZ4yfEWdGerTJXmEdQU6j3E8AnJ3X91BYltNYhh0ZJOoCJqQ==} - peerDependencies: - '@firebase/app-compat': 0.x - dependencies: - '@firebase/app-compat': 0.2.0 - '@firebase/auth': 0.21.0_@firebase+app@0.9.0 - '@firebase/auth-types': 0.12.0_ymjb4f6a56kabcdqyfm4cet2ly - '@firebase/component': 0.6.0 - '@firebase/util': 1.8.0 - node-fetch: 2.6.7 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app' - - '@firebase/app-types' - - encoding - dev: false - - /@firebase/auth-interop-types/0.2.0_ymjb4f6a56kabcdqyfm4cet2ly: - resolution: {integrity: sha512-7Mt2qzwvu5X3Qxz24gjj0qITrBsMmy1W4vGBP8TZRuQrjA4OTlGVCTG8ysvweZ3xpdl1XGhBsIjo2KjfOPg0xA==} - peerDependencies: - '@firebase/app-types': 0.x - '@firebase/util': 1.x - dependencies: - '@firebase/app-types': 0.9.0 - '@firebase/util': 1.8.0 - dev: false - - /@firebase/auth-types/0.12.0_ymjb4f6a56kabcdqyfm4cet2ly: - resolution: {integrity: sha512-pPwaZt+SPOshK8xNoiQlK5XIrS97kFYc3Rc7xmy373QsOJ9MmqXxLaYssP5Kcds4wd2qK//amx/c+A8O2fVeZA==} - peerDependencies: - '@firebase/app-types': 0.x - '@firebase/util': 1.x - dependencies: - '@firebase/app-types': 0.9.0 - '@firebase/util': 1.8.0 - dev: false - - /@firebase/auth/0.21.0_@firebase+app@0.9.0: - resolution: {integrity: sha512-kXOQl/hyLuGKxs0r2icLsDmAyeO0uM4zV9Q+fx6VE8Ncl94TBUc/n895GSrF3RkNHdiq/DZxV/PUCZ/ozPQNKw==} - peerDependencies: - '@firebase/app': 0.x - dependencies: - '@firebase/app': 0.9.0 - '@firebase/component': 0.6.0 - '@firebase/logger': 0.4.0 - '@firebase/util': 1.8.0 - node-fetch: 2.6.7 - tslib: 2.4.1 - transitivePeerDependencies: - - encoding - dev: false - - /@firebase/component/0.6.0: - resolution: {integrity: sha512-9hyNc4OmrXMtthDJq6zyJHll/UIYBWYmMG3rXty2eMeWxHWB0vlsq3AOI+k14PL15aSBAQolv0EZJWVJv/gCEg==} - dependencies: - '@firebase/util': 1.8.0 - tslib: 2.5.0 - dev: false - - /@firebase/database-compat/0.3.0_@firebase+app-types@0.9.0: - resolution: {integrity: sha512-5kzhXdACd+RX/G8k/DKYAuiMYHDHIZ9WFV/ccVoPsC+bxIQEgPilDEtkljY5ZxiKbUj+PEOSYUfYdV/LQMJatQ==} - dependencies: - '@firebase/component': 0.6.0 - '@firebase/database': 0.14.0_@firebase+app-types@0.9.0 - '@firebase/database-types': 0.10.0 - '@firebase/logger': 0.4.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app-types' - dev: false - - /@firebase/database-types/0.10.0: - resolution: {integrity: sha512-jZHI1fY1tm+8heLR4sbgJHtSYI2kTlSp4QTXWALwdT+dfST5OlZYsZeb+hGWeqjHEElzUnkLbw8XuZSy9Uy6rA==} - dependencies: - '@firebase/app-types': 0.9.0 - '@firebase/util': 1.8.0 - dev: false - - /@firebase/database/0.14.0_@firebase+app-types@0.9.0: - resolution: {integrity: sha512-SM5eri3eGuPjQdXBRObqKTsgmkRwrSGsbgtD43EpGzU+lIeBVLqwRzfcFialYrWzFFI5V7hWXdS2oJxAkfnBFw==} - dependencies: - '@firebase/auth-interop-types': 0.2.0_ymjb4f6a56kabcdqyfm4cet2ly - '@firebase/component': 0.6.0 - '@firebase/logger': 0.4.0 - '@firebase/util': 1.8.0 - faye-websocket: 0.11.4 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app-types' - dev: false - - /@firebase/firestore-compat/0.3.0_z6klzwxqggigirvqix3ggnu6f4: - resolution: {integrity: sha512-ckU4mkziDnsFKxgYv+OAJHPuNpti2RjyoeIAqz3EqRHAsYFC70U5w4aXC2Sbu2jJp3Ba2BoD7MV/4Qb2A7CJtw==} - peerDependencies: - '@firebase/app-compat': 0.x - dependencies: - '@firebase/app-compat': 0.2.0 - '@firebase/component': 0.6.0 - '@firebase/firestore': 3.8.0_@firebase+app@0.9.0 - '@firebase/firestore-types': 2.5.1_ymjb4f6a56kabcdqyfm4cet2ly - '@firebase/util': 1.8.0 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app' - - '@firebase/app-types' - - encoding - dev: false - - /@firebase/firestore-types/2.5.1_ymjb4f6a56kabcdqyfm4cet2ly: - resolution: {integrity: sha512-xG0CA6EMfYo8YeUxC8FeDzf6W3FX1cLlcAGBYV6Cku12sZRI81oWcu61RSKM66K6kUENP+78Qm8mvroBcm1whw==} - peerDependencies: - '@firebase/app-types': 0.x - '@firebase/util': 1.x - dependencies: - '@firebase/app-types': 0.9.0 - '@firebase/util': 1.8.0 - dev: false - - /@firebase/firestore/3.8.0_@firebase+app@0.9.0: - resolution: {integrity: sha512-aKwfZ73FmOV8e/dN0anDtrq6+1IhX4zmjxUcXcgaypZ14q6bq0QpUdlRxjsfiUQ5m3H3MwWWIFOcT5Xa89sIkw==} - engines: {node: '>=10.10.0'} - peerDependencies: - '@firebase/app': 0.x - dependencies: - '@firebase/app': 0.9.0 - '@firebase/component': 0.6.0 - '@firebase/logger': 0.4.0 - '@firebase/util': 1.8.0 - '@firebase/webchannel-wrapper': 0.9.0 - '@grpc/grpc-js': 1.7.3 - '@grpc/proto-loader': 0.6.13 - node-fetch: 2.6.7 - tslib: 2.4.1 - transitivePeerDependencies: - - encoding - dev: false - - /@firebase/functions-compat/0.3.0_z6klzwxqggigirvqix3ggnu6f4: - resolution: {integrity: sha512-xOEdqOVeHXJ2ZjDbTntNGLl1lgW9umx73bWXJn9h68bSD4f9ldIVoz+h15s8i/e1pJOO/LlEp2BMvoA35U1P/Q==} - peerDependencies: - '@firebase/app-compat': 0.x - dependencies: - '@firebase/app-compat': 0.2.0 - '@firebase/component': 0.6.0 - '@firebase/functions': 0.9.0_mw76ib4woycgbhoj6pqh7xkrde - '@firebase/functions-types': 0.6.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app' - - '@firebase/app-types' - - encoding - dev: false - - /@firebase/functions-types/0.6.0: - resolution: {integrity: sha512-hfEw5VJtgWXIRf92ImLkgENqpL6IWpYaXVYiRkFY1jJ9+6tIhWM7IzzwbevwIIud/jaxKVdRzD7QBWfPmkwCYw==} - dev: false - - /@firebase/functions/0.9.0_mw76ib4woycgbhoj6pqh7xkrde: - resolution: {integrity: sha512-na/+7uc9ViQVBadEsCVjBnbZsfUCMyS/x6SID1Nz4Z5nkhuxrls9Jcv7jc28tMqHR0VpoGq8W6oLProyjT8JPg==} - peerDependencies: - '@firebase/app': 0.x - dependencies: - '@firebase/app': 0.9.0 - '@firebase/app-check-interop-types': 0.2.0 - '@firebase/auth-interop-types': 0.2.0_ymjb4f6a56kabcdqyfm4cet2ly - '@firebase/component': 0.6.0 - '@firebase/messaging-interop-types': 0.2.0 - '@firebase/util': 1.8.0 - node-fetch: 2.6.7 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app-types' - - encoding - dev: false - - /@firebase/installations-compat/0.2.0_z6klzwxqggigirvqix3ggnu6f4: - resolution: {integrity: sha512-EqCU8C9XPQN6npfTCW+6agzQ0yPLvbSCY5WROdnU1ZJfOsGFrMMVMRk42XBzah1dHBoSQYggVaixEzJUOH7zbQ==} - peerDependencies: - '@firebase/app-compat': 0.x - dependencies: - '@firebase/app-compat': 0.2.0 - '@firebase/component': 0.6.0 - '@firebase/installations': 0.6.0_@firebase+app@0.9.0 - '@firebase/installations-types': 0.5.0_@firebase+app-types@0.9.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app' - - '@firebase/app-types' - dev: false - - /@firebase/installations-types/0.5.0_@firebase+app-types@0.9.0: - resolution: {integrity: sha512-9DP+RGfzoI2jH7gY4SlzqvZ+hr7gYzPODrbzVD82Y12kScZ6ZpRg/i3j6rleto8vTFC8n6Len4560FnV1w2IRg==} - peerDependencies: - '@firebase/app-types': 0.x - dependencies: - '@firebase/app-types': 0.9.0 - dev: false - - /@firebase/installations/0.6.0_@firebase+app@0.9.0: - resolution: {integrity: sha512-Aks56ThZs1MsM0qJzJxhdeXak+Ob3tjd3JSY2poJptreLWsIOSBCxYO7Ev4yZ7DE7twMdZ0x70NhQ1ceXfdy0w==} - peerDependencies: - '@firebase/app': 0.x - dependencies: - '@firebase/app': 0.9.0 - '@firebase/component': 0.6.0 - '@firebase/util': 1.8.0 - idb: 7.0.1 - tslib: 2.4.1 - dev: false - - /@firebase/logger/0.4.0: - resolution: {integrity: sha512-eRKSeykumZ5+cJPdxxJRgAC3G5NknY2GwEbKfymdnXtnT0Ucm4pspfR6GT4MUQEDuJwRVbVcSx85kgJulMoFFA==} - dependencies: - tslib: 2.5.0 - dev: false - - /@firebase/messaging-compat/0.2.0_5z7svkifsmkn6ro3hru7lnxwrq: - resolution: {integrity: sha512-Qk9W9lVmTO67bR5jCaQ9HqS9MipkCuPGKCcO5JnnDd/p+Y2beWzScYxwzYGh9pEga3qzDAMSCB1PYoNgNTMzew==} - peerDependencies: - '@firebase/app-compat': 0.x - dependencies: - '@firebase/app-compat': 0.2.0 - '@firebase/component': 0.6.0 - '@firebase/messaging': 0.12.0_@firebase+app@0.9.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app' - dev: false - - /@firebase/messaging-interop-types/0.2.0: - resolution: {integrity: sha512-ujA8dcRuVeBixGR9CtegfpU4YmZf3Lt7QYkcj693FFannwNuZgfAYaTmbJ40dtjB81SAu6tbFPL9YLNT15KmOQ==} - dev: false - - /@firebase/messaging/0.12.0_@firebase+app@0.9.0: - resolution: {integrity: sha512-M+LWaBH392SLF7/wAH5byJrP5f1MpromUG02NIr0sbgJ6Ot2nc+qDrDGjKF4qLXFqYzhNRlhskCCdf0ClgDM0A==} - peerDependencies: - '@firebase/app': 0.x - dependencies: - '@firebase/app': 0.9.0 - '@firebase/component': 0.6.0 - '@firebase/installations': 0.6.0_@firebase+app@0.9.0 - '@firebase/messaging-interop-types': 0.2.0 - '@firebase/util': 1.8.0 - idb: 7.0.1 - tslib: 2.4.1 - dev: false - - /@firebase/performance-compat/0.2.0_5z7svkifsmkn6ro3hru7lnxwrq: - resolution: {integrity: sha512-iO0fspVpiVOGxR08Y51nXoSMPH/bdRkRVQXYo4wuDDfQoZ5WZ0DXQuE0kXy3/T9QgqXdr8tSU0P0nil/jvnOcg==} - peerDependencies: - '@firebase/app-compat': 0.x - dependencies: - '@firebase/app-compat': 0.2.0 - '@firebase/component': 0.6.0 - '@firebase/logger': 0.4.0 - '@firebase/performance': 0.6.0_@firebase+app@0.9.0 - '@firebase/performance-types': 0.2.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app' - dev: false - - /@firebase/performance-types/0.2.0: - resolution: {integrity: sha512-kYrbr8e/CYr1KLrLYZZt2noNnf+pRwDq2KK9Au9jHrBMnb0/C9X9yWSXmZkFt4UIdsQknBq8uBB7fsybZdOBTA==} - dev: false - - /@firebase/performance/0.6.0_@firebase+app@0.9.0: - resolution: {integrity: sha512-mmCQ/8F0hQZ+J+JBvfQPlPAgKIRZccYW6N9321NbX8swd7EQP3dsW905RBmdXRsbjBpBqhn20zcQU6TDOKRwYA==} - peerDependencies: - '@firebase/app': 0.x - dependencies: - '@firebase/app': 0.9.0 - '@firebase/component': 0.6.0 - '@firebase/installations': 0.6.0_@firebase+app@0.9.0 - '@firebase/logger': 0.4.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - dev: false - - /@firebase/remote-config-compat/0.2.0_5z7svkifsmkn6ro3hru7lnxwrq: - resolution: {integrity: sha512-2t+w4ngp1DPtZc04a6IjicbUGBpLb/MuFPlqpT8kHNqa/fNvA+ZFcAlEtHvzjS4o9rnTfjHgB+OJMgFP+r9OOw==} - peerDependencies: - '@firebase/app-compat': 0.x - dependencies: - '@firebase/app-compat': 0.2.0 - '@firebase/component': 0.6.0 - '@firebase/logger': 0.4.0 - '@firebase/remote-config': 0.4.0_@firebase+app@0.9.0 - '@firebase/remote-config-types': 0.3.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app' - dev: false - - /@firebase/remote-config-types/0.3.0: - resolution: {integrity: sha512-RtEH4vdcbXZuZWRZbIRmQVBNsE7VDQpet2qFvq6vwKLBIQRQR5Kh58M4ok3A3US8Sr3rubYnaGqZSurCwI8uMA==} - dev: false - - /@firebase/remote-config/0.4.0_@firebase+app@0.9.0: - resolution: {integrity: sha512-sedVYE4PwN4qtXfb7EkUYe9mz7hqBP/3y3c7WRMmTuh2VRNz5C5+NYULr5zySeJq+UZd6KyaS+KUOIxmx70tTw==} - peerDependencies: - '@firebase/app': 0.x - dependencies: - '@firebase/app': 0.9.0 - '@firebase/component': 0.6.0 - '@firebase/installations': 0.6.0_@firebase+app@0.9.0 - '@firebase/logger': 0.4.0 - '@firebase/util': 1.8.0 - tslib: 2.4.1 - dev: false - - /@firebase/storage-compat/0.2.0_z6klzwxqggigirvqix3ggnu6f4: - resolution: {integrity: sha512-w+7CyZyZ53YQWlTb8YOQ9YcmScgDwkvkXhpUbRWHlvlzAs06l0au42MydmHCeeTcSqvLOzpgURiVfm15ZifARg==} - peerDependencies: - '@firebase/app-compat': 0.x - dependencies: - '@firebase/app-compat': 0.2.0 - '@firebase/component': 0.6.0 - '@firebase/storage': 0.10.0_@firebase+app@0.9.0 - '@firebase/storage-types': 0.7.0_ymjb4f6a56kabcdqyfm4cet2ly - '@firebase/util': 1.8.0 - tslib: 2.4.1 - transitivePeerDependencies: - - '@firebase/app' - - '@firebase/app-types' - - encoding - dev: false - - /@firebase/storage-types/0.7.0_ymjb4f6a56kabcdqyfm4cet2ly: - resolution: {integrity: sha512-n/8pYd82hc9XItV3Pa2KGpnuJ/2h/n/oTAaBberhe6GeyWQPnsmwwRK94W3GxUwBA/ZsszBAYZd7w7tTE+6XXA==} - peerDependencies: - '@firebase/app-types': 0.x - '@firebase/util': 1.x - dependencies: - '@firebase/app-types': 0.9.0 - '@firebase/util': 1.8.0 - dev: false - - /@firebase/storage/0.10.0_@firebase+app@0.9.0: - resolution: {integrity: sha512-2rp7+/bQ1gkUgrqDv5qHf/vlPAOKV+a/h1tnZ8D9zN0/6wc42gqFTORJUZj/A4efVnX7Ix8MWHBe4woO/2Th0w==} - peerDependencies: - '@firebase/app': 0.x - dependencies: - '@firebase/app': 0.9.0 - '@firebase/component': 0.6.0 - '@firebase/util': 1.8.0 - node-fetch: 2.6.7 - tslib: 2.4.1 - transitivePeerDependencies: - - encoding - dev: false - - /@firebase/util/1.8.0: - resolution: {integrity: sha512-clK6pTTxIiLMYz4UrvDTVAs2rIaOiroAuFdX67C0JalvEwzi6Vv8li6xAGj38tkj7Qax06mosM1fQkxf2h4VTg==} - dependencies: - tslib: 2.4.1 - dev: false - - /@firebase/webchannel-wrapper/0.9.0: - resolution: {integrity: sha512-BpiZLBWdLFw+qFel9p3Zs1jD6QmH7Ii4aTDu6+vx8ShdidChZUXqDhYJly4ZjSgQh54miXbBgBrk0S+jTIh/Qg==} - dev: false - - /@floating-ui/core/1.1.0: - resolution: {integrity: sha512-zbsLwtnHo84w1Kc8rScAo5GMk1GdecSlrflIbfnEBJwvTSj1SL6kkOYV+nHraMCPEy+RNZZUaZyL8JosDGCtGQ==} - dev: false - - /@floating-ui/dom/1.1.0: - resolution: {integrity: sha512-TSogMPVxbRe77QCj1dt8NmRiJasPvuc+eT5jnJ6YpLqgOD2zXc5UA3S1qwybN+GVCDNdKfpKy1oj8RpzLJvh6A==} - dependencies: - '@floating-ui/core': 1.1.0 - dev: false - - /@floating-ui/react-dom/1.2.1_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-YCLlqibZtgUhxUpxkSp1oekvYgH/jI4KdZEJv85E62twlZHN43xdlQNe6JcF4ROD3/Zu6juNHN+aOygN+6yZjg==} - peerDependencies: - react: '>=16.8.0' - react-dom: '>=16.8.0' - dependencies: - '@floating-ui/dom': 1.1.0 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - - /@formatjs/ecma402-abstract/1.14.3: - resolution: {integrity: sha512-SlsbRC/RX+/zg4AApWIFNDdkLtFbkq3LNoZWXZCE/nHVKqoIJyaoQyge/I0Y38vLxowUn9KTtXgusLD91+orbg==} - dependencies: - '@formatjs/intl-localematcher': 0.2.32 - tslib: 2.4.1 - dev: false - - /@formatjs/fast-memoize/1.2.7: - resolution: {integrity: sha512-hPeM5LXUUjtCKPybWOUAWpv8lpja8Xz+uKprFPJcg5F2Rd+/bf1E0UUsLRpaAgOReAf5HMRtoIgv/UcyPICrTQ==} - dependencies: - tslib: 2.5.0 - dev: false - - /@formatjs/icu-messageformat-parser/2.1.14: - resolution: {integrity: sha512-0KqeVOb72losEhUW+59vhZGGd14s1f35uThfEMVKZHKLEObvJdFTiI3ZQwvTMUCzLEMxnS6mtnYPmG4mTvwd3Q==} - dependencies: - '@formatjs/ecma402-abstract': 1.14.3 - '@formatjs/icu-skeleton-parser': 1.3.18 - tslib: 2.4.1 - dev: false - - /@formatjs/icu-skeleton-parser/1.3.18: - resolution: {integrity: sha512-ND1ZkZfmLPcHjAH1sVpkpQxA+QYfOX3py3SjKWMUVGDow18gZ0WPqz3F+pJLYQMpS2LnnQ5zYR2jPVYTbRwMpg==} - dependencies: - '@formatjs/ecma402-abstract': 1.14.3 - tslib: 2.5.0 - dev: false - - /@formatjs/intl-displaynames/6.2.3: - resolution: {integrity: sha512-teB0L68MDGM8jEKQg55w7nvFjzeLHE6e3eK/04s+iuEVYYmvjjiHJKHrthKENzcJ0F6mHf/AwXrbX+1mKxT6AQ==} - dependencies: - '@formatjs/ecma402-abstract': 1.14.3 - '@formatjs/intl-localematcher': 0.2.32 - tslib: 2.4.1 - dev: false - - /@formatjs/intl-listformat/7.1.7: - resolution: {integrity: sha512-Zzf5ruPpfJnrAA2hGgf/6pMgQ3tx9oJVhpqycFDavHl3eEzrwdHddGqGdSNwhd0bB4NAFttZNQdmKDldc5iDZw==} - dependencies: - '@formatjs/ecma402-abstract': 1.14.3 - '@formatjs/intl-localematcher': 0.2.32 - tslib: 2.4.1 - dev: false - - /@formatjs/intl-localematcher/0.2.32: - resolution: {integrity: sha512-k/MEBstff4sttohyEpXxCmC3MqbUn9VvHGlZ8fauLzkbwXmVrEeyzS+4uhrvAk9DWU9/7otYWxyDox4nT/KVLQ==} - dependencies: - tslib: 2.5.0 - dev: false - - /@formatjs/intl/2.6.3_typescript@4.9.5: - resolution: {integrity: sha512-JaVZk14U/GypVfCZPevQ0KdruFkq16FXx7g398/Dm+YEx/W7sRiftbZeDy4wQ7WGryb45e763XycxD9o/vm9BA==} - peerDependencies: - typescript: ^4.7 - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@formatjs/ecma402-abstract': 1.14.3 - '@formatjs/fast-memoize': 1.2.7 - '@formatjs/icu-messageformat-parser': 2.1.14 - '@formatjs/intl-displaynames': 6.2.3 - '@formatjs/intl-listformat': 7.1.7 - intl-messageformat: 10.2.5 - tslib: 2.4.1 - typescript: 4.9.5 - dev: false - - /@fortawesome/fontawesome-common-types/6.2.1: - resolution: {integrity: sha512-Sz07mnQrTekFWLz5BMjOzHl/+NooTdW8F8kDQxjWwbpOJcnoSg4vUDng8d/WR1wOxM0O+CY9Zw0nR054riNYtQ==} - engines: {node: '>=6'} - requiresBuild: true - dev: false - - /@fortawesome/fontawesome-svg-core/6.2.1: - resolution: {integrity: sha512-HELwwbCz6C1XEcjzyT1Jugmz2NNklMrSPjZOWMlc+ZsHIVk+XOvOXLGGQtFBwSyqfJDNgRq4xBCwWOaZ/d9DEA==} - engines: {node: '>=6'} - requiresBuild: true - dependencies: - '@fortawesome/fontawesome-common-types': 6.2.1 - dev: false - - /@fortawesome/free-brands-svg-icons/6.2.1: - resolution: {integrity: sha512-L8l4MfdHPmZlJ72PvzdfwOwbwcCAL0vx48tJRnI6u1PJXh+j2f3yDoKyQgO3qjEsgD5Fr2tQV/cPP8F/k6aUig==} - engines: {node: '>=6'} - requiresBuild: true - dependencies: - '@fortawesome/fontawesome-common-types': 6.2.1 - dev: false - - /@fortawesome/free-regular-svg-icons/6.2.1: - resolution: {integrity: sha512-wiqcNDNom75x+pe88FclpKz7aOSqS2lOivZeicMV5KRwOAeypxEYWAK/0v+7r+LrEY30+qzh8r2XDaEHvoLsMA==} - engines: {node: '>=6'} - requiresBuild: true - dependencies: - '@fortawesome/fontawesome-common-types': 6.2.1 - dev: false - - /@fortawesome/free-solid-svg-icons/6.2.1: - resolution: {integrity: sha512-oKuqrP5jbfEPJWTij4sM+/RvgX+RMFwx3QZCZcK9PrBDgxC35zuc7AOFsyMjMd/PIFPeB2JxyqDr5zs/DZFPPw==} - engines: {node: '>=6'} - requiresBuild: true - dependencies: - '@fortawesome/fontawesome-common-types': 6.2.1 - dev: false - - /@fortawesome/react-fontawesome/0.1.19_chs37cqfjyeefvqfxhgbgy2rma: - resolution: {integrity: sha512-Hyb+lB8T18cvLNX0S3llz7PcSOAJMLwiVKBuuzwM/nI5uoBw+gQjnf9il0fR1C3DKOI5Kc79pkJ4/xB0Uw9aFQ==} - peerDependencies: - '@fortawesome/fontawesome-svg-core': ~1 || ~6 - react: '>=16.x' - dependencies: - '@fortawesome/fontawesome-svg-core': 6.2.1 - prop-types: 15.8.1 - react: 17.0.2 - dev: false - - /@grpc/grpc-js/1.7.3: - resolution: {integrity: sha512-H9l79u4kJ2PVSxUNA08HMYAnUBLj9v6KjYQ7SQ71hOZcEXhShE/y5iQCesP8+6/Ik/7i2O0a10bPquIcYfufog==} - engines: {node: ^8.13.0 || >=10.10.0} - dependencies: - '@grpc/proto-loader': 0.7.4 - '@types/node': 17.0.45 - dev: false - - /@grpc/proto-loader/0.6.13: - resolution: {integrity: sha512-FjxPYDRTn6Ec3V0arm1FtSpmP6V50wuph2yILpyvTKzjc76oDdoihXqM1DzOW5ubvCC8GivfCnNtfaRE8myJ7g==} - engines: {node: '>=6'} - hasBin: true - dependencies: - '@types/long': 4.0.2 - lodash.camelcase: 4.3.0 - long: 4.0.0 - protobufjs: 6.11.3 - yargs: 16.2.0 - dev: false - - /@grpc/proto-loader/0.7.4: - resolution: {integrity: sha512-MnWjkGwqQ3W8fx94/c1CwqLsNmHHv2t0CFn+9++6+cDphC1lolpg9M2OU0iebIjK//pBNX9e94ho+gjx6vz39w==} - engines: {node: '>=6'} - hasBin: true - dependencies: - '@types/long': 4.0.2 - lodash.camelcase: 4.3.0 - long: 4.0.0 - protobufjs: 7.1.2 - yargs: 16.2.0 - dev: false - - /@headlessui/react/1.7.7_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-BqDOd/tB9u2tA0T3Z0fn18ktw+KbVwMnkxxsGPIH2hzssrQhKB5n/6StZOyvLYP/FsYtvuXfi9I0YowKPv2c1w==} - engines: {node: '>=10'} - peerDependencies: - react: ^16 || ^17 || ^18 - react-dom: ^16 || ^17 || ^18 - dependencies: - client-only: 0.0.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - - /@humanwhocodes/config-array/0.11.8: - resolution: {integrity: sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==} - engines: {node: '>=10.10.0'} - dependencies: - '@humanwhocodes/object-schema': 1.2.1 - debug: 4.3.4 - minimatch: 3.1.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@humanwhocodes/module-importer/1.0.1: - resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} - engines: {node: '>=12.22'} - dev: true - - /@humanwhocodes/object-schema/1.2.1: - resolution: {integrity: sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==} - dev: true - - /@ibm-cloud/openapi-ruleset/0.37.3: - resolution: {integrity: sha512-saQM/1YTfhW7ou/mtmC4BMUhW/UM54aD47KBZucjrZLvAelzt8Lykm5zeN59Cu4cs/LBDEcvJfyZzDpPhdcVjQ==} - engines: {node: '>=12.0.0'} - dependencies: - '@stoplight/spectral-formats': 1.4.0 - '@stoplight/spectral-functions': 1.7.2 - '@stoplight/spectral-rulesets': 1.14.1 - lodash: 4.17.21 - transitivePeerDependencies: - - encoding - dev: true - - /@istanbuljs/load-nyc-config/1.1.0: - resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} - engines: {node: '>=8'} - dependencies: - camelcase: 5.3.1 - find-up: 4.1.0 - get-package-type: 0.1.0 - js-yaml: 3.14.1 - resolve-from: 5.0.0 - dev: true - - /@istanbuljs/schema/0.1.3: - resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} - engines: {node: '>=8'} - dev: true - - /@jest/console/29.3.1: - resolution: {integrity: sha512-IRE6GD47KwcqA09RIWrabKdHPiKDGgtAL31xDxbi/RjQMsr+lY+ppxmHwY0dUEV3qvvxZzoe5Hl0RXZJOjQNUg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.3.1 - '@types/node': 17.0.45 - chalk: 4.1.2 - jest-message-util: 29.3.1 - jest-util: 29.3.1 - slash: 3.0.0 - dev: true - - /@jest/core/29.3.1_ts-node@10.9.1: - resolution: {integrity: sha512-0ohVjjRex985w5MmO5L3u5GR1O30DexhBSpuwx2P+9ftyqHdJXnk7IUWiP80oHMvt7ubHCJHxV0a0vlKVuZirw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@jest/console': 29.3.1 - '@jest/reporters': 29.3.1 - '@jest/test-result': 29.3.1 - '@jest/transform': 29.3.1 - '@jest/types': 29.3.1 - '@types/node': 17.0.45 - ansi-escapes: 4.3.2 - chalk: 4.1.2 - ci-info: 3.7.1 - exit: 0.1.2 - graceful-fs: 4.2.10 - jest-changed-files: 29.2.0 - jest-config: 29.3.1_2263m44mchjafa7bz7l52hbcpa - jest-haste-map: 29.3.1 - jest-message-util: 29.3.1 - jest-regex-util: 29.2.0 - jest-resolve: 29.3.1 - jest-resolve-dependencies: 29.3.1 - jest-runner: 29.3.1 - jest-runtime: 29.3.1 - jest-snapshot: 29.3.1 - jest-util: 29.3.1 - jest-validate: 29.3.1 - jest-watcher: 29.3.1 - micromatch: 4.0.5 - pretty-format: 29.3.1 - slash: 3.0.0 - strip-ansi: 6.0.1 - transitivePeerDependencies: - - supports-color - - ts-node - dev: true - - /@jest/environment/29.3.1: - resolution: {integrity: sha512-pMmvfOPmoa1c1QpfFW0nXYtNLpofqo4BrCIk6f2kW4JFeNlHV2t3vd+3iDLf31e2ot2Mec0uqZfmI+U0K2CFag==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/fake-timers': 29.3.1 - '@jest/types': 29.3.1 - '@types/node': 17.0.45 - jest-mock: 29.3.1 - dev: true - - /@jest/expect-utils/29.3.1: - resolution: {integrity: sha512-wlrznINZI5sMjwvUoLVk617ll/UYfGIZNxmbU+Pa7wmkL4vYzhV9R2pwVqUh4NWWuLQWkI8+8mOkxs//prKQ3g==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - jest-get-type: 29.2.0 - dev: true - - /@jest/expect/29.3.1: - resolution: {integrity: sha512-QivM7GlSHSsIAWzgfyP8dgeExPRZ9BIe2LsdPyEhCGkZkoyA+kGsoIzbKAfZCvvRzfZioKwPtCZIt5SaoxYCvg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - expect: 29.3.1 - jest-snapshot: 29.3.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/fake-timers/29.3.1: - resolution: {integrity: sha512-iHTL/XpnDlFki9Tq0Q1GGuVeQ8BHZGIYsvCO5eN/O/oJaRzofG9Xndd9HuSDBI/0ZS79pg0iwn07OMTQ7ngF2A==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.3.1 - '@sinonjs/fake-timers': 9.1.2 - '@types/node': 17.0.45 - jest-message-util: 29.3.1 - jest-mock: 29.3.1 - jest-util: 29.3.1 - dev: true - - /@jest/globals/29.3.1: - resolution: {integrity: sha512-cTicd134vOcwO59OPaB6AmdHQMCtWOe+/DitpTZVxWgMJ+YvXL1HNAmPyiGbSHmF/mXVBkvlm8YYtQhyHPnV6Q==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.3.1 - '@jest/expect': 29.3.1 - '@jest/types': 29.3.1 - jest-mock: 29.3.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/reporters/29.3.1: - resolution: {integrity: sha512-GhBu3YFuDrcAYW/UESz1JphEAbvUjaY2vShRZRoRY1mxpCMB3yGSJ4j9n0GxVlEOdCf7qjvUfBCrTUUqhVfbRA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@bcoe/v8-coverage': 0.2.3 - '@jest/console': 29.3.1 - '@jest/test-result': 29.3.1 - '@jest/transform': 29.3.1 - '@jest/types': 29.3.1 - '@jridgewell/trace-mapping': 0.3.17 - '@types/node': 17.0.45 - chalk: 4.1.2 - collect-v8-coverage: 1.0.1 - exit: 0.1.2 - glob: 7.2.3 - graceful-fs: 4.2.10 - istanbul-lib-coverage: 3.2.0 - istanbul-lib-instrument: 5.2.1 - istanbul-lib-report: 3.0.0 - istanbul-lib-source-maps: 4.0.1 - istanbul-reports: 3.1.5 - jest-message-util: 29.3.1 - jest-util: 29.3.1 - jest-worker: 29.3.1 - slash: 3.0.0 - string-length: 4.0.2 - strip-ansi: 6.0.1 - v8-to-istanbul: 9.0.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/schemas/29.0.0: - resolution: {integrity: sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@sinclair/typebox': 0.24.51 - dev: true - - /@jest/source-map/29.2.0: - resolution: {integrity: sha512-1NX9/7zzI0nqa6+kgpSdKPK+WU1p+SJk3TloWZf5MzPbxri9UEeXX5bWZAPCzbQcyuAzubcdUHA7hcNznmRqWQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jridgewell/trace-mapping': 0.3.17 - callsites: 3.1.0 - graceful-fs: 4.2.10 - dev: true - - /@jest/test-result/29.3.1: - resolution: {integrity: sha512-qeLa6qc0ddB0kuOZyZIhfN5q0e2htngokyTWsGriedsDhItisW7SDYZ7ceOe57Ii03sL988/03wAcBh3TChMGw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/console': 29.3.1 - '@jest/types': 29.3.1 - '@types/istanbul-lib-coverage': 2.0.4 - collect-v8-coverage: 1.0.1 - dev: true - - /@jest/test-sequencer/29.3.1: - resolution: {integrity: sha512-IqYvLbieTv20ArgKoAMyhLHNrVHJfzO6ARZAbQRlY4UGWfdDnLlZEF0BvKOMd77uIiIjSZRwq3Jb3Fa3I8+2UA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/test-result': 29.3.1 - graceful-fs: 4.2.10 - jest-haste-map: 29.3.1 - slash: 3.0.0 - dev: true - - /@jest/transform/29.3.1: - resolution: {integrity: sha512-8wmCFBTVGYqFNLWfcOWoVuMuKYPUBTnTMDkdvFtAYELwDOl9RGwOsvQWGPFxDJ8AWY9xM/8xCXdqmPK3+Q5Lug==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/core': 7.20.12 - '@jest/types': 29.3.1 - '@jridgewell/trace-mapping': 0.3.17 - babel-plugin-istanbul: 6.1.1 - chalk: 4.1.2 - convert-source-map: 2.0.0 - fast-json-stable-stringify: 2.1.0 - graceful-fs: 4.2.10 - jest-haste-map: 29.3.1 - jest-regex-util: 29.2.0 - jest-util: 29.3.1 - micromatch: 4.0.5 - pirates: 4.0.5 - slash: 3.0.0 - write-file-atomic: 4.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/types/29.3.1: - resolution: {integrity: sha512-d0S0jmmTpjnhCmNpApgX3jrUZgZ22ivKJRvL2lli5hpCRoNnp1f85r2/wpKfXuYu8E7Jjh1hGfhPyup1NM5AmA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/schemas': 29.0.0 - '@types/istanbul-lib-coverage': 2.0.4 - '@types/istanbul-reports': 3.0.1 - '@types/node': 17.0.45 - '@types/yargs': 17.0.19 - chalk: 4.1.2 - dev: true - - /@joshwooding/vite-plugin-react-docgen-typescript/0.2.1_egung5nfepmolqa7uavvqho3gq: - resolution: {integrity: sha512-ou4ZJSXMMWHqGS4g8uNRbC5TiTWxAgQZiVucoUrOCWuPrTbkpJbmVyIi9jU72SBry7gQtuMEDp4YR8EEXAg7VQ==} - peerDependencies: - typescript: '>= 4.3.x' - vite: ^3.0.0 || ^4.0.0 - peerDependenciesMeta: - typescript: - optional: true - dependencies: - glob: 7.2.3 - glob-promise: 4.2.2_glob@7.2.3 - magic-string: 0.27.0 - react-docgen-typescript: 2.2.2_typescript@4.9.5 - typescript: 4.9.5 - vite: 4.0.4_arwryhsn4zwmtf5pq2mmdxlt6a - dev: true - - /@jridgewell/gen-mapping/0.1.1: - resolution: {integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/set-array': 1.1.2 - '@jridgewell/sourcemap-codec': 1.4.14 - - /@jridgewell/gen-mapping/0.3.2: - resolution: {integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/set-array': 1.1.2 - '@jridgewell/sourcemap-codec': 1.4.14 - '@jridgewell/trace-mapping': 0.3.17 - - /@jridgewell/resolve-uri/3.1.0: - resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} - engines: {node: '>=6.0.0'} - - /@jridgewell/set-array/1.1.2: - resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} - engines: {node: '>=6.0.0'} - - /@jridgewell/sourcemap-codec/1.4.14: - resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} - - /@jridgewell/trace-mapping/0.3.17: - resolution: {integrity: sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g==} - dependencies: - '@jridgewell/resolve-uri': 3.1.0 - '@jridgewell/sourcemap-codec': 1.4.14 - - /@jridgewell/trace-mapping/0.3.9: - resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} - dependencies: - '@jridgewell/resolve-uri': 3.1.0 - '@jridgewell/sourcemap-codec': 1.4.14 - dev: true - - /@jsdevtools/ono/7.1.3: - resolution: {integrity: sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==} - dev: true - - /@jsep-plugin/regex/1.0.3_jsep@1.3.8: - resolution: {integrity: sha512-XfZgry4DwEZvSFtS/6Y+R48D7qJYJK6R9/yJFyUFHCIUMEEHuJ4X95TDgJp5QkmzfLYvapMPzskV5HpIDrREug==} - engines: {node: '>= 10.16.0'} - peerDependencies: - jsep: ^0.4.0||^1.0.0 - dependencies: - jsep: 1.3.8 - dev: true - - /@jsep-plugin/ternary/1.1.3_jsep@1.3.8: - resolution: {integrity: sha512-qtLGzCNzPVJ3kdH6/zoLWDPjauHIKiLSBAR71Wa0+PWvGA8wODUQvRgxtpUA5YqAYL3CQ8S4qXhd/9WuWTZirg==} - engines: {node: '>= 10.16.0'} - peerDependencies: - jsep: ^0.4.0||^1.0.0 - dependencies: - jsep: 1.3.8 - dev: true - - /@mattiasbuelens/web-streams-polyfill/0.2.1: - resolution: {integrity: sha512-oKuFCQFa3W7Hj7zKn0+4ypI8JFm4ZKIoncwAC6wd5WwFW2sL7O1hpPoJdSWpynQ4DJ4lQ6MvFoVDmCLilonDFg==} - engines: {node: '>= 8'} - deprecated: moved to web-streams-polyfill@2.0.0 - dependencies: - '@types/whatwg-streams': 0.0.7 - dev: false - - /@mdx-js/react/2.2.1_react@17.0.2: - resolution: {integrity: sha512-YdXcMcEnqZhzql98RNrqYo9cEhTTesBiCclEtoiQUbJwx87q9453GTapYU6kJ8ZZ2ek1Vp25SiAXEFy5O/eAPw==} - peerDependencies: - react: '>=16' - dependencies: - '@types/mdx': 2.0.3 - '@types/react': 17.0.52 - react: 17.0.2 - dev: true - - /@monaco-editor/loader/1.3.2_monaco-editor@0.34.1: - resolution: {integrity: sha512-BTDbpHl3e47r3AAtpfVFTlAi7WXv4UQ/xZmz8atKl4q7epQV5e7+JbigFDViWF71VBi4IIBdcWP57Hj+OWuc9g==} - peerDependencies: - monaco-editor: '>= 0.21.0 < 1' - dependencies: - monaco-editor: 0.34.1 - state-local: 1.0.7 - dev: false - - /@monaco-editor/react/4.4.6_5rrpgbvkp3saaogcekwkrw2jia: - resolution: {integrity: sha512-Gr3uz3LYf33wlFE3eRnta4RxP5FSNxiIV9ENn2D2/rN8KgGAD8ecvcITRtsbbyuOuNkwbuHYxfeaz2Vr+CtyFA==} - peerDependencies: - monaco-editor: '>= 0.25.0 < 1' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - dependencies: - '@monaco-editor/loader': 1.3.2_monaco-editor@0.34.1 - monaco-editor: 0.34.1 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - - /@motionone/animation/10.15.1: - resolution: {integrity: sha512-mZcJxLjHor+bhcPuIFErMDNyrdb2vJur8lSfMCsuCB4UyV8ILZLvK+t+pg56erv8ud9xQGK/1OGPt10agPrCyQ==} - dependencies: - '@motionone/easing': 10.15.1 - '@motionone/types': 10.15.1 - '@motionone/utils': 10.15.1 - tslib: 2.5.0 - dev: false - - /@motionone/dom/10.12.0: - resolution: {integrity: sha512-UdPTtLMAktHiqV0atOczNYyDd/d8Cf5fFsd1tua03PqTwwCe/6lwhLSQ8a7TbnQ5SN0gm44N1slBfj+ORIhrqw==} - dependencies: - '@motionone/animation': 10.15.1 - '@motionone/generators': 10.15.1 - '@motionone/types': 10.15.1 - '@motionone/utils': 10.15.1 - hey-listen: 1.0.8 - tslib: 2.4.1 - dev: false - - /@motionone/easing/10.15.1: - resolution: {integrity: sha512-6hIHBSV+ZVehf9dcKZLT7p5PEKHGhDwky2k8RKkmOvUoYP3S+dXsKupyZpqx5apjd9f+php4vXk4LuS+ADsrWw==} - dependencies: - '@motionone/utils': 10.15.1 - tslib: 2.5.0 - dev: false - - /@motionone/generators/10.15.1: - resolution: {integrity: sha512-67HLsvHJbw6cIbLA/o+gsm7h+6D4Sn7AUrB/GPxvujse1cGZ38F5H7DzoH7PhX+sjvtDnt2IhFYF2Zp1QTMKWQ==} - dependencies: - '@motionone/types': 10.15.1 - '@motionone/utils': 10.15.1 - tslib: 2.5.0 - dev: false - - /@motionone/types/10.15.1: - resolution: {integrity: sha512-iIUd/EgUsRZGrvW0jqdst8st7zKTzS9EsKkP+6c6n4MPZoQHwiHuVtTQLD6Kp0bsBLhNzKIBlHXponn/SDT4hA==} - dev: false - - /@motionone/utils/10.15.1: - resolution: {integrity: sha512-p0YncgU+iklvYr/Dq4NobTRdAPv9PveRDUXabPEeOjBLSO/1FNB2phNTZxOxpi1/GZwYpAoECEa0Wam+nsmhSw==} - dependencies: - '@motionone/types': 10.15.1 - hey-listen: 1.0.8 - tslib: 2.5.0 - dev: false - - /@nicolo-ribaudo/eslint-scope-5-internals/5.1.1-v1: - resolution: {integrity: sha512-54/JRvkLIzzDWshCWfuhadfrfZVPiElY8Fcgmg1HroEly/EDSszzhBAsarCux+D/kOslTRquNzuyGSmUSTTHGg==} - dependencies: - eslint-scope: 5.1.1 - dev: true - - /@nodelib/fs.scandir/2.1.5: - resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} - engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - run-parallel: 1.2.0 - dev: true - - /@nodelib/fs.stat/2.0.5: - resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} - engines: {node: '>= 8'} - dev: true - - /@nodelib/fs.walk/1.2.8: - resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} - engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.scandir': 2.1.5 - fastq: 1.15.0 - dev: true - - /@orval/angular/6.11.1: - resolution: {integrity: sha512-L1sdQ7b7cq5jtL2OBacpyQ2MwGRUcWm1U5BEJg0R+K3eWN7LfFAQk99karMEbre1C5U/s2BXh/hxmWHK24i1+A==} - dependencies: - '@orval/core': 6.11.1 - transitivePeerDependencies: - - encoding - - openapi-types - - supports-color - dev: true - - /@orval/axios/6.11.1: - resolution: {integrity: sha512-iemXAYoTuguCIgzK4yHRuUWNxIYk8umpI7FYNYCk+YZ1MJfWa+GAKeem9Fzb3EbmWWvlH4bNfFkXsygvorS1nw==} - dependencies: - '@orval/core': 6.11.1 - transitivePeerDependencies: - - encoding - - openapi-types - - supports-color - dev: true - - /@orval/core/6.11.1: - resolution: {integrity: sha512-0d8K4gmeMovYYdagRQ5pj0KsgtB8JKDYP/RjzQfr40ootAjbP6egaAjhSzogv0vfSr5GDwNPPzD+S0cK6pQsAw==} - dependencies: - '@apidevtools/swagger-parser': 10.1.0 - acorn: 8.8.2 - ajv: 8.12.0 - chalk: 4.1.2 - compare-versions: 4.1.4 - debug: 4.3.4 - esbuild: 0.15.18 - esutils: 2.0.3 - fs-extra: 10.1.0 - globby: 11.1.0 - ibm-openapi-validator: 0.88.3 - lodash.get: 4.4.2 - lodash.isempty: 4.4.0 - lodash.omit: 4.5.0 - lodash.uniq: 4.5.0 - lodash.uniqby: 4.7.0 - lodash.uniqwith: 4.5.0 - micromatch: 4.0.5 - openapi3-ts: 3.1.2 - swagger2openapi: 7.0.8 - validator: 13.7.0 - transitivePeerDependencies: - - encoding - - openapi-types - - supports-color - dev: true - - /@orval/msw/6.11.1: - resolution: {integrity: sha512-1WA9nLBhvBTu0MZ0V+UAlipNMkOmhy0vv2RfcAGrqiq8TCHkdVvVOZvrIuQkJxYkELEBAXTPfGnIzqCVwRkwiA==} - dependencies: - '@orval/core': 6.11.1 - cuid: 2.1.8 - lodash.get: 4.4.2 - lodash.omit: 4.5.0 - openapi3-ts: 3.1.2 - transitivePeerDependencies: - - encoding - - openapi-types - - supports-color - dev: true - - /@orval/query/6.11.1: - resolution: {integrity: sha512-FV5mGQ9fpyqPJl82AkGvft2WQGr0GLyrVp7k7Nrlr0I2j+F0PByvlvtvTbS0CCXY9m9ot6tE3NDXkISZEczG4w==} - dependencies: - '@orval/core': 6.11.1 - lodash.omitby: 4.6.0 - transitivePeerDependencies: - - encoding - - openapi-types - - supports-color - dev: true - - /@orval/swr/6.11.1: - resolution: {integrity: sha512-Oi/cgNzFWU0ukXoq5RselfJLcoy9l+MmLpmgnqTghXewRq67q1iIc5ee5/8Kf9m36dLCfb7uk8m1i3e6F5LhwA==} - dependencies: - '@orval/core': 6.11.1 - transitivePeerDependencies: - - encoding - - openapi-types - - supports-color - dev: true - - /@popperjs/core/2.11.6: - resolution: {integrity: sha512-50/17A98tWUfQ176raKiOGXuYpLyyVMkxxG6oylzL3BPOlA6ADGdK7EYunSa4I064xerltq9TGXs8HmOk5E+vw==} - - /@protobufjs/aspromise/1.1.2: - resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} - dev: false - - /@protobufjs/base64/1.1.2: - resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} - dev: false - - /@protobufjs/codegen/2.0.4: - resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} - dev: false - - /@protobufjs/eventemitter/1.1.0: - resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} - dev: false - - /@protobufjs/fetch/1.1.0: - resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} - dependencies: - '@protobufjs/aspromise': 1.1.2 - '@protobufjs/inquire': 1.1.0 - dev: false - - /@protobufjs/float/1.0.2: - resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} - dev: false - - /@protobufjs/inquire/1.1.0: - resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} - dev: false - - /@protobufjs/path/1.1.2: - resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} - dev: false - - /@protobufjs/pool/1.1.0: - resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} - dev: false - - /@protobufjs/utf8/1.1.0: - resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} - dev: false - - /@rollup/plugin-commonjs/22.0.2_rollup@2.79.1: - resolution: {integrity: sha512-//NdP6iIwPbMTcazYsiBMbJW7gfmpHom33u1beiIoHDEM0Q9clvtQB1T0efvMqHeKsGohiHo97BCPCkBXdscwg==} - engines: {node: '>= 12.0.0'} - peerDependencies: - rollup: ^2.68.0 - dependencies: - '@rollup/pluginutils': 3.1.0_rollup@2.79.1 - commondir: 1.0.1 - estree-walker: 2.0.2 - glob: 7.2.3 - is-reference: 1.2.1 - magic-string: 0.25.9 - resolve: 1.22.1 - rollup: 2.79.1 - dev: true - - /@rollup/pluginutils/3.1.0_rollup@2.79.1: - resolution: {integrity: sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg==} - engines: {node: '>= 8.0.0'} - peerDependencies: - rollup: ^1.20.0||^2.0.0 - dependencies: - '@types/estree': 0.0.39 - estree-walker: 1.0.1 - picomatch: 2.3.1 - rollup: 2.79.1 - dev: true - - /@rollup/pluginutils/4.2.1: - resolution: {integrity: sha512-iKnFXr7NkdZAIHiIWE+BX5ULi/ucVFYWD6TbAV+rZctiRTY2PL6tsIKhoIOaoskiWAkgu+VsbXgUVDNLHf+InQ==} - engines: {node: '>= 8.0.0'} - dependencies: - estree-walker: 2.0.2 - picomatch: 2.3.1 - dev: true - - /@rollup/pluginutils/5.0.2: - resolution: {integrity: sha512-pTd9rIsP92h+B6wWwFbW8RkZv4hiR/xKsqre4SIuAOaOEQRxi0lqLke9k2/7WegC85GgUs9pjmOjCUi3In4vwA==} - engines: {node: '>=14.0.0'} - peerDependencies: - rollup: ^1.20.0||^2.0.0||^3.0.0 - peerDependenciesMeta: - rollup: - optional: true - dependencies: - '@types/estree': 1.0.0 - estree-walker: 2.0.2 - picomatch: 2.3.1 - dev: true - - /@rushstack/eslint-patch/1.2.0: - resolution: {integrity: sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg==} - dev: true - - /@sentry/browser/6.19.7: - resolution: {integrity: sha512-oDbklp4O3MtAM4mtuwyZLrgO1qDVYIujzNJQzXmi9YzymJCuzMLSRDvhY83NNDCRxf0pds4DShgYeZdbSyKraA==} - engines: {node: '>=6'} - dependencies: - '@sentry/core': 6.19.7 - '@sentry/types': 6.19.7 - '@sentry/utils': 6.19.7 - tslib: 1.14.1 - dev: false - - /@sentry/core/6.19.7: - resolution: {integrity: sha512-tOfZ/umqB2AcHPGbIrsFLcvApdTm9ggpi/kQZFkej7kMphjT+SGBiQfYtjyg9jcRW+ilAR4JXC9BGKsdEQ+8Vw==} - engines: {node: '>=6'} - dependencies: - '@sentry/hub': 6.19.7 - '@sentry/minimal': 6.19.7 - '@sentry/types': 6.19.7 - '@sentry/utils': 6.19.7 - tslib: 1.14.1 - dev: false - - /@sentry/hub/6.19.7: - resolution: {integrity: sha512-y3OtbYFAqKHCWezF0EGGr5lcyI2KbaXW2Ik7Xp8Mu9TxbSTuwTe4rTntwg8ngPjUQU3SUHzgjqVB8qjiGqFXCA==} - engines: {node: '>=6'} - dependencies: - '@sentry/types': 6.19.7 - '@sentry/utils': 6.19.7 - tslib: 1.14.1 - dev: false - - /@sentry/minimal/6.19.7: - resolution: {integrity: sha512-wcYmSJOdvk6VAPx8IcmZgN08XTXRwRtB1aOLZm+MVHjIZIhHoBGZJYTVQS/BWjldsamj2cX3YGbGXNunaCfYJQ==} - engines: {node: '>=6'} - dependencies: - '@sentry/hub': 6.19.7 - '@sentry/types': 6.19.7 - tslib: 1.14.1 - dev: false - - /@sentry/react/6.19.7_react@17.0.2: - resolution: {integrity: sha512-VzJeBg/v41jfxUYPkH2WYrKjWc4YiMLzDX0f4Zf6WkJ4v3IlDDSkX6DfmWekjTKBho6wiMkSNy2hJ1dHfGZ9jA==} - engines: {node: '>=6'} - peerDependencies: - react: 15.x || 16.x || 17.x || 18.x - dependencies: - '@sentry/browser': 6.19.7 - '@sentry/minimal': 6.19.7 - '@sentry/types': 6.19.7 - '@sentry/utils': 6.19.7 - hoist-non-react-statics: 3.3.2 - react: 17.0.2 - tslib: 1.14.1 - dev: false - - /@sentry/tracing/6.19.7: - resolution: {integrity: sha512-ol4TupNnv9Zd+bZei7B6Ygnr9N3Gp1PUrNI761QSlHtPC25xXC5ssSD3GMhBgyQrcvpuRcCFHVNNM97tN5cZiA==} - engines: {node: '>=6'} - dependencies: - '@sentry/hub': 6.19.7 - '@sentry/minimal': 6.19.7 - '@sentry/types': 6.19.7 - '@sentry/utils': 6.19.7 - tslib: 1.14.1 - dev: false - - /@sentry/types/6.19.7: - resolution: {integrity: sha512-jH84pDYE+hHIbVnab3Hr+ZXr1v8QABfhx39KknxqKWr2l0oEItzepV0URvbEhB446lk/S/59230dlUUIBGsXbg==} - engines: {node: '>=6'} - dev: false - - /@sentry/utils/6.19.7: - resolution: {integrity: sha512-z95ECmE3i9pbWoXQrD/7PgkBAzJYR+iXtPuTkpBjDKs86O3mT+PXOT3BAn79w2wkn7/i3vOGD2xVr1uiMl26dA==} - engines: {node: '>=6'} - dependencies: - '@sentry/types': 6.19.7 - tslib: 1.14.1 - dev: false - - /@sinclair/typebox/0.24.51: - resolution: {integrity: sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA==} - dev: true - - /@sinonjs/commons/1.8.6: - resolution: {integrity: sha512-Ky+XkAkqPZSm3NLBeUng77EBQl3cmeJhITaGHdYH8kjVB+aun3S4XBRti2zt17mtt0mIUDiNxYeoJm6drVvBJQ==} - dependencies: - type-detect: 4.0.8 - dev: true - - /@sinonjs/fake-timers/9.1.2: - resolution: {integrity: sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw==} - dependencies: - '@sinonjs/commons': 1.8.6 - dev: true - - /@stoplight/better-ajv-errors/1.0.3_ajv@8.12.0: - resolution: {integrity: sha512-0p9uXkuB22qGdNfy3VeEhxkU5uwvp/KrBTAbrLBURv6ilxIVwanKwjMc41lQfIVgPGcOkmLbTolfFrSsueu7zA==} - engines: {node: ^12.20 || >= 14.13} - peerDependencies: - ajv: '>=8' - dependencies: - ajv: 8.12.0 - jsonpointer: 5.0.1 - leven: 3.1.0 - dev: true - - /@stoplight/json-ref-readers/1.2.2: - resolution: {integrity: sha512-nty0tHUq2f1IKuFYsLM4CXLZGHdMn+X/IwEUIpeSOXt0QjMUbL0Em57iJUDzz+2MkWG83smIigNZ3fauGjqgdQ==} - engines: {node: '>=8.3.0'} - dependencies: - node-fetch: 2.6.9 - tslib: 1.14.1 - transitivePeerDependencies: - - encoding - dev: true - - /@stoplight/json-ref-resolver/3.1.5: - resolution: {integrity: sha512-uaKLITor7UF+JBtI84zs3aOWM0L79zp7w9TrBTwPtx5SLbaQQ4HadDKgX5yhFOLMApLdhwhiftF4c0GFanOxGg==} - engines: {node: '>=8.3.0'} - dependencies: - '@stoplight/json': 3.20.1 - '@stoplight/path': 1.3.2 - '@stoplight/types': 13.8.0 - '@types/urijs': 1.19.19 - dependency-graph: 0.11.0 - fast-memoize: 2.5.2 - immer: 9.0.19 - lodash: 4.17.21 - tslib: 2.5.0 - urijs: 1.19.11 - dev: true - - /@stoplight/json/3.20.1: - resolution: {integrity: sha512-FXfud+uWgIj1xv6nUO9WnmgmnVikaxJcbtR4XQt4C42n5c2qua3U05Z/3B57hP5TJRSj+tpn9ID6/bFeyYYlEg==} - engines: {node: '>=8.3.0'} - dependencies: - '@stoplight/ordered-object-literal': 1.0.4 - '@stoplight/path': 1.3.2 - '@stoplight/types': 13.8.0 - jsonc-parser: 2.2.1 - lodash: 4.17.21 - safe-stable-stringify: 1.1.1 - dev: true - - /@stoplight/ordered-object-literal/1.0.4: - resolution: {integrity: sha512-OF8uib1jjDs5/cCU+iOVy+GJjU3X7vk/qJIkIJFqwmlJKrrtijFmqwbu8XToXrwTYLQTP+Hebws5gtZEmk9jag==} - engines: {node: '>=8'} - dev: true - - /@stoplight/path/1.3.2: - resolution: {integrity: sha512-lyIc6JUlUA8Ve5ELywPC8I2Sdnh1zc1zmbYgVarhXIp9YeAB0ReeqmGEOWNtlHkbP2DAA1AL65Wfn2ncjK/jtQ==} - engines: {node: '>=8'} - dev: true - - /@stoplight/spectral-cli/6.6.0: - resolution: {integrity: sha512-z46fnrvraaWMio8Y9RYYkLO+XdmtxOWpy5qNJF3CsmWua0FZ4iOTryb5Cm3GkB0wEtqxNUCBUHvoo4hS6Noyqg==} - engines: {node: ^12.20 || >= 14.13} - hasBin: true - dependencies: - '@stoplight/json': 3.20.1 - '@stoplight/path': 1.3.2 - '@stoplight/spectral-core': 1.16.0 - '@stoplight/spectral-parsers': 1.0.2 - '@stoplight/spectral-ref-resolver': 1.0.2 - '@stoplight/spectral-ruleset-bundler': 1.5.0 - '@stoplight/spectral-ruleset-migrator': 1.9.1 - '@stoplight/spectral-rulesets': 1.14.1 - '@stoplight/spectral-runtime': 1.1.2 - '@stoplight/types': 13.8.0 - chalk: 4.1.2 - cliui: 7.0.4 - eol: 0.9.1 - fast-glob: 3.2.7 - lodash: 4.17.21 - pony-cause: 1.1.1 - proxy-agent: 5.0.0 - stacktracey: 2.1.8 - strip-ansi: 6.0.1 - text-table: 0.2.0 - tslib: 2.5.0 - yargs: 17.3.1 - transitivePeerDependencies: - - encoding - - supports-color - dev: true - - /@stoplight/spectral-core/1.16.0: - resolution: {integrity: sha512-W/NG+wV2UffwLExboqEa04/JbjGhiSTOl7GghLWYP4NKxZGaO6karP6fIxRBOnm34n1qyoZv9thsjSe92MWcDw==} - engines: {node: ^12.20 || >= 14.13} - dependencies: - '@stoplight/better-ajv-errors': 1.0.3_ajv@8.12.0 - '@stoplight/json': 3.20.1 - '@stoplight/path': 1.3.2 - '@stoplight/spectral-parsers': 1.0.2 - '@stoplight/spectral-ref-resolver': 1.0.3 - '@stoplight/spectral-runtime': 1.1.2 - '@stoplight/types': 13.6.0 - '@types/es-aggregate-error': 1.0.2 - '@types/json-schema': 7.0.11 - ajv: 8.12.0 - ajv-errors: 3.0.0_ajv@8.12.0 - ajv-formats: 2.1.1_ajv@8.12.0 - es-aggregate-error: 1.0.9 - jsonpath-plus: 7.1.0 - lodash: 4.17.21 - lodash.topath: 4.5.2 - minimatch: 3.1.2 - nimma: 0.2.2 - pony-cause: 1.1.1 - simple-eval: 1.0.0 - tslib: 2.5.0 - transitivePeerDependencies: - - encoding - dev: true - - /@stoplight/spectral-formats/1.4.0: - resolution: {integrity: sha512-j9VQukDzgqDSi26rK9LqsbXrqtkeIsPSPgEf5/sxRsmeF2bwWUhSjYXgYin4flSZ7owFZjZWQ3o0Qq3iApi2JQ==} - engines: {node: '>=12'} - dependencies: - '@stoplight/json': 3.20.1 - '@stoplight/spectral-core': 1.16.0 - '@types/json-schema': 7.0.11 - tslib: 2.5.0 - transitivePeerDependencies: - - encoding - dev: true - - /@stoplight/spectral-functions/1.7.2: - resolution: {integrity: sha512-f+61/FtIkQeIo+a269CeaeqjpyRsgDyIk6DGr7iS4hyuk1PPk7Uf6MNRDs9FEIBh7CpdEJ+HSHbMLwgpymWTIw==} - engines: {node: '>=12'} - dependencies: - '@stoplight/better-ajv-errors': 1.0.3_ajv@8.12.0 - '@stoplight/json': 3.20.1 - '@stoplight/spectral-core': 1.16.0 - '@stoplight/spectral-formats': 1.4.0 - '@stoplight/spectral-runtime': 1.1.2 - ajv: 8.12.0 - ajv-draft-04: 1.0.0_ajv@8.12.0 - ajv-errors: 3.0.0_ajv@8.12.0 - ajv-formats: 2.1.1_ajv@8.12.0 - lodash: 4.17.21 - tslib: 2.5.0 - transitivePeerDependencies: - - encoding - dev: true - - /@stoplight/spectral-parsers/1.0.2: - resolution: {integrity: sha512-ZQXknJ+BM5Re4Opj4cgVlHgG2qyOk/wznKJq3Vf1qsBEg2CNzN0pJmSB0deRqW0kArqm44qpb8c+cz3F2rgMtw==} - engines: {node: '>=12'} - dependencies: - '@stoplight/json': 3.20.1 - '@stoplight/types': 13.8.0 - '@stoplight/yaml': 4.2.3 - tslib: 2.5.0 - dev: true - - /@stoplight/spectral-ref-resolver/1.0.2: - resolution: {integrity: sha512-ah6NIB/O1EdEaEu89So3LmtbKRXPVnSElgQ7oBRE9S4/VOedSqyXn+qqMd40tGnO2CsKgZaFUYXdSEHOshpHYw==} - engines: {node: '>=12'} - dependencies: - '@stoplight/json-ref-readers': 1.2.2 - '@stoplight/json-ref-resolver': 3.1.5 - '@stoplight/spectral-runtime': 1.1.2 - dependency-graph: 0.11.0 - tslib: 2.5.0 - transitivePeerDependencies: - - encoding - dev: true - - /@stoplight/spectral-ref-resolver/1.0.3: - resolution: {integrity: sha512-pj+bH4SH8hcWlnV787WD7P0/En7LA3EfZMvG1JUGMW/7bFd9AaZZXNkh5j0ve8qnPlwP8F4SH/2Cnr1tXOXCVw==} - engines: {node: '>=12'} - dependencies: - '@stoplight/json-ref-readers': 1.2.2 - '@stoplight/json-ref-resolver': 3.1.5 - '@stoplight/spectral-runtime': 1.1.2 - dependency-graph: 0.11.0 - tslib: 2.5.0 - transitivePeerDependencies: - - encoding - dev: true - - /@stoplight/spectral-ruleset-bundler/1.5.0: - resolution: {integrity: sha512-I1ZbhnJtRTi0lG6oXA1r8J6KLxoZKkNB3aSdrNJJTHoo/AccMSMhV4ey8zbLsYNsJ/9ywR5ttkBAbyGuo3Jtxg==} - engines: {node: ^12.20 || >= 14.13} - dependencies: - '@rollup/plugin-commonjs': 22.0.2_rollup@2.79.1 - '@stoplight/path': 1.3.2 - '@stoplight/spectral-core': 1.16.0 - '@stoplight/spectral-formats': 1.4.0 - '@stoplight/spectral-functions': 1.7.2 - '@stoplight/spectral-parsers': 1.0.2 - '@stoplight/spectral-ref-resolver': 1.0.2 - '@stoplight/spectral-ruleset-migrator': 1.9.1 - '@stoplight/spectral-rulesets': 1.14.1 - '@stoplight/spectral-runtime': 1.1.2 - '@stoplight/types': 13.8.0 - '@types/node': 17.0.45 - pony-cause: 1.1.1 - rollup: 2.79.1 - tslib: 2.5.0 - validate-npm-package-name: 3.0.0 - transitivePeerDependencies: - - encoding - dev: true - - /@stoplight/spectral-ruleset-migrator/1.9.1: - resolution: {integrity: sha512-TiH7UZIuHX+yb6EsWA9Z2ou455Wtki3z7SCkVRgd7WdzkD7O13R8ywqKoCUJ44UP7iuo1Ejnog18Rw4qJJE/fg==} - engines: {node: '>=12'} - dependencies: - '@stoplight/json': 3.20.1 - '@stoplight/ordered-object-literal': 1.0.4 - '@stoplight/path': 1.3.2 - '@stoplight/spectral-functions': 1.7.2 - '@stoplight/spectral-runtime': 1.1.2 - '@stoplight/types': 13.8.0 - '@stoplight/yaml': 4.2.3 - '@types/node': 17.0.45 - ajv: 8.12.0 - ast-types: 0.14.2 - astring: 1.8.4 - reserved: 0.1.2 - tslib: 2.5.0 - validate-npm-package-name: 3.0.0 - transitivePeerDependencies: - - encoding - dev: true - - /@stoplight/spectral-rulesets/1.14.1: - resolution: {integrity: sha512-tn6a5fYPFDwEY+/YyK/hcq2gcR5nSIBt7l+JGELb/2RdTzD5ikj2mfl2ua3uxbqOZytftFoOX5ewGZ0qQNrudw==} - engines: {node: '>=12'} - dependencies: - '@asyncapi/specs': 3.2.1 - '@stoplight/better-ajv-errors': 1.0.3_ajv@8.12.0 - '@stoplight/json': 3.20.1 - '@stoplight/spectral-core': 1.16.0 - '@stoplight/spectral-formats': 1.4.0 - '@stoplight/spectral-functions': 1.7.2 - '@stoplight/spectral-runtime': 1.1.2 - '@stoplight/types': 13.8.0 - '@types/json-schema': 7.0.11 - ajv: 8.12.0 - ajv-formats: 2.1.1_ajv@8.12.0 - json-schema-traverse: 1.0.0 - lodash: 4.17.21 - tslib: 2.5.0 - transitivePeerDependencies: - - encoding - dev: true - - /@stoplight/spectral-runtime/1.1.2: - resolution: {integrity: sha512-fr5zRceXI+hrl82yAVoME+4GvJie8v3wmOe9tU+ZLRRNonizthy8qDi0Z/z4olE+vGreSDcuDOZ7JjRxFW5kTw==} - engines: {node: '>=12'} - dependencies: - '@stoplight/json': 3.20.1 - '@stoplight/path': 1.3.2 - '@stoplight/types': 12.5.0 - abort-controller: 3.0.0 - lodash: 4.17.21 - node-fetch: 2.6.9 - tslib: 2.5.0 - transitivePeerDependencies: - - encoding - dev: true - - /@stoplight/types/12.5.0: - resolution: {integrity: sha512-dwqYcDrGmEyUv5TWrDam5TGOxU72ufyQ7hnOIIDdmW5ezOwZaBFoR5XQ9AsH49w7wgvOqB2Bmo799pJPWnpCbg==} - engines: {node: '>=8'} - dependencies: - '@types/json-schema': 7.0.11 - utility-types: 3.10.0 - dev: true - - /@stoplight/types/13.6.0: - resolution: {integrity: sha512-dzyuzvUjv3m1wmhPfq82lCVYGcXG0xUYgqnWfCq3PCVR4BKFhjdkHrnJ+jIDoMKvXb05AZP/ObQF6+NpDo29IQ==} - engines: {node: ^12.20 || >=14.13} - dependencies: - '@types/json-schema': 7.0.11 - utility-types: 3.10.0 - dev: true - - /@stoplight/types/13.8.0: - resolution: {integrity: sha512-5glKswz7y9aACh+a+JegID+4xX//4TsIdv7iPl29hWnOoWrnlPbg3Gjc4nYUXXgMSaSlSsA15JU/0+rE89fR4A==} - engines: {node: ^12.20 || >=14.13} - dependencies: - '@types/json-schema': 7.0.11 - utility-types: 3.10.0 - dev: true - - /@stoplight/yaml-ast-parser/0.0.48: - resolution: {integrity: sha512-sV+51I7WYnLJnKPn2EMWgS4EUfoP4iWEbrWwbXsj0MZCB/xOK8j6+C9fntIdOM50kpx45ZLC3s6kwKivWuqvyg==} - dev: true - - /@stoplight/yaml/4.2.3: - resolution: {integrity: sha512-Mx01wjRAR9C7yLMUyYFTfbUf5DimEpHMkRDQ1PKLe9dfNILbgdxyrncsOXM3vCpsQ1Hfj4bPiGl+u4u6e9Akqw==} - engines: {node: '>=10.8'} - dependencies: - '@stoplight/ordered-object-literal': 1.0.4 - '@stoplight/types': 13.8.0 - '@stoplight/yaml-ast-parser': 0.0.48 - tslib: 2.5.0 - dev: true - - /@storybook/addon-actions/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-3vyYC8o3bhC8wHIhCHzm0wznrkreEmg6joHyc4g/N6L27xpI4IiFufU7T38fYM3QOQfnAR++Cf7HBtB9A9uAaw==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/components': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - '@storybook/manager-api': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/theming': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/types': 7.0.0-beta.38 - dequal: 2.0.3 - lodash: 4.17.21 - polished: 4.2.2 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-inspector: 6.0.1_react@17.0.2 - telejson: 7.0.4 - ts-dedent: 2.2.0 - uuid-browser: 3.1.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/addon-backgrounds/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-rDuQiUmFBBA/l/rpCB0Ei2NLwd111SkxgAhyhcjKpyPr2HeYwNAQRRqxphKf87Xn4LtE1kTaw9XYObvnZwfZyw==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/components': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - '@storybook/manager-api': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/theming': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/types': 7.0.0-beta.38 - memoizerific: 1.11.3 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/addon-controls/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-BFJVYmp9jXfvHerHnipBcQJDIFOyhOUFPmfd5ub9zVZEWkhO21/HVqwXoBdhPARkZW3cYPq1U7kwU0CfJzK8YQ==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/blocks': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/components': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/core-common': 7.0.0-beta.38 - '@storybook/manager-api': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/node-logger': 7.0.0-beta.38 - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/theming': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/types': 7.0.0-beta.38 - lodash: 4.17.21 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/addon-docs/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-2zHthuKB22aLdD7pMWgTKjDyrp8IlsQaY2a3fXwFWKPUVgxlfe9fTCcQ6fFnZz7p5Zyoy+aIR1GSsM/quWp6pA==} - peerDependencies: - '@storybook/mdx1-csf': '>=1.0.0-0' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - '@storybook/mdx1-csf': - optional: true - dependencies: - '@babel/core': 7.20.12 - '@babel/plugin-transform-react-jsx': 7.20.7_@babel+core@7.20.12 - '@jest/transform': 29.3.1 - '@mdx-js/react': 2.2.1_react@17.0.2 - '@storybook/blocks': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/components': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/csf-plugin': 7.0.0-beta.38 - '@storybook/csf-tools': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - '@storybook/mdx2-csf': 1.0.0-next.5 - '@storybook/node-logger': 7.0.0-beta.38 - '@storybook/postinstall': 7.0.0-beta.38 - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/theming': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/types': 7.0.0-beta.38 - fs-extra: 11.1.0 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - remark-external-links: 8.0.0 - remark-slug: 6.1.0 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/addon-essentials/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-qgt1J4lKO9jJKmihUcYC4lDywPEsDmkpC9fLKIXY6Ub/M1EFhvswBtBHIfi22HpSvdfi5ewpI4J0MmKwTsZ3/w==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - dependencies: - '@storybook/addon-actions': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/addon-backgrounds': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/addon-controls': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/addon-docs': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/addon-highlight': 7.0.0-beta.38 - '@storybook/addon-measure': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/addon-outline': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/addon-toolbars': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/addon-viewport': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/core-common': 7.0.0-beta.38 - '@storybook/manager-api': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/node-logger': 7.0.0-beta.38 - '@storybook/preview-api': 7.0.0-beta.38 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - '@storybook/mdx1-csf' - - supports-color - dev: true - - /@storybook/addon-highlight/7.0.0-beta.38: - resolution: {integrity: sha512-mjwOAwFBnw7n/Juu3mDgsbIzTJ38iz2FApVTGMw6I9qSB0T8wUPYqDGpM+gIhC5apYc4VPdOMXvnJfyZBAyR+Q==} - dependencies: - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - '@storybook/preview-api': 7.0.0-beta.38 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/addon-links/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-s8zjrx34OI+yH0XthJssxYyGi9kFust8kGlsYLLgRcMYf91kL9NQsp7M4Y5TRCo5n8TIyBCdDkSEPNB46n2c3A==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/csf': 0.0.2-next.8 - '@storybook/global': 5.0.0 - '@storybook/manager-api': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/router': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/types': 7.0.0-beta.38 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/addon-measure/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-BS1I+nHC1DtuMqQCdfQNIf0zKua8SrSk7w2ib7V/hoAKWv1yIpIY3xDm/29YqGgw0dpGl/reOwjYbkNDZrCO5w==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/components': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - '@storybook/manager-api': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/types': 7.0.0-beta.38 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/addon-outline/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-GxZFieMl+svpBIzBYcQKU/MYos9QR7UjKq1bGPFMoc+hK7iNA5H2MTwICLhN3bOhzsxrPoikwfKjSPTjOql12Q==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/components': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - '@storybook/manager-api': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/types': 7.0.0-beta.38 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/addon-toolbars/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-FPW+CiRpccdzQphMzVTI31O8QxiRwlTAiUCrBrJj+fC9drL02IEDrtLJe4kMztLK8PSOTlkPlJ5YXz//lsronQ==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/components': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/manager-api': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/theming': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/addon-viewport/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-zDMX2w5kNHN1XwXiCUrmBjAqfkkvKoxZnGmdWOgjS+uXbwyqdBz4jFtlTydMhGnvx3z7R0Icnz2POEszWJNJqg==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/components': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - '@storybook/manager-api': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/theming': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - memoizerific: 1.11.3 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/blocks/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-wokrqvYw6dICer4BVWLdmP3KHJ0g+uEbJQvCzobVtLBvO0nsjWx+JWAgs7ih+6wNUF8Xw0iui/iL/LSEX/iYcw==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - dependencies: - '@storybook/channels': 7.0.0-beta.38 - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/components': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/csf': 0.0.2-next.8 - '@storybook/docs-tools': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - '@storybook/manager-api': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/theming': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/types': 7.0.0-beta.38 - '@types/lodash': 4.14.191 - color-convert: 2.0.1 - dequal: 2.0.3 - lodash: 4.17.21 - markdown-to-jsx: 7.1.8_react@17.0.2 - memoizerific: 1.11.3 - polished: 4.2.2 - react: 17.0.2 - react-colorful: 5.6.1_sfoxds7t5ydpegc3knd667wn6m - react-dom: 17.0.2_react@17.0.2 - ts-dedent: 2.2.0 - util-deprecate: 1.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/builder-manager/7.0.0-beta.38: - resolution: {integrity: sha512-bhQr/9wECvIXekJXvQSsadMkjwuOp3H5dMy8gfmrwaOj90HRdviuP39m746DFI82TJHagyFZswGJ6Cv2qAaQrQ==} - dependencies: - '@fal-works/esbuild-plugin-global-externals': 2.1.2 - '@storybook/core-common': 7.0.0-beta.38 - '@storybook/manager': 7.0.0-beta.38 - '@storybook/node-logger': 7.0.0-beta.38 - '@types/ejs': 3.1.1 - '@types/find-cache-dir': 3.2.1 - '@yarnpkg/esbuild-plugin-pnp': 3.0.0-rc.15_esbuild@0.16.17 - browser-assert: 1.2.1 - ejs: 3.1.8 - esbuild: 0.16.17 - esbuild-plugin-alias: 0.2.1 - express: 4.18.2 - find-cache-dir: 3.3.2 - fs-extra: 11.1.0 - process: 0.11.10 - slash: 3.0.0 - util: 0.12.5 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/builder-vite/7.0.0-beta.38_egung5nfepmolqa7uavvqho3gq: - resolution: {integrity: sha512-FfgS3tHLbcF5O+g6IBjqwCR6UBrT+YnF4Ozgp/kCmXNEg/JVJ5W0yzYQXlFhp9Sbbpf8NghZiEPnNUinqbRm1w==} - peerDependencies: - '@preact/preset-vite': '*' - typescript: '>= 4.3.x' - vite: ^3.0.0 || ^4.0.0 - vite-plugin-glimmerx: '*' - peerDependenciesMeta: - '@preact/preset-vite': - optional: true - typescript: - optional: true - vite-plugin-glimmerx: - optional: true - dependencies: - '@storybook/channel-postmessage': 7.0.0-beta.38 - '@storybook/channel-websocket': 7.0.0-beta.38 - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/core-common': 7.0.0-beta.38 - '@storybook/csf-plugin': 7.0.0-beta.38 - '@storybook/mdx2-csf': 1.0.0-next.5 - '@storybook/node-logger': 7.0.0-beta.38 - '@storybook/preview': 7.0.0-beta.38 - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/types': 7.0.0-beta.38 - browser-assert: 1.2.1 - es-module-lexer: 0.9.3 - express: 4.18.2 - fs-extra: 11.1.0 - glob: 7.2.3 - glob-promise: 4.2.2_glob@7.2.3 - magic-string: 0.27.0 - rollup: 3.10.1 - slash: 3.0.0 - typescript: 4.9.5 - vite: 4.0.4_arwryhsn4zwmtf5pq2mmdxlt6a - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/channel-postmessage/7.0.0-beta.38: - resolution: {integrity: sha512-brOLkukbfOxznnW/B8qfzYZGeyTtf9OtSQWrfSTdZv6eqy1fgY4LXSnfUmaKtHPS7K5eWwLGpPbdJI7PPxOklQ==} - dependencies: - '@storybook/channels': 7.0.0-beta.38 - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - qs: 6.11.0 - telejson: 7.0.4 - dev: true - - /@storybook/channel-websocket/7.0.0-beta.38: - resolution: {integrity: sha512-XGQ/LjXA55qsdLck9o4HGjKEdM9tEKmiyRJds30Uj/zxfJETz6ODi/g/EEJkPtZ5QeQDqPzieC0AdNinasaB6w==} - dependencies: - '@storybook/channels': 7.0.0-beta.38 - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - telejson: 7.0.4 - dev: true - - /@storybook/channels/7.0.0-beta.38: - resolution: {integrity: sha512-3Wr0jXpwIKppOHaputPLpZBwYYN7gQzqM0MYTQmw1e+lzblhdZkFAI0KaHrMWIEi70NLbsR48ZEVwyZVwrLMRw==} - dev: true - - /@storybook/cli/7.0.0-beta.38: - resolution: {integrity: sha512-0zFbXX59Fh6BvL6kWfDkdxG7F8KBwkqV6pObxq4qmwMhDpjV0HYwQotQ0aYK5Z6OuSq+WMqDY1iXszTWB6meqg==} - hasBin: true - dependencies: - '@babel/core': 7.20.12 - '@babel/preset-env': 7.20.2_@babel+core@7.20.12 - '@storybook/codemod': 7.0.0-beta.38 - '@storybook/core-common': 7.0.0-beta.38 - '@storybook/core-server': 7.0.0-beta.38 - '@storybook/csf-tools': 7.0.0-beta.38 - '@storybook/node-logger': 7.0.0-beta.38 - '@storybook/telemetry': 7.0.0-beta.38 - '@storybook/types': 7.0.0-beta.38 - '@types/semver': 7.3.13 - boxen: 5.1.2 - chalk: 4.1.2 - commander: 6.2.1 - cross-spawn: 7.0.3 - detect-indent: 6.1.0 - envinfo: 7.8.1 - execa: 5.1.1 - express: 4.18.2 - find-up: 5.0.0 - fs-extra: 11.1.0 - get-port: 5.1.1 - giget: 1.0.0 - globby: 11.1.0 - jscodeshift: 0.13.1_@babel+preset-env@7.20.2 - leven: 3.1.0 - prompts: 2.4.2 - puppeteer-core: 2.1.1 - read-pkg-up: 7.0.1 - semver: 7.3.8 - shelljs: 0.8.5 - simple-update-notifier: 1.1.0 - strip-json-comments: 3.1.1 - tempy: 1.0.1 - ts-dedent: 2.2.0 - util-deprecate: 1.0.2 - transitivePeerDependencies: - - bufferutil - - encoding - - supports-color - - utf-8-validate - dev: true - - /@storybook/client-logger/7.0.0-beta.38: - resolution: {integrity: sha512-B2Q+rmZj+QbPFWpHwqI4Llf842jiZe2JuTNd8zaeuTYf4WS/gWKicOL6VtDxUF3mfCftrQT9gNEdjBwwca1HsQ==} - dependencies: - '@storybook/global': 5.0.0 - dev: true - - /@storybook/codemod/7.0.0-beta.38: - resolution: {integrity: sha512-k+f1a6fTCRq8xSxABlagQiAbjbY/2Yfbsh94gEA0mXjQpARSfDJS2kDro4guwadnlBL8PZy/RtqNX/3RvCx4JQ==} - dependencies: - '@babel/core': 7.20.12 - '@babel/preset-env': 7.20.2_@babel+core@7.20.12 - '@babel/types': 7.20.7 - '@storybook/csf': 0.0.2-next.8 - '@storybook/csf-tools': 7.0.0-beta.38 - '@storybook/node-logger': 7.0.0-beta.38 - '@storybook/types': 7.0.0-beta.38 - cross-spawn: 7.0.3 - globby: 11.1.0 - jscodeshift: 0.13.1_@babel+preset-env@7.20.2 - lodash: 4.17.21 - prettier: 2.8.3 - recast: 0.23.1 - util: 0.12.5 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/components/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-AjxMwkXCcEwn7cq7yI5U2sTFctZzUKyUytUuxOS10+8uo8iA21bExLVht/z39bTQzHfh9ph433ArOR0bd9gWXw==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/csf': 0.0.2-next.8 - '@storybook/global': 5.0.0 - '@storybook/theming': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/types': 7.0.0-beta.38 - memoizerific: 1.11.3 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - util-deprecate: 1.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/core-client/7.0.0-beta.38: - resolution: {integrity: sha512-RrBs8JT6r53dPkkbNR+dQqMdYQbZVxsS+1mhaBkIhj+vHpkcq2hW5iM68ilut7DcmpSuKt+Wr+skmH3ObNhD9g==} - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/preview-api': 7.0.0-beta.38 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/core-common/7.0.0-beta.38: - resolution: {integrity: sha512-ldfhLxS4zHdb5jZp6HoW6efZ0Df+04V0AoioPCZ9DwgHmg1Ra/xB/r8rpOWkzIcW2dFdj1fMxcsRPRFaglASSA==} - dependencies: - '@babel/core': 7.20.12 - '@storybook/node-logger': 7.0.0-beta.38 - '@storybook/types': 7.0.0-beta.38 - '@types/babel__core': 7.1.20 - '@types/express': 4.17.16 - '@types/node': 16.18.11 - '@types/pretty-hrtime': 1.0.1 - chalk: 4.1.2 - esbuild: 0.16.17 - esbuild-register: 3.4.2_esbuild@0.16.17 - express: 4.18.2 - file-system-cache: 2.0.2 - find-up: 5.0.0 - fs-extra: 11.1.0 - glob: 7.2.3 - handlebars: 4.7.7 - lazy-universal-dotenv: 4.0.0 - picomatch: 2.3.1 - pkg-dir: 5.0.0 - pretty-hrtime: 1.0.3 - resolve-from: 5.0.0 - slash: 3.0.0 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/core-events/7.0.0-beta.38: - resolution: {integrity: sha512-VzC+ssutXDheDeoDnnImVdyzzKxO4THANlM2hV8aekcRMzQe5MFSOy4kNu4bu7aA4okkz10f3Pj/TwEbgwkH0A==} - dev: true - - /@storybook/core-server/7.0.0-beta.38: - resolution: {integrity: sha512-16fVtaxaa370Lx+I1k429szUl1NcXXfQ/YonkaQBHMSOmAy4ZLjqzBDQZhpzaXYUIh3e9nuK9BQKZtd6IOC2ZQ==} - dependencies: - '@aw-web-design/x-default-browser': 1.4.88 - '@discoveryjs/json-ext': 0.5.7 - '@storybook/builder-manager': 7.0.0-beta.38 - '@storybook/core-common': 7.0.0-beta.38 - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/csf': 0.0.2-next.8 - '@storybook/csf-tools': 7.0.0-beta.38 - '@storybook/docs-mdx': 0.0.1-next.6 - '@storybook/global': 5.0.0 - '@storybook/node-logger': 7.0.0-beta.38 - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/telemetry': 7.0.0-beta.38 - '@storybook/types': 7.0.0-beta.38 - '@types/detect-port': 1.3.2 - '@types/node': 16.18.11 - '@types/node-fetch': 2.6.2 - '@types/pretty-hrtime': 1.0.1 - '@types/semver': 7.3.13 - better-opn: 2.1.1 - boxen: 5.1.2 - chalk: 4.1.2 - cli-table3: 0.6.3 - compression: 1.7.4 - detect-port: 1.5.1 - express: 4.18.2 - fs-extra: 11.1.0 - globby: 11.1.0 - ip: 2.0.0 - lodash: 4.17.21 - node-fetch: 2.6.8 - open: 8.4.0 - pretty-hrtime: 1.0.3 - prompts: 2.4.2 - read-pkg-up: 7.0.1 - semver: 7.3.8 - serve-favicon: 2.5.0 - slash: 3.0.0 - telejson: 7.0.4 - ts-dedent: 2.2.0 - util-deprecate: 1.0.2 - watchpack: 2.4.0 - ws: 8.12.0 - transitivePeerDependencies: - - bufferutil - - encoding - - supports-color - - utf-8-validate - dev: true - - /@storybook/csf-plugin/7.0.0-beta.38: - resolution: {integrity: sha512-xXRSKNnDHb5W6OTh8Laleo+XbVu+tjlnFYWJVfgi80aU2uZMsUremYiREyXWh/9rmDTslkteuXDDXn8Rpe8daQ==} - dependencies: - '@storybook/csf-tools': 7.0.0-beta.38 - unplugin: 0.10.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/csf-tools/7.0.0-beta.38: - resolution: {integrity: sha512-qx1RIcfx8ofshqadJTGd2SgM12F08NJhNJeNLW4WTU5+rv2rmZ/OfagUJDNBoeS9OOfveoxJSWAcCbE4ks1fOA==} - dependencies: - '@babel/types': 7.20.7 - '@storybook/csf': 0.0.2-next.8 - '@storybook/types': 7.0.0-beta.38 - fs-extra: 11.1.0 - recast: 0.23.1 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/csf/0.0.2-next.8: - resolution: {integrity: sha512-3T6rflW7D9q1iXOR+bidwoNbd9rVUTyjYH/sqsnYjbXhb/aOXsQzGKwNeq9QqZIFVpKfg5BoOF5i7DCMtoGknQ==} - dependencies: - expect-type: 0.14.2 - lodash: 4.17.21 - type-fest: 2.19.0 - dev: true - - /@storybook/docs-mdx/0.0.1-next.6: - resolution: {integrity: sha512-DjoSIXADmLJtdroXAjUotFiZlcZ2usWhqrS7aeOtZs0DVR0Ws5WQjnwtpDUXt8gryTSd+OZJ0cNsDcqg4JDEvQ==} - dev: true - - /@storybook/docs-tools/7.0.0-beta.38: - resolution: {integrity: sha512-7hLwcvZxnvBcdNemmo5eknqNQHxL+EbulQA14vLeqerSchQuaMS9Y/MNboHxPZkaFeKs+vWj4ansfl62yvcIKQ==} - dependencies: - '@babel/core': 7.20.12 - '@storybook/core-common': 7.0.0-beta.38 - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/types': 7.0.0-beta.38 - '@types/doctrine': 0.0.3 - doctrine: 3.0.0 - lodash: 4.17.21 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/global/5.0.0: - resolution: {integrity: sha512-FcOqPAXACP0I3oJ/ws6/rrPT9WGhu915Cg8D02a9YxLo0DE9zI+a9A5gRGvmQ09fiWPukqI8ZAEoQEdWUKMQdQ==} - dev: true - - /@storybook/manager-api/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-B2t6rOSUlPpFA86MOJCzVel11S1qay2lN+HLhw42a1P1ToeMri4uEYdzwnV7F8tcrxAys1tcDRLoQiPpx87DYg==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - dependencies: - '@storybook/channels': 7.0.0-beta.38 - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/csf': 0.0.2-next.8 - '@storybook/global': 5.0.0 - '@storybook/router': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/theming': 7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m - '@storybook/types': 7.0.0-beta.38 - dequal: 2.0.3 - lodash: 4.17.21 - memoizerific: 1.11.3 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - semver: 7.3.8 - store2: 2.14.2 - telejson: 7.0.4 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/manager/7.0.0-beta.38: - resolution: {integrity: sha512-qqj/rqrpd4KLDkqYuSVtoC1cPE3v/vDuri48DQwU+mxO07Xf3RN+QXfc1hUlS9r6eDXG8t9pOGZ3ZAz3lqevjw==} - dev: true - - /@storybook/mdx2-csf/1.0.0-next.5: - resolution: {integrity: sha512-02w0sgGZaK1agT050yCVhJ+o4rLHANWvLKWjQjeAsYbjneLC5ITt+3GDB4jRiWwJboZ8dHW1fGSK1Vg5fA34aQ==} - dev: true - - /@storybook/node-logger/7.0.0-beta.38: - resolution: {integrity: sha512-EJzYAFOZnKkM4WEl+vB+SC7GrpojdGh0Gv3MHuhRBh7fnXIL30ocVZkIlK+qt0fLy2yC/9FAFIFPX6akvhT9hQ==} - dependencies: - '@types/npmlog': 4.1.4 - chalk: 4.1.2 - npmlog: 5.0.1 - pretty-hrtime: 1.0.3 - dev: true - - /@storybook/postinstall/7.0.0-beta.38: - resolution: {integrity: sha512-KeWYbiIAW5S6FCNMCLgO8u4qLRsV1hLRQEle/z+I8AaggY4RheqiWa8hCupdXwKfkGdy7iJuSCHRUdNxaLnZ7A==} - dev: true - - /@storybook/preview-api/7.0.0-beta.38: - resolution: {integrity: sha512-ljt+1Uw2qYcQ1p2wTvNEj6OkEYctP9KBXSTo2fmOgFh+ti/jXvG0tjz33JOR2bh7hFrjp2xXUIBSWkKDTqheig==} - dependencies: - '@storybook/channel-postmessage': 7.0.0-beta.38 - '@storybook/channels': 7.0.0-beta.38 - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/core-events': 7.0.0-beta.38 - '@storybook/csf': 0.0.2-next.8 - '@storybook/global': 5.0.0 - '@storybook/types': 7.0.0-beta.38 - '@types/qs': 6.9.7 - dequal: 2.0.3 - lodash: 4.17.21 - memoizerific: 1.11.3 - qs: 6.11.0 - slash: 3.0.0 - synchronous-promise: 2.0.16 - ts-dedent: 2.2.0 - util-deprecate: 1.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/preview/7.0.0-beta.38: - resolution: {integrity: sha512-eQ1lQ6WHU8h4ZKDTgCJj45qLawtejggDh84QZJ1rN/tFCOGnzS++aG/dhBBDWOa9FwWS18Mgmde1RrMsqH1/6A==} - dev: true - - /@storybook/react-vite/7.0.0-beta.38_ixkwjuwc6whfuuxbuybnyjw2h4: - resolution: {integrity: sha512-+5ZYku6qVGLq/6M/3xFR0wSSb5omzyuEB395+TxtVCF+Ha8h+rYnpDWaPHoF+rMD7TWdVxXNYwN8UiHJFSFjDQ==} - engines: {node: '>=16'} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - vite: ^3.0.0 || ^4.0.0 - dependencies: - '@joshwooding/vite-plugin-react-docgen-typescript': 0.2.1_egung5nfepmolqa7uavvqho3gq - '@rollup/pluginutils': 4.2.1 - '@storybook/builder-vite': 7.0.0-beta.38_egung5nfepmolqa7uavvqho3gq - '@storybook/react': 7.0.0-beta.38_jgxnvbe4faw3ohf4h6p42qq6oy - '@vitejs/plugin-react': 3.0.1_vite@4.0.4 - ast-types: 0.14.2 - magic-string: 0.27.0 - react: 17.0.2 - react-docgen: 6.0.0-alpha.3 - react-dom: 17.0.2_react@17.0.2 - vite: 4.0.4_arwryhsn4zwmtf5pq2mmdxlt6a - transitivePeerDependencies: - - '@preact/preset-vite' - - supports-color - - typescript - - vite-plugin-glimmerx - dev: true - - /@storybook/react/7.0.0-beta.38_jgxnvbe4faw3ohf4h6p42qq6oy: - resolution: {integrity: sha512-0h7yZjCuBhSMMOB51PGN/RA2gXZgLHIr64gr0JqNfM6TKLPTCsnhA88odwwmtEwExLqvnaB97matNpkeV4mbuw==} - engines: {node: '>=16.0.0'} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/core-client': 7.0.0-beta.38 - '@storybook/docs-tools': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - '@storybook/preview-api': 7.0.0-beta.38 - '@storybook/types': 7.0.0-beta.38 - '@types/escodegen': 0.0.6 - '@types/estree': 0.0.51 - '@types/node': 16.18.11 - acorn: 7.4.1 - acorn-jsx: 5.3.2_acorn@7.4.1 - acorn-walk: 7.2.0 - escodegen: 2.0.0 - html-tags: 3.2.0 - lodash: 4.17.21 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-element-to-jsx-string: 15.0.0_sfoxds7t5ydpegc3knd667wn6m - ts-dedent: 2.2.0 - type-fest: 2.19.0 - typescript: 4.9.5 - util-deprecate: 1.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/router/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-Cx1/f6mAP8E5fRBsiA3lJksf+6IqYTCDmQ3sA/kbDobXslqy25lZ2OHuwXr9NzLwFQKwiZ3z0MMqMxYR0A7JEw==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - memoizerific: 1.11.3 - qs: 6.11.0 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: true - - /@storybook/telemetry/7.0.0-beta.38: - resolution: {integrity: sha512-hdS7SPOXQC0Yl7ukvuIE4ebFUwrYCTUpHw7FQ3Xl8l3hDap5DAx/Bs/0I7zJb2qlzqwnDuWnS8ZReuUcCEgZMw==} - dependencies: - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/core-common': 7.0.0-beta.38 - chalk: 4.1.2 - detect-package-manager: 2.0.1 - fetch-retry: 5.0.3 - fs-extra: 11.1.0 - isomorphic-unfetch: 3.1.0 - nanoid: 3.3.4 - read-pkg-up: 7.0.1 - transitivePeerDependencies: - - encoding - - supports-color - dev: true - - /@storybook/theming/7.0.0-beta.38_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-ss9N8c8mATZxaWSOAKrouQHUbwQpZvE2wTEIf5T3MR/JN/Kz/DPU8uTH4DUcP6wntkDARoj5kH0200l0w3IbKA==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - dependencies: - '@emotion/use-insertion-effect-with-fallbacks': 1.0.0_react@17.0.2 - '@storybook/client-logger': 7.0.0-beta.38 - '@storybook/global': 5.0.0 - memoizerific: 1.11.3 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: true - - /@storybook/types/7.0.0-beta.38: - resolution: {integrity: sha512-S4CR/hlp9M5/BzlbUuih/MfqU8wSFwjA0R+Tukiwc+p8qPBx5W4/acGMEwEwBGMM3KXMbqvRZExyWjWyOkQZJQ==} - dependencies: - '@babel/core': 7.20.12 - '@storybook/channels': 7.0.0-beta.38 - '@types/babel__core': 7.1.20 - '@types/express': 4.17.16 - express: 4.18.2 - file-system-cache: 2.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@svgr/babel-plugin-add-jsx-attribute/6.5.1_@babel+core@7.20.12: - resolution: {integrity: sha512-9PYGcXrAxitycIjRmZB+Q0JaN07GZIWaTBIGQzfaZv+qr1n8X1XUEJ5rZ/vx6OVD9RRYlrNnXWExQXcmZeD/BQ==} - engines: {node: '>=10'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - dev: true - - /@svgr/babel-plugin-remove-jsx-attribute/5.4.0: - resolution: {integrity: sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg==} - engines: {node: '>=10'} - dev: true - - /@svgr/babel-plugin-remove-jsx-empty-expression/5.0.1: - resolution: {integrity: sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA==} - engines: {node: '>=10'} - dev: true - - /@svgr/babel-plugin-replace-jsx-attribute-value/6.5.1_@babel+core@7.20.12: - resolution: {integrity: sha512-8DPaVVE3fd5JKuIC29dqyMB54sA6mfgki2H2+swh+zNJoynC8pMPzOkidqHOSc6Wj032fhl8Z0TVn1GiPpAiJg==} - engines: {node: '>=10'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - dev: true - - /@svgr/babel-plugin-svg-dynamic-title/6.5.1_@babel+core@7.20.12: - resolution: {integrity: sha512-FwOEi0Il72iAzlkaHrlemVurgSQRDFbk0OC8dSvD5fSBPHltNh7JtLsxmZUhjYBZo2PpcU/RJvvi6Q0l7O7ogw==} - engines: {node: '>=10'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - dev: true - - /@svgr/babel-plugin-svg-em-dimensions/6.5.1_@babel+core@7.20.12: - resolution: {integrity: sha512-gWGsiwjb4tw+ITOJ86ndY/DZZ6cuXMNE/SjcDRg+HLuCmwpcjOktwRF9WgAiycTqJD/QXqL2f8IzE2Rzh7aVXA==} - engines: {node: '>=10'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - dev: true - - /@svgr/babel-plugin-transform-react-native-svg/6.5.1_@babel+core@7.20.12: - resolution: {integrity: sha512-2jT3nTayyYP7kI6aGutkyfJ7UMGtuguD72OjeGLwVNyfPRBD8zQthlvL+fAbAKk5n9ZNcvFkp/b1lZ7VsYqVJg==} - engines: {node: '>=10'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - dev: true - - /@svgr/babel-plugin-transform-svg-component/6.5.1_@babel+core@7.20.12: - resolution: {integrity: sha512-a1p6LF5Jt33O3rZoVRBqdxL350oge54iZWHNI6LJB5tQ7EelvD/Mb1mfBiZNAan0dt4i3VArkFRjA4iObuNykQ==} - engines: {node: '>=12'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - dev: true - - /@svgr/babel-preset/6.5.1_@babel+core@7.20.12: - resolution: {integrity: sha512-6127fvO/FF2oi5EzSQOAjo1LE3OtNVh11R+/8FXa+mHx1ptAaS4cknIjnUA7e6j6fwGGJ17NzaTJFUwOV2zwCw==} - engines: {node: '>=10'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@svgr/babel-plugin-add-jsx-attribute': 6.5.1_@babel+core@7.20.12 - '@svgr/babel-plugin-remove-jsx-attribute': 5.4.0 - '@svgr/babel-plugin-remove-jsx-empty-expression': 5.0.1 - '@svgr/babel-plugin-replace-jsx-attribute-value': 6.5.1_@babel+core@7.20.12 - '@svgr/babel-plugin-svg-dynamic-title': 6.5.1_@babel+core@7.20.12 - '@svgr/babel-plugin-svg-em-dimensions': 6.5.1_@babel+core@7.20.12 - '@svgr/babel-plugin-transform-react-native-svg': 6.5.1_@babel+core@7.20.12 - '@svgr/babel-plugin-transform-svg-component': 6.5.1_@babel+core@7.20.12 - dev: true - - /@svgr/core/6.5.1: - resolution: {integrity: sha512-/xdLSWxK5QkqG524ONSjvg3V/FkNyCv538OIBdQqPNaAta3AsXj/Bd2FbvR87yMbXO2hFSWiAe/Q6IkVPDw+mw==} - engines: {node: '>=10'} - dependencies: - '@babel/core': 7.20.12 - '@svgr/babel-preset': 6.5.1_@babel+core@7.20.12 - '@svgr/plugin-jsx': 6.5.1_@svgr+core@6.5.1 - camelcase: 6.3.0 - cosmiconfig: 7.1.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@svgr/hast-util-to-babel-ast/6.5.1: - resolution: {integrity: sha512-1hnUxxjd83EAxbL4a0JDJoD3Dao3hmjvyvyEV8PzWmLK3B9m9NPlW7GKjFyoWE8nM7HnXzPcmmSyOW8yOddSXw==} - engines: {node: '>=10'} - dependencies: - '@babel/types': 7.20.7 - entities: 4.4.0 - dev: true - - /@svgr/plugin-jsx/6.5.1_@svgr+core@6.5.1: - resolution: {integrity: sha512-+UdQxI3jgtSjCykNSlEMuy1jSRQlGC7pqBCPvkG/2dATdWo082zHTTK3uhnAju2/6XpE6B5mZ3z4Z8Ns01S8Gw==} - engines: {node: '>=10'} - peerDependencies: - '@svgr/core': ^6.0.0 - dependencies: - '@babel/core': 7.20.12 - '@svgr/babel-preset': 6.5.1_@babel+core@7.20.12 - '@svgr/core': 6.5.1 - '@svgr/hast-util-to-babel-ast': 6.5.1 - svg-parser: 2.0.4 - transitivePeerDependencies: - - supports-color - dev: true - - /@tanstack/react-table/8.7.6_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-/QijmMFeP7wDLBnr0MQ/5MlbXePbIL/1nOtkxBC9zvmBu4gDKJEDBqipUyM7Wc/iBpSd0IFyqBlvZvTPD9FYDA==} - engines: {node: '>=12'} - peerDependencies: - react: '>=16' - react-dom: '>=16' - dependencies: - '@tanstack/table-core': 8.7.6 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - - /@tanstack/table-core/8.7.6: - resolution: {integrity: sha512-sqiNTMzB6cpyL8DFH6/VqW48SwiflLqxQqYpo2wNock7rdVGvlm0BLNI8vZUJbr1+fmmWmHwBvi5OMgZw8n1DA==} - engines: {node: '>=12'} - dev: false - - /@testing-library/dom/8.20.0: - resolution: {integrity: sha512-d9ULIT+a4EXLX3UU8FBjauG9NnsZHkHztXoIcTsOKoOw030fyjheN9svkTULjJxtYag9DZz5Jz5qkWZDPxTFwA==} - engines: {node: '>=12'} - dependencies: - '@babel/code-frame': 7.18.6 - '@babel/runtime': 7.20.7 - '@types/aria-query': 5.0.1 - aria-query: 5.1.3 - chalk: 4.1.2 - dom-accessibility-api: 0.5.15 - lz-string: 1.4.4 - pretty-format: 27.5.1 - dev: true - - /@testing-library/jest-dom/5.16.5: - resolution: {integrity: sha512-N5ixQ2qKpi5OLYfwQmUb/5mSV9LneAcaUfp32pn4yCnpb8r/Yz0pXFPck21dIicKmi+ta5WRAknkZCfA8refMA==} - engines: {node: '>=8', npm: '>=6', yarn: '>=1'} - dependencies: - '@adobe/css-tools': 4.0.2 - '@babel/runtime': 7.20.7 - '@types/testing-library__jest-dom': 5.14.5 - aria-query: 5.1.3 - chalk: 3.0.0 - css.escape: 1.5.1 - dom-accessibility-api: 0.5.15 - lodash: 4.17.21 - redent: 3.0.0 - dev: true - - /@testing-library/react-hooks/7.0.2_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-dYxpz8u9m4q1TuzfcUApqi8iFfR6R0FaMbr2hjZJy1uC8z+bO/K4v8Gs9eogGKYQop7QsrBTFkv/BCF7MzD2Cg==} - engines: {node: '>=12'} - peerDependencies: - react: '>=16.9.0' - react-dom: '>=16.9.0' - react-test-renderer: '>=16.9.0' - peerDependenciesMeta: - react-dom: - optional: true - react-test-renderer: - optional: true - dependencies: - '@babel/runtime': 7.20.7 - '@types/react': 17.0.52 - '@types/react-dom': 17.0.18 - '@types/react-test-renderer': 18.0.0 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-error-boundary: 3.1.4_react@17.0.2 - dev: true - - /@testing-library/react/12.1.5_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-OfTXCJUFgjd/digLUuPxa0+/3ZxsQmE7ub9kcbW/wi96Bh3o/p5vrETcBGfP17NWPGqeYYl5LTRpwyGoMC4ysg==} - engines: {node: '>=12'} - peerDependencies: - react: <18.0.0 - react-dom: <18.0.0 - dependencies: - '@babel/runtime': 7.20.7 - '@testing-library/dom': 8.20.0 - '@types/react-dom': 17.0.18 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: true - - /@testing-library/user-event/13.5.0: - resolution: {integrity: sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg==} - engines: {node: '>=10', npm: '>=6'} - peerDependencies: - '@testing-library/dom': '>=7.21.4' - dependencies: - '@babel/runtime': 7.20.7 - dev: true - - /@tootallnate/once/1.1.2: - resolution: {integrity: sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==} - engines: {node: '>= 6'} - dev: true - - /@tootallnate/once/2.0.0: - resolution: {integrity: sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==} - engines: {node: '>= 10'} - dev: true - - /@tsconfig/node10/1.0.9: - resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} - dev: true - - /@tsconfig/node12/1.0.11: - resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} - dev: true - - /@tsconfig/node14/1.0.3: - resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} - dev: true - - /@tsconfig/node16/1.0.3: - resolution: {integrity: sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==} - dev: true - - /@types/aria-query/5.0.1: - resolution: {integrity: sha512-XTIieEY+gvJ39ChLcB4If5zHtPxt3Syj5rgZR+e1ctpmK8NjPf0zFqsz4JpLJT0xla9GFDKjy8Cpu331nrmE1Q==} - dev: true - - /@types/babel__core/7.1.20: - resolution: {integrity: sha512-PVb6Bg2QuscZ30FvOU7z4guG6c926D9YRvOxEaelzndpMsvP+YM74Q/dAFASpg2l6+XLalxSGxcq/lrgYWZtyQ==} - dependencies: - '@babel/parser': 7.20.7 - '@babel/types': 7.20.7 - '@types/babel__generator': 7.6.4 - '@types/babel__template': 7.4.1 - '@types/babel__traverse': 7.18.3 - dev: true - - /@types/babel__generator/7.6.4: - resolution: {integrity: sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==} - dependencies: - '@babel/types': 7.20.7 - dev: true - - /@types/babel__template/7.4.1: - resolution: {integrity: sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==} - dependencies: - '@babel/parser': 7.20.7 - '@babel/types': 7.20.7 - dev: true - - /@types/babel__traverse/7.18.3: - resolution: {integrity: sha512-1kbcJ40lLB7MHsj39U4Sh1uTd2E7rLEa79kmDpI6cy+XiXsteB3POdQomoq4FxszMrO3ZYchkhYJw7A2862b3w==} - dependencies: - '@babel/types': 7.20.7 - dev: true - - /@types/body-parser/1.19.2: - resolution: {integrity: sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==} - dependencies: - '@types/connect': 3.4.35 - '@types/node': 17.0.45 - dev: true - - /@types/connect/3.4.35: - resolution: {integrity: sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==} - dependencies: - '@types/node': 17.0.45 - dev: true - - /@types/d3-array/3.0.4: - resolution: {integrity: sha512-nwvEkG9vYOc0Ic7G7kwgviY4AQlTfYGIZ0fqB7CQHXGyYM6nO7kJh5EguSNA3jfh4rq7Sb7eMVq8isuvg2/miQ==} - dev: false - - /@types/d3-color/3.1.0: - resolution: {integrity: sha512-HKuicPHJuvPgCD+np6Se9MQvS6OCbJmOjGvylzMJRlDwUXjKTTXs6Pwgk79O09Vj/ho3u1ofXnhFOaEWWPrlwA==} - dev: false - - /@types/d3-ease/3.0.0: - resolution: {integrity: sha512-aMo4eaAOijJjA6uU+GIeW018dvy9+oH5Y2VPPzjjfxevvGQ/oRDs+tfYC9b50Q4BygRR8yE2QCLsrT0WtAVseA==} - dev: false - - /@types/d3-interpolate/3.0.1: - resolution: {integrity: sha512-jx5leotSeac3jr0RePOH1KdR9rISG91QIE4Q2PYTu4OymLTZfA3SrnURSLzKH48HmXVUru50b8nje4E79oQSQw==} - dependencies: - '@types/d3-color': 3.1.0 - dev: false - - /@types/d3-path/3.0.0: - resolution: {integrity: sha512-0g/A+mZXgFkQxN3HniRDbXMN79K3CdTpLsevj+PXiTcb2hVyvkZUBg37StmgCQkaD84cUJ4uaDAWq7UJOQy2Tg==} - dev: false - - /@types/d3-scale/4.0.3: - resolution: {integrity: sha512-PATBiMCpvHJSMtZAMEhc2WyL+hnzarKzI6wAHYjhsonjWJYGq5BXTzQjv4l8m2jO183/4wZ90rKvSeT7o72xNQ==} - dependencies: - '@types/d3-time': 3.0.0 - dev: false - - /@types/d3-shape/3.1.1: - resolution: {integrity: sha512-6Uh86YFF7LGg4PQkuO2oG6EMBRLuW9cbavUW46zkIO5kuS2PfTqo2o9SkgtQzguBHbLgNnU90UNsITpsX1My+A==} - dependencies: - '@types/d3-path': 3.0.0 - dev: false - - /@types/d3-time/3.0.0: - resolution: {integrity: sha512-sZLCdHvBUcNby1cB6Fd3ZBrABbjz3v1Vm90nysCQ6Vt7vd6e/h9Lt7SiJUoEX0l4Dzc7P5llKyhqSi1ycSf1Hg==} - dev: false - - /@types/d3-timer/3.0.0: - resolution: {integrity: sha512-HNB/9GHqu7Fo8AQiugyJbv6ZxYz58wef0esl4Mv828w1ZKpAshw/uFWVDUcIB9KKFeFKoxS3cHY07FFgtTRZ1g==} - dev: false - - /@types/debug/4.1.7: - resolution: {integrity: sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==} - dependencies: - '@types/ms': 0.7.31 - dev: false - - /@types/detect-port/1.3.2: - resolution: {integrity: sha512-xxgAGA2SAU4111QefXPSp5eGbDm/hW6zhvYl9IeEPZEry9F4d66QAHm5qpUXjb6IsevZV/7emAEx5MhP6O192g==} - dev: true - - /@types/diff/5.0.2: - resolution: {integrity: sha512-uw8eYMIReOwstQ0QKF0sICefSy8cNO/v7gOTiIy9SbwuHyEecJUm7qlgueOO5S1udZ5I/irVydHVwMchgzbKTg==} - dev: false - - /@types/doctrine/0.0.3: - resolution: {integrity: sha512-w5jZ0ee+HaPOaX25X2/2oGR/7rgAQSYII7X7pp0m9KgBfMP7uKfMfTvcpl5Dj+eDBbpxKGiqE+flqDr6XTd2RA==} - dev: true - - /@types/ejs/3.1.1: - resolution: {integrity: sha512-RQul5wEfY7BjWm0sYY86cmUN/pcXWGyVxWX93DFFJvcrxax5zKlieLwA3T77xJGwNcZW0YW6CYG70p1m8xPFmA==} - dev: true - - /@types/es-aggregate-error/1.0.2: - resolution: {integrity: sha512-erqUpFXksaeR2kejKnhnjZjbFxUpGZx4Z7ydNL9ie8tEhXPiZTsLeUDJ6aR1F8j5wWUAtOAQWUqkc7givBJbBA==} - dependencies: - '@types/node': 17.0.45 - dev: true - - /@types/escodegen/0.0.6: - resolution: {integrity: sha512-AjwI4MvWx3HAOaZqYsjKWyEObT9lcVV0Y0V8nXo6cXzN8ZiMxVhf6F3d/UNvXVGKrEzL/Dluc5p+y9GkzlTWig==} - dev: true - - /@types/estree/0.0.39: - resolution: {integrity: sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==} - dev: true - - /@types/estree/0.0.51: - resolution: {integrity: sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==} - dev: true - - /@types/estree/1.0.0: - resolution: {integrity: sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ==} - dev: true - - /@types/express-serve-static-core/4.17.33: - resolution: {integrity: sha512-TPBqmR/HRYI3eC2E5hmiivIzv+bidAfXofM+sbonAGvyDhySGw9/PQZFt2BLOrjUUR++4eJVpx6KnLQK1Fk9tA==} - dependencies: - '@types/node': 17.0.45 - '@types/qs': 6.9.7 - '@types/range-parser': 1.2.4 - dev: true - - /@types/express/4.17.16: - resolution: {integrity: sha512-LkKpqRZ7zqXJuvoELakaFYuETHjZkSol8EV6cNnyishutDBCCdv6+dsKPbKkCcIk57qRphOLY5sEgClw1bO3gA==} - dependencies: - '@types/body-parser': 1.19.2 - '@types/express-serve-static-core': 4.17.33 - '@types/qs': 6.9.7 - '@types/serve-static': 1.15.0 - dev: true - - /@types/find-cache-dir/3.2.1: - resolution: {integrity: sha512-frsJrz2t/CeGifcu/6uRo4b+SzAwT4NYCVPu1GN8IB9XTzrpPkGuV0tmh9mN+/L0PklAlsC3u5Fxt0ju00LXIw==} - dev: true - - /@types/flat/5.0.2: - resolution: {integrity: sha512-3zsplnP2djeps5P9OyarTxwRpMLoe5Ash8aL9iprw0JxB+FAHjY+ifn4yZUuW4/9hqtnmor6uvjSRzJhiVbrEQ==} - dev: true - - /@types/glob/7.2.0: - resolution: {integrity: sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==} - dependencies: - '@types/minimatch': 5.1.2 - '@types/node': 17.0.45 - dev: true - - /@types/graceful-fs/4.1.6: - resolution: {integrity: sha512-Sig0SNORX9fdW+bQuTEovKj3uHcUL6LQKbCrrqb1X7J6/ReAbhCXRAhc+SMejhLELFj2QcyuxmUooZ4bt5ReSw==} - dependencies: - '@types/node': 17.0.45 - dev: true - - /@types/hast/2.3.4: - resolution: {integrity: sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==} - dependencies: - '@types/unist': 2.0.6 - dev: false - - /@types/hoist-non-react-statics/3.3.1: - resolution: {integrity: sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA==} - dependencies: - '@types/react': 17.0.52 - hoist-non-react-statics: 3.3.2 - - /@types/istanbul-lib-coverage/2.0.4: - resolution: {integrity: sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==} - dev: true - - /@types/istanbul-lib-report/3.0.0: - resolution: {integrity: sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==} - dependencies: - '@types/istanbul-lib-coverage': 2.0.4 - dev: true - - /@types/istanbul-reports/3.0.1: - resolution: {integrity: sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==} - dependencies: - '@types/istanbul-lib-report': 3.0.0 - dev: true - - /@types/jest/27.5.2: - resolution: {integrity: sha512-mpT8LJJ4CMeeahobofYWIjFo0xonRS/HfxnVEPMPFSQdGUt1uHCnoPT7Zhb+sjDU2wz0oKV0OLUR0WzrHNgfeA==} - dependencies: - jest-matcher-utils: 27.5.1 - pretty-format: 27.5.1 - dev: true - - /@types/js-cookie/2.2.7: - resolution: {integrity: sha512-aLkWa0C0vO5b4Sr798E26QgOkss68Un0bLjs7u9qxzPT5CG+8DuNTffWES58YzJs3hrVAOs1wonycqEBqNJubA==} - dev: false - - /@types/js-yaml/4.0.5: - resolution: {integrity: sha512-FhpRzf927MNQdRZP0J5DLIdTXhjLYzeUTmLAu69mnVksLH9CJY3IuSeEgbKUki7GQZm0WqDkGzyxju2EZGD2wA==} - dev: true - - /@types/jsdom/20.0.1: - resolution: {integrity: sha512-d0r18sZPmMQr1eG35u12FZfhIXNrnsPU/g5wvRKCUf/tOGilKKwYMYGqh33BNR6ba+2gkHw1EUiHoN3mn7E5IQ==} - dependencies: - '@types/node': 17.0.45 - '@types/tough-cookie': 4.0.2 - parse5: 7.1.2 - dev: true - - /@types/json-schema/7.0.11: - resolution: {integrity: sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==} - dev: true - - /@types/json5/0.0.29: - resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} - dev: true - - /@types/lodash/4.14.191: - resolution: {integrity: sha512-BdZ5BCCvho3EIXw6wUCXHe7rS53AIDPLE+JzwgT+OsJk53oBfbSmZZ7CX4VaRoN78N+TJpFi9QPlfIVNmJYWxQ==} - - /@types/long/4.0.2: - resolution: {integrity: sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==} - dev: false - - /@types/mdast/3.0.10: - resolution: {integrity: sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==} - dependencies: - '@types/unist': 2.0.6 - dev: false - - /@types/mdurl/1.0.2: - resolution: {integrity: sha512-eC4U9MlIcu2q0KQmXszyn5Akca/0jrQmwDRgpAMJai7qBWq4amIQhZyNau4VYGtCeALvW1/NtjzJJ567aZxfKA==} - dev: false - - /@types/mdx/2.0.3: - resolution: {integrity: sha512-IgHxcT3RC8LzFLhKwP3gbMPeaK7BM9eBH46OdapPA7yvuIUJ8H6zHZV53J8hGZcTSnt95jANt+rTBNUUc22ACQ==} - dev: true - - /@types/mime-types/2.1.1: - resolution: {integrity: sha512-vXOTGVSLR2jMw440moWTC7H19iUyLtP3Z1YTj7cSsubOICinjMxFeb/V57v9QdyyPGbbWolUFSSmSiRSn94tFw==} - dev: true - - /@types/mime/3.0.1: - resolution: {integrity: sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==} - dev: true - - /@types/minimatch/5.1.2: - resolution: {integrity: sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==} - dev: true - - /@types/minimist/1.2.2: - resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==} - dev: true - - /@types/ms/0.7.31: - resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} - dev: false - - /@types/node-fetch/2.6.2: - resolution: {integrity: sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==} - dependencies: - '@types/node': 17.0.45 - form-data: 3.0.1 - - /@types/node/16.18.11: - resolution: {integrity: sha512-3oJbGBUWuS6ahSnEq1eN2XrCyf4YsWI8OyCvo7c64zQJNplk3mO84t53o8lfTk+2ji59g5ycfc6qQ3fdHliHuA==} - dev: true - - /@types/node/17.0.45: - resolution: {integrity: sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==} - - /@types/normalize-package-data/2.4.1: - resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==} - dev: true - - /@types/npmlog/4.1.4: - resolution: {integrity: sha512-WKG4gTr8przEZBiJ5r3s8ZIAoMXNbOgQ+j/d5O4X3x6kZJRLNvyUJuUK/KoG3+8BaOHPhp2m7WC6JKKeovDSzQ==} - dev: true - - /@types/parse-json/4.0.0: - resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==} - - /@types/prettier/2.7.2: - resolution: {integrity: sha512-KufADq8uQqo1pYKVIYzfKbJfBAc0sOeXqGbFaSpv8MRmC/zXgowNZmFcbngndGk922QDmOASEXUZCaY48gs4cg==} - dev: true - - /@types/pretty-hrtime/1.0.1: - resolution: {integrity: sha512-VjID5MJb1eGKthz2qUerWT8+R4b9N+CHvGCzg9fn4kWZgaF9AhdYikQio3R7wV8YY1NsQKPaCwKz1Yff+aHNUQ==} - dev: true - - /@types/prop-types/15.7.5: - resolution: {integrity: sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==} - - /@types/qs/6.9.7: - resolution: {integrity: sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==} - dev: true - - /@types/range-parser/1.2.4: - resolution: {integrity: sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==} - dev: true - - /@types/react-datepicker/4.8.0_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-20uzZsIf4moPAjjHDfPvH8UaOHZBxrkiQZoLS3wgKq8Xhp+95gdercLEdoA7/I8nR9R5Jz2qQkdMIM+Lq4AS1A==} - dependencies: - '@popperjs/core': 2.11.6 - '@types/react': 17.0.52 - date-fns: 2.29.3 - react-popper: 2.3.0_vov5yimr6vvxyufd6uigwwkst4 - transitivePeerDependencies: - - react - - react-dom - dev: true - - /@types/react-dom/17.0.18: - resolution: {integrity: sha512-rLVtIfbwyur2iFKykP2w0pl/1unw26b5td16d5xMgp7/yjTHomkyxPYChFoCr/FtEX1lN9wY6lFj1qvKdS5kDw==} - dependencies: - '@types/react': 17.0.52 - dev: true - - /@types/react-helmet/6.1.6: - resolution: {integrity: sha512-ZKcoOdW/Tg+kiUbkFCBtvDw0k3nD4HJ/h/B9yWxN4uDO8OkRksWTO+EL+z/Qu3aHTeTll3Ro0Cc/8UhwBCMG5A==} - dependencies: - '@types/react': 17.0.52 - dev: true - - /@types/react-lazylog/4.5.1: - resolution: {integrity: sha512-g4yeosa1zYhu2BUJmuu2H2o0dsdRj0o8Omw3pBiVHdLHJaeYIyArvyMRR3bI/MxZxG4EaiRl8AOQ6zeM8P46jA==} - dependencies: - '@types/react': 17.0.52 - immutable: 4.2.2 - dev: true - - /@types/react-paginate/7.1.1: - resolution: {integrity: sha512-94cEJHfBaINba2XGQOWFJF/tYe+AImIhOPqMCfqyeFOgk0qUpDZyDOOW76A1rgdtWndR8UwsMPgV/uuTM8vZ7w==} - dependencies: - '@types/react': 17.0.52 - dev: true - - /@types/react-slick/0.23.10: - resolution: {integrity: sha512-ZiqdencANDZy6sWOWJ54LDvebuXFEhDlHtXU9FFipQR2BcYU2QJxZhvJPW6YK7cocibUiNn+YvDTbt1HtCIBVA==} - dependencies: - '@types/react': 17.0.52 - dev: true - - /@types/react-table/7.7.14: - resolution: {integrity: sha512-TYrv7onCiakaG1uAu/UpQ9FojNEt/4/ht87EgJQaEGFoWV606ZLWUZAcUHzMxgc3v1mywP1cDyz3qB4ho3hWOw==} - dependencies: - '@types/react': 17.0.52 - dev: true - - /@types/react-test-renderer/18.0.0: - resolution: {integrity: sha512-C7/5FBJ3g3sqUahguGi03O79b8afNeSD6T8/GU50oQrJCU0bVCCGQHaGKUbg2Ce8VQEEqTw8/HiS6lXHHdgkdQ==} - dependencies: - '@types/react': 17.0.52 - dev: true - - /@types/react-transition-group/4.4.5: - resolution: {integrity: sha512-juKD/eiSM3/xZYzjuzH6ZwpP+/lejltmiS3QEzV/vmb/Q8+HfDmxu+Baga8UEMGBqV88Nbg4l2hY/K2DkyaLLA==} - dependencies: - '@types/react': 17.0.52 - dev: false - - /@types/react-widgets/4.4.7: - resolution: {integrity: sha512-cW476/b3MOOmZ7kPTx6ShdFWIn54zbgIpsSzwMJVlmr36uDuKeLMjoxoehtYOpFJgpHX9vv24Bfh91Xi9ZQYCA==} - dependencies: - '@types/react': 17.0.52 - dev: true - - /@types/react/17.0.52: - resolution: {integrity: sha512-vwk8QqVODi0VaZZpDXQCmEmiOuyjEFPY7Ttaw5vjM112LOq37yz1CDJGrRJwA1fYEq4Iitd5rnjd1yWAc/bT+A==} - dependencies: - '@types/prop-types': 15.7.5 - '@types/scheduler': 0.16.2 - csstype: 3.1.1 - - /@types/sanitize-html/2.8.0: - resolution: {integrity: sha512-Uih6caOm3DsBYnVGOYn0A9NoTNe1c4aPStmHC/YA2JrpP9kx//jzaRcIklFvSpvVQEcpl/ZCr4DgISSf/YxTvg==} - dependencies: - htmlparser2: 8.0.1 - dev: true - - /@types/scheduler/0.16.2: - resolution: {integrity: sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==} - - /@types/segment-analytics/0.0.34: - resolution: {integrity: sha512-fiOyEgyqJY2Mv9k72WG4XoY4fVE31byiSUrEFcNh+MgHcH3HuJmoz2J7ktO3YizBrN6/RuaH1tY5J/5I5BJHJQ==} - dev: false - - /@types/semver/7.3.13: - resolution: {integrity: sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==} - dev: true - - /@types/serve-static/1.15.0: - resolution: {integrity: sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==} - dependencies: - '@types/mime': 3.0.1 - '@types/node': 17.0.45 - dev: true - - /@types/stack-utils/2.0.1: - resolution: {integrity: sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==} - dev: true - - /@types/styled-components/5.1.26: - resolution: {integrity: sha512-KuKJ9Z6xb93uJiIyxo/+ksS7yLjS1KzG6iv5i78dhVg/X3u5t1H7juRWqVmodIdz6wGVaIApo1u01kmFRdJHVw==} - dependencies: - '@types/hoist-non-react-statics': 3.3.1 - '@types/react': 17.0.52 - csstype: 3.1.1 - dev: true - - /@types/testing-library__jest-dom/5.14.5: - resolution: {integrity: sha512-SBwbxYoyPIvxHbeHxTZX2Pe/74F/tX2/D3mMvzabdeJ25bBojfW0TyB8BHrbq/9zaaKICJZjLP+8r6AeZMFCuQ==} - dependencies: - '@types/jest': 27.5.2 - dev: true - - /@types/tough-cookie/4.0.2: - resolution: {integrity: sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw==} - dev: true - - /@types/unist/2.0.6: - resolution: {integrity: sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==} - - /@types/urijs/1.19.19: - resolution: {integrity: sha512-FDJNkyhmKLw7uEvTxx5tSXfPeQpO0iy73Ry+PmYZJvQy0QIWX8a7kJ4kLWRf+EbTPJEPDSgPXHaM7pzr5lmvCg==} - dev: true - - /@types/uuid/9.0.0: - resolution: {integrity: sha512-kr90f+ERiQtKWMz5rP32ltJ/BtULDI5RVO0uavn1HQUOwjx0R1h0rnDYNL0CepF1zL5bSY6FISAfd9tOdDhU5Q==} - dev: false - - /@types/whatwg-streams/0.0.7: - resolution: {integrity: sha512-6sDiSEP6DWcY2ZolsJ2s39ZmsoGQ7KVwBDI3sESQsEm9P2dHTcqnDIHRZFRNtLCzWp7hCFGqYbw5GyfpQnJ01A==} - dev: false - - /@types/yargs-parser/21.0.0: - resolution: {integrity: sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==} - dev: true - - /@types/yargs/17.0.19: - resolution: {integrity: sha512-cAx3qamwaYX9R0fzOIZAlFpo4A+1uBVCxqpKz9D26uTF4srRXaGTTsikQmaotCtNdbhzyUH7ft6p9ktz9s6UNQ==} - dependencies: - '@types/yargs-parser': 21.0.0 - dev: true - - /@typescript-eslint/eslint-plugin/5.48.2_azmbqzqvrlvblbdtiwxwvyvjjy: - resolution: {integrity: sha512-sR0Gja9Ky1teIq4qJOl0nC+Tk64/uYdX+mi+5iB//MH8gwyx8e3SOyhEzeLZEFEEfCaLf8KJq+Bd/6je1t+CAg==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - '@typescript-eslint/parser': ^5.0.0 - eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@typescript-eslint/parser': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - '@typescript-eslint/scope-manager': 5.48.2 - '@typescript-eslint/type-utils': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - '@typescript-eslint/utils': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - debug: 4.3.4 - eslint: 8.32.0 - ignore: 5.2.4 - natural-compare-lite: 1.4.0 - regexpp: 3.2.0 - semver: 7.3.8 - tsutils: 3.21.0_typescript@4.9.5 - typescript: 4.9.5 - transitivePeerDependencies: - - supports-color - dev: true - - /@typescript-eslint/experimental-utils/5.48.2_et5x32uxl7z5ldub3ye5rhlyqm: - resolution: {integrity: sha512-Iwx8De8dwl6qPaPZWIaEfP1feN/YFlA5FlCxF3zUIm+2AG92C5Tefkugj2L9ytOFrmTYkTE/CqvJFZbYoVZQMg==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - dependencies: - '@typescript-eslint/utils': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - eslint: 8.32.0 - transitivePeerDependencies: - - supports-color - - typescript - dev: true - - /@typescript-eslint/parser/5.48.2_et5x32uxl7z5ldub3ye5rhlyqm: - resolution: {integrity: sha512-38zMsKsG2sIuM5Oi/olurGwYJXzmtdsHhn5mI/pQogP+BjYVkK5iRazCQ8RGS0V+YLk282uWElN70zAAUmaYHw==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@typescript-eslint/scope-manager': 5.48.2 - '@typescript-eslint/types': 5.48.2 - '@typescript-eslint/typescript-estree': 5.48.2_typescript@4.9.5 - debug: 4.3.4 - eslint: 8.32.0 - typescript: 4.9.5 - transitivePeerDependencies: - - supports-color - dev: true - - /@typescript-eslint/scope-manager/5.48.2: - resolution: {integrity: sha512-zEUFfonQid5KRDKoI3O+uP1GnrFd4tIHlvs+sTJXiWuypUWMuDaottkJuR612wQfOkjYbsaskSIURV9xo4f+Fw==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dependencies: - '@typescript-eslint/types': 5.48.2 - '@typescript-eslint/visitor-keys': 5.48.2 - dev: true - - /@typescript-eslint/type-utils/5.48.2_et5x32uxl7z5ldub3ye5rhlyqm: - resolution: {integrity: sha512-QVWx7J5sPMRiOMJp5dYshPxABRoZV1xbRirqSk8yuIIsu0nvMTZesKErEA3Oix1k+uvsk8Cs8TGJ6kQ0ndAcew==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - eslint: '*' - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@typescript-eslint/typescript-estree': 5.48.2_typescript@4.9.5 - '@typescript-eslint/utils': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - debug: 4.3.4 - eslint: 8.32.0 - tsutils: 3.21.0_typescript@4.9.5 - typescript: 4.9.5 - transitivePeerDependencies: - - supports-color - dev: true - - /@typescript-eslint/types/5.48.2: - resolution: {integrity: sha512-hE7dA77xxu7ByBc6KCzikgfRyBCTst6dZQpwaTy25iMYOnbNljDT4hjhrGEJJ0QoMjrfqrx+j1l1B9/LtKeuqA==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dev: true - - /@typescript-eslint/typescript-estree/5.48.2_typescript@4.9.5: - resolution: {integrity: sha512-bibvD3z6ilnoVxUBFEgkO0k0aFvUc4Cttt0dAreEr+nrAHhWzkO83PEVVuieK3DqcgL6VAK5dkzK8XUVja5Zcg==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@typescript-eslint/types': 5.48.2 - '@typescript-eslint/visitor-keys': 5.48.2 - debug: 4.3.4 - globby: 11.1.0 - is-glob: 4.0.3 - semver: 7.3.8 - tsutils: 3.21.0_typescript@4.9.5 - typescript: 4.9.5 - transitivePeerDependencies: - - supports-color - dev: true - - /@typescript-eslint/utils/5.48.2_et5x32uxl7z5ldub3ye5rhlyqm: - resolution: {integrity: sha512-2h18c0d7jgkw6tdKTlNaM7wyopbLRBiit8oAxoP89YnuBOzCZ8g8aBCaCqq7h208qUTroL7Whgzam7UY3HVLow==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - dependencies: - '@types/json-schema': 7.0.11 - '@types/semver': 7.3.13 - '@typescript-eslint/scope-manager': 5.48.2 - '@typescript-eslint/types': 5.48.2 - '@typescript-eslint/typescript-estree': 5.48.2_typescript@4.9.5 - eslint: 8.32.0 - eslint-scope: 5.1.1 - eslint-utils: 3.0.0_eslint@8.32.0 - semver: 7.3.8 - transitivePeerDependencies: - - supports-color - - typescript - dev: true - - /@typescript-eslint/visitor-keys/5.48.2: - resolution: {integrity: sha512-z9njZLSkwmjFWUelGEwEbdf4NwKvfHxvGC0OcGN1Hp/XNDIcJ7D5DpPNPv6x6/mFvc1tQHsaWmpD/a4gOvvCJQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dependencies: - '@typescript-eslint/types': 5.48.2 - eslint-visitor-keys: 3.3.0 - dev: true - - /@vitejs/plugin-basic-ssl/1.0.1_vite@4.0.4: - resolution: {integrity: sha512-pcub+YbFtFhaGRTo1832FQHQSHvMrlb43974e2eS8EKleR3p1cDdkJFPci1UhwkEf1J9Bz+wKBSzqpKp7nNj2A==} - engines: {node: '>=14.6.0'} - peerDependencies: - vite: ^3.0.0 || ^4.0.0 - dependencies: - vite: 4.0.4_arwryhsn4zwmtf5pq2mmdxlt6a - dev: true - - /@vitejs/plugin-react/3.0.1_vite@4.0.4: - resolution: {integrity: sha512-mx+QvYwIbbpOIJw+hypjnW1lAbKDHtWK5ibkF/V1/oMBu8HU/chb+SnqJDAsLq1+7rGqjktCEomMTM5KShzUKQ==} - engines: {node: ^14.18.0 || >=16.0.0} - peerDependencies: - vite: ^4.0.0 - dependencies: - '@babel/core': 7.20.12 - '@babel/plugin-transform-react-jsx-self': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-react-jsx-source': 7.19.6_@babel+core@7.20.12 - magic-string: 0.27.0 - react-refresh: 0.14.0 - vite: 4.0.4_arwryhsn4zwmtf5pq2mmdxlt6a - transitivePeerDependencies: - - supports-color - dev: true - - /@xobotyi/scrollbar-width/1.9.5: - resolution: {integrity: sha512-N8tkAACJx2ww8vFMneJmaAgmjAG1tnVBZJRLRcx061tmsLRZHSEZSLuGWnwPtunsSLvSqXQ2wfp7Mgqg1I+2dQ==} - dev: false - - /@yarnpkg/esbuild-plugin-pnp/3.0.0-rc.15_esbuild@0.16.17: - resolution: {integrity: sha512-kYzDJO5CA9sy+on/s2aIW0411AklfCi8Ck/4QDivOqsMKpStZA2SsR+X27VTggGwpStWaLrjJcDcdDMowtG8MA==} - engines: {node: '>=14.15.0'} - peerDependencies: - esbuild: '>=0.10.0' - dependencies: - esbuild: 0.16.17 - tslib: 2.5.0 - dev: true - - /abab/2.0.6: - resolution: {integrity: sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==} - dev: true - - /abbrev/1.1.1: - resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==} - dev: true - - /abort-controller/3.0.0: - resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} - engines: {node: '>=6.5'} - dependencies: - event-target-shim: 5.0.1 - dev: true - - /accepts/1.3.8: - resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} - engines: {node: '>= 0.6'} - dependencies: - mime-types: 2.1.35 - negotiator: 0.6.3 - dev: true - - /acorn-globals/7.0.1: - resolution: {integrity: sha512-umOSDSDrfHbTNPuNpC2NSnnA3LUrqpevPb4T9jRx4MagXNS0rs+gwiTcAvqCRmsD6utzsrzNt+ebm00SNWiC3Q==} - dependencies: - acorn: 8.8.1 - acorn-walk: 8.2.0 - dev: true - - /acorn-jsx/5.3.2_acorn@7.4.1: - resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} - peerDependencies: - acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - dependencies: - acorn: 7.4.1 - dev: true - - /acorn-jsx/5.3.2_acorn@8.8.1: - resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} - peerDependencies: - acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - dependencies: - acorn: 8.8.1 - dev: true - - /acorn-walk/7.2.0: - resolution: {integrity: sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==} - engines: {node: '>=0.4.0'} - dev: true - - /acorn-walk/8.2.0: - resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==} - engines: {node: '>=0.4.0'} - dev: true - - /acorn/7.4.1: - resolution: {integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==} - engines: {node: '>=0.4.0'} - hasBin: true - dev: true - - /acorn/8.8.1: - resolution: {integrity: sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==} - engines: {node: '>=0.4.0'} - hasBin: true - dev: true - - /acorn/8.8.2: - resolution: {integrity: sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==} - engines: {node: '>=0.4.0'} - hasBin: true - dev: true - - /address/1.2.2: - resolution: {integrity: sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA==} - engines: {node: '>= 10.0.0'} - dev: true - - /agent-base/5.1.1: - resolution: {integrity: sha512-TMeqbNl2fMW0nMjTEPOwe3J/PRFP4vqeoNuQMG0HlMrtm5QxKqdvAkZ1pRBQ/ulIyDD5Yq0nJ7YbdD8ey0TO3g==} - engines: {node: '>= 6.0.0'} - dev: true - - /agent-base/6.0.2: - resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} - engines: {node: '>= 6.0.0'} - dependencies: - debug: 4.3.4 - transitivePeerDependencies: - - supports-color - dev: true - - /aggregate-error/3.1.0: - resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} - engines: {node: '>=8'} - dependencies: - clean-stack: 2.2.0 - indent-string: 4.0.0 - dev: true - - /ajv-draft-04/1.0.0_ajv@8.12.0: - resolution: {integrity: sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==} - peerDependencies: - ajv: ^8.5.0 - peerDependenciesMeta: - ajv: - optional: true - dependencies: - ajv: 8.12.0 - dev: true - - /ajv-errors/3.0.0_ajv@8.12.0: - resolution: {integrity: sha512-V3wD15YHfHz6y0KdhYFjyy9vWtEVALT9UrxfN3zqlI6dMioHnJrqOYfyPKol3oqrnCM9uwkcdCwkJ0WUcbLMTQ==} - peerDependencies: - ajv: ^8.0.1 - dependencies: - ajv: 8.12.0 - dev: true - - /ajv-formats/2.1.1_ajv@8.12.0: - resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==} - peerDependencies: - ajv: ^8.0.0 - peerDependenciesMeta: - ajv: - optional: true - dependencies: - ajv: 8.12.0 - dev: true - - /ajv/6.12.6: - resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} - dependencies: - fast-deep-equal: 3.1.3 - fast-json-stable-stringify: 2.1.0 - json-schema-traverse: 0.4.1 - uri-js: 4.4.1 - dev: true - - /ajv/8.12.0: - resolution: {integrity: sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==} - dependencies: - fast-deep-equal: 3.1.3 - json-schema-traverse: 1.0.0 - require-from-string: 2.0.2 - uri-js: 4.4.1 - dev: true - - /ansi-align/3.0.1: - resolution: {integrity: sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==} - dependencies: - string-width: 4.2.3 - dev: true - - /ansi-colors/4.1.3: - resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==} - engines: {node: '>=6'} - dev: true - - /ansi-escapes/4.3.2: - resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} - engines: {node: '>=8'} - dependencies: - type-fest: 0.21.3 - dev: true - - /ansi-regex/5.0.1: - resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} - engines: {node: '>=8'} - - /ansi-regex/6.0.1: - resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==} - engines: {node: '>=12'} - dev: true - - /ansi-styles/3.2.1: - resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} - engines: {node: '>=4'} - dependencies: - color-convert: 1.9.3 - - /ansi-styles/4.3.0: - resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} - engines: {node: '>=8'} - dependencies: - color-convert: 2.0.1 - - /ansi-styles/5.2.0: - resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} - engines: {node: '>=10'} - dev: true - - /ansi-styles/6.2.1: - resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} - engines: {node: '>=12'} - dev: true - - /anymatch/3.1.3: - resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} - engines: {node: '>= 8'} - dependencies: - normalize-path: 3.0.0 - picomatch: 2.3.1 - - /app-root-dir/1.0.2: - resolution: {integrity: sha512-jlpIfsOoNoafl92Sz//64uQHGSyMrD2vYG5d8o2a4qGvyNCvXur7bzIsWtAC/6flI2RYAp3kv8rsfBtaLm7w0g==} - dev: true - - /aproba/2.0.0: - resolution: {integrity: sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==} - dev: true - - /are-we-there-yet/2.0.0: - resolution: {integrity: sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==} - engines: {node: '>=10'} - dependencies: - delegates: 1.0.0 - readable-stream: 3.6.0 - dev: true - - /arg/4.1.3: - resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} - dev: true - - /argparse/1.0.10: - resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} - dependencies: - sprintf-js: 1.0.3 - dev: true - - /argparse/2.0.1: - resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - - /aria-query/5.1.3: - resolution: {integrity: sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ==} - dependencies: - deep-equal: 2.2.0 - dev: true - - /arr-diff/4.0.0: - resolution: {integrity: sha512-YVIQ82gZPGBebQV/a8dar4AitzCQs0jjXwMPZllpXMaGjXPYVUawSxQrRsjhjupyVxEvbHgUmIhKVlND+j02kA==} - engines: {node: '>=0.10.0'} - dev: true - - /arr-flatten/1.1.0: - resolution: {integrity: sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==} - engines: {node: '>=0.10.0'} - dev: true - - /arr-union/3.1.0: - resolution: {integrity: sha512-sKpyeERZ02v1FeCZT8lrfJq5u6goHCtpTAzPwJYe7c8SPFOboNjNg1vz2L4VTn9T4PQxEx13TbXLmYUcS6Ug7Q==} - engines: {node: '>=0.10.0'} - dev: true - - /array-find-index/1.0.2: - resolution: {integrity: sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==} - engines: {node: '>=0.10.0'} - dev: true - - /array-flatten/1.1.1: - resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==} - dev: true - - /array-includes/3.1.6: - resolution: {integrity: sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.21.1 - get-intrinsic: 1.1.3 - is-string: 1.0.7 - dev: true - - /array-union/2.1.0: - resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} - engines: {node: '>=8'} - dev: true - - /array-unique/0.3.2: - resolution: {integrity: sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ==} - engines: {node: '>=0.10.0'} - dev: true - - /array.prototype.flat/1.3.1: - resolution: {integrity: sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.21.1 - es-shim-unscopables: 1.0.0 - dev: true - - /array.prototype.flatmap/1.3.1: - resolution: {integrity: sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.21.1 - es-shim-unscopables: 1.0.0 - dev: true - - /array.prototype.tosorted/1.1.1: - resolution: {integrity: sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ==} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.21.1 - es-shim-unscopables: 1.0.0 - get-intrinsic: 1.1.3 - dev: true - - /arrify/1.0.1: - resolution: {integrity: sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==} - engines: {node: '>=0.10.0'} - dev: true - - /as-table/1.0.55: - resolution: {integrity: sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ==} - dependencies: - printable-characters: 1.0.42 - dev: true - - /asap/2.0.6: - resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} - dev: true - - /assert/2.0.0: - resolution: {integrity: sha512-se5Cd+js9dXJnu6Ag2JFc00t+HmHOen+8Q+L7O9zI0PqQXr20uk2J0XQqMxZEeo5U50o8Nvmmx7dZrl+Ufr35A==} - dependencies: - es6-object-assign: 1.1.0 - is-nan: 1.3.2 - object-is: 1.1.5 - util: 0.12.5 - dev: true - - /assign-symbols/1.0.0: - resolution: {integrity: sha512-Q+JC7Whu8HhmTdBph/Tq59IoRtoy6KAm5zzPv00WdujX82lbAL8K7WVjne7vdCsAmbF4AYaDOPyO3k0kl8qIrw==} - engines: {node: '>=0.10.0'} - dev: true - - /ast-types-flow/0.0.7: - resolution: {integrity: sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==} - dev: true - - /ast-types/0.13.4: - resolution: {integrity: sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w==} - engines: {node: '>=4'} - dependencies: - tslib: 2.5.0 - dev: true - - /ast-types/0.14.2: - resolution: {integrity: sha512-O0yuUDnZeQDL+ncNGlJ78BiO4jnYI3bvMsD5prT0/nsgijG/LpNBIr63gTjVTNsiGkgQhiyCShTgxt8oXOrklA==} - engines: {node: '>=4'} - dependencies: - tslib: 2.4.1 - dev: true - - /ast-types/0.16.1: - resolution: {integrity: sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==} - engines: {node: '>=4'} - dependencies: - tslib: 2.5.0 - dev: true - - /astral-regex/2.0.0: - resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==} - engines: {node: '>=8'} - dev: true - - /astring/1.8.4: - resolution: {integrity: sha512-97a+l2LBU3Op3bBQEff79i/E4jMD2ZLFD8rHx9B6mXyB2uQwhJQYfiDqUwtfjF4QA1F2qs//N6Cw8LetMbQjcw==} - hasBin: true - dev: true - - /async-limiter/1.0.1: - resolution: {integrity: sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==} - dev: true - - /async/3.2.4: - resolution: {integrity: sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==} - dev: true - - /asynckit/0.4.0: - resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} - - /atob/2.1.2: - resolution: {integrity: sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==} - engines: {node: '>= 4.5.0'} - hasBin: true - dev: true - - /available-typed-arrays/1.0.5: - resolution: {integrity: sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==} - engines: {node: '>= 0.4'} - dev: true - - /axe-core/4.6.2: - resolution: {integrity: sha512-b1WlTV8+XKLj9gZy2DZXgQiyDp9xkkoe2a6U6UbYccScq2wgH/YwCeI2/Jq2mgo0HzQxqJOjWZBLeA/mqsk5Mg==} - engines: {node: '>=4'} - dev: true - - /axobject-query/3.1.1: - resolution: {integrity: sha512-goKlv8DZrK9hUh975fnHzhNIO4jUnFCfv/dszV5VwUGDFjI6vQ2VwoyjYjYNEbBE8AH87TduWP5uyDR1D+Iteg==} - dependencies: - deep-equal: 2.2.0 - dev: true - - /babel-core/7.0.0-bridge.0_@babel+core@7.20.12: - resolution: {integrity: sha512-poPX9mZH/5CSanm50Q+1toVci6pv5KSRv/5TWCwtzQS5XEwn40BcCrgIeMFWP9CKKIniKXNxoIOnOq4VVlGXhg==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - dev: true - - /babel-jest/29.3.1_@babel+core@7.20.12: - resolution: {integrity: sha512-aard+xnMoxgjwV70t0L6wkW/3HQQtV+O0PEimxKgzNqCJnbYmroPojdP2tqKSOAt8QAKV/uSZU8851M7B5+fcA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - '@babel/core': ^7.8.0 - dependencies: - '@babel/core': 7.20.12 - '@jest/transform': 29.3.1 - '@types/babel__core': 7.1.20 - babel-plugin-istanbul: 6.1.1 - babel-preset-jest: 29.2.0_@babel+core@7.20.12 - chalk: 4.1.2 - graceful-fs: 4.2.10 - slash: 3.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-istanbul/6.1.1: - resolution: {integrity: sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==} - engines: {node: '>=8'} - dependencies: - '@babel/helper-plugin-utils': 7.20.2 - '@istanbuljs/load-nyc-config': 1.1.0 - '@istanbuljs/schema': 0.1.3 - istanbul-lib-instrument: 5.2.1 - test-exclude: 6.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-jest-hoist/29.2.0: - resolution: {integrity: sha512-TnspP2WNiR3GLfCsUNHqeXw0RoQ2f9U5hQ5L3XFpwuO8htQmSrhh8qsB6vi5Yi8+kuynN1yjDjQsPfkebmB6ZA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/template': 7.20.7 - '@babel/types': 7.20.7 - '@types/babel__core': 7.1.20 - '@types/babel__traverse': 7.18.3 - dev: true - - /babel-plugin-macros/3.1.0: - resolution: {integrity: sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg==} - engines: {node: '>=10', npm: '>=6'} - dependencies: - '@babel/runtime': 7.20.7 - cosmiconfig: 7.1.0 - resolve: 1.22.1 - - /babel-plugin-polyfill-corejs2/0.3.3_@babel+core@7.20.12: - resolution: {integrity: sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/compat-data': 7.20.10 - '@babel/core': 7.20.12 - '@babel/helper-define-polyfill-provider': 0.3.3_@babel+core@7.20.12 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-polyfill-corejs3/0.6.0_@babel+core@7.20.12: - resolution: {integrity: sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-define-polyfill-provider': 0.3.3_@babel+core@7.20.12 - core-js-compat: 3.27.1 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-polyfill-regenerator/0.4.1_@babel+core@7.20.12: - resolution: {integrity: sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.20.12 - '@babel/helper-define-polyfill-provider': 0.3.3_@babel+core@7.20.12 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-styled-components/2.0.7_styled-components@5.3.6: - resolution: {integrity: sha512-i7YhvPgVqRKfoQ66toiZ06jPNA3p6ierpfUuEWxNF+fV27Uv5gxBkf8KZLHUCc1nFA9j6+80pYoIpqCeyW3/bA==} - peerDependencies: - styled-components: '>= 2' - dependencies: - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-module-imports': 7.18.6 - babel-plugin-syntax-jsx: 6.18.0 - lodash: 4.17.21 - picomatch: 2.3.1 - styled-components: 5.3.6_sfoxds7t5ydpegc3knd667wn6m - dev: false - - /babel-plugin-syntax-jsx/6.18.0: - resolution: {integrity: sha512-qrPaCSo9c8RHNRHIotaufGbuOBN8rtdC4QrrFFc43vyWCCz7Kl7GL1PGaXtMGQZUXrkCjNEgxDfmAuAabr/rlw==} - dev: false - - /babel-plugin-transform-react-remove-prop-types/0.4.24: - resolution: {integrity: sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA==} - dev: true - - /babel-preset-current-node-syntax/1.0.1_@babel+core@7.20.12: - resolution: {integrity: sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.20.12 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.20.12 - '@babel/plugin-syntax-bigint': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-class-properties': 7.12.13_@babel+core@7.20.12 - '@babel/plugin-syntax-import-meta': 7.10.4_@babel+core@7.20.12 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.20.12 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.20.12 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.20.12 - '@babel/plugin-syntax-top-level-await': 7.14.5_@babel+core@7.20.12 - dev: true - - /babel-preset-jest/29.2.0_@babel+core@7.20.12: - resolution: {integrity: sha512-z9JmMJppMxNv8N7fNRHvhMg9cvIkMxQBXgFkane3yKVEvEOP+kB50lk8DFRvF9PGqbyXxlmebKWhuDORO8RgdA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.20.12 - babel-plugin-jest-hoist: 29.2.0 - babel-preset-current-node-syntax: 1.0.1_@babel+core@7.20.12 - dev: true - - /babel-preset-react-app/10.0.1: - resolution: {integrity: sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg==} - dependencies: - '@babel/core': 7.20.12 - '@babel/plugin-proposal-class-properties': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-decorators': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-numeric-separator': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-optional-chaining': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-proposal-private-methods': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-private-property-in-object': 7.20.5_@babel+core@7.20.12 - '@babel/plugin-transform-flow-strip-types': 7.19.0_@babel+core@7.20.12 - '@babel/plugin-transform-react-display-name': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-transform-runtime': 7.19.6_@babel+core@7.20.12 - '@babel/preset-env': 7.20.2_@babel+core@7.20.12 - '@babel/preset-react': 7.18.6_@babel+core@7.20.12 - '@babel/preset-typescript': 7.18.6_@babel+core@7.20.12 - '@babel/runtime': 7.20.7 - babel-plugin-macros: 3.1.0 - babel-plugin-transform-react-remove-prop-types: 0.4.24 - transitivePeerDependencies: - - supports-color - dev: true - - /backslash/0.2.0: - resolution: {integrity: sha512-Avs+8FUZ1HF/VFP4YWwHQZSGzRPm37ukU1JQYQWijuHhtXdOuAzcZ8PcAzfIw898a8PyBzdn+RtnKA6MzW0X2A==} - dev: true - - /bail/2.0.2: - resolution: {integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==} - dev: false - - /balanced-match/1.0.2: - resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - - /balanced-match/2.0.0: - resolution: {integrity: sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA==} - dev: true - - /base/0.11.2: - resolution: {integrity: sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==} - engines: {node: '>=0.10.0'} - dependencies: - cache-base: 1.0.1 - class-utils: 0.3.6 - component-emitter: 1.3.0 - define-property: 1.0.0 - isobject: 3.0.1 - mixin-deep: 1.3.2 - pascalcase: 0.1.1 - dev: true - - /base64-js/1.5.1: - resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - dev: false - - /better-opn/2.1.1: - resolution: {integrity: sha512-kIPXZS5qwyKiX/HcRvDYfmBQUa8XP17I0mYZZ0y4UhpYOSvtsLHDYqmomS+Mj20aDvD3knEiQ0ecQy2nhio3yA==} - engines: {node: '>8.0.0'} - dependencies: - open: 7.4.2 - dev: true - - /big-integer/1.6.51: - resolution: {integrity: sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==} - engines: {node: '>=0.6'} - - /binary-extensions/2.2.0: - resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} - engines: {node: '>=8'} - - /body-parser/1.20.1: - resolution: {integrity: sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==} - engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} - dependencies: - bytes: 3.1.2 - content-type: 1.0.4 - debug: 2.6.9 - depd: 2.0.0 - destroy: 1.2.0 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - on-finished: 2.4.1 - qs: 6.11.0 - raw-body: 2.5.1 - type-is: 1.6.18 - unpipe: 1.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /boxen/5.1.2: - resolution: {integrity: sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==} - engines: {node: '>=10'} - dependencies: - ansi-align: 3.0.1 - camelcase: 6.3.0 - chalk: 4.1.2 - cli-boxes: 2.2.1 - string-width: 4.2.3 - type-fest: 0.20.2 - widest-line: 3.1.0 - wrap-ansi: 7.0.0 - dev: true - - /bplist-parser/0.2.0: - resolution: {integrity: sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw==} - engines: {node: '>= 5.10.0'} - dependencies: - big-integer: 1.6.51 - dev: true - - /brace-expansion/1.1.11: - resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} - dependencies: - balanced-match: 1.0.2 - concat-map: 0.0.1 - - /brace-expansion/2.0.1: - resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} - dependencies: - balanced-match: 1.0.2 - dev: true - - /braces/2.3.2: - resolution: {integrity: sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==} - engines: {node: '>=0.10.0'} - dependencies: - arr-flatten: 1.1.0 - array-unique: 0.3.2 - extend-shallow: 2.0.1 - fill-range: 4.0.0 - isobject: 3.0.1 - repeat-element: 1.1.4 - snapdragon: 0.8.2 - snapdragon-node: 2.1.1 - split-string: 3.1.0 - to-regex: 3.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /braces/3.0.2: - resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} - engines: {node: '>=8'} - dependencies: - fill-range: 7.0.1 - - /broadcast-channel/3.7.0: - resolution: {integrity: sha512-cIAKJXAxGJceNZGTZSBzMxzyOn72cVgPnKx4dc6LRjQgbaJUQqhy5rzL3zbMxkMWsGKkv2hSFkPRMEXfoMZ2Mg==} - dependencies: - '@babel/runtime': 7.20.7 - detect-node: 2.1.0 - js-sha3: 0.8.0 - microseconds: 0.2.0 - nano-time: 1.0.0 - oblivious-set: 1.0.0 - rimraf: 3.0.2 - unload: 2.2.0 - dev: false - - /browser-assert/1.2.1: - resolution: {integrity: sha512-nfulgvOR6S4gt9UKCeGJOuSGBPGiFT6oQ/2UBnvTY/5aQ1PnksW72fhZkM30DzoRRv2WpwZf1vHHEr3mtuXIWQ==} - dev: true - - /browserslist/4.21.4: - resolution: {integrity: sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==} - engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} - hasBin: true - dependencies: - caniuse-lite: 1.0.30001445 - electron-to-chromium: 1.4.284 - node-releases: 2.0.8 - update-browserslist-db: 1.0.10_browserslist@4.21.4 - - /bser/2.1.1: - resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} - dependencies: - node-int64: 0.4.0 - dev: true - - /buffer-crc32/0.2.13: - resolution: {integrity: sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==} - dev: true - - /buffer-from/1.1.2: - resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} - dev: true - - /builtins/1.0.3: - resolution: {integrity: sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ==} - dev: true - - /bytes/3.0.0: - resolution: {integrity: sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==} - engines: {node: '>= 0.8'} - dev: true - - /bytes/3.1.2: - resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} - engines: {node: '>= 0.8'} - dev: true - - /c8/7.12.0: - resolution: {integrity: sha512-CtgQrHOkyxr5koX1wEUmN/5cfDa2ckbHRA4Gy5LAL0zaCFtVWJS5++n+w4/sr2GWGerBxgTjpKeDclk/Qk6W/A==} - engines: {node: '>=10.12.0'} - hasBin: true - dependencies: - '@bcoe/v8-coverage': 0.2.3 - '@istanbuljs/schema': 0.1.3 - find-up: 5.0.0 - foreground-child: 2.0.0 - istanbul-lib-coverage: 3.2.0 - istanbul-lib-report: 3.0.0 - istanbul-reports: 3.1.5 - rimraf: 3.0.2 - test-exclude: 6.0.0 - v8-to-istanbul: 9.0.1 - yargs: 16.2.0 - yargs-parser: 20.2.9 - dev: true - - /cac/6.7.14: - resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} - engines: {node: '>=8'} - dev: true - - /cache-base/1.0.1: - resolution: {integrity: sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==} - engines: {node: '>=0.10.0'} - dependencies: - collection-visit: 1.0.0 - component-emitter: 1.3.0 - get-value: 2.0.6 - has-value: 1.0.0 - isobject: 3.0.1 - set-value: 2.0.1 - to-object-path: 0.3.0 - union-value: 1.0.1 - unset-value: 1.0.0 - dev: true - - /call-bind/1.0.2: - resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} - dependencies: - function-bind: 1.1.1 - get-intrinsic: 1.1.3 - dev: true - - /call-me-maybe/1.0.2: - resolution: {integrity: sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ==} - dev: true - - /callsites/3.1.0: - resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} - engines: {node: '>=6'} - - /camelcase-keys/6.2.2: - resolution: {integrity: sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==} - engines: {node: '>=8'} - dependencies: - camelcase: 5.3.1 - map-obj: 4.3.0 - quick-lru: 4.0.1 - dev: true - - /camelcase/5.3.1: - resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} - engines: {node: '>=6'} - dev: true - - /camelcase/6.3.0: - resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} - engines: {node: '>=10'} - dev: true - - /camelize/1.0.1: - resolution: {integrity: sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ==} - dev: false - - /caniuse-lite/1.0.30001445: - resolution: {integrity: sha512-8sdQIdMztYmzfTMO6KfLny878Ln9c2M0fc7EH60IjlP4Dc4PiCy7K2Vl3ITmWgOyPgVQKa5x+UP/KqFsxj4mBg==} - - /ccount/2.0.1: - resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} - dev: false - - /chalk/2.4.2: - resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} - engines: {node: '>=4'} - dependencies: - ansi-styles: 3.2.1 - escape-string-regexp: 1.0.5 - supports-color: 5.5.0 - - /chalk/3.0.0: - resolution: {integrity: sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==} - engines: {node: '>=8'} - dependencies: - ansi-styles: 4.3.0 - supports-color: 7.2.0 - dev: true - - /chalk/4.1.2: - resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} - engines: {node: '>=10'} - dependencies: - ansi-styles: 4.3.0 - supports-color: 7.2.0 - dev: true - - /char-regex/1.0.2: - resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} - engines: {node: '>=10'} - dev: true - - /character-entities-html4/2.1.0: - resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==} - dev: false - - /character-entities-legacy/3.0.0: - resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==} - dev: false - - /character-entities/2.0.2: - resolution: {integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==} - dev: false - - /character-reference-invalid/2.0.1: - resolution: {integrity: sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==} - dev: false - - /chokidar/3.5.3: - resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} - engines: {node: '>= 8.10.0'} - dependencies: - anymatch: 3.1.3 - braces: 3.0.2 - glob-parent: 5.1.2 - is-binary-path: 2.1.0 - is-glob: 4.0.3 - normalize-path: 3.0.0 - readdirp: 3.6.0 - optionalDependencies: - fsevents: 2.3.2 - - /chownr/2.0.0: - resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} - engines: {node: '>=10'} - dev: true - - /ci-info/3.7.1: - resolution: {integrity: sha512-4jYS4MOAaCIStSRwiuxc4B8MYhIe676yO1sYGzARnjXkWpmzZMMYxY6zu8WYWDhSuth5zhrQ1rhNSibyyvv4/w==} - engines: {node: '>=8'} - dev: true - - /cjs-module-lexer/1.2.2: - resolution: {integrity: sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==} - dev: true - - /class-utils/0.3.6: - resolution: {integrity: sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==} - engines: {node: '>=0.10.0'} - dependencies: - arr-union: 3.1.0 - define-property: 0.2.5 - isobject: 3.0.1 - static-extend: 0.1.2 - dev: true - - /classnames/2.3.2: - resolution: {integrity: sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==} - dev: false - - /clean-stack/2.2.0: - resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} - engines: {node: '>=6'} - dev: true - - /cli-boxes/2.2.1: - resolution: {integrity: sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==} - engines: {node: '>=6'} - dev: true - - /cli-cursor/3.1.0: - resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} - engines: {node: '>=8'} - dependencies: - restore-cursor: 3.1.0 - dev: true - - /cli-table3/0.6.3: - resolution: {integrity: sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==} - engines: {node: 10.* || >= 12.*} - dependencies: - string-width: 4.2.3 - optionalDependencies: - '@colors/colors': 1.5.0 - dev: true - - /cli-truncate/2.1.0: - resolution: {integrity: sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==} - engines: {node: '>=8'} - dependencies: - slice-ansi: 3.0.0 - string-width: 4.2.3 - dev: true - - /cli-truncate/3.1.0: - resolution: {integrity: sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - slice-ansi: 5.0.0 - string-width: 5.1.2 - dev: true - - /client-only/0.0.1: - resolution: {integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==} - dev: false - - /cliui/7.0.4: - resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==} - dependencies: - string-width: 4.2.3 - strip-ansi: 6.0.1 - wrap-ansi: 7.0.0 - - /cliui/8.0.1: - resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} - engines: {node: '>=12'} - dependencies: - string-width: 4.2.3 - strip-ansi: 6.0.1 - wrap-ansi: 7.0.0 - dev: true - - /clone-deep/4.0.1: - resolution: {integrity: sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==} - engines: {node: '>=6'} - dependencies: - is-plain-object: 2.0.4 - kind-of: 6.0.3 - shallow-clone: 3.0.1 - dev: true - - /clone/1.0.4: - resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} - engines: {node: '>=0.8'} - dev: true - - /clsx/1.2.1: - resolution: {integrity: sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==} - engines: {node: '>=6'} - dev: false - - /co/4.6.0: - resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} - engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} - dev: true - - /collect-v8-coverage/1.0.1: - resolution: {integrity: sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==} - dev: true - - /collection-visit/1.0.0: - resolution: {integrity: sha512-lNkKvzEeMBBjUGHZ+q6z9pSJla0KWAQPvtzhEV9+iGyQYG+pBpl7xKDhxoNSOZH2hhv0v5k0y2yAM4o4SjoSkw==} - engines: {node: '>=0.10.0'} - dependencies: - map-visit: 1.0.0 - object-visit: 1.0.1 - dev: true - - /color-convert/1.9.3: - resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} - dependencies: - color-name: 1.1.3 - - /color-convert/2.0.1: - resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} - engines: {node: '>=7.0.0'} - dependencies: - color-name: 1.1.4 - - /color-name/1.1.3: - resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} - - /color-name/1.1.4: - resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - - /color-support/1.1.3: - resolution: {integrity: sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==} - hasBin: true - dev: true - - /colord/2.9.3: - resolution: {integrity: sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==} - dev: true - - /colorette/2.0.19: - resolution: {integrity: sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==} - dev: true - - /combined-stream/1.0.8: - resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} - engines: {node: '>= 0.8'} - dependencies: - delayed-stream: 1.0.0 - - /comma-separated-tokens/2.0.3: - resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==} - dev: false - - /commander/2.20.3: - resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} - dev: true - - /commander/6.2.1: - resolution: {integrity: sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==} - engines: {node: '>= 6'} - dev: true - - /commander/8.3.0: - resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} - engines: {node: '>= 12'} - dev: true - - /commander/9.5.0: - resolution: {integrity: sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==} - engines: {node: ^12.20.0 || >=14} - dev: true - - /commondir/1.0.1: - resolution: {integrity: sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==} - dev: true - - /compare-versions/4.1.4: - resolution: {integrity: sha512-FemMreK9xNyL8gQevsdRMrvO4lFCkQP7qbuktn1q8ndcNk1+0mz7lgE7b/sNvbhVgY4w6tMN1FDp6aADjqw2rw==} - dev: true - - /component-emitter/1.3.0: - resolution: {integrity: sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==} - dev: true - - /compressible/2.0.18: - resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==} - engines: {node: '>= 0.6'} - dependencies: - mime-db: 1.52.0 - dev: true - - /compression/1.7.4: - resolution: {integrity: sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==} - engines: {node: '>= 0.8.0'} - dependencies: - accepts: 1.3.8 - bytes: 3.0.0 - compressible: 2.0.18 - debug: 2.6.9 - on-headers: 1.0.2 - safe-buffer: 5.1.2 - vary: 1.1.2 - transitivePeerDependencies: - - supports-color - dev: true - - /concat-map/0.0.1: - resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} - - /concat-stream/1.6.2: - resolution: {integrity: sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==} - engines: {'0': node >= 0.8} - dependencies: - buffer-from: 1.1.2 - inherits: 2.0.4 - readable-stream: 2.3.7 - typedarray: 0.0.6 - dev: true - - /confusing-browser-globals/1.0.11: - resolution: {integrity: sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==} - dev: true - - /console-control-strings/1.1.0: - resolution: {integrity: sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==} - dev: true - - /content-disposition/0.5.4: - resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==} - engines: {node: '>= 0.6'} - dependencies: - safe-buffer: 5.2.1 - dev: true - - /content-type/1.0.4: - resolution: {integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==} - engines: {node: '>= 0.6'} - dev: true - - /convert-source-map/1.9.0: - resolution: {integrity: sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==} - - /convert-source-map/2.0.0: - resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} - dev: true - - /cookie-signature/1.0.6: - resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==} - dev: true - - /cookie/0.5.0: - resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==} - engines: {node: '>= 0.6'} - dev: true - - /copy-anything/2.0.6: - resolution: {integrity: sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw==} - dependencies: - is-what: 3.14.1 - dev: true - - /copy-descriptor/0.1.1: - resolution: {integrity: sha512-XgZ0pFcakEUlbwQEVNg3+QAis1FyTL3Qel9FYy8pSkQqoG3PNoT0bOCQtOXcOkur21r2Eq2kI+IE+gsmAEVlYw==} - engines: {node: '>=0.10.0'} - dev: true - - /copy-to-clipboard/3.3.3: - resolution: {integrity: sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA==} - dependencies: - toggle-selection: 1.0.6 - dev: false - - /core-js-compat/3.27.1: - resolution: {integrity: sha512-Dg91JFeCDA17FKnneN7oCMz4BkQ4TcffkgHP4OWwp9yx3pi7ubqMDXXSacfNak1PQqjc95skyt+YBLHQJnkJwA==} - dependencies: - browserslist: 4.21.4 - dev: true - - /core-util-is/1.0.3: - resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} - dev: true - - /cosmiconfig/7.1.0: - resolution: {integrity: sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==} - engines: {node: '>=10'} - dependencies: - '@types/parse-json': 4.0.0 - import-fresh: 3.3.0 - parse-json: 5.2.0 - path-type: 4.0.0 - yaml: 1.10.2 - - /create-require/1.1.1: - resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} - dev: true - - /cross-spawn/7.0.3: - resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} - engines: {node: '>= 8'} - dependencies: - path-key: 3.1.1 - shebang-command: 2.0.0 - which: 2.0.2 - dev: true - - /crypto-random-string/2.0.0: - resolution: {integrity: sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==} - engines: {node: '>=8'} - dev: true - - /css-color-keywords/1.0.0: - resolution: {integrity: sha512-FyyrDHZKEjXDpNJYvVsV960FiqQyXc/LlYmsxl2BcdMb2WPx0OGRVgTg55rPSyLSNMqP52R9r8geSp7apN3Ofg==} - engines: {node: '>=4'} - dev: false - - /css-functions-list/3.1.0: - resolution: {integrity: sha512-/9lCvYZaUbBGvYUgYGFJ4dcYiyqdhSjG7IPVluoV8A1ILjkF7ilmhp1OGUz8n+nmBcu0RNrQAzgD8B6FJbrt2w==} - engines: {node: '>=12.22'} - dev: true - - /css-in-js-utils/3.1.0: - resolution: {integrity: sha512-fJAcud6B3rRu+KHYk+Bwf+WFL2MDCJJ1XG9x137tJQ0xYxor7XziQtuGFbWNdqrvF4Tk26O3H73nfVqXt/fW1A==} - dependencies: - hyphenate-style-name: 1.0.4 - dev: false - - /css-selector-tokenizer/0.7.3: - resolution: {integrity: sha512-jWQv3oCEL5kMErj4wRnK/OPoBi0D+P1FR2cDCKYPaMeD2eW3/mttav8HT4hT1CKopiJI/psEULjkClhvJo4Lvg==} - dependencies: - cssesc: 3.0.0 - fastparse: 1.1.2 - dev: true - - /css-to-react-native/3.1.0: - resolution: {integrity: sha512-AryfkFA29b4I3vG7N4kxFboq15DxwSXzhXM37XNEjwJMgjYIc8BcqfiprpAqX0zadI5PMByEIwAMzXxk5Vcc4g==} - dependencies: - camelize: 1.0.1 - css-color-keywords: 1.0.0 - postcss-value-parser: 4.2.0 - dev: false - - /css-tree/1.1.3: - resolution: {integrity: sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==} - engines: {node: '>=8.0.0'} - dependencies: - mdn-data: 2.0.14 - source-map: 0.6.1 - dev: false - - /css-unit-converter/1.1.2: - resolution: {integrity: sha512-IiJwMC8rdZE0+xiEZHeru6YoONC4rfPMqGm2W85jMIbkFvv5nFTwJVFHam2eFrN6txmoUYFAFXiv8ICVeTO0MA==} - dev: false - - /css.escape/1.5.1: - resolution: {integrity: sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==} - dev: true - - /cssesc/3.0.0: - resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==} - engines: {node: '>=4'} - hasBin: true - dev: true - - /cssom/0.3.8: - resolution: {integrity: sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==} - dev: true - - /cssom/0.5.0: - resolution: {integrity: sha512-iKuQcq+NdHqlAcwUY0o/HL69XQrUaQdMjmStJ8JFmUaiiQErlhrmuigkg/CU4E2J0IyUKUrMAgl36TvN67MqTw==} - dev: true - - /cssstyle/2.3.0: - resolution: {integrity: sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==} - engines: {node: '>=8'} - dependencies: - cssom: 0.3.8 - dev: true - - /csstype/3.1.1: - resolution: {integrity: sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==} - - /cuid/2.1.8: - resolution: {integrity: sha512-xiEMER6E7TlTPnDxrM4eRiC6TRgjNX9xzEZ5U/Se2YJKr7Mq4pJn/2XEHjl3STcSh96GmkHPcBXLES8M29wyyg==} - deprecated: Cuid and other k-sortable and non-cryptographic ids (Ulid, ObjectId, KSUID, all UUIDs) are all insecure. Use @paralleldrive/cuid2 instead. - dev: true - - /d3-array/3.2.1: - resolution: {integrity: sha512-gUY/qeHq/yNqqoCKNq4vtpFLdoCdvyNpWoC/KNjhGbhDuQpAM9sIQQKkXSNpXa9h5KySs/gzm7R88WkUutgwWQ==} - engines: {node: '>=12'} - dependencies: - internmap: 2.0.3 - dev: false - - /d3-color/3.1.0: - resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} - engines: {node: '>=12'} - dev: false - - /d3-ease/3.0.1: - resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} - engines: {node: '>=12'} - dev: false - - /d3-format/3.1.0: - resolution: {integrity: sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==} - engines: {node: '>=12'} - dev: false - - /d3-interpolate/3.0.1: - resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} - engines: {node: '>=12'} - dependencies: - d3-color: 3.1.0 - dev: false - - /d3-path/3.1.0: - resolution: {integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==} - engines: {node: '>=12'} - dev: false - - /d3-scale/4.0.2: - resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==} - engines: {node: '>=12'} - dependencies: - d3-array: 3.2.1 - d3-format: 3.1.0 - d3-interpolate: 3.0.1 - d3-time: 3.1.0 - d3-time-format: 4.1.0 - dev: false - - /d3-shape/3.2.0: - resolution: {integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==} - engines: {node: '>=12'} - dependencies: - d3-path: 3.1.0 - dev: false - - /d3-time-format/4.1.0: - resolution: {integrity: sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==} - engines: {node: '>=12'} - dependencies: - d3-time: 3.1.0 - dev: false - - /d3-time/3.1.0: - resolution: {integrity: sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==} - engines: {node: '>=12'} - dependencies: - d3-array: 3.2.1 - dev: false - - /d3-timer/3.0.1: - resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==} - engines: {node: '>=12'} - dev: false - - /damerau-levenshtein/1.0.8: - resolution: {integrity: sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==} - dev: true - - /data-uri-to-buffer/2.0.2: - resolution: {integrity: sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA==} - dev: true - - /data-uri-to-buffer/3.0.1: - resolution: {integrity: sha512-WboRycPNsVw3B3TL559F7kuBUM4d8CgMEvk6xEJlOp7OBPjt6G7z8WMWlD2rOFZLk6OYfFIUGsCOWzcQH9K2og==} - engines: {node: '>= 6'} - dev: true - - /data-urls/3.0.2: - resolution: {integrity: sha512-Jy/tj3ldjZJo63sVAvg6LHt2mHvl4V6AgRAmNDtLdm7faqtsx+aJG42rsyCo9JCoRVKwPFzKlIPx3DIibwSIaQ==} - engines: {node: '>=12'} - dependencies: - abab: 2.0.6 - whatwg-mimetype: 3.0.0 - whatwg-url: 11.0.0 - dev: true - - /date-arithmetic/3.1.0: - resolution: {integrity: sha512-ynlmvduDVuzwDDYW3OF4RHCikdzegg0vWQtzwjiVKPs/RjZ93b/7AxIwhfZKxSQQFA8l9lwhkyeDVQyrzbPUwA==} - dev: false - - /date-fns/2.29.3: - resolution: {integrity: sha512-dDCnyH2WnnKusqvZZ6+jA1O51Ibt8ZMRNkDZdyAyK4YfbDwa/cEmuztzG5pk6hqlp9aSBPYcjOlktquahGwGeA==} - engines: {node: '>=0.11'} - - /dayjs/1.11.7: - resolution: {integrity: sha512-+Yw9U6YO5TQohxLcIkrXBeY73WP3ejHWVvx8XCk3gxvQDCTEmS48ZrSZCKciI7Bhl/uCMyxYtE9UqRILmFphkQ==} - dev: false - - /debug/2.6.9: - resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.0.0 - dev: true - - /debug/3.2.7: - resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.3 - dev: true - - /debug/4.3.4: - resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.2 - - /debug/4.3.4_supports-color@5.5.0: - resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.2 - supports-color: 5.5.0 - dev: false - - /debug/4.3.4_supports-color@9.3.1: - resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.2 - supports-color: 9.3.1 - dev: true - - /debuglog/1.0.1: - resolution: {integrity: sha512-syBZ+rnAK3EgMsH2aYEOLUW7mZSY9Gb+0wUMCFsZvcmiz+HigA0LOcq/HoQqVuGG+EKykunc7QG2bzrponfaSw==} - dev: true - - /decamelize-keys/1.1.1: - resolution: {integrity: sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==} - engines: {node: '>=0.10.0'} - dependencies: - decamelize: 1.2.0 - map-obj: 1.0.1 - dev: true - - /decamelize/1.2.0: - resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==} - engines: {node: '>=0.10.0'} - dev: true - - /decimal.js-light/2.5.1: - resolution: {integrity: sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==} - dev: false - - /decimal.js/10.4.3: - resolution: {integrity: sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==} - dev: true - - /decode-named-character-reference/1.0.2: - resolution: {integrity: sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==} - dependencies: - character-entities: 2.0.2 - dev: false - - /decode-uri-component/0.2.2: - resolution: {integrity: sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==} - engines: {node: '>=0.10'} - - /dedent/0.7.0: - resolution: {integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==} - dev: true - - /deep-equal/2.2.0: - resolution: {integrity: sha512-RdpzE0Hv4lhowpIUKKMJfeH6C1pXdtT1/it80ubgWqwI3qpuxUBpC1S4hnHg+zjnuOoDkzUtUCEEkG+XG5l3Mw==} - dependencies: - call-bind: 1.0.2 - es-get-iterator: 1.1.3 - get-intrinsic: 1.1.3 - is-arguments: 1.1.1 - is-array-buffer: 3.0.1 - is-date-object: 1.0.5 - is-regex: 1.1.4 - is-shared-array-buffer: 1.0.2 - isarray: 2.0.5 - object-is: 1.1.5 - object-keys: 1.1.1 - object.assign: 4.1.4 - regexp.prototype.flags: 1.4.3 - side-channel: 1.0.4 - which-boxed-primitive: 1.0.2 - which-collection: 1.0.1 - which-typed-array: 1.1.9 - dev: true - - /deep-is/0.1.4: - resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} - dev: true - - /deepmerge/2.2.1: - resolution: {integrity: sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==} - engines: {node: '>=0.10.0'} - - /deepmerge/4.2.2: - resolution: {integrity: sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==} - engines: {node: '>=0.10.0'} - - /default-browser-id/3.0.0: - resolution: {integrity: sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA==} - engines: {node: '>=12'} - dependencies: - bplist-parser: 0.2.0 - untildify: 4.0.0 - dev: true - - /defaults/1.0.4: - resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} - dependencies: - clone: 1.0.4 - dev: true - - /define-lazy-prop/2.0.0: - resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} - engines: {node: '>=8'} - dev: true - - /define-properties/1.1.4: - resolution: {integrity: sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==} - engines: {node: '>= 0.4'} - dependencies: - has-property-descriptors: 1.0.0 - object-keys: 1.1.1 - dev: true - - /define-property/0.2.5: - resolution: {integrity: sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==} - engines: {node: '>=0.10.0'} - dependencies: - is-descriptor: 0.1.6 - dev: true - - /define-property/1.0.0: - resolution: {integrity: sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==} - engines: {node: '>=0.10.0'} - dependencies: - is-descriptor: 1.0.2 - dev: true - - /define-property/2.0.2: - resolution: {integrity: sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==} - engines: {node: '>=0.10.0'} - dependencies: - is-descriptor: 1.0.2 - isobject: 3.0.1 - dev: true - - /defu/6.1.2: - resolution: {integrity: sha512-+uO4+qr7msjNNWKYPHqN/3+Dx3NFkmIzayk2L1MyZQlvgZb/J1A0fo410dpKrN2SnqFjt8n4JL8fDJE0wIgjFQ==} - dev: true - - /degenerator/3.0.2: - resolution: {integrity: sha512-c0mef3SNQo56t6urUU6tdQAs+ThoD0o9B9MJ8HEt7NQcGEILCRFqQb7ZbP9JAv+QF1Ky5plydhMR/IrqWDm+TQ==} - engines: {node: '>= 6'} - dependencies: - ast-types: 0.13.4 - escodegen: 1.14.3 - esprima: 4.0.1 - vm2: 3.9.13 - dev: true - - /del/6.1.1: - resolution: {integrity: sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==} - engines: {node: '>=10'} - dependencies: - globby: 11.1.0 - graceful-fs: 4.2.10 - is-glob: 4.0.3 - is-path-cwd: 2.2.0 - is-path-inside: 3.0.3 - p-map: 4.0.0 - rimraf: 3.0.2 - slash: 3.0.0 - dev: true - - /delayed-stream/1.0.0: - resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} - engines: {node: '>=0.4.0'} - - /delegates/1.0.0: - resolution: {integrity: sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==} - dev: true - - /depd/2.0.0: - resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} - engines: {node: '>= 0.8'} - dev: true - - /dependency-graph/0.11.0: - resolution: {integrity: sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg==} - engines: {node: '>= 0.6.0'} - dev: true - - /dequal/2.0.3: - resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} - engines: {node: '>=6'} - - /destroy/1.2.0: - resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} - engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} - dev: true - - /detect-indent/6.1.0: - resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} - engines: {node: '>=8'} - dev: true - - /detect-newline/3.1.0: - resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} - engines: {node: '>=8'} - dev: true - - /detect-node/2.1.0: - resolution: {integrity: sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==} - dev: false - - /detect-package-manager/2.0.1: - resolution: {integrity: sha512-j/lJHyoLlWi6G1LDdLgvUtz60Zo5GEj+sVYtTVXnYLDPuzgC3llMxonXym9zIwhhUII8vjdw0LXxavpLqTbl1A==} - engines: {node: '>=12'} - dependencies: - execa: 5.1.1 - dev: true - - /detect-port/1.5.1: - resolution: {integrity: sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ==} - hasBin: true - dependencies: - address: 1.2.2 - debug: 4.3.4 - transitivePeerDependencies: - - supports-color - dev: true - - /dezalgo/1.0.4: - resolution: {integrity: sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==} - dependencies: - asap: 2.0.6 - wrappy: 1.0.2 - dev: true - - /diff-sequences/27.5.1: - resolution: {integrity: sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==} - engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} - dev: true - - /diff-sequences/29.3.1: - resolution: {integrity: sha512-hlM3QR272NXCi4pq+N4Kok4kOp6EsgOM3ZSpJI7Da3UAs+Ttsi8MRmB6trM/lhyzUxGfOgnpkHtgqm5Q/CTcfQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dev: true - - /diff/4.0.2: - resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} - engines: {node: '>=0.3.1'} - dev: true - - /diff/5.1.0: - resolution: {integrity: sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw==} - engines: {node: '>=0.3.1'} - dev: false - - /dir-glob/3.0.1: - resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} - engines: {node: '>=8'} - dependencies: - path-type: 4.0.0 - dev: true - - /doctrine/2.1.0: - resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==} - engines: {node: '>=0.10.0'} - dependencies: - esutils: 2.0.3 - dev: true - - /doctrine/3.0.0: - resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} - engines: {node: '>=6.0.0'} - dependencies: - esutils: 2.0.3 - dev: true - - /dom-accessibility-api/0.5.15: - resolution: {integrity: sha512-8o+oVqLQZoruQPYy3uAAQtc6YbtSiRq5aPJBhJ82YTJRHvI6ofhYAkC81WmjFTnfUbqg6T3aCglIpU9p/5e7Cw==} - dev: true - - /dom-helpers/3.4.0: - resolution: {integrity: sha512-LnuPJ+dwqKDIyotW1VzmOZ5TONUN7CwkCR5hrgawTUbkBGYdeoNLZo6nNfGkCrjtE1nXXaj7iMMpDa8/d9WoIA==} - dependencies: - '@babel/runtime': 7.20.7 - dev: false - - /dom-helpers/5.2.1: - resolution: {integrity: sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==} - dependencies: - '@babel/runtime': 7.20.7 - csstype: 3.1.1 - dev: false - - /dom-serializer/2.0.0: - resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==} - dependencies: - domelementtype: 2.3.0 - domhandler: 5.0.3 - entities: 4.4.0 - - /domelementtype/2.3.0: - resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} - - /domexception/4.0.0: - resolution: {integrity: sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==} - engines: {node: '>=12'} - dependencies: - webidl-conversions: 7.0.0 - dev: true - - /domhandler/5.0.3: - resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==} - engines: {node: '>= 4'} - dependencies: - domelementtype: 2.3.0 - - /domutils/3.0.1: - resolution: {integrity: sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==} - dependencies: - dom-serializer: 2.0.0 - domelementtype: 2.3.0 - domhandler: 5.0.3 - - /dotenv-expand/10.0.0: - resolution: {integrity: sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==} - engines: {node: '>=12'} - dev: true - - /dotenv/16.0.3: - resolution: {integrity: sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==} - engines: {node: '>=12'} - dev: true - - /eastasianwidth/0.2.0: - resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - dev: true - - /ee-first/1.1.1: - resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} - dev: true - - /ejs/3.1.8: - resolution: {integrity: sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ==} - engines: {node: '>=0.10.0'} - hasBin: true - dependencies: - jake: 10.8.5 - dev: true - - /electron-to-chromium/1.4.284: - resolution: {integrity: sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA==} - - /emittery/0.13.1: - resolution: {integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==} - engines: {node: '>=12'} - dev: true - - /emoji-regex/8.0.0: - resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - - /emoji-regex/9.2.2: - resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} - dev: true - - /encodeurl/1.0.2: - resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} - engines: {node: '>= 0.8'} - dev: true - - /enquire.js/2.1.6: - resolution: {integrity: sha512-/KujNpO+PT63F7Hlpu4h3pE3TokKRHN26JYmQpPyjkRD/N57R7bPDNojMXdi7uveAKjYB7yQnartCxZnFWr0Xw==} - dev: false - - /enquirer/2.3.6: - resolution: {integrity: sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==} - engines: {node: '>=8.6'} - dependencies: - ansi-colors: 4.1.3 - dev: true - - /entities/4.4.0: - resolution: {integrity: sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA==} - engines: {node: '>=0.12'} - - /envinfo/7.8.1: - resolution: {integrity: sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==} - engines: {node: '>=4'} - hasBin: true - dev: true - - /eol/0.9.1: - resolution: {integrity: sha512-Ds/TEoZjwggRoz/Q2O7SE3i4Jm66mqTDfmdHdq/7DKVk3bro9Q8h6WdXKdPqFLMoqxrDK5SVRzHVPOS6uuGtrg==} - dev: true - - /errno/0.1.8: - resolution: {integrity: sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==} - hasBin: true - requiresBuild: true - dependencies: - prr: 1.0.1 - dev: true - optional: true - - /error-ex/1.3.2: - resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} - dependencies: - is-arrayish: 0.2.1 - - /error-stack-parser/2.1.4: - resolution: {integrity: sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ==} - dependencies: - stackframe: 1.3.4 - dev: false - - /es-abstract/1.21.1: - resolution: {integrity: sha512-QudMsPOz86xYz/1dG1OuGBKOELjCh99IIWHLzy5znUB6j8xG2yMA7bfTV86VSqKF+Y/H08vQPR+9jyXpuC6hfg==} - engines: {node: '>= 0.4'} - dependencies: - available-typed-arrays: 1.0.5 - call-bind: 1.0.2 - es-set-tostringtag: 2.0.1 - es-to-primitive: 1.2.1 - function-bind: 1.1.1 - function.prototype.name: 1.1.5 - get-intrinsic: 1.1.3 - get-symbol-description: 1.0.0 - globalthis: 1.0.3 - gopd: 1.0.1 - has: 1.0.3 - has-property-descriptors: 1.0.0 - has-proto: 1.0.1 - has-symbols: 1.0.3 - internal-slot: 1.0.4 - is-array-buffer: 3.0.1 - is-callable: 1.2.7 - is-negative-zero: 2.0.2 - is-regex: 1.1.4 - is-shared-array-buffer: 1.0.2 - is-string: 1.0.7 - is-typed-array: 1.1.10 - is-weakref: 1.0.2 - object-inspect: 1.12.3 - object-keys: 1.1.1 - object.assign: 4.1.4 - regexp.prototype.flags: 1.4.3 - safe-regex-test: 1.0.0 - string.prototype.trimend: 1.0.6 - string.prototype.trimstart: 1.0.6 - typed-array-length: 1.0.4 - unbox-primitive: 1.0.2 - which-typed-array: 1.1.9 - dev: true - - /es-aggregate-error/1.0.9: - resolution: {integrity: sha512-fvnX40sb538wdU6r4s35cq4EY6Lr09Upj40BEVem4LEsuW8XgQep9yD5Q1U2KftokNp1rWODFJ2qwZSsAjFpbg==} - engines: {node: '>= 0.4'} - dependencies: - define-properties: 1.1.4 - es-abstract: 1.21.1 - function-bind: 1.1.1 - functions-have-names: 1.2.3 - get-intrinsic: 1.2.0 - globalthis: 1.0.3 - has-property-descriptors: 1.0.0 - dev: true - - /es-get-iterator/1.1.3: - resolution: {integrity: sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==} - dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 - has-symbols: 1.0.3 - is-arguments: 1.1.1 - is-map: 2.0.2 - is-set: 2.0.2 - is-string: 1.0.7 - isarray: 2.0.5 - stop-iteration-iterator: 1.0.0 - dev: true - - /es-module-lexer/0.9.3: - resolution: {integrity: sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==} - dev: true - - /es-set-tostringtag/2.0.1: - resolution: {integrity: sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==} - engines: {node: '>= 0.4'} - dependencies: - get-intrinsic: 1.1.3 - has: 1.0.3 - has-tostringtag: 1.0.0 - dev: true - - /es-shim-unscopables/1.0.0: - resolution: {integrity: sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==} - dependencies: - has: 1.0.3 - dev: true - - /es-to-primitive/1.2.1: - resolution: {integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==} - engines: {node: '>= 0.4'} - dependencies: - is-callable: 1.2.7 - is-date-object: 1.0.5 - is-symbol: 1.0.4 - dev: true - - /es6-object-assign/1.1.0: - resolution: {integrity: sha512-MEl9uirslVwqQU369iHNWZXsI8yaZYGg/D65aOgZkeyFJwHYSxilf7rQzXKI7DdDuBPrBXbfk3sl9hJhmd5AUw==} - dev: true - - /es6-promise/3.3.1: - resolution: {integrity: sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==} - dev: true - - /esbuild-android-64/0.15.18: - resolution: {integrity: sha512-wnpt3OXRhcjfIDSZu9bnzT4/TNTDsOUvip0foZOUBG7QbSt//w3QV4FInVJxNhKc/ErhUxc5z4QjHtMi7/TbgA==} - engines: {node: '>=12'} - cpu: [x64] - os: [android] - requiresBuild: true - dev: true - optional: true - - /esbuild-android-arm64/0.15.18: - resolution: {integrity: sha512-G4xu89B8FCzav9XU8EjsXacCKSG2FT7wW9J6hOc18soEHJdtWu03L3TQDGf0geNxfLTtxENKBzMSq9LlbjS8OQ==} - engines: {node: '>=12'} - cpu: [arm64] - os: [android] - requiresBuild: true - dev: true - optional: true - - /esbuild-darwin-64/0.15.18: - resolution: {integrity: sha512-2WAvs95uPnVJPuYKP0Eqx+Dl/jaYseZEUUT1sjg97TJa4oBtbAKnPnl3b5M9l51/nbx7+QAEtuummJZW0sBEmg==} - engines: {node: '>=12'} - cpu: [x64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /esbuild-darwin-arm64/0.15.18: - resolution: {integrity: sha512-tKPSxcTJ5OmNb1btVikATJ8NftlyNlc8BVNtyT/UAr62JFOhwHlnoPrhYWz09akBLHI9nElFVfWSTSRsrZiDUA==} - engines: {node: '>=12'} - cpu: [arm64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /esbuild-freebsd-64/0.15.18: - resolution: {integrity: sha512-TT3uBUxkteAjR1QbsmvSsjpKjOX6UkCstr8nMr+q7zi3NuZ1oIpa8U41Y8I8dJH2fJgdC3Dj3CXO5biLQpfdZA==} - engines: {node: '>=12'} - cpu: [x64] - os: [freebsd] - requiresBuild: true - dev: true - optional: true - - /esbuild-freebsd-arm64/0.15.18: - resolution: {integrity: sha512-R/oVr+X3Tkh+S0+tL41wRMbdWtpWB8hEAMsOXDumSSa6qJR89U0S/PpLXrGF7Wk/JykfpWNokERUpCeHDl47wA==} - engines: {node: '>=12'} - cpu: [arm64] - os: [freebsd] - requiresBuild: true - dev: true - optional: true - - /esbuild-linux-32/0.15.18: - resolution: {integrity: sha512-lphF3HiCSYtaa9p1DtXndiQEeQDKPl9eN/XNoBf2amEghugNuqXNZA/ZovthNE2aa4EN43WroO0B85xVSjYkbg==} - engines: {node: '>=12'} - cpu: [ia32] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /esbuild-linux-64/0.15.18: - resolution: {integrity: sha512-hNSeP97IviD7oxLKFuii5sDPJ+QHeiFTFLoLm7NZQligur8poNOWGIgpQ7Qf8Balb69hptMZzyOBIPtY09GZYw==} - engines: {node: '>=12'} - cpu: [x64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /esbuild-linux-arm/0.15.18: - resolution: {integrity: sha512-UH779gstRblS4aoS2qpMl3wjg7U0j+ygu3GjIeTonCcN79ZvpPee12Qun3vcdxX+37O5LFxz39XeW2I9bybMVA==} - engines: {node: '>=12'} - cpu: [arm] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /esbuild-linux-arm64/0.15.18: - resolution: {integrity: sha512-54qr8kg/6ilcxd+0V3h9rjT4qmjc0CccMVWrjOEM/pEcUzt8X62HfBSeZfT2ECpM7104mk4yfQXkosY8Quptug==} - engines: {node: '>=12'} - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /esbuild-linux-mips64le/0.15.18: - resolution: {integrity: sha512-Mk6Ppwzzz3YbMl/ZZL2P0q1tnYqh/trYZ1VfNP47C31yT0K8t9s7Z077QrDA/guU60tGNp2GOwCQnp+DYv7bxQ==} - engines: {node: '>=12'} - cpu: [mips64el] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /esbuild-linux-ppc64le/0.15.18: - resolution: {integrity: sha512-b0XkN4pL9WUulPTa/VKHx2wLCgvIAbgwABGnKMY19WhKZPT+8BxhZdqz6EgkqCLld7X5qiCY2F/bfpUUlnFZ9w==} - engines: {node: '>=12'} - cpu: [ppc64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /esbuild-linux-riscv64/0.15.18: - resolution: {integrity: sha512-ba2COaoF5wL6VLZWn04k+ACZjZ6NYniMSQStodFKH/Pu6RxzQqzsmjR1t9QC89VYJxBeyVPTaHuBMCejl3O/xg==} - engines: {node: '>=12'} - cpu: [riscv64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /esbuild-linux-s390x/0.15.18: - resolution: {integrity: sha512-VbpGuXEl5FCs1wDVp93O8UIzl3ZrglgnSQ+Hu79g7hZu6te6/YHgVJxCM2SqfIila0J3k0csfnf8VD2W7u2kzQ==} - engines: {node: '>=12'} - cpu: [s390x] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /esbuild-netbsd-64/0.15.18: - resolution: {integrity: sha512-98ukeCdvdX7wr1vUYQzKo4kQ0N2p27H7I11maINv73fVEXt2kyh4K4m9f35U1K43Xc2QGXlzAw0K9yoU7JUjOg==} - engines: {node: '>=12'} - cpu: [x64] - os: [netbsd] - requiresBuild: true - dev: true - optional: true - - /esbuild-openbsd-64/0.15.18: - resolution: {integrity: sha512-yK5NCcH31Uae076AyQAXeJzt/vxIo9+omZRKj1pauhk3ITuADzuOx5N2fdHrAKPxN+zH3w96uFKlY7yIn490xQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [openbsd] - requiresBuild: true - dev: true - optional: true - - /esbuild-plugin-alias/0.2.1: - resolution: {integrity: sha512-jyfL/pwPqaFXyKnj8lP8iLk6Z0m099uXR45aSN8Av1XD4vhvQutxxPzgA2bTcAwQpa1zCXDcWOlhFgyP3GKqhQ==} - dev: true - - /esbuild-register/3.4.2_esbuild@0.16.17: - resolution: {integrity: sha512-kG/XyTDyz6+YDuyfB9ZoSIOOmgyFCH+xPRtsCa8W85HLRV5Csp+o3jWVbOSHgSLfyLc5DmP+KFDNwty4mEjC+Q==} - peerDependencies: - esbuild: '>=0.12 <1' - dependencies: - debug: 4.3.4 - esbuild: 0.16.17 - transitivePeerDependencies: - - supports-color - dev: true - - /esbuild-sunos-64/0.15.18: - resolution: {integrity: sha512-On22LLFlBeLNj/YF3FT+cXcyKPEI263nflYlAhz5crxtp3yRG1Ugfr7ITyxmCmjm4vbN/dGrb/B7w7U8yJR9yw==} - engines: {node: '>=12'} - cpu: [x64] - os: [sunos] - requiresBuild: true - dev: true - optional: true - - /esbuild-windows-32/0.15.18: - resolution: {integrity: sha512-o+eyLu2MjVny/nt+E0uPnBxYuJHBvho8vWsC2lV61A7wwTWC3jkN2w36jtA+yv1UgYkHRihPuQsL23hsCYGcOQ==} - engines: {node: '>=12'} - cpu: [ia32] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /esbuild-windows-64/0.15.18: - resolution: {integrity: sha512-qinug1iTTaIIrCorAUjR0fcBk24fjzEedFYhhispP8Oc7SFvs+XeW3YpAKiKp8dRpizl4YYAhxMjlftAMJiaUw==} - engines: {node: '>=12'} - cpu: [x64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /esbuild-windows-arm64/0.15.18: - resolution: {integrity: sha512-q9bsYzegpZcLziq0zgUi5KqGVtfhjxGbnksaBFYmWLxeV/S1fK4OLdq2DFYnXcLMjlZw2L0jLsk1eGoB522WXQ==} - engines: {node: '>=12'} - cpu: [arm64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /esbuild/0.15.18: - resolution: {integrity: sha512-x/R72SmW3sSFRm5zrrIjAhCeQSAWoni3CmHEqfQrZIQTM3lVCdehdwuIqaOtfC2slvpdlLa62GYoN8SxT23m6Q==} - engines: {node: '>=12'} - hasBin: true - requiresBuild: true - optionalDependencies: - '@esbuild/android-arm': 0.15.18 - '@esbuild/linux-loong64': 0.15.18 - esbuild-android-64: 0.15.18 - esbuild-android-arm64: 0.15.18 - esbuild-darwin-64: 0.15.18 - esbuild-darwin-arm64: 0.15.18 - esbuild-freebsd-64: 0.15.18 - esbuild-freebsd-arm64: 0.15.18 - esbuild-linux-32: 0.15.18 - esbuild-linux-64: 0.15.18 - esbuild-linux-arm: 0.15.18 - esbuild-linux-arm64: 0.15.18 - esbuild-linux-mips64le: 0.15.18 - esbuild-linux-ppc64le: 0.15.18 - esbuild-linux-riscv64: 0.15.18 - esbuild-linux-s390x: 0.15.18 - esbuild-netbsd-64: 0.15.18 - esbuild-openbsd-64: 0.15.18 - esbuild-sunos-64: 0.15.18 - esbuild-windows-32: 0.15.18 - esbuild-windows-64: 0.15.18 - esbuild-windows-arm64: 0.15.18 - dev: true - - /esbuild/0.16.17: - resolution: {integrity: sha512-G8LEkV0XzDMNwXKgM0Jwu3nY3lSTwSGY6XbxM9cr9+s0T/qSV1q1JVPBGzm3dcjhCic9+emZDmMffkwgPeOeLg==} - engines: {node: '>=12'} - hasBin: true - requiresBuild: true - optionalDependencies: - '@esbuild/android-arm': 0.16.17 - '@esbuild/android-arm64': 0.16.17 - '@esbuild/android-x64': 0.16.17 - '@esbuild/darwin-arm64': 0.16.17 - '@esbuild/darwin-x64': 0.16.17 - '@esbuild/freebsd-arm64': 0.16.17 - '@esbuild/freebsd-x64': 0.16.17 - '@esbuild/linux-arm': 0.16.17 - '@esbuild/linux-arm64': 0.16.17 - '@esbuild/linux-ia32': 0.16.17 - '@esbuild/linux-loong64': 0.16.17 - '@esbuild/linux-mips64el': 0.16.17 - '@esbuild/linux-ppc64': 0.16.17 - '@esbuild/linux-riscv64': 0.16.17 - '@esbuild/linux-s390x': 0.16.17 - '@esbuild/linux-x64': 0.16.17 - '@esbuild/netbsd-x64': 0.16.17 - '@esbuild/openbsd-x64': 0.16.17 - '@esbuild/sunos-x64': 0.16.17 - '@esbuild/win32-arm64': 0.16.17 - '@esbuild/win32-ia32': 0.16.17 - '@esbuild/win32-x64': 0.16.17 - dev: true - - /escalade/3.1.1: - resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} - engines: {node: '>=6'} - - /escape-html/1.0.3: - resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} - dev: true - - /escape-string-regexp/1.0.5: - resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} - engines: {node: '>=0.8.0'} - - /escape-string-regexp/2.0.0: - resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} - engines: {node: '>=8'} - dev: true - - /escape-string-regexp/4.0.0: - resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} - engines: {node: '>=10'} - - /escape-string-regexp/5.0.0: - resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} - engines: {node: '>=12'} - dev: false - - /escodegen/1.14.3: - resolution: {integrity: sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==} - engines: {node: '>=4.0'} - hasBin: true - dependencies: - esprima: 4.0.1 - estraverse: 4.3.0 - esutils: 2.0.3 - optionator: 0.8.3 - optionalDependencies: - source-map: 0.6.1 - dev: true - - /escodegen/2.0.0: - resolution: {integrity: sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==} - engines: {node: '>=6.0'} - hasBin: true - dependencies: - esprima: 4.0.1 - estraverse: 5.3.0 - esutils: 2.0.3 - optionator: 0.8.3 - optionalDependencies: - source-map: 0.6.1 - dev: true - - /eslint-config-prettier/8.6.0_eslint@8.32.0: - resolution: {integrity: sha512-bAF0eLpLVqP5oEVUFKpMA+NnRFICwn9X8B5jrR9FcqnYBuPbqWEjTEspPWMj5ye6czoSLDweCzSo3Ko7gGrZaA==} - hasBin: true - peerDependencies: - eslint: '>=7.0.0' - dependencies: - eslint: 8.32.0 - dev: true - - /eslint-config-react-app/7.0.1_f3p7cdzsbkhsmnshbzhbbdgmve: - resolution: {integrity: sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA==} - engines: {node: '>=14.0.0'} - peerDependencies: - eslint: ^8.0.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@babel/core': 7.20.12 - '@babel/eslint-parser': 7.19.1_2je5tsgpdnpnp4f5qs5fqust6m - '@rushstack/eslint-patch': 1.2.0 - '@typescript-eslint/eslint-plugin': 5.48.2_azmbqzqvrlvblbdtiwxwvyvjjy - '@typescript-eslint/parser': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - babel-preset-react-app: 10.0.1 - confusing-browser-globals: 1.0.11 - eslint: 8.32.0 - eslint-plugin-flowtype: 8.0.3_eslint@8.32.0 - eslint-plugin-import: 2.27.5_2l6piu6guil2f63lj3qmhzbnn4 - eslint-plugin-jest: 25.7.0_i5clxtuiaceouxhg5syqkw5wwi - eslint-plugin-jsx-a11y: 6.7.1_eslint@8.32.0 - eslint-plugin-react: 7.32.1_eslint@8.32.0 - eslint-plugin-react-hooks: 4.6.0_eslint@8.32.0 - eslint-plugin-testing-library: 5.9.1_et5x32uxl7z5ldub3ye5rhlyqm - typescript: 4.9.5 - transitivePeerDependencies: - - '@babel/plugin-syntax-flow' - - '@babel/plugin-transform-react-jsx' - - eslint-import-resolver-typescript - - eslint-import-resolver-webpack - - jest - - supports-color - dev: true - - /eslint-import-resolver-node/0.3.7: - resolution: {integrity: sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA==} - dependencies: - debug: 3.2.7 - is-core-module: 2.11.0 - resolve: 1.22.1 - transitivePeerDependencies: - - supports-color - dev: true - - /eslint-module-utils/2.7.4_kvyj4idustix6trhy5lyssy2sq: - resolution: {integrity: sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==} - engines: {node: '>=4'} - peerDependencies: - '@typescript-eslint/parser': '*' - eslint: '*' - eslint-import-resolver-node: '*' - eslint-import-resolver-typescript: '*' - eslint-import-resolver-webpack: '*' - peerDependenciesMeta: - '@typescript-eslint/parser': - optional: true - eslint: - optional: true - eslint-import-resolver-node: - optional: true - eslint-import-resolver-typescript: - optional: true - eslint-import-resolver-webpack: - optional: true - dependencies: - '@typescript-eslint/parser': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - debug: 3.2.7 - eslint: 8.32.0 - eslint-import-resolver-node: 0.3.7 - transitivePeerDependencies: - - supports-color - dev: true - - /eslint-plugin-css-modules/2.11.0_eslint@8.32.0: - resolution: {integrity: sha512-CLvQvJOMlCywZzaI4HVu7QH/ltgNXvCg7giJGiE+sA9wh5zQ+AqTgftAzrERV22wHe1p688wrU/Zwxt1Ry922w==} - engines: {node: '>=4.0.0'} - peerDependencies: - eslint: '>=2.0.0' - dependencies: - eslint: 8.32.0 - gonzales-pe: 4.3.0 - lodash: 4.17.21 - dev: true - - /eslint-plugin-flowtype/8.0.3_eslint@8.32.0: - resolution: {integrity: sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ==} - engines: {node: '>=12.0.0'} - peerDependencies: - '@babel/plugin-syntax-flow': ^7.14.5 - '@babel/plugin-transform-react-jsx': ^7.14.9 - eslint: ^8.1.0 - dependencies: - eslint: 8.32.0 - lodash: 4.17.21 - string-natural-compare: 3.0.1 - dev: true - - /eslint-plugin-import/2.27.5_2l6piu6guil2f63lj3qmhzbnn4: - resolution: {integrity: sha512-LmEt3GVofgiGuiE+ORpnvP+kAm3h6MLZJ4Q5HCyHADofsb4VzXFsRiWj3c0OFiV+3DWFh0qg3v9gcPlfc3zRow==} - engines: {node: '>=4'} - peerDependencies: - '@typescript-eslint/parser': '*' - eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 - peerDependenciesMeta: - '@typescript-eslint/parser': - optional: true - dependencies: - '@typescript-eslint/parser': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - array-includes: 3.1.6 - array.prototype.flat: 1.3.1 - array.prototype.flatmap: 1.3.1 - debug: 3.2.7 - doctrine: 2.1.0 - eslint: 8.32.0 - eslint-import-resolver-node: 0.3.7 - eslint-module-utils: 2.7.4_kvyj4idustix6trhy5lyssy2sq - has: 1.0.3 - is-core-module: 2.11.0 - is-glob: 4.0.3 - minimatch: 3.1.2 - object.values: 1.1.6 - resolve: 1.22.1 - semver: 6.3.0 - tsconfig-paths: 3.14.1 - transitivePeerDependencies: - - eslint-import-resolver-typescript - - eslint-import-resolver-webpack - - supports-color - dev: true - - /eslint-plugin-jest/25.7.0_i5clxtuiaceouxhg5syqkw5wwi: - resolution: {integrity: sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - peerDependencies: - '@typescript-eslint/eslint-plugin': ^4.0.0 || ^5.0.0 - eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - jest: '*' - peerDependenciesMeta: - '@typescript-eslint/eslint-plugin': - optional: true - jest: - optional: true - dependencies: - '@typescript-eslint/eslint-plugin': 5.48.2_azmbqzqvrlvblbdtiwxwvyvjjy - '@typescript-eslint/experimental-utils': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - eslint: 8.32.0 - jest: 29.3.1_2263m44mchjafa7bz7l52hbcpa - transitivePeerDependencies: - - supports-color - - typescript - dev: true - - /eslint-plugin-jest/26.9.0_i5clxtuiaceouxhg5syqkw5wwi: - resolution: {integrity: sha512-TWJxWGp1J628gxh2KhaH1H1paEdgE2J61BBF1I59c6xWeL5+D1BzMxGDN/nXAfX+aSkR5u80K+XhskK6Gwq9ng==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - '@typescript-eslint/eslint-plugin': ^5.0.0 - eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - jest: '*' - peerDependenciesMeta: - '@typescript-eslint/eslint-plugin': - optional: true - jest: - optional: true - dependencies: - '@typescript-eslint/eslint-plugin': 5.48.2_azmbqzqvrlvblbdtiwxwvyvjjy - '@typescript-eslint/utils': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - eslint: 8.32.0 - jest: 29.3.1_2263m44mchjafa7bz7l52hbcpa - transitivePeerDependencies: - - supports-color - - typescript - dev: true - - /eslint-plugin-jsx-a11y/6.7.1_eslint@8.32.0: - resolution: {integrity: sha512-63Bog4iIethyo8smBklORknVjB0T2dwB8Mr/hIC+fBS0uyHdYYpzM/Ed+YC8VxTjlXHEWFOdmgwcDn1U2L9VCA==} - engines: {node: '>=4.0'} - peerDependencies: - eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 - dependencies: - '@babel/runtime': 7.20.7 - aria-query: 5.1.3 - array-includes: 3.1.6 - array.prototype.flatmap: 1.3.1 - ast-types-flow: 0.0.7 - axe-core: 4.6.2 - axobject-query: 3.1.1 - damerau-levenshtein: 1.0.8 - emoji-regex: 9.2.2 - eslint: 8.32.0 - has: 1.0.3 - jsx-ast-utils: 3.3.3 - language-tags: 1.0.5 - minimatch: 3.1.2 - object.entries: 1.1.6 - object.fromentries: 2.0.6 - semver: 6.3.0 - dev: true - - /eslint-plugin-prettier/4.2.1_cn4lalcyadplruoxa5mhp7j3dq: - resolution: {integrity: sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==} - engines: {node: '>=12.0.0'} - peerDependencies: - eslint: '>=7.28.0' - eslint-config-prettier: '*' - prettier: '>=2.0.0' - peerDependenciesMeta: - eslint-config-prettier: - optional: true - dependencies: - eslint: 8.32.0 - eslint-config-prettier: 8.6.0_eslint@8.32.0 - prettier: 2.8.3 - prettier-linter-helpers: 1.0.0 - dev: true - - /eslint-plugin-react-hooks/4.6.0_eslint@8.32.0: - resolution: {integrity: sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==} - engines: {node: '>=10'} - peerDependencies: - eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 - dependencies: - eslint: 8.32.0 - dev: true - - /eslint-plugin-react/7.32.1_eslint@8.32.0: - resolution: {integrity: sha512-vOjdgyd0ZHBXNsmvU+785xY8Bfe57EFbTYYk8XrROzWpr9QBvpjITvAXt9xqcE6+8cjR/g1+mfumPToxsl1www==} - engines: {node: '>=4'} - peerDependencies: - eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 - dependencies: - array-includes: 3.1.6 - array.prototype.flatmap: 1.3.1 - array.prototype.tosorted: 1.1.1 - doctrine: 2.1.0 - eslint: 8.32.0 - estraverse: 5.3.0 - jsx-ast-utils: 3.3.3 - minimatch: 3.1.2 - object.entries: 1.1.6 - object.fromentries: 2.0.6 - object.hasown: 1.1.2 - object.values: 1.1.6 - prop-types: 15.8.1 - resolve: 2.0.0-next.4 - semver: 6.3.0 - string.prototype.matchall: 4.0.8 - dev: true - - /eslint-plugin-testing-library/5.9.1_et5x32uxl7z5ldub3ye5rhlyqm: - resolution: {integrity: sha512-6BQp3tmb79jLLasPHJmy8DnxREe+2Pgf7L+7o09TSWPfdqqtQfRZmZNetr5mOs3yqZk/MRNxpN3RUpJe0wB4LQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0, npm: '>=6'} - peerDependencies: - eslint: ^7.5.0 || ^8.0.0 - dependencies: - '@typescript-eslint/utils': 5.48.2_et5x32uxl7z5ldub3ye5rhlyqm - eslint: 8.32.0 - transitivePeerDependencies: - - supports-color - - typescript - dev: true - - /eslint-plugin-unused-imports/2.0.0_virssgr5omih4ylyae2gddvmxu: - resolution: {integrity: sha512-3APeS/tQlTrFa167ThtP0Zm0vctjr4M44HMpeg1P4bK6wItarumq0Ma82xorMKdFsWpphQBlRPzw/pxiVELX1A==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - '@typescript-eslint/eslint-plugin': ^5.0.0 - eslint: ^8.0.0 - peerDependenciesMeta: - '@typescript-eslint/eslint-plugin': - optional: true - dependencies: - '@typescript-eslint/eslint-plugin': 5.48.2_azmbqzqvrlvblbdtiwxwvyvjjy - eslint: 8.32.0 - eslint-rule-composer: 0.3.0 - dev: true - - /eslint-rule-composer/0.3.0: - resolution: {integrity: sha512-bt+Sh8CtDmn2OajxvNO+BX7Wn4CIWMpTRm3MaiKPCQcnnlm0CS2mhui6QaoeQugs+3Kj2ESKEEGJUdVafwhiCg==} - engines: {node: '>=4.0.0'} - dev: true - - /eslint-scope/5.1.1: - resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==} - engines: {node: '>=8.0.0'} - dependencies: - esrecurse: 4.3.0 - estraverse: 4.3.0 - dev: true - - /eslint-scope/7.1.1: - resolution: {integrity: sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dependencies: - esrecurse: 4.3.0 - estraverse: 5.3.0 - dev: true - - /eslint-utils/3.0.0_eslint@8.32.0: - resolution: {integrity: sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==} - engines: {node: ^10.0.0 || ^12.0.0 || >= 14.0.0} - peerDependencies: - eslint: '>=5' - dependencies: - eslint: 8.32.0 - eslint-visitor-keys: 2.1.0 - dev: true - - /eslint-visitor-keys/2.1.0: - resolution: {integrity: sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==} - engines: {node: '>=10'} - dev: true - - /eslint-visitor-keys/3.3.0: - resolution: {integrity: sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dev: true - - /eslint/8.32.0: - resolution: {integrity: sha512-nETVXpnthqKPFyuY2FNjz/bEd6nbosRgKbkgS/y1C7LJop96gYHWpiguLecMHQ2XCPxn77DS0P+68WzG6vkZSQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - hasBin: true - dependencies: - '@eslint/eslintrc': 1.4.1 - '@humanwhocodes/config-array': 0.11.8 - '@humanwhocodes/module-importer': 1.0.1 - '@nodelib/fs.walk': 1.2.8 - ajv: 6.12.6 - chalk: 4.1.2 - cross-spawn: 7.0.3 - debug: 4.3.4 - doctrine: 3.0.0 - escape-string-regexp: 4.0.0 - eslint-scope: 7.1.1 - eslint-utils: 3.0.0_eslint@8.32.0 - eslint-visitor-keys: 3.3.0 - espree: 9.4.1 - esquery: 1.4.0 - esutils: 2.0.3 - fast-deep-equal: 3.1.3 - file-entry-cache: 6.0.1 - find-up: 5.0.0 - glob-parent: 6.0.2 - globals: 13.19.0 - grapheme-splitter: 1.0.4 - ignore: 5.2.4 - import-fresh: 3.3.0 - imurmurhash: 0.1.4 - is-glob: 4.0.3 - is-path-inside: 3.0.3 - js-sdsl: 4.3.0 - js-yaml: 4.1.0 - json-stable-stringify-without-jsonify: 1.0.1 - levn: 0.4.1 - lodash.merge: 4.6.2 - minimatch: 3.1.2 - natural-compare: 1.4.0 - optionator: 0.9.1 - regexpp: 3.2.0 - strip-ansi: 6.0.1 - strip-json-comments: 3.1.1 - text-table: 0.2.0 - transitivePeerDependencies: - - supports-color - dev: true - - /espree/9.4.1: - resolution: {integrity: sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dependencies: - acorn: 8.8.1 - acorn-jsx: 5.3.2_acorn@8.8.1 - eslint-visitor-keys: 3.3.0 - dev: true - - /esprima/4.0.1: - resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} - engines: {node: '>=4'} - hasBin: true - dev: true - - /esquery/1.4.0: - resolution: {integrity: sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==} - engines: {node: '>=0.10'} - dependencies: - estraverse: 5.3.0 - dev: true - - /esrecurse/4.3.0: - resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} - engines: {node: '>=4.0'} - dependencies: - estraverse: 5.3.0 - dev: true - - /estraverse/4.3.0: - resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==} - engines: {node: '>=4.0'} - dev: true - - /estraverse/5.3.0: - resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} - engines: {node: '>=4.0'} - dev: true - - /estree-to-babel/3.2.1: - resolution: {integrity: sha512-YNF+mZ/Wu2FU/gvmzuWtYc8rloubL7wfXCTgouFrnjGVXPA/EeYYA7pupXWrb3Iv1cTBeSSxxJIbK23l4MRNqg==} - engines: {node: '>=8.3.0'} - dependencies: - '@babel/traverse': 7.20.12 - '@babel/types': 7.20.7 - c8: 7.12.0 - transitivePeerDependencies: - - supports-color - dev: true - - /estree-walker/1.0.1: - resolution: {integrity: sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg==} - dev: true - - /estree-walker/2.0.2: - resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} - dev: true - - /esutils/2.0.3: - resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} - engines: {node: '>=0.10.0'} - dev: true - - /etag/1.8.1: - resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} - engines: {node: '>= 0.6'} - dev: true - - /event-target-shim/5.0.1: - resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} - engines: {node: '>=6'} - dev: true - - /eventemitter3/4.0.7: - resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} - dev: false - - /execa/5.1.1: - resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} - engines: {node: '>=10'} - dependencies: - cross-spawn: 7.0.3 - get-stream: 6.0.1 - human-signals: 2.1.0 - is-stream: 2.0.1 - merge-stream: 2.0.0 - npm-run-path: 4.0.1 - onetime: 5.1.2 - signal-exit: 3.0.7 - strip-final-newline: 2.0.0 - dev: true - - /exit/0.1.2: - resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==} - engines: {node: '>= 0.8.0'} - dev: true - - /expand-brackets/2.1.4: - resolution: {integrity: sha512-w/ozOKR9Obk3qoWeY/WDi6MFta9AoMR+zud60mdnbniMcBxRuFJyDt2LdX/14A1UABeqk+Uk+LDfUpvoGKppZA==} - engines: {node: '>=0.10.0'} - dependencies: - debug: 2.6.9 - define-property: 0.2.5 - extend-shallow: 2.0.1 - posix-character-classes: 0.1.1 - regex-not: 1.0.2 - snapdragon: 0.8.2 - to-regex: 3.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /expect-type/0.14.2: - resolution: {integrity: sha512-ed3+tr5ujbIYXZ8Pl/VgIphwJQ0q5tBLGGdn7Zvwt1WyPBRX83xjT5pT77P/GkuQbctx0K2ZNSSan7eruJqTCQ==} - dev: true - - /expect/29.3.1: - resolution: {integrity: sha512-gGb1yTgU30Q0O/tQq+z30KBWv24ApkMgFUpvKBkyLUBL68Wv8dHdJxTBZFl/iT8K/bqDHvUYRH6IIN3rToopPA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/expect-utils': 29.3.1 - jest-get-type: 29.2.0 - jest-matcher-utils: 29.3.1 - jest-message-util: 29.3.1 - jest-util: 29.3.1 - dev: true - - /express/4.18.2: - resolution: {integrity: sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==} - engines: {node: '>= 0.10.0'} - dependencies: - accepts: 1.3.8 - array-flatten: 1.1.1 - body-parser: 1.20.1 - content-disposition: 0.5.4 - content-type: 1.0.4 - cookie: 0.5.0 - cookie-signature: 1.0.6 - debug: 2.6.9 - depd: 2.0.0 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - finalhandler: 1.2.0 - fresh: 0.5.2 - http-errors: 2.0.0 - merge-descriptors: 1.0.1 - methods: 1.1.2 - on-finished: 2.4.1 - parseurl: 1.3.3 - path-to-regexp: 0.1.7 - proxy-addr: 2.0.7 - qs: 6.11.0 - range-parser: 1.2.1 - safe-buffer: 5.2.1 - send: 0.18.0 - serve-static: 1.15.0 - setprototypeof: 1.2.0 - statuses: 2.0.1 - type-is: 1.6.18 - utils-merge: 1.0.1 - vary: 1.1.2 - transitivePeerDependencies: - - supports-color - dev: true - - /extend-shallow/2.0.1: - resolution: {integrity: sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==} - engines: {node: '>=0.10.0'} - dependencies: - is-extendable: 0.1.1 - dev: true - - /extend-shallow/3.0.2: - resolution: {integrity: sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==} - engines: {node: '>=0.10.0'} - dependencies: - assign-symbols: 1.0.0 - is-extendable: 1.0.1 - dev: true - - /extend/3.0.2: - resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} - - /extglob/2.0.4: - resolution: {integrity: sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==} - engines: {node: '>=0.10.0'} - dependencies: - array-unique: 0.3.2 - define-property: 1.0.0 - expand-brackets: 2.1.4 - extend-shallow: 2.0.1 - fragment-cache: 0.2.1 - regex-not: 1.0.2 - snapdragon: 0.8.2 - to-regex: 3.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /extract-zip/1.7.0: - resolution: {integrity: sha512-xoh5G1W/PB0/27lXgMQyIhP5DSY/LhoCsOyZgb+6iMmRtCwVBo55uKaMoEYrDCKQhWvqEip5ZPKAc6eFNyf/MA==} - hasBin: true - dependencies: - concat-stream: 1.6.2 - debug: 2.6.9 - mkdirp: 0.5.6 - yauzl: 2.10.0 - transitivePeerDependencies: - - supports-color - dev: true - - /fast-deep-equal/2.0.1: - resolution: {integrity: sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w==} - dev: false - - /fast-deep-equal/3.1.3: - resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} - - /fast-diff/1.2.0: - resolution: {integrity: sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==} - dev: true - - /fast-equals/2.0.4: - resolution: {integrity: sha512-caj/ZmjHljPrZtbzJ3kfH5ia/k4mTJe/qSiXAGzxZWRZgsgDV0cvNaQULqUX8t0/JVlzzEdYOwCN5DmzTxoD4w==} - dev: false - - /fast-glob/3.2.12: - resolution: {integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==} - engines: {node: '>=8.6.0'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - '@nodelib/fs.walk': 1.2.8 - glob-parent: 5.1.2 - merge2: 1.4.1 - micromatch: 4.0.5 - dev: true - - /fast-glob/3.2.7: - resolution: {integrity: sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q==} - engines: {node: '>=8'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - '@nodelib/fs.walk': 1.2.8 - glob-parent: 5.1.2 - merge2: 1.4.1 - micromatch: 4.0.5 - dev: true - - /fast-json-stable-stringify/2.1.0: - resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} - dev: true - - /fast-levenshtein/2.0.6: - resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} - dev: true - - /fast-loops/1.1.3: - resolution: {integrity: sha512-8EZzEP0eKkEEVX+drtd9mtuQ+/QrlfW/5MlwcwK5Nds6EkZ/tRzEexkzUY2mIssnAyVLT+TKHuRXmFNNXYUd6g==} - dev: false - - /fast-memoize/2.5.2: - resolution: {integrity: sha512-Ue0LwpDYErFbmNnZSF0UH6eImUwDmogUO1jyE+JbN2gsQz/jICm1Ve7t9QT0rNSsfJt+Hs4/S3GnsDVjL4HVrw==} - dev: true - - /fast-safe-stringify/2.1.1: - resolution: {integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==} - dev: true - - /fast-shallow-equal/1.0.0: - resolution: {integrity: sha512-HPtaa38cPgWvaCFmRNhlc6NG7pv6NUHqjPgVAkWGoB9mQMwYB27/K0CvOM5Czy+qpT3e8XJ6Q4aPAnzpNpzNaw==} - dev: false - - /fastest-levenshtein/1.0.16: - resolution: {integrity: sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==} - engines: {node: '>= 4.9.1'} - dev: true - - /fastest-stable-stringify/2.0.2: - resolution: {integrity: sha512-bijHueCGd0LqqNK9b5oCMHc0MluJAx0cwqASgbWMvkO01lCYgIhacVRLcaDz3QnyYIRNJRDwMb41VuT6pHJ91Q==} - dev: false - - /fastparse/1.1.2: - resolution: {integrity: sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==} - dev: true - - /fastq/1.15.0: - resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==} - dependencies: - reusify: 1.0.4 - dev: true - - /fault/2.0.1: - resolution: {integrity: sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==} - dependencies: - format: 0.2.2 - dev: false - - /faye-websocket/0.11.4: - resolution: {integrity: sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==} - engines: {node: '>=0.8.0'} - dependencies: - websocket-driver: 0.7.4 - dev: false - - /fb-watchman/2.0.2: - resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} - dependencies: - bser: 2.1.1 - dev: true - - /fd-slicer/1.1.0: - resolution: {integrity: sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==} - dependencies: - pend: 1.2.0 - dev: true - - /fetch-readablestream/0.2.0: - resolution: {integrity: sha512-qu4mXWf4wus4idBIN/kVH+XSer8IZ9CwHP+Pd7DL7TuKNC1hP7ykon4kkBjwJF3EMX2WsFp4hH7gU7CyL7ucXw==} - dev: false - - /fetch-retry/5.0.3: - resolution: {integrity: sha512-uJQyMrX5IJZkhoEUBQ3EjxkeiZkppBd5jS/fMTJmfZxLSiaQjv2zD0kTvuvkSH89uFvgSlB6ueGpjD3HWN7Bxw==} - dev: true - - /file-entry-cache/6.0.1: - resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} - engines: {node: ^10.12.0 || >=12.0.0} - dependencies: - flat-cache: 3.0.4 - dev: true - - /file-system-cache/2.0.2: - resolution: {integrity: sha512-lp4BHO4CWqvRyx88Tt3quZic9ZMf4cJyquYq7UI8sH42Bm2ArlBBjKQAalZOo+UfaBassb7X123Lik5qZ/tSAA==} - dependencies: - fs-extra: 11.1.0 - ramda: 0.28.0 - dev: true - - /file-uri-to-path/2.0.0: - resolution: {integrity: sha512-hjPFI8oE/2iQPVe4gbrJ73Pp+Xfub2+WI2LlXDbsaJBwT5wuMh35WNWVYYTpnz895shtwfyutMFLFywpQAFdLg==} - engines: {node: '>= 6'} - dev: true - - /filelist/1.0.4: - resolution: {integrity: sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==} - dependencies: - minimatch: 5.1.6 - dev: true - - /fill-range/4.0.0: - resolution: {integrity: sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==} - engines: {node: '>=0.10.0'} - dependencies: - extend-shallow: 2.0.1 - is-number: 3.0.0 - repeat-string: 1.6.1 - to-regex-range: 2.1.1 - dev: true - - /fill-range/7.0.1: - resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} - engines: {node: '>=8'} - dependencies: - to-regex-range: 5.0.1 - - /filter-obj/1.1.0: - resolution: {integrity: sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==} - engines: {node: '>=0.10.0'} - dev: false - - /finalhandler/1.2.0: - resolution: {integrity: sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==} - engines: {node: '>= 0.8'} - dependencies: - debug: 2.6.9 - encodeurl: 1.0.2 - escape-html: 1.0.3 - on-finished: 2.4.1 - parseurl: 1.3.3 - statuses: 2.0.1 - unpipe: 1.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /find-cache-dir/2.1.0: - resolution: {integrity: sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==} - engines: {node: '>=6'} - dependencies: - commondir: 1.0.1 - make-dir: 2.1.0 - pkg-dir: 3.0.0 - dev: true - - /find-cache-dir/3.3.2: - resolution: {integrity: sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==} - engines: {node: '>=8'} - dependencies: - commondir: 1.0.1 - make-dir: 3.1.0 - pkg-dir: 4.2.0 - dev: true - - /find-root/1.1.0: - resolution: {integrity: sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==} - dev: false - - /find-up/3.0.0: - resolution: {integrity: sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==} - engines: {node: '>=6'} - dependencies: - locate-path: 3.0.0 - dev: true - - /find-up/4.1.0: - resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} - engines: {node: '>=8'} - dependencies: - locate-path: 5.0.0 - path-exists: 4.0.0 - dev: true - - /find-up/5.0.0: - resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} - engines: {node: '>=10'} - dependencies: - locate-path: 6.0.0 - path-exists: 4.0.0 - dev: true - - /firebase/9.15.0: - resolution: {integrity: sha512-Fa8qFahDY/pMYMzwPGcfpUkAS3Q55qJ0QKD+5xnXjSX/jVHsJqoXtxapmyDCfAKktiLhXIcRElW1VDVd9xGwQQ==} - dependencies: - '@firebase/analytics': 0.9.0_@firebase+app@0.9.0 - '@firebase/analytics-compat': 0.2.0_5z7svkifsmkn6ro3hru7lnxwrq - '@firebase/app': 0.9.0 - '@firebase/app-check': 0.6.0_@firebase+app@0.9.0 - '@firebase/app-check-compat': 0.3.0_5z7svkifsmkn6ro3hru7lnxwrq - '@firebase/app-compat': 0.2.0 - '@firebase/app-types': 0.9.0 - '@firebase/auth': 0.21.0_@firebase+app@0.9.0 - '@firebase/auth-compat': 0.3.0_z6klzwxqggigirvqix3ggnu6f4 - '@firebase/database': 0.14.0_@firebase+app-types@0.9.0 - '@firebase/database-compat': 0.3.0_@firebase+app-types@0.9.0 - '@firebase/firestore': 3.8.0_@firebase+app@0.9.0 - '@firebase/firestore-compat': 0.3.0_z6klzwxqggigirvqix3ggnu6f4 - '@firebase/functions': 0.9.0_mw76ib4woycgbhoj6pqh7xkrde - '@firebase/functions-compat': 0.3.0_z6klzwxqggigirvqix3ggnu6f4 - '@firebase/installations': 0.6.0_@firebase+app@0.9.0 - '@firebase/installations-compat': 0.2.0_z6klzwxqggigirvqix3ggnu6f4 - '@firebase/messaging': 0.12.0_@firebase+app@0.9.0 - '@firebase/messaging-compat': 0.2.0_5z7svkifsmkn6ro3hru7lnxwrq - '@firebase/performance': 0.6.0_@firebase+app@0.9.0 - '@firebase/performance-compat': 0.2.0_5z7svkifsmkn6ro3hru7lnxwrq - '@firebase/remote-config': 0.4.0_@firebase+app@0.9.0 - '@firebase/remote-config-compat': 0.2.0_5z7svkifsmkn6ro3hru7lnxwrq - '@firebase/storage': 0.10.0_@firebase+app@0.9.0 - '@firebase/storage-compat': 0.2.0_z6klzwxqggigirvqix3ggnu6f4 - '@firebase/util': 1.8.0 - transitivePeerDependencies: - - encoding - dev: false - - /flat-cache/3.0.4: - resolution: {integrity: sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==} - engines: {node: ^10.12.0 || >=12.0.0} - dependencies: - flatted: 3.2.7 - rimraf: 3.0.2 - dev: true - - /flat/5.0.2: - resolution: {integrity: sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==} - hasBin: true - dev: false - - /flatted/3.2.7: - resolution: {integrity: sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==} - dev: true - - /flow-parser/0.198.2: - resolution: {integrity: sha512-tCQzqXbRAz0ZadIhAXGwdp/xsusADo8IK9idgc/2qCK5RmazbKDGedyykfRtzWgy7Klt4f4NZxq0o/wFUg6plQ==} - engines: {node: '>=0.4.0'} - dev: true - - /for-each/0.3.3: - resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==} - dependencies: - is-callable: 1.2.7 - dev: true - - /for-in/1.0.2: - resolution: {integrity: sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ==} - engines: {node: '>=0.10.0'} - dev: true - - /foreground-child/2.0.0: - resolution: {integrity: sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==} - engines: {node: '>=8.0.0'} - dependencies: - cross-spawn: 7.0.3 - signal-exit: 3.0.7 - dev: true - - /form-data/3.0.1: - resolution: {integrity: sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==} - engines: {node: '>= 6'} - dependencies: - asynckit: 0.4.0 - combined-stream: 1.0.8 - mime-types: 2.1.35 - - /form-data/4.0.0: - resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} - engines: {node: '>= 6'} - dependencies: - asynckit: 0.4.0 - combined-stream: 1.0.8 - mime-types: 2.1.35 - dev: true - - /format-util/1.0.5: - resolution: {integrity: sha512-varLbTj0e0yVyRpqQhuWV+8hlePAgaoFRhNFj50BNjEIrw1/DphHSObtqwskVCPWNgzwPoQrZAbfa/SBiicNeg==} - dev: true - - /format/0.2.2: - resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} - engines: {node: '>=0.4.x'} - dev: false - - /formik/2.2.9_react@17.0.2: - resolution: {integrity: sha512-LQLcISMmf1r5at4/gyJigGn0gOwFbeEAlji+N9InZF6LIMXnFNkO42sCI8Jt84YZggpD4cPWObAZaxpEFtSzNA==} - peerDependencies: - react: '>=16.8.0' - dependencies: - deepmerge: 2.2.1 - hoist-non-react-statics: 3.3.2 - lodash: 4.17.21 - lodash-es: 4.17.21 - react: 17.0.2 - react-fast-compare: 2.0.4 - tiny-warning: 1.0.3 - tslib: 1.14.1 - dev: false - - /forwarded/0.2.0: - resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} - engines: {node: '>= 0.6'} - dev: true - - /fragment-cache/0.2.1: - resolution: {integrity: sha512-GMBAbW9antB8iZRHLoGw0b3HANt57diZYFO/HL1JGIC1MjKrdmhxvrJbupnVvpys0zsz7yBApXdQyfepKly2kA==} - engines: {node: '>=0.10.0'} - dependencies: - map-cache: 0.2.2 - dev: true - - /framer-motion/6.5.1_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-o1BGqqposwi7cgDrtg0dNONhkmPsUFDaLcKXigzuTFC5x58mE8iyTazxSudFzmT6MEyJKfjjU8ItoMe3W+3fiw==} - peerDependencies: - react: '>=16.8 || ^17.0.0 || ^18.0.0' - react-dom: '>=16.8 || ^17.0.0 || ^18.0.0' - dependencies: - '@motionone/dom': 10.12.0 - framesync: 6.0.1 - hey-listen: 1.0.8 - popmotion: 11.0.3 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - style-value-types: 5.0.0 - tslib: 2.4.1 - optionalDependencies: - '@emotion/is-prop-valid': 0.8.8 - dev: false - - /framesync/6.0.1: - resolution: {integrity: sha512-fUY88kXvGiIItgNC7wcTOl0SNRCVXMKSWW2Yzfmn7EKNc+MpCzcz9DhdHcdjbrtN3c6R4H5dTY2jiCpPdysEjA==} - dependencies: - tslib: 2.4.1 - dev: false - - /fresh/0.5.2: - resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==} - engines: {node: '>= 0.6'} - dev: true - - /fs-extra/10.1.0: - resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} - engines: {node: '>=12'} - dependencies: - graceful-fs: 4.2.10 - jsonfile: 6.1.0 - universalify: 2.0.0 - dev: true - - /fs-extra/11.1.0: - resolution: {integrity: sha512-0rcTq621PD5jM/e0a3EJoGC/1TC5ZBCERW82LQuwfGnCa1V8w7dpYH1yNu+SLb6E5dkeCBzKEyLGlFrnr+dUyw==} - engines: {node: '>=14.14'} - dependencies: - graceful-fs: 4.2.10 - jsonfile: 6.1.0 - universalify: 2.0.0 - dev: true - - /fs-extra/8.1.0: - resolution: {integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==} - engines: {node: '>=6 <7 || >=8'} - dependencies: - graceful-fs: 4.2.10 - jsonfile: 4.0.0 - universalify: 0.1.2 - dev: true - - /fs-minipass/2.1.0: - resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} - engines: {node: '>= 8'} - dependencies: - minipass: 3.3.6 - dev: true - - /fs.realpath/1.0.0: - resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} - - /fsevents/2.3.2: - resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} - engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} - os: [darwin] - requiresBuild: true - optional: true - - /ftp/0.3.10: - resolution: {integrity: sha512-faFVML1aBx2UoDStmLwv2Wptt4vw5x03xxX172nhA5Y5HBshW5JweqQ2W4xL4dezQTG8inJsuYcpPHHU3X5OTQ==} - engines: {node: '>=0.8.0'} - dependencies: - readable-stream: 1.1.14 - xregexp: 2.0.0 - dev: true - - /function-bind/1.1.1: - resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} - - /function.prototype.name/1.1.5: - resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.21.1 - functions-have-names: 1.2.3 - dev: true - - /functions-have-names/1.2.3: - resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} - dev: true - - /gauge/3.0.2: - resolution: {integrity: sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==} - engines: {node: '>=10'} - dependencies: - aproba: 2.0.0 - color-support: 1.1.3 - console-control-strings: 1.1.0 - has-unicode: 2.0.1 - object-assign: 4.1.1 - signal-exit: 3.0.7 - string-width: 4.2.3 - strip-ansi: 6.0.1 - wide-align: 1.1.5 - dev: true - - /generic-names/1.0.3: - resolution: {integrity: sha512-b6OHfQuKasIKM9b6YPkX+KUj/TLBTx3B/1aT1T5F12FEuEqyFMdr59OMS53aoaSw8eVtapdqieX6lbg5opaOhA==} - dependencies: - loader-utils: 3.2.1 - dev: true - - /gensync/1.0.0-beta.2: - resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} - engines: {node: '>=6.9.0'} - - /get-caller-file/2.0.5: - resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} - engines: {node: 6.* || 8.* || >= 10.*} - - /get-intrinsic/1.1.3: - resolution: {integrity: sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==} - dependencies: - function-bind: 1.1.1 - has: 1.0.3 - has-symbols: 1.0.3 - dev: true - - /get-intrinsic/1.2.0: - resolution: {integrity: sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==} - dependencies: - function-bind: 1.1.1 - has: 1.0.3 - has-symbols: 1.0.3 - dev: true - - /get-node-dimensions/1.2.1: - resolution: {integrity: sha512-2MSPMu7S1iOTL+BOa6K1S62hB2zUAYNF/lV0gSVlOaacd087lc6nR1H1r0e3B1CerTo+RceOmi1iJW+vp21xcQ==} - dev: false - - /get-package-type/0.1.0: - resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} - engines: {node: '>=8.0.0'} - dev: true - - /get-port/5.1.1: - resolution: {integrity: sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ==} - engines: {node: '>=8'} - dev: true - - /get-source/2.0.12: - resolution: {integrity: sha512-X5+4+iD+HoSeEED+uwrQ07BOQr0kEDFMVqqpBuI+RaZBpBpHCuXxo70bjar6f0b0u/DQJsJ7ssurpP0V60Az+w==} - dependencies: - data-uri-to-buffer: 2.0.2 - source-map: 0.6.1 - dev: true - - /get-stream/6.0.1: - resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} - engines: {node: '>=10'} - dev: true - - /get-symbol-description/1.0.0: - resolution: {integrity: sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 - dev: true - - /get-uri/3.0.2: - resolution: {integrity: sha512-+5s0SJbGoyiJTZZ2JTpFPLMPSch72KEqGOTvQsBqg0RBWvwhWUSYZFAtz3TPW0GXJuLBJPts1E241iHg+VRfhg==} - engines: {node: '>= 6'} - dependencies: - '@tootallnate/once': 1.1.2 - data-uri-to-buffer: 3.0.1 - debug: 4.3.4 - file-uri-to-path: 2.0.0 - fs-extra: 8.1.0 - ftp: 0.3.10 - transitivePeerDependencies: - - supports-color - dev: true - - /get-value/2.0.6: - resolution: {integrity: sha512-Ln0UQDlxH1BapMu3GPtf7CuYNwRZf2gwCuPqbyG6pB8WfmFpzqcy4xtAaAMUhnNqjMKTiCPZG2oMT3YSx8U2NA==} - engines: {node: '>=0.10.0'} - dev: true - - /giget/1.0.0: - resolution: {integrity: sha512-KWELZn3Nxq5+0So485poHrFriK9Bn3V/x9y+wgqrHkbmnGbjfLmZ685/SVA/ovW+ewoqW0gVI47pI4yW/VNobQ==} - hasBin: true - dependencies: - colorette: 2.0.19 - defu: 6.1.2 - https-proxy-agent: 5.0.1 - mri: 1.2.0 - node-fetch-native: 1.0.1 - pathe: 1.1.0 - tar: 6.1.13 - transitivePeerDependencies: - - supports-color - dev: true - - /github-slugger/1.5.0: - resolution: {integrity: sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw==} - dev: true - - /github-slugger/2.0.0: - resolution: {integrity: sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==} - dev: false - - /glob-parent/5.1.2: - resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} - engines: {node: '>= 6'} - dependencies: - is-glob: 4.0.3 - - /glob-parent/6.0.2: - resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} - engines: {node: '>=10.13.0'} - dependencies: - is-glob: 4.0.3 - dev: true - - /glob-promise/4.2.2_glob@7.2.3: - resolution: {integrity: sha512-xcUzJ8NWN5bktoTIX7eOclO1Npxd/dyVqUJxlLIDasT4C7KZyqlPIwkdJ0Ypiy3p2ZKahTjK4M9uC3sNSfNMzw==} - engines: {node: '>=12'} - peerDependencies: - glob: ^7.1.6 - dependencies: - '@types/glob': 7.2.0 - glob: 7.2.3 - dev: true - - /glob-to-regexp/0.4.1: - resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==} - dev: true - - /glob/7.2.3: - resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} - dependencies: - fs.realpath: 1.0.0 - inflight: 1.0.6 - inherits: 2.0.4 - minimatch: 3.1.2 - once: 1.4.0 - path-is-absolute: 1.0.1 - - /global-modules/2.0.0: - resolution: {integrity: sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==} - engines: {node: '>=6'} - dependencies: - global-prefix: 3.0.0 - dev: true - - /global-prefix/3.0.0: - resolution: {integrity: sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==} - engines: {node: '>=6'} - dependencies: - ini: 1.3.8 - kind-of: 6.0.3 - which: 1.3.1 - dev: true - - /globals/11.12.0: - resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} - engines: {node: '>=4'} - - /globals/13.19.0: - resolution: {integrity: sha512-dkQ957uSRWHw7CFXLUtUHQI3g3aWApYhfNR2O6jn/907riyTYKVBmxYVROkBcY614FSSeSJh7Xm7SrUWCxvJMQ==} - engines: {node: '>=8'} - dependencies: - type-fest: 0.20.2 - dev: true - - /globalthis/1.0.3: - resolution: {integrity: sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==} - engines: {node: '>= 0.4'} - dependencies: - define-properties: 1.1.4 - dev: true - - /globby/11.1.0: - resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} - engines: {node: '>=10'} - dependencies: - array-union: 2.1.0 - dir-glob: 3.0.1 - fast-glob: 3.2.12 - ignore: 5.2.4 - merge2: 1.4.1 - slash: 3.0.0 - dev: true - - /globjoin/0.1.4: - resolution: {integrity: sha512-xYfnw62CKG8nLkZBfWbhWwDw02CHty86jfPcc2cr3ZfeuK9ysoVPPEUxf21bAD/rWAgk52SuBrLJlefNy8mvFg==} - dev: true - - /globrex/0.1.2: - resolution: {integrity: sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==} - dev: true - - /gonzales-pe/4.3.0: - resolution: {integrity: sha512-otgSPpUmdWJ43VXyiNgEYE4luzHCL2pz4wQ0OnDluC6Eg4Ko3Vexy/SrSynglw/eR+OhkzmqFCZa/OFa/RgAOQ==} - engines: {node: '>=0.6.0'} - hasBin: true - dependencies: - minimist: 1.2.7 - dev: true - - /gopd/1.0.1: - resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} - dependencies: - get-intrinsic: 1.1.3 - dev: true - - /graceful-fs/4.2.10: - resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} - dev: true - - /grapheme-splitter/1.0.4: - resolution: {integrity: sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==} - dev: true - - /handlebars/4.7.7: - resolution: {integrity: sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA==} - engines: {node: '>=0.4.7'} - hasBin: true - dependencies: - minimist: 1.2.7 - neo-async: 2.6.2 - source-map: 0.6.1 - wordwrap: 1.0.0 - optionalDependencies: - uglify-js: 3.17.4 - dev: true - - /hard-rejection/2.1.0: - resolution: {integrity: sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==} - engines: {node: '>=6'} - dev: true - - /has-bigints/1.0.2: - resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==} - dev: true - - /has-flag/3.0.0: - resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} - engines: {node: '>=4'} - - /has-flag/4.0.0: - resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} - engines: {node: '>=8'} - dev: true - - /has-property-descriptors/1.0.0: - resolution: {integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==} - dependencies: - get-intrinsic: 1.1.3 - dev: true - - /has-proto/1.0.1: - resolution: {integrity: sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==} - engines: {node: '>= 0.4'} - dev: true - - /has-symbols/1.0.3: - resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} - engines: {node: '>= 0.4'} - dev: true - - /has-tostringtag/1.0.0: - resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==} - engines: {node: '>= 0.4'} - dependencies: - has-symbols: 1.0.3 - dev: true - - /has-unicode/2.0.1: - resolution: {integrity: sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==} - dev: true - - /has-value/0.3.1: - resolution: {integrity: sha512-gpG936j8/MzaeID5Yif+577c17TxaDmhuyVgSwtnL/q8UUTySg8Mecb+8Cf1otgLoD7DDH75axp86ER7LFsf3Q==} - engines: {node: '>=0.10.0'} - dependencies: - get-value: 2.0.6 - has-values: 0.1.4 - isobject: 2.1.0 - dev: true - - /has-value/1.0.0: - resolution: {integrity: sha512-IBXk4GTsLYdQ7Rvt+GRBrFSVEkmuOUy4re0Xjd9kJSUQpnTrWR4/y9RpfexN9vkAPMFuQoeWKwqzPozRTlasGw==} - engines: {node: '>=0.10.0'} - dependencies: - get-value: 2.0.6 - has-values: 1.0.0 - isobject: 3.0.1 - dev: true - - /has-values/0.1.4: - resolution: {integrity: sha512-J8S0cEdWuQbqD9//tlZxiMuMNmxB8PlEwvYwuxsTmR1G5RXUePEX/SJn7aD0GMLieuZYSwNH0cQuJGwnYunXRQ==} - engines: {node: '>=0.10.0'} - dev: true - - /has-values/1.0.0: - resolution: {integrity: sha512-ODYZC64uqzmtfGMEAX/FvZiRyWLpAC3vYnNunURUnkGVTS+mI0smVsWaPydRBsE3g+ok7h960jChO8mFcWlHaQ==} - engines: {node: '>=0.10.0'} - dependencies: - is-number: 3.0.0 - kind-of: 4.0.0 - dev: true - - /has/1.0.3: - resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} - engines: {node: '>= 0.4.0'} - dependencies: - function-bind: 1.1.1 - - /hast-util-has-property/1.0.4: - resolution: {integrity: sha512-ghHup2voGfgFoHMGnaLHOjbYFACKrRh9KFttdCzMCbFoBMJXiNi2+XTrPP8+q6cDJM/RSqlCfVWrjp1H201rZg==} - dev: false - - /hast-util-has-property/2.0.1: - resolution: {integrity: sha512-X2+RwZIMTMKpXUzlotatPzWj8bspCymtXH3cfG3iQKV+wPF53Vgaqxi/eLqGck0wKq1kS9nvoB1wchbCPEL8sg==} - dev: false - - /hast-util-heading-rank/2.1.1: - resolution: {integrity: sha512-iAuRp+ESgJoRFJbSyaqsfvJDY6zzmFoEnL1gtz1+U8gKtGGj1p0CVlysuUAUjq95qlZESHINLThwJzNGmgGZxA==} - dependencies: - '@types/hast': 2.3.4 - dev: false - - /hast-util-to-string/2.0.0: - resolution: {integrity: sha512-02AQ3vLhuH3FisaMM+i/9sm4OXGSq1UhOOCpTLLQtHdL3tZt7qil69r8M8iDkZYyC0HCFylcYoP+8IO7ddta1A==} - dependencies: - '@types/hast': 2.3.4 - dev: false - - /hast-util-whitespace/2.0.1: - resolution: {integrity: sha512-nAxA0v8+vXSBDt3AnRUNjyRIQ0rD+ntpbAp4LnPkumc5M9yUbSMa4XDU9Q6etY4f1Wp4bNgvc1yjiZtsTTrSng==} - dev: false - - /hey-listen/1.0.8: - resolution: {integrity: sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==} - dev: false - - /history/5.3.0: - resolution: {integrity: sha512-ZqaKwjjrAYUYfLG+htGaIIZ4nioX2L70ZUMIFysS3xvBsSG4x/n1V6TXV3N8ZYNuFGlDirFg32T7B6WOUPDYcQ==} - dependencies: - '@babel/runtime': 7.20.7 - - /hoist-non-react-statics/3.3.2: - resolution: {integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==} - dependencies: - react-is: 16.13.1 - - /hosted-git-info/2.8.9: - resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} - dev: true - - /hosted-git-info/4.1.0: - resolution: {integrity: sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==} - engines: {node: '>=10'} - dependencies: - lru-cache: 6.0.0 - dev: true - - /html-encoding-sniffer/3.0.0: - resolution: {integrity: sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==} - engines: {node: '>=12'} - dependencies: - whatwg-encoding: 2.0.0 - dev: true - - /html-escaper/2.0.2: - resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} - dev: true - - /html-tags/3.2.0: - resolution: {integrity: sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg==} - engines: {node: '>=8'} - dev: true - - /htmlparser2/8.0.1: - resolution: {integrity: sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA==} - dependencies: - domelementtype: 2.3.0 - domhandler: 5.0.3 - domutils: 3.0.1 - entities: 4.4.0 - - /http-errors/2.0.0: - resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==} - engines: {node: '>= 0.8'} - dependencies: - depd: 2.0.0 - inherits: 2.0.4 - setprototypeof: 1.2.0 - statuses: 2.0.1 - toidentifier: 1.0.1 - dev: true - - /http-parser-js/0.5.8: - resolution: {integrity: sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==} - dev: false - - /http-proxy-agent/4.0.1: - resolution: {integrity: sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==} - engines: {node: '>= 6'} - dependencies: - '@tootallnate/once': 1.1.2 - agent-base: 6.0.2 - debug: 4.3.4 - transitivePeerDependencies: - - supports-color - dev: true - - /http-proxy-agent/5.0.0: - resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==} - engines: {node: '>= 6'} - dependencies: - '@tootallnate/once': 2.0.0 - agent-base: 6.0.2 - debug: 4.3.4 - transitivePeerDependencies: - - supports-color - dev: true - - /http2-client/1.3.5: - resolution: {integrity: sha512-EC2utToWl4RKfs5zd36Mxq7nzHHBuomZboI0yYL6Y0RmBgT7Sgkq4rQ0ezFTYoIsSs7Tm9SJe+o2FcAg6GBhGA==} - dev: true - - /https-proxy-agent/4.0.0: - resolution: {integrity: sha512-zoDhWrkR3of1l9QAL8/scJZyLu8j/gBkcwcaQOZh7Gyh/+uJQzGVETdgT30akuwkpL8HTRfssqI3BZuV18teDg==} - engines: {node: '>= 6.0.0'} - dependencies: - agent-base: 5.1.1 - debug: 4.3.4 - transitivePeerDependencies: - - supports-color - dev: true - - /https-proxy-agent/5.0.1: - resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} - engines: {node: '>= 6'} - dependencies: - agent-base: 6.0.2 - debug: 4.3.4 - transitivePeerDependencies: - - supports-color - dev: true - - /human-signals/2.1.0: - resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} - engines: {node: '>=10.17.0'} - dev: true - - /husky/8.0.3: - resolution: {integrity: sha512-+dQSyqPh4x1hlO1swXBiNb2HzTDN1I2IGLQx1GrBuiqFJfoMrnZWwVmatvSiO+Iz8fBUnf+lekwNo4c2LlXItg==} - engines: {node: '>=14'} - hasBin: true - dev: true - - /hyphenate-style-name/1.0.4: - resolution: {integrity: sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==} - dev: false - - /ibm-openapi-validator/0.88.3: - resolution: {integrity: sha512-WHkkO5TXWSS12P8VybB04Stq+yFloMlHy2aVzcLAZo425PYIVMuIWhsH7zN9vwcZcOB/qAnWQ4T3PKn6wrcT+Q==} - engines: {node: '>=12.0.0'} - hasBin: true - dependencies: - '@ibm-cloud/openapi-ruleset': 0.37.3 - '@stoplight/spectral-cli': 6.6.0 - '@stoplight/spectral-core': 1.16.0 - '@stoplight/spectral-parsers': 1.0.2 - chalk: 4.1.2 - commander: 2.20.3 - deepmerge: 2.2.1 - find-up: 3.0.0 - globby: 11.1.0 - js-yaml: 3.14.1 - json-dup-key-validator: 1.0.3 - json-schema-ref-parser: 5.1.3 - jsonschema: 1.4.1 - lodash: 4.17.21 - matcher: 1.1.1 - pad: 2.3.0 - require-all: 3.0.0 - semver: 5.7.1 - validator: 13.7.0 - yaml-js: 0.2.3 - transitivePeerDependencies: - - encoding - - supports-color - dev: true - - /iconv-lite/0.4.24: - resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} - engines: {node: '>=0.10.0'} - dependencies: - safer-buffer: 2.1.2 - dev: true - - /iconv-lite/0.6.3: - resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} - engines: {node: '>=0.10.0'} - dependencies: - safer-buffer: 2.1.2 - dev: true - - /icss-utils/3.0.1: - resolution: {integrity: sha512-ANhVLoEfe0KoC9+z4yiTaXOneB49K6JIXdS+yAgH0NERELpdIT7kkj2XxUPuHafeHnn8umXnECSpsfk1RTaUew==} - dependencies: - postcss: 8.4.21 - dev: true - - /icss-utils/5.1.0_postcss@8.4.21: - resolution: {integrity: sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==} - engines: {node: ^10 || ^12 || >= 14} - peerDependencies: - postcss: ^8.1.0 - dependencies: - postcss: 8.4.21 - dev: true - - /idb/7.0.1: - resolution: {integrity: sha512-UUxlE7vGWK5RfB/fDwEGgRf84DY/ieqNha6msMV99UsEMQhJ1RwbCd8AYBj3QMgnE3VZnfQvm4oKVCJTYlqIgg==} - dev: false - - /ignore/5.2.4: - resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==} - engines: {node: '>= 4'} - dev: true - - /image-size/0.5.5: - resolution: {integrity: sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ==} - engines: {node: '>=0.10.0'} - hasBin: true - requiresBuild: true - dev: true - optional: true - - /immer/9.0.19: - resolution: {integrity: sha512-eY+Y0qcsB4TZKwgQzLaE/lqYMlKhv5J9dyd2RhhtGhNo2njPXDqU9XPfcNfa3MIDsdtZt5KlkIsirlo4dHsWdQ==} - dev: true - - /immutable/3.8.2: - resolution: {integrity: sha512-15gZoQ38eYjEjxkorfbcgBKBL6R7T459OuK+CpcWt7O3KF4uPCx2tD0uFETlUDIyo+1789crbMhTvQBSR5yBMg==} - engines: {node: '>=0.10.0'} - dev: false - - /immutable/4.2.2: - resolution: {integrity: sha512-fTMKDwtbvO5tldky9QZ2fMX7slR0mYpY5nbnFWYp0fOzDhHqhgIw9KoYgxLWsoNTS9ZHGauHj18DTyEw6BK3Og==} - - /import-fresh/3.3.0: - resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} - engines: {node: '>=6'} - dependencies: - parent-module: 1.0.1 - resolve-from: 4.0.0 - - /import-lazy/4.0.0: - resolution: {integrity: sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==} - engines: {node: '>=8'} - dev: true - - /import-local/3.1.0: - resolution: {integrity: sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==} - engines: {node: '>=8'} - hasBin: true - dependencies: - pkg-dir: 4.2.0 - resolve-cwd: 3.0.0 - dev: true - - /imurmurhash/0.1.4: - resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} - engines: {node: '>=0.8.19'} - dev: true - - /indent-string/4.0.0: - resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} - engines: {node: '>=8'} - dev: true - - /inflight/1.0.6: - resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} - dependencies: - once: 1.4.0 - wrappy: 1.0.2 - - /inherits/2.0.4: - resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - - /ini/1.3.8: - resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} - dev: true - - /inline-style-parser/0.1.1: - resolution: {integrity: sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==} - dev: false - - /inline-style-prefixer/6.0.4: - resolution: {integrity: sha512-FwXmZC2zbeeS7NzGjJ6pAiqRhXR0ugUShSNb6GApMl6da0/XGc4MOJsoWAywia52EEWbXNSy0pzkwz/+Y+swSg==} - dependencies: - css-in-js-utils: 3.1.0 - fast-loops: 1.1.3 - dev: false - - /internal-slot/1.0.4: - resolution: {integrity: sha512-tA8URYccNzMo94s5MQZgH8NB/XTa6HsOo0MLfXTKKEnHVVdegzaQoFZ7Jp44bdvLvY2waT5dc+j5ICEswhi7UQ==} - engines: {node: '>= 0.4'} - dependencies: - get-intrinsic: 1.1.3 - has: 1.0.3 - side-channel: 1.0.4 - dev: true - - /internmap/2.0.3: - resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==} - engines: {node: '>=12'} - dev: false - - /interpret/1.4.0: - resolution: {integrity: sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==} - engines: {node: '>= 0.10'} - dev: true - - /intl-messageformat/10.2.5: - resolution: {integrity: sha512-AievYMN6WLLHwBeCTv4aRKG+w3ZNyZtkObwgsKk3Q7GNTq8zDRvDbJSBQkb2OPeVCcAKcIXvak9FF/bRNavoww==} - dependencies: - '@formatjs/ecma402-abstract': 1.14.3 - '@formatjs/fast-memoize': 1.2.7 - '@formatjs/icu-messageformat-parser': 2.1.14 - tslib: 2.4.1 - dev: false - - /invariant/2.2.4: - resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==} - dependencies: - loose-envify: 1.4.0 - dev: false - - /ip/1.1.8: - resolution: {integrity: sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg==} - dev: true - - /ip/2.0.0: - resolution: {integrity: sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==} - dev: true - - /ipaddr.js/1.9.1: - resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} - engines: {node: '>= 0.10'} - dev: true - - /is-absolute-url/3.0.3: - resolution: {integrity: sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==} - engines: {node: '>=8'} - dev: true - - /is-accessor-descriptor/0.1.6: - resolution: {integrity: sha512-e1BM1qnDbMRG3ll2U9dSK0UMHuWOs3pY3AtcFsmvwPtKL3MML/Q86i+GilLfvqEs4GW+ExB91tQ3Ig9noDIZ+A==} - engines: {node: '>=0.10.0'} - dependencies: - kind-of: 3.2.2 - dev: true - - /is-accessor-descriptor/1.0.0: - resolution: {integrity: sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==} - engines: {node: '>=0.10.0'} - dependencies: - kind-of: 6.0.3 - dev: true - - /is-alphabetical/2.0.1: - resolution: {integrity: sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==} - dev: false - - /is-alphanumerical/2.0.1: - resolution: {integrity: sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==} - dependencies: - is-alphabetical: 2.0.1 - is-decimal: 2.0.1 - dev: false - - /is-arguments/1.1.1: - resolution: {integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - has-tostringtag: 1.0.0 - dev: true - - /is-array-buffer/3.0.1: - resolution: {integrity: sha512-ASfLknmY8Xa2XtB4wmbz13Wu202baeA18cJBCeCy0wXUHZF0IPyVEXqKEcd+t2fNSLLL1vC6k7lxZEojNbISXQ==} - dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 - is-typed-array: 1.1.10 - dev: true - - /is-arrayish/0.2.1: - resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} - - /is-arrayish/0.3.2: - resolution: {integrity: sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==} - dev: false - - /is-bigint/1.0.4: - resolution: {integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==} - dependencies: - has-bigints: 1.0.2 - dev: true - - /is-binary-path/2.1.0: - resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} - engines: {node: '>=8'} - dependencies: - binary-extensions: 2.2.0 - - /is-boolean-object/1.1.2: - resolution: {integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - has-tostringtag: 1.0.0 - dev: true - - /is-buffer/1.1.6: - resolution: {integrity: sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==} - dev: true - - /is-buffer/2.0.5: - resolution: {integrity: sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==} - engines: {node: '>=4'} - dev: false - - /is-callable/1.2.7: - resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} - engines: {node: '>= 0.4'} - dev: true - - /is-core-module/2.11.0: - resolution: {integrity: sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==} - dependencies: - has: 1.0.3 - - /is-data-descriptor/0.1.4: - resolution: {integrity: sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg==} - engines: {node: '>=0.10.0'} - dependencies: - kind-of: 3.2.2 - dev: true - - /is-data-descriptor/1.0.0: - resolution: {integrity: sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==} - engines: {node: '>=0.10.0'} - dependencies: - kind-of: 6.0.3 - dev: true - - /is-date-object/1.0.5: - resolution: {integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==} - engines: {node: '>= 0.4'} - dependencies: - has-tostringtag: 1.0.0 - dev: true - - /is-decimal/2.0.1: - resolution: {integrity: sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==} - dev: false - - /is-descriptor/0.1.6: - resolution: {integrity: sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==} - engines: {node: '>=0.10.0'} - dependencies: - is-accessor-descriptor: 0.1.6 - is-data-descriptor: 0.1.4 - kind-of: 5.1.0 - dev: true - - /is-descriptor/1.0.2: - resolution: {integrity: sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==} - engines: {node: '>=0.10.0'} - dependencies: - is-accessor-descriptor: 1.0.0 - is-data-descriptor: 1.0.0 - kind-of: 6.0.3 - dev: true - - /is-docker/2.2.1: - resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} - engines: {node: '>=8'} - hasBin: true - dev: true - - /is-extendable/0.1.1: - resolution: {integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==} - engines: {node: '>=0.10.0'} - dev: true - - /is-extendable/1.0.1: - resolution: {integrity: sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==} - engines: {node: '>=0.10.0'} - dependencies: - is-plain-object: 2.0.4 - dev: true - - /is-extglob/2.1.1: - resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} - engines: {node: '>=0.10.0'} - - /is-fullwidth-code-point/3.0.0: - resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} - engines: {node: '>=8'} - - /is-fullwidth-code-point/4.0.0: - resolution: {integrity: sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==} - engines: {node: '>=12'} - dev: true - - /is-generator-fn/2.1.0: - resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} - engines: {node: '>=6'} - dev: true - - /is-generator-function/1.0.10: - resolution: {integrity: sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==} - engines: {node: '>= 0.4'} - dependencies: - has-tostringtag: 1.0.0 - dev: true - - /is-glob/4.0.3: - resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} - engines: {node: '>=0.10.0'} - dependencies: - is-extglob: 2.1.1 - - /is-hexadecimal/2.0.1: - resolution: {integrity: sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==} - dev: false - - /is-map/2.0.2: - resolution: {integrity: sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==} - dev: true - - /is-nan/1.3.2: - resolution: {integrity: sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - dev: true - - /is-negative-zero/2.0.2: - resolution: {integrity: sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==} - engines: {node: '>= 0.4'} - dev: true - - /is-number-object/1.0.7: - resolution: {integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==} - engines: {node: '>= 0.4'} - dependencies: - has-tostringtag: 1.0.0 - dev: true - - /is-number/3.0.0: - resolution: {integrity: sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==} - engines: {node: '>=0.10.0'} - dependencies: - kind-of: 3.2.2 - dev: true - - /is-number/7.0.0: - resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} - engines: {node: '>=0.12.0'} - - /is-path-cwd/2.2.0: - resolution: {integrity: sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==} - engines: {node: '>=6'} - dev: true - - /is-path-inside/3.0.3: - resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==} - engines: {node: '>=8'} - dev: true - - /is-plain-obj/1.1.0: - resolution: {integrity: sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==} - engines: {node: '>=0.10.0'} - dev: true - - /is-plain-obj/4.1.0: - resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} - engines: {node: '>=12'} - dev: false - - /is-plain-object/2.0.4: - resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==} - engines: {node: '>=0.10.0'} - dependencies: - isobject: 3.0.1 - dev: true - - /is-plain-object/5.0.0: - resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==} - engines: {node: '>=0.10.0'} - - /is-potential-custom-element-name/1.0.1: - resolution: {integrity: sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==} - dev: true - - /is-reference/1.2.1: - resolution: {integrity: sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==} - dependencies: - '@types/estree': 1.0.0 - dev: true - - /is-regex/1.1.4: - resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - has-tostringtag: 1.0.0 - dev: true - - /is-set/2.0.2: - resolution: {integrity: sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==} - dev: true - - /is-shared-array-buffer/1.0.2: - resolution: {integrity: sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==} - dependencies: - call-bind: 1.0.2 - dev: true - - /is-stream/2.0.1: - resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} - engines: {node: '>=8'} - dev: true - - /is-string/1.0.7: - resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==} - engines: {node: '>= 0.4'} - dependencies: - has-tostringtag: 1.0.0 - dev: true - - /is-symbol/1.0.4: - resolution: {integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==} - engines: {node: '>= 0.4'} - dependencies: - has-symbols: 1.0.3 - dev: true - - /is-typed-array/1.1.10: - resolution: {integrity: sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==} - engines: {node: '>= 0.4'} - dependencies: - available-typed-arrays: 1.0.5 - call-bind: 1.0.2 - for-each: 0.3.3 - gopd: 1.0.1 - has-tostringtag: 1.0.0 - dev: true - - /is-weakmap/2.0.1: - resolution: {integrity: sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==} - dev: true - - /is-weakref/1.0.2: - resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==} - dependencies: - call-bind: 1.0.2 - dev: true - - /is-weakset/2.0.2: - resolution: {integrity: sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==} - dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 - dev: true - - /is-what/3.14.1: - resolution: {integrity: sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==} - dev: true - - /is-windows/1.0.2: - resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} - engines: {node: '>=0.10.0'} - dev: true - - /is-wsl/2.2.0: - resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} - engines: {node: '>=8'} - dependencies: - is-docker: 2.2.1 - dev: true - - /isarray/0.0.1: - resolution: {integrity: sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==} - dev: true - - /isarray/1.0.0: - resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} - dev: true - - /isarray/2.0.5: - resolution: {integrity: sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==} - dev: true - - /isexe/2.0.0: - resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - dev: true - - /isobject/2.1.0: - resolution: {integrity: sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA==} - engines: {node: '>=0.10.0'} - dependencies: - isarray: 1.0.0 - dev: true - - /isobject/3.0.1: - resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==} - engines: {node: '>=0.10.0'} - dev: true - - /isomorphic-unfetch/3.1.0: - resolution: {integrity: sha512-geDJjpoZ8N0kWexiwkX8F9NkTsXhetLPVbZFQ+JTW239QNOwvB0gniuR1Wc6f0AMTn7/mFGyXvHTifrCp/GH8Q==} - dependencies: - node-fetch: 2.6.8 - unfetch: 4.2.0 - transitivePeerDependencies: - - encoding - dev: true - - /istanbul-lib-coverage/3.2.0: - resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==} - engines: {node: '>=8'} - dev: true - - /istanbul-lib-instrument/5.2.1: - resolution: {integrity: sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==} - engines: {node: '>=8'} - dependencies: - '@babel/core': 7.20.12 - '@babel/parser': 7.20.7 - '@istanbuljs/schema': 0.1.3 - istanbul-lib-coverage: 3.2.0 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /istanbul-lib-report/3.0.0: - resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==} - engines: {node: '>=8'} - dependencies: - istanbul-lib-coverage: 3.2.0 - make-dir: 3.1.0 - supports-color: 7.2.0 - dev: true - - /istanbul-lib-source-maps/4.0.1: - resolution: {integrity: sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==} - engines: {node: '>=10'} - dependencies: - debug: 4.3.4 - istanbul-lib-coverage: 3.2.0 - source-map: 0.6.1 - transitivePeerDependencies: - - supports-color - dev: true - - /istanbul-reports/3.1.5: - resolution: {integrity: sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==} - engines: {node: '>=8'} - dependencies: - html-escaper: 2.0.2 - istanbul-lib-report: 3.0.0 - dev: true - - /jake/10.8.5: - resolution: {integrity: sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==} - engines: {node: '>=10'} - hasBin: true - dependencies: - async: 3.2.4 - chalk: 4.1.2 - filelist: 1.0.4 - minimatch: 3.1.2 - dev: true - - /jest-changed-files/29.2.0: - resolution: {integrity: sha512-qPVmLLyBmvF5HJrY7krDisx6Voi8DmlV3GZYX0aFNbaQsZeoz1hfxcCMbqDGuQCxU1dJy9eYc2xscE8QrCCYaA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - execa: 5.1.1 - p-limit: 3.1.0 - dev: true - - /jest-circus/29.3.1: - resolution: {integrity: sha512-wpr26sEvwb3qQQbdlmei+gzp6yoSSoSL6GsLPxnuayZSMrSd5Ka7IjAvatpIernBvT2+Ic6RLTg+jSebScmasg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.3.1 - '@jest/expect': 29.3.1 - '@jest/test-result': 29.3.1 - '@jest/types': 29.3.1 - '@types/node': 17.0.45 - chalk: 4.1.2 - co: 4.6.0 - dedent: 0.7.0 - is-generator-fn: 2.1.0 - jest-each: 29.3.1 - jest-matcher-utils: 29.3.1 - jest-message-util: 29.3.1 - jest-runtime: 29.3.1 - jest-snapshot: 29.3.1 - jest-util: 29.3.1 - p-limit: 3.1.0 - pretty-format: 29.3.1 - slash: 3.0.0 - stack-utils: 2.0.6 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-cli/29.3.1_2263m44mchjafa7bz7l52hbcpa: - resolution: {integrity: sha512-TO/ewvwyvPOiBBuWZ0gm04z3WWP8TIK8acgPzE4IxgsLKQgb377NYGrQLc3Wl/7ndWzIH2CDNNsUjGxwLL43VQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - hasBin: true - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@jest/core': 29.3.1_ts-node@10.9.1 - '@jest/test-result': 29.3.1 - '@jest/types': 29.3.1 - chalk: 4.1.2 - exit: 0.1.2 - graceful-fs: 4.2.10 - import-local: 3.1.0 - jest-config: 29.3.1_2263m44mchjafa7bz7l52hbcpa - jest-util: 29.3.1 - jest-validate: 29.3.1 - prompts: 2.4.2 - yargs: 17.6.2 - transitivePeerDependencies: - - '@types/node' - - supports-color - - ts-node - dev: true - - /jest-config/29.3.1_2263m44mchjafa7bz7l52hbcpa: - resolution: {integrity: sha512-y0tFHdj2WnTEhxmGUK1T7fgLen7YK4RtfvpLFBXfQkh2eMJAQq24Vx9472lvn5wg0MAO6B+iPfJfzdR9hJYalg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - '@types/node': '*' - ts-node: '>=9.0.0' - peerDependenciesMeta: - '@types/node': - optional: true - ts-node: - optional: true - dependencies: - '@babel/core': 7.20.12 - '@jest/test-sequencer': 29.3.1 - '@jest/types': 29.3.1 - '@types/node': 17.0.45 - babel-jest: 29.3.1_@babel+core@7.20.12 - chalk: 4.1.2 - ci-info: 3.7.1 - deepmerge: 4.2.2 - glob: 7.2.3 - graceful-fs: 4.2.10 - jest-circus: 29.3.1 - jest-environment-node: 29.3.1 - jest-get-type: 29.2.0 - jest-regex-util: 29.2.0 - jest-resolve: 29.3.1 - jest-runner: 29.3.1 - jest-util: 29.3.1 - jest-validate: 29.3.1 - micromatch: 4.0.5 - parse-json: 5.2.0 - pretty-format: 29.3.1 - slash: 3.0.0 - strip-json-comments: 3.1.1 - ts-node: 10.9.1_cin3sed6ohfsopbmt6orxeb4o4 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-diff/27.5.1: - resolution: {integrity: sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==} - engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} - dependencies: - chalk: 4.1.2 - diff-sequences: 27.5.1 - jest-get-type: 27.5.1 - pretty-format: 27.5.1 - dev: true - - /jest-diff/29.3.1: - resolution: {integrity: sha512-vU8vyiO7568tmin2lA3r2DP8oRvzhvRcD4DjpXc6uGveQodyk7CKLhQlCSiwgx3g0pFaE88/KLZ0yaTWMc4Uiw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - chalk: 4.1.2 - diff-sequences: 29.3.1 - jest-get-type: 29.2.0 - pretty-format: 29.3.1 - dev: true - - /jest-docblock/29.2.0: - resolution: {integrity: sha512-bkxUsxTgWQGbXV5IENmfiIuqZhJcyvF7tU4zJ/7ioTutdz4ToB5Yx6JOFBpgI+TphRY4lhOyCWGNH/QFQh5T6A==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - detect-newline: 3.1.0 - dev: true - - /jest-each/29.3.1: - resolution: {integrity: sha512-qrZH7PmFB9rEzCSl00BWjZYuS1BSOH8lLuC0azQE9lQrAx3PWGKHTDudQiOSwIy5dGAJh7KA0ScYlCP7JxvFYA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.3.1 - chalk: 4.1.2 - jest-get-type: 29.2.0 - jest-util: 29.3.1 - pretty-format: 29.3.1 - dev: true - - /jest-environment-jsdom/29.3.1: - resolution: {integrity: sha512-G46nKgiez2Gy4zvYNhayfMEAFlVHhWfncqvqS6yCd0i+a4NsSUD2WtrKSaYQrYiLQaupHXxCRi8xxVL2M9PbhA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - canvas: ^2.5.0 - peerDependenciesMeta: - canvas: - optional: true - dependencies: - '@jest/environment': 29.3.1 - '@jest/fake-timers': 29.3.1 - '@jest/types': 29.3.1 - '@types/jsdom': 20.0.1 - '@types/node': 17.0.45 - jest-mock: 29.3.1 - jest-util: 29.3.1 - jsdom: 20.0.3 - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - dev: true - - /jest-environment-node/29.3.1: - resolution: {integrity: sha512-xm2THL18Xf5sIHoU7OThBPtuH6Lerd+Y1NLYiZJlkE3hbE+7N7r8uvHIl/FkZ5ymKXJe/11SQuf3fv4v6rUMag==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.3.1 - '@jest/fake-timers': 29.3.1 - '@jest/types': 29.3.1 - '@types/node': 17.0.45 - jest-mock: 29.3.1 - jest-util: 29.3.1 - dev: true - - /jest-get-type/27.5.1: - resolution: {integrity: sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==} - engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} - dev: true - - /jest-get-type/29.2.0: - resolution: {integrity: sha512-uXNJlg8hKFEnDgFsrCjznB+sTxdkuqiCL6zMgA75qEbAJjJYTs9XPrvDctrEig2GDow22T/LvHgO57iJhXB/UA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dev: true - - /jest-haste-map/29.3.1: - resolution: {integrity: sha512-/FFtvoG1xjbbPXQLFef+WSU4yrc0fc0Dds6aRPBojUid7qlPqZvxdUBA03HW0fnVHXVCnCdkuoghYItKNzc/0A==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.3.1 - '@types/graceful-fs': 4.1.6 - '@types/node': 17.0.45 - anymatch: 3.1.3 - fb-watchman: 2.0.2 - graceful-fs: 4.2.10 - jest-regex-util: 29.2.0 - jest-util: 29.3.1 - jest-worker: 29.3.1 - micromatch: 4.0.5 - walker: 1.0.8 - optionalDependencies: - fsevents: 2.3.2 - dev: true - - /jest-leak-detector/29.3.1: - resolution: {integrity: sha512-3DA/VVXj4zFOPagGkuqHnSQf1GZBmmlagpguxEERO6Pla2g84Q1MaVIB3YMxgUaFIaYag8ZnTyQgiZ35YEqAQA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - jest-get-type: 29.2.0 - pretty-format: 29.3.1 - dev: true - - /jest-matcher-utils/27.5.1: - resolution: {integrity: sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==} - engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} - dependencies: - chalk: 4.1.2 - jest-diff: 27.5.1 - jest-get-type: 27.5.1 - pretty-format: 27.5.1 - dev: true - - /jest-matcher-utils/29.3.1: - resolution: {integrity: sha512-fkRMZUAScup3txIKfMe3AIZZmPEjWEdsPJFK3AIy5qRohWqQFg1qrmKfYXR9qEkNc7OdAu2N4KPHibEmy4HPeQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - chalk: 4.1.2 - jest-diff: 29.3.1 - jest-get-type: 29.2.0 - pretty-format: 29.3.1 - dev: true - - /jest-message-util/29.3.1: - resolution: {integrity: sha512-lMJTbgNcDm5z+6KDxWtqOFWlGQxD6XaYwBqHR8kmpkP+WWWG90I35kdtQHY67Ay5CSuydkTBbJG+tH9JShFCyA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/code-frame': 7.18.6 - '@jest/types': 29.3.1 - '@types/stack-utils': 2.0.1 - chalk: 4.1.2 - graceful-fs: 4.2.10 - micromatch: 4.0.5 - pretty-format: 29.3.1 - slash: 3.0.0 - stack-utils: 2.0.6 - dev: true - - /jest-mock/29.3.1: - resolution: {integrity: sha512-H8/qFDtDVMFvFP4X8NuOT3XRDzOUTz+FeACjufHzsOIBAxivLqkB1PoLCaJx9iPPQ8dZThHPp/G3WRWyMgA3JA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.3.1 - '@types/node': 17.0.45 - jest-util: 29.3.1 - dev: true - - /jest-pnp-resolver/1.2.3_jest-resolve@29.3.1: - resolution: {integrity: sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==} - engines: {node: '>=6'} - peerDependencies: - jest-resolve: '*' - peerDependenciesMeta: - jest-resolve: - optional: true - dependencies: - jest-resolve: 29.3.1 - dev: true - - /jest-regex-util/29.2.0: - resolution: {integrity: sha512-6yXn0kg2JXzH30cr2NlThF+70iuO/3irbaB4mh5WyqNIvLLP+B6sFdluO1/1RJmslyh/f9osnefECflHvTbwVA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dev: true - - /jest-resolve-dependencies/29.3.1: - resolution: {integrity: sha512-Vk0cYq0byRw2WluNmNWGqPeRnZ3p3hHmjJMp2dyyZeYIfiBskwq4rpiuGFR6QGAdbj58WC7HN4hQHjf2mpvrLA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - jest-regex-util: 29.2.0 - jest-snapshot: 29.3.1 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-resolve/29.3.1: - resolution: {integrity: sha512-amXJgH/Ng712w3Uz5gqzFBBjxV8WFLSmNjoreBGMqxgCz5cH7swmBZzgBaCIOsvb0NbpJ0vgaSFdJqMdT+rADw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - chalk: 4.1.2 - graceful-fs: 4.2.10 - jest-haste-map: 29.3.1 - jest-pnp-resolver: 1.2.3_jest-resolve@29.3.1 - jest-util: 29.3.1 - jest-validate: 29.3.1 - resolve: 1.22.1 - resolve.exports: 1.1.1 - slash: 3.0.0 - dev: true - - /jest-runner/29.3.1: - resolution: {integrity: sha512-oFvcwRNrKMtE6u9+AQPMATxFcTySyKfLhvso7Sdk/rNpbhg4g2GAGCopiInk1OP4q6gz3n6MajW4+fnHWlU3bA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/console': 29.3.1 - '@jest/environment': 29.3.1 - '@jest/test-result': 29.3.1 - '@jest/transform': 29.3.1 - '@jest/types': 29.3.1 - '@types/node': 17.0.45 - chalk: 4.1.2 - emittery: 0.13.1 - graceful-fs: 4.2.10 - jest-docblock: 29.2.0 - jest-environment-node: 29.3.1 - jest-haste-map: 29.3.1 - jest-leak-detector: 29.3.1 - jest-message-util: 29.3.1 - jest-resolve: 29.3.1 - jest-runtime: 29.3.1 - jest-util: 29.3.1 - jest-watcher: 29.3.1 - jest-worker: 29.3.1 - p-limit: 3.1.0 - source-map-support: 0.5.13 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-runtime/29.3.1: - resolution: {integrity: sha512-jLzkIxIqXwBEOZx7wx9OO9sxoZmgT2NhmQKzHQm1xwR1kNW/dn0OjxR424VwHHf1SPN6Qwlb5pp1oGCeFTQ62A==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.3.1 - '@jest/fake-timers': 29.3.1 - '@jest/globals': 29.3.1 - '@jest/source-map': 29.2.0 - '@jest/test-result': 29.3.1 - '@jest/transform': 29.3.1 - '@jest/types': 29.3.1 - '@types/node': 17.0.45 - chalk: 4.1.2 - cjs-module-lexer: 1.2.2 - collect-v8-coverage: 1.0.1 - glob: 7.2.3 - graceful-fs: 4.2.10 - jest-haste-map: 29.3.1 - jest-message-util: 29.3.1 - jest-mock: 29.3.1 - jest-regex-util: 29.2.0 - jest-resolve: 29.3.1 - jest-snapshot: 29.3.1 - jest-util: 29.3.1 - slash: 3.0.0 - strip-bom: 4.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-snapshot/29.3.1: - resolution: {integrity: sha512-+3JOc+s28upYLI2OJM4PWRGK9AgpsMs/ekNryUV0yMBClT9B1DF2u2qay8YxcQd338PPYSFNb0lsar1B49sLDA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/core': 7.20.12 - '@babel/generator': 7.20.7 - '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-syntax-typescript': 7.20.0_@babel+core@7.20.12 - '@babel/traverse': 7.20.12 - '@babel/types': 7.20.7 - '@jest/expect-utils': 29.3.1 - '@jest/transform': 29.3.1 - '@jest/types': 29.3.1 - '@types/babel__traverse': 7.18.3 - '@types/prettier': 2.7.2 - babel-preset-current-node-syntax: 1.0.1_@babel+core@7.20.12 - chalk: 4.1.2 - expect: 29.3.1 - graceful-fs: 4.2.10 - jest-diff: 29.3.1 - jest-get-type: 29.2.0 - jest-haste-map: 29.3.1 - jest-matcher-utils: 29.3.1 - jest-message-util: 29.3.1 - jest-util: 29.3.1 - natural-compare: 1.4.0 - pretty-format: 29.3.1 - semver: 7.3.8 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-util/29.3.1: - resolution: {integrity: sha512-7YOVZaiX7RJLv76ZfHt4nbNEzzTRiMW/IiOG7ZOKmTXmoGBxUDefgMAxQubu6WPVqP5zSzAdZG0FfLcC7HOIFQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.3.1 - '@types/node': 17.0.45 - chalk: 4.1.2 - ci-info: 3.7.1 - graceful-fs: 4.2.10 - picomatch: 2.3.1 - dev: true - - /jest-validate/29.3.1: - resolution: {integrity: sha512-N9Lr3oYR2Mpzuelp1F8negJR3YE+L1ebk1rYA5qYo9TTY3f9OWdptLoNSPP9itOCBIRBqjt/S5XHlzYglLN67g==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.3.1 - camelcase: 6.3.0 - chalk: 4.1.2 - jest-get-type: 29.2.0 - leven: 3.1.0 - pretty-format: 29.3.1 - dev: true - - /jest-watcher/29.3.1: - resolution: {integrity: sha512-RspXG2BQFDsZSRKGCT/NiNa8RkQ1iKAjrO0//soTMWx/QUt+OcxMqMSBxz23PYGqUuWm2+m2mNNsmj0eIoOaFg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/test-result': 29.3.1 - '@jest/types': 29.3.1 - '@types/node': 17.0.45 - ansi-escapes: 4.3.2 - chalk: 4.1.2 - emittery: 0.13.1 - jest-util: 29.3.1 - string-length: 4.0.2 - dev: true - - /jest-worker/29.3.1: - resolution: {integrity: sha512-lY4AnnmsEWeiXirAIA0c9SDPbuCBq8IYuDVL8PMm0MZ2PEs2yPvRA/J64QBXuZp7CYKrDM/rmNrc9/i3KJQncw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@types/node': 17.0.45 - jest-util: 29.3.1 - merge-stream: 2.0.0 - supports-color: 8.1.1 - dev: true - - /jest/29.3.1_2263m44mchjafa7bz7l52hbcpa: - resolution: {integrity: sha512-6iWfL5DTT0Np6UYs/y5Niu7WIfNv/wRTtN5RSXt2DIEft3dx3zPuw/3WJQBCJfmEzvDiEKwoqMbGD9n49+qLSA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - hasBin: true - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@jest/core': 29.3.1_ts-node@10.9.1 - '@jest/types': 29.3.1 - import-local: 3.1.0 - jest-cli: 29.3.1_2263m44mchjafa7bz7l52hbcpa - transitivePeerDependencies: - - '@types/node' - - supports-color - - ts-node - dev: true - - /js-cookie/2.2.1: - resolution: {integrity: sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ==} - dev: false - - /js-sdsl/4.3.0: - resolution: {integrity: sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==} - dev: true - - /js-sha3/0.8.0: - resolution: {integrity: sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==} - dev: false - - /js-tokens/4.0.0: - resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - - /js-yaml/3.14.1: - resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} - hasBin: true - dependencies: - argparse: 1.0.10 - esprima: 4.0.1 - dev: true - - /js-yaml/4.1.0: - resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} - hasBin: true - dependencies: - argparse: 2.0.1 - - /jscodeshift/0.13.1_@babel+preset-env@7.20.2: - resolution: {integrity: sha512-lGyiEbGOvmMRKgWk4vf+lUrCWO/8YR8sUR3FKF1Cq5fovjZDlIcw3Hu5ppLHAnEXshVffvaM0eyuY/AbOeYpnQ==} - hasBin: true - peerDependencies: - '@babel/preset-env': ^7.1.6 - dependencies: - '@babel/core': 7.20.12 - '@babel/parser': 7.20.7 - '@babel/plugin-proposal-class-properties': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6_@babel+core@7.20.12 - '@babel/plugin-proposal-optional-chaining': 7.20.7_@babel+core@7.20.12 - '@babel/plugin-transform-modules-commonjs': 7.20.11_@babel+core@7.20.12 - '@babel/preset-env': 7.20.2_@babel+core@7.20.12 - '@babel/preset-flow': 7.18.6_@babel+core@7.20.12 - '@babel/preset-typescript': 7.18.6_@babel+core@7.20.12 - '@babel/register': 7.18.9_@babel+core@7.20.12 - babel-core: 7.0.0-bridge.0_@babel+core@7.20.12 - chalk: 4.1.2 - flow-parser: 0.198.2 - graceful-fs: 4.2.10 - micromatch: 3.1.10 - neo-async: 2.6.2 - node-dir: 0.1.17 - recast: 0.20.5 - temp: 0.8.4 - write-file-atomic: 2.4.3 - transitivePeerDependencies: - - supports-color - dev: true - - /jsdom/20.0.3: - resolution: {integrity: sha512-SYhBvTh89tTfCD/CRdSOm13mOBa42iTaTyfyEWBdKcGdPxPtLFBXuHR8XHb33YNYaP+lLbmSvBTsnoesCNJEsQ==} - engines: {node: '>=14'} - peerDependencies: - canvas: ^2.5.0 - peerDependenciesMeta: - canvas: - optional: true - dependencies: - abab: 2.0.6 - acorn: 8.8.1 - acorn-globals: 7.0.1 - cssom: 0.5.0 - cssstyle: 2.3.0 - data-urls: 3.0.2 - decimal.js: 10.4.3 - domexception: 4.0.0 - escodegen: 2.0.0 - form-data: 4.0.0 - html-encoding-sniffer: 3.0.0 - http-proxy-agent: 5.0.0 - https-proxy-agent: 5.0.1 - is-potential-custom-element-name: 1.0.1 - nwsapi: 2.2.2 - parse5: 7.1.2 - saxes: 6.0.0 - symbol-tree: 3.2.4 - tough-cookie: 4.1.2 - w3c-xmlserializer: 4.0.0 - webidl-conversions: 7.0.0 - whatwg-encoding: 2.0.0 - whatwg-mimetype: 3.0.0 - whatwg-url: 11.0.0 - ws: 8.12.0 - xml-name-validator: 4.0.0 - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - dev: true - - /jsep/1.3.8: - resolution: {integrity: sha512-qofGylTGgYj9gZFsHuyWAN4jr35eJ66qJCK4eKDnldohuUoQFbU3iZn2zjvEbd9wOAhP9Wx5DsAAduTyE1PSWQ==} - engines: {node: '>= 10.16.0'} - dev: true - - /jsesc/0.5.0: - resolution: {integrity: sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==} - hasBin: true - dev: true - - /jsesc/2.5.2: - resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} - engines: {node: '>=4'} - hasBin: true - - /json-dup-key-validator/1.0.3: - resolution: {integrity: sha512-JvJcV01JSiO7LRz7DY1Fpzn4wX2rJ3dfNTiAfnlvLNdhhnm0Pgdvhi2SGpENrZn7eSg26Ps3TPhOcuD/a4STXQ==} - dependencies: - backslash: 0.2.0 - dev: true - - /json-parse-even-better-errors/2.3.1: - resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} - - /json-schema-ref-parser/5.1.3: - resolution: {integrity: sha512-CpDFlBwz/6la78hZxyB9FECVKGYjIIl3Ms3KLqFj99W7IIb7D00/RDgc++IGB4BBALl0QRhh5m4q5WNSopvLtQ==} - deprecated: Please switch to @apidevtools/json-schema-ref-parser - dependencies: - call-me-maybe: 1.0.2 - debug: 3.2.7 - js-yaml: 3.14.1 - ono: 4.0.11 - transitivePeerDependencies: - - supports-color - dev: true - - /json-schema-traverse/0.4.1: - resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} - dev: true - - /json-schema-traverse/1.0.0: - resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} - dev: true - - /json-schema/0.4.0: - resolution: {integrity: sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==} - dev: false - - /json-stable-stringify-without-jsonify/1.0.1: - resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} - dev: true - - /json2mq/0.2.0: - resolution: {integrity: sha512-SzoRg7ux5DWTII9J2qkrZrqV1gt+rTaoufMxEzXbS26Uid0NwaJd123HcoB80TgubEppxxIGdNxCx50fEoEWQA==} - dependencies: - string-convert: 0.2.1 - dev: false - - /json5/1.0.2: - resolution: {integrity: sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==} - hasBin: true - dependencies: - minimist: 1.2.7 - dev: true - - /json5/2.2.3: - resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} - engines: {node: '>=6'} - hasBin: true - - /jsonc-parser/2.2.1: - resolution: {integrity: sha512-o6/yDBYccGvTz1+QFevz6l6OBZ2+fMVu2JZ9CIhzsYRX4mjaK5IyX9eldUdCmga16zlgQxyrj5pt9kzuj2C02w==} - dev: true - - /jsonfile/4.0.0: - resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==} - optionalDependencies: - graceful-fs: 4.2.10 - dev: true - - /jsonfile/6.1.0: - resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} - dependencies: - universalify: 2.0.0 - optionalDependencies: - graceful-fs: 4.2.10 - dev: true - - /jsonpath-plus/6.0.1: - resolution: {integrity: sha512-EvGovdvau6FyLexFH2OeXfIITlgIbgZoAZe3usiySeaIDm5QS+A10DKNpaPBBqqRSZr2HN6HVNXxtwUAr2apEw==} - engines: {node: '>=10.0.0'} - requiresBuild: true - dev: true - optional: true - - /jsonpath-plus/7.1.0: - resolution: {integrity: sha512-gTaNRsPWO/K2KY6MrqaUFClF9kmuM6MFH5Dhg1VYDODgFbByw1yb7xu3hrViE/sz+dGOeMWgCzwUwQtAnCTE9g==} - engines: {node: '>=12.0.0'} - dev: true - - /jsonpointer/5.0.1: - resolution: {integrity: sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==} - engines: {node: '>=0.10.0'} - dev: true - - /jsonschema/1.4.1: - resolution: {integrity: sha512-S6cATIPVv1z0IlxdN+zUk5EPjkGCdnhN4wVSBlvoUO1tOLJootbo9CquNJmbIh4yikWHiUedhRYrNPn1arpEmQ==} - dev: true - - /jsx-ast-utils/3.3.3: - resolution: {integrity: sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==} - engines: {node: '>=4.0'} - dependencies: - array-includes: 3.1.6 - object.assign: 4.1.4 - dev: true - - /kind-of/3.2.2: - resolution: {integrity: sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==} - engines: {node: '>=0.10.0'} - dependencies: - is-buffer: 1.1.6 - dev: true - - /kind-of/4.0.0: - resolution: {integrity: sha512-24XsCxmEbRwEDbz/qz3stgin8TTzZ1ESR56OMCN0ujYg+vRutNSiOj9bHH9u85DKgXguraugV5sFuvbD4FW/hw==} - engines: {node: '>=0.10.0'} - dependencies: - is-buffer: 1.1.6 - dev: true - - /kind-of/5.1.0: - resolution: {integrity: sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==} - engines: {node: '>=0.10.0'} - dev: true - - /kind-of/6.0.3: - resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} - engines: {node: '>=0.10.0'} - dev: true - - /kleur/3.0.3: - resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} - engines: {node: '>=6'} - dev: true - - /kleur/4.1.5: - resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} - engines: {node: '>=6'} - dev: false - - /known-css-properties/0.26.0: - resolution: {integrity: sha512-5FZRzrZzNTBruuurWpvZnvP9pum+fe0HcK8z/ooo+U+Hmp4vtbyp1/QDsqmufirXy4egGzbaH/y2uCZf+6W5Kg==} - dev: true - - /language-subtag-registry/0.3.22: - resolution: {integrity: sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==} - dev: true - - /language-tags/1.0.5: - resolution: {integrity: sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==} - dependencies: - language-subtag-registry: 0.3.22 - dev: true - - /launchdarkly-js-client-sdk/3.1.0: - resolution: {integrity: sha512-8aM3Wp5ZAS/TCIELOYkEs4CFzih/g7sO+YCHNQ5g0k80WDMTmTcGLMLhCKmrzaQnMK21HiBwbkBpAYnyzr5yUQ==} - dependencies: - escape-string-regexp: 4.0.0 - launchdarkly-js-sdk-common: 5.0.1 - dev: false - - /launchdarkly-js-sdk-common/5.0.1: - resolution: {integrity: sha512-NPPU/9bT4hUCbglCCMAm9XGDWnRKs3aJ1OMhRF8g6xTUWAjOO2FieoGjwVbUVvOYKIiINVVtAWUiuVYbix1lqw==} - dependencies: - base64-js: 1.5.1 - fast-deep-equal: 2.0.1 - uuid: 8.3.2 - dev: false - - /lazy-universal-dotenv/4.0.0: - resolution: {integrity: sha512-aXpZJRnTkpK6gQ/z4nk+ZBLd/Qdp118cvPruLSIQzQNRhKwEcdXCOzXuF55VDqIiuAaY3UGZ10DJtvZzDcvsxg==} - engines: {node: '>=14.0.0'} - dependencies: - app-root-dir: 1.0.2 - dotenv: 16.0.3 - dotenv-expand: 10.0.0 - dev: true - - /less/4.1.3: - resolution: {integrity: sha512-w16Xk/Ta9Hhyei0Gpz9m7VS8F28nieJaL/VyShID7cYvP6IL5oHeL6p4TXSDJqZE/lNv0oJ2pGVjJsRkfwm5FA==} - engines: {node: '>=6'} - hasBin: true - dependencies: - copy-anything: 2.0.6 - parse-node-version: 1.0.1 - tslib: 2.5.0 - optionalDependencies: - errno: 0.1.8 - graceful-fs: 4.2.10 - image-size: 0.5.5 - make-dir: 2.1.0 - mime: 1.6.0 - needle: 3.2.0 - source-map: 0.6.1 - transitivePeerDependencies: - - supports-color - dev: true - - /leven/3.1.0: - resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} - engines: {node: '>=6'} - dev: true - - /levn/0.3.0: - resolution: {integrity: sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==} - engines: {node: '>= 0.8.0'} - dependencies: - prelude-ls: 1.1.2 - type-check: 0.3.2 - dev: true - - /levn/0.4.1: - resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} - engines: {node: '>= 0.8.0'} - dependencies: - prelude-ls: 1.2.1 - type-check: 0.4.0 - dev: true - - /license-checker/25.0.1: - resolution: {integrity: sha512-mET5AIwl7MR2IAKYYoVBBpV0OnkKQ1xGj2IMMeEFIs42QAkEVjRtFZGWmQ28WeU7MP779iAgOaOy93Mn44mn6g==} - hasBin: true - dependencies: - chalk: 2.4.2 - debug: 3.2.7 - mkdirp: 0.5.6 - nopt: 4.0.3 - read-installed: 4.0.3 - semver: 5.7.1 - spdx-correct: 3.1.1 - spdx-expression-parse: 3.0.1 - spdx-satisfies: 4.0.1 - treeify: 1.1.0 - transitivePeerDependencies: - - supports-color - dev: true - - /lilconfig/2.0.5: - resolution: {integrity: sha512-xaYmXZtTHPAw5m+xLN8ab9C+3a8YmV3asNSPOATITbtwrfbwaLJj8h66H1WMIpALCkqsIzK3h7oQ+PdX+LQ9Eg==} - engines: {node: '>=10'} - dev: true - - /lines-and-columns/1.2.4: - resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} - - /lint-staged/12.5.0: - resolution: {integrity: sha512-BKLUjWDsKquV/JuIcoQW4MSAI3ggwEImF1+sB4zaKvyVx1wBk3FsG7UK9bpnmBTN1pm7EH2BBcMwINJzCRv12g==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - hasBin: true - dependencies: - cli-truncate: 3.1.0 - colorette: 2.0.19 - commander: 9.5.0 - debug: 4.3.4_supports-color@9.3.1 - execa: 5.1.1 - lilconfig: 2.0.5 - listr2: 4.0.5 - micromatch: 4.0.5 - normalize-path: 3.0.0 - object-inspect: 1.12.3 - pidtree: 0.5.0 - string-argv: 0.3.1 - supports-color: 9.3.1 - yaml: 1.10.2 - transitivePeerDependencies: - - enquirer - dev: true - - /listr2/4.0.5: - resolution: {integrity: sha512-juGHV1doQdpNT3GSTs9IUN43QJb7KHdF9uqg7Vufs/tG9VTzpFphqF4pm/ICdAABGQxsyNn9CiYA3StkI6jpwA==} - engines: {node: '>=12'} - peerDependencies: - enquirer: '>= 2.3.0 < 3' - peerDependenciesMeta: - enquirer: - optional: true - dependencies: - cli-truncate: 2.1.0 - colorette: 2.0.19 - log-update: 4.0.0 - p-map: 4.0.0 - rfdc: 1.3.0 - rxjs: 7.8.0 - through: 2.3.8 - wrap-ansi: 7.0.0 - dev: true - - /loader-utils/3.2.1: - resolution: {integrity: sha512-ZvFw1KWS3GVyYBYb7qkmRM/WwL2TQQBxgCK62rlvm4WpVQ23Nb4tYjApUlfjrEGvOs7KHEsmyUn75OHZrJMWPw==} - engines: {node: '>= 12.13.0'} - dev: true - - /locate-path/3.0.0: - resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==} - engines: {node: '>=6'} - dependencies: - p-locate: 3.0.0 - path-exists: 3.0.0 - dev: true - - /locate-path/5.0.0: - resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} - engines: {node: '>=8'} - dependencies: - p-locate: 4.1.0 - dev: true - - /locate-path/6.0.0: - resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} - engines: {node: '>=10'} - dependencies: - p-locate: 5.0.0 - dev: true - - /lodash-es/4.17.21: - resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==} - dev: false - - /lodash.camelcase/4.3.0: - resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} - - /lodash.debounce/4.0.8: - resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} - - /lodash.get/4.4.2: - resolution: {integrity: sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==} - dev: true - - /lodash.isempty/4.4.0: - resolution: {integrity: sha512-oKMuF3xEeqDltrGMfDxAPGIVMSSRv8tbRSODbrs4KGsRRLEhrW8N8Rd4DRgB2+621hY8A8XwwrTVhXWpxFvMzg==} - dev: true - - /lodash.merge/4.6.2: - resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} - dev: true - - /lodash.omit/4.5.0: - resolution: {integrity: sha512-XeqSp49hNGmlkj2EJlfrQFIzQ6lXdNro9sddtQzcJY8QaoC2GO0DT7xaIokHeyM+mIT0mPMlPvkYzg2xCuHdZg==} - dev: true - - /lodash.omitby/4.6.0: - resolution: {integrity: sha512-5OrRcIVR75M288p4nbI2WLAf3ndw2GD9fyNv3Bc15+WCxJDdZ4lYndSxGd7hnG6PVjiJTeJE2dHEGhIuKGicIQ==} - dev: true - - /lodash.pick/4.4.0: - resolution: {integrity: sha512-hXt6Ul/5yWjfklSGvLQl8vM//l3FtyHZeuelpzK6mm99pNvN9yTDruNZPEJZD1oWrqo+izBmB7oUfWgcCX7s4Q==} - dev: true - - /lodash.throttle/4.1.1: - resolution: {integrity: sha512-wIkUCfVKpVsWo3JSZlc+8MB5it+2AN5W8J7YVMST30UrvcQNZ1Okbj+rbVniijTWE6FGYy4XJq/rHkas8qJMLQ==} - dev: false - - /lodash.topath/4.5.2: - resolution: {integrity: sha512-1/W4dM+35DwvE/iEd1M9ekewOSTlpFekhw9mhAtrwjVqUr83/ilQiyAvmg4tVX7Unkcfl1KC+i9WdaT4B6aQcg==} - dev: true - - /lodash.truncate/4.4.2: - resolution: {integrity: sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==} - dev: true - - /lodash.uniq/4.5.0: - resolution: {integrity: sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==} - dev: true - - /lodash.uniqby/4.7.0: - resolution: {integrity: sha512-e/zcLx6CSbmaEgFHCA7BnoQKyCtKMxnuWrJygbwPs/AIn+IMKl66L8/s+wBUn5LRw2pZx3bUHibiV1b6aTWIww==} - dev: true - - /lodash.uniqwith/4.5.0: - resolution: {integrity: sha512-7lYL8bLopMoy4CTICbxygAUq6CdRJ36vFc80DucPueUee+d5NBRxz3FdT9Pes/HEx5mPoT9jwnsEJWz1N7uq7Q==} - dev: true - - /lodash/4.17.21: - resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} - - /log-update/4.0.0: - resolution: {integrity: sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==} - engines: {node: '>=10'} - dependencies: - ansi-escapes: 4.3.2 - cli-cursor: 3.1.0 - slice-ansi: 4.0.0 - wrap-ansi: 6.2.0 - dev: true - - /long/4.0.0: - resolution: {integrity: sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==} - dev: false - - /long/5.2.1: - resolution: {integrity: sha512-GKSNGeNAtw8IryjjkhZxuKB3JzlcLTwjtiQCHKvqQet81I93kXslhDQruGI/QsddO83mcDToBVy7GqGS/zYf/A==} - dev: false - - /longest-streak/3.1.0: - resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==} - dev: false - - /loose-envify/1.4.0: - resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} - hasBin: true - dependencies: - js-tokens: 4.0.0 - - /lru-cache/5.1.1: - resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} - dependencies: - yallist: 3.1.1 - - /lru-cache/6.0.0: - resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} - engines: {node: '>=10'} - dependencies: - yallist: 4.0.0 - dev: true - - /lz-string/1.4.4: - resolution: {integrity: sha512-0ckx7ZHRPqb0oUm8zNr+90mtf9DQB60H1wMCjBtfi62Kl3a7JbHob6gA2bC+xRvZoOL+1hzUK8jeuEIQE8svEQ==} - hasBin: true - dev: true - - /magic-string/0.25.9: - resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==} - dependencies: - sourcemap-codec: 1.4.8 - dev: true - - /magic-string/0.27.0: - resolution: {integrity: sha512-8UnnX2PeRAPZuN12svgR9j7M1uWMovg/CEnIwIG0LFkXSJJe4PdfUGiTGl8V9bsBHFUtfVINcSyYxd7q+kx9fA==} - engines: {node: '>=12'} - dependencies: - '@jridgewell/sourcemap-codec': 1.4.14 - dev: true - - /make-dir/2.1.0: - resolution: {integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==} - engines: {node: '>=6'} - dependencies: - pify: 4.0.1 - semver: 5.7.1 - dev: true - - /make-dir/3.1.0: - resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} - engines: {node: '>=8'} - dependencies: - semver: 6.3.0 - dev: true - - /make-error/1.3.6: - resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} - dev: true - - /makeerror/1.0.12: - resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} - dependencies: - tmpl: 1.0.5 - dev: true - - /map-cache/0.2.2: - resolution: {integrity: sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg==} - engines: {node: '>=0.10.0'} - dev: true - - /map-obj/1.0.1: - resolution: {integrity: sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==} - engines: {node: '>=0.10.0'} - dev: true - - /map-obj/4.3.0: - resolution: {integrity: sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==} - engines: {node: '>=8'} - dev: true - - /map-or-similar/1.5.0: - resolution: {integrity: sha512-0aF7ZmVon1igznGI4VS30yugpduQW3y3GkcgGJOp7d8x8QrizhigUxjI/m2UojsXXto+jLAH3KSz+xOJTiORjg==} - dev: true - - /map-visit/1.0.0: - resolution: {integrity: sha512-4y7uGv8bd2WdM9vpQsiQNo41Ln1NvhvDRuVt0k2JZQ+ezN2uaQes7lZeZ+QQUHOLQAtDaBJ+7wCbi+ab/KFs+w==} - engines: {node: '>=0.10.0'} - dependencies: - object-visit: 1.0.1 - dev: true - - /markdown-table/3.0.3: - resolution: {integrity: sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw==} - dev: false - - /markdown-to-jsx/7.1.8_react@17.0.2: - resolution: {integrity: sha512-rRSa1aFmFnpDRFAhv5vIkWM4nPaoB9vnzIjuIKa1wGupfn2hdCNeaQHKpu4/muoc8n8J7yowjTP2oncA4/Rbgg==} - engines: {node: '>= 10'} - peerDependencies: - react: '>= 0.14.0' - dependencies: - react: 17.0.2 - dev: true - - /match-sorter/6.3.1: - resolution: {integrity: sha512-mxybbo3pPNuA+ZuCUhm5bwNkXrJTbsk5VWbR5wiwz/GC6LIiegBGn2w3O08UG/jdbYLinw51fSQ5xNU1U3MgBw==} - dependencies: - '@babel/runtime': 7.20.7 - remove-accents: 0.4.2 - dev: false - - /matcher/1.1.1: - resolution: {integrity: sha512-+BmqxWIubKTRKNWx/ahnCkk3mG8m7OturVlqq6HiojGJTd5hVYbgZm6WzcYPCoB+KBT4Vd6R7WSRG2OADNaCjg==} - engines: {node: '>=4'} - dependencies: - escape-string-regexp: 1.0.5 - dev: true - - /mathml-tag-names/2.1.3: - resolution: {integrity: sha512-APMBEanjybaPzUrfqU0IMU5I0AswKMH7k8OTLs0vvV4KZpExkTkY87nR/zpbuTPj+gARop7aGUbl11pnDfW6xg==} - dev: true - - /mdast-util-definitions/4.0.0: - resolution: {integrity: sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ==} - dependencies: - unist-util-visit: 2.0.3 - dev: true - - /mdast-util-definitions/5.1.1: - resolution: {integrity: sha512-rQ+Gv7mHttxHOBx2dkF4HWTg+EE+UR78ptQWDylzPKaQuVGdG4HIoY3SrS/pCp80nZ04greFvXbVFHT+uf0JVQ==} - dependencies: - '@types/mdast': 3.0.10 - '@types/unist': 2.0.6 - unist-util-visit: 4.1.1 - dev: false - - /mdast-util-directive/2.2.2: - resolution: {integrity: sha512-6BuW4dFkCbTIf9peVMXdtWylI6ovMidVjnHyJpx7IDhwk3GosIgUs87Rl3x6T6kP5iAf1qIE3lMn6CgWw40d+g==} - dependencies: - '@types/mdast': 3.0.10 - '@types/unist': 2.0.6 - mdast-util-to-markdown: 1.5.0 - parse-entities: 4.0.0 - stringify-entities: 4.0.3 - unist-util-visit-parents: 5.1.1 - dev: false - - /mdast-util-find-and-replace/2.2.1: - resolution: {integrity: sha512-SobxkQXFAdd4b5WmEakmkVoh18icjQRxGy5OWTCzgsLRm1Fu/KCtwD1HIQSsmq5ZRjVH0Ehwg6/Fn3xIUk+nKw==} - dependencies: - escape-string-regexp: 5.0.0 - unist-util-is: 5.1.1 - unist-util-visit-parents: 5.1.1 - dev: false - - /mdast-util-from-markdown/1.2.0: - resolution: {integrity: sha512-iZJyyvKD1+K7QX1b5jXdE7Sc5dtoTry1vzV28UZZe8Z1xVnB/czKntJ7ZAkG0tANqRnBF6p3p7GpU1y19DTf2Q==} - dependencies: - '@types/mdast': 3.0.10 - '@types/unist': 2.0.6 - decode-named-character-reference: 1.0.2 - mdast-util-to-string: 3.1.0 - micromark: 3.1.0 - micromark-util-decode-numeric-character-reference: 1.0.0 - micromark-util-decode-string: 1.0.2 - micromark-util-normalize-identifier: 1.0.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - unist-util-stringify-position: 3.0.2 - uvu: 0.5.6 - transitivePeerDependencies: - - supports-color - dev: false - - /mdast-util-frontmatter/1.0.0: - resolution: {integrity: sha512-7itKvp0arEVNpCktOET/eLFAYaZ+0cNjVtFtIPxgQ5tV+3i+D4SDDTjTzPWl44LT59PC+xdx+glNTawBdF98Mw==} - dependencies: - micromark-extension-frontmatter: 1.0.0 - dev: false - - /mdast-util-gfm-autolink-literal/1.0.2: - resolution: {integrity: sha512-FzopkOd4xTTBeGXhXSBU0OCDDh5lUj2rd+HQqG92Ld+jL4lpUfgX2AT2OHAVP9aEeDKp7G92fuooSZcYJA3cRg==} - dependencies: - '@types/mdast': 3.0.10 - ccount: 2.0.1 - mdast-util-find-and-replace: 2.2.1 - micromark-util-character: 1.1.0 - dev: false - - /mdast-util-gfm-footnote/1.0.1: - resolution: {integrity: sha512-p+PrYlkw9DeCRkTVw1duWqPRHX6Ywh2BNKJQcZbCwAuP/59B0Lk9kakuAd7KbQprVO4GzdW8eS5++A9PUSqIyw==} - dependencies: - '@types/mdast': 3.0.10 - mdast-util-to-markdown: 1.5.0 - micromark-util-normalize-identifier: 1.0.0 - dev: false - - /mdast-util-gfm-strikethrough/1.0.2: - resolution: {integrity: sha512-T/4DVHXcujH6jx1yqpcAYYwd+z5lAYMw4Ls6yhTfbMMtCt0PHY4gEfhW9+lKsLBtyhUGKRIzcUA2FATVqnvPDA==} - dependencies: - '@types/mdast': 3.0.10 - mdast-util-to-markdown: 1.5.0 - dev: false - - /mdast-util-gfm-table/1.0.6: - resolution: {integrity: sha512-uHR+fqFq3IvB3Rd4+kzXW8dmpxUhvgCQZep6KdjsLK4O6meK5dYZEayLtIxNus1XO3gfjfcIFe8a7L0HZRGgag==} - dependencies: - '@types/mdast': 3.0.10 - markdown-table: 3.0.3 - mdast-util-from-markdown: 1.2.0 - mdast-util-to-markdown: 1.5.0 - transitivePeerDependencies: - - supports-color - dev: false - - /mdast-util-gfm-task-list-item/1.0.1: - resolution: {integrity: sha512-KZ4KLmPdABXOsfnM6JHUIjxEvcx2ulk656Z/4Balw071/5qgnhz+H1uGtf2zIGnrnvDC8xR4Fj9uKbjAFGNIeA==} - dependencies: - '@types/mdast': 3.0.10 - mdast-util-to-markdown: 1.5.0 - dev: false - - /mdast-util-gfm/2.0.1: - resolution: {integrity: sha512-42yHBbfWIFisaAfV1eixlabbsa6q7vHeSPY+cg+BBjX51M8xhgMacqH9g6TftB/9+YkcI0ooV4ncfrJslzm/RQ==} - dependencies: - mdast-util-from-markdown: 1.2.0 - mdast-util-gfm-autolink-literal: 1.0.2 - mdast-util-gfm-footnote: 1.0.1 - mdast-util-gfm-strikethrough: 1.0.2 - mdast-util-gfm-table: 1.0.6 - mdast-util-gfm-task-list-item: 1.0.1 - mdast-util-to-markdown: 1.5.0 - transitivePeerDependencies: - - supports-color - dev: false - - /mdast-util-phrasing/3.0.0: - resolution: {integrity: sha512-S+QYsDRLkGi8U7o5JF1agKa/sdP+CNGXXLqC17pdTVL8FHHgQEiwFGa9yE5aYtUxNiFGYoaDy9V1kC85Sz86Gg==} - dependencies: - '@types/mdast': 3.0.10 - unist-util-is: 5.1.1 - dev: false - - /mdast-util-to-hast/11.3.0: - resolution: {integrity: sha512-4o3Cli3hXPmm1LhB+6rqhfsIUBjnKFlIUZvudaermXB+4/KONdd/W4saWWkC+LBLbPMqhFSSTSRgafHsT5fVJw==} - dependencies: - '@types/hast': 2.3.4 - '@types/mdast': 3.0.10 - '@types/mdurl': 1.0.2 - mdast-util-definitions: 5.1.1 - mdurl: 1.0.1 - unist-builder: 3.0.0 - unist-util-generated: 2.0.0 - unist-util-position: 4.0.3 - unist-util-visit: 4.1.1 - dev: false - - /mdast-util-to-markdown/1.5.0: - resolution: {integrity: sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==} - dependencies: - '@types/mdast': 3.0.10 - '@types/unist': 2.0.6 - longest-streak: 3.1.0 - mdast-util-phrasing: 3.0.0 - mdast-util-to-string: 3.1.0 - micromark-util-decode-string: 1.0.2 - unist-util-visit: 4.1.1 - zwitch: 2.0.4 - dev: false - - /mdast-util-to-string/1.1.0: - resolution: {integrity: sha512-jVU0Nr2B9X3MU4tSK7JP1CMkSvOj7X5l/GboG1tKRw52lLF1x2Ju92Ms9tNetCcbfX3hzlM73zYo2NKkWSfF/A==} - dev: true - - /mdast-util-to-string/3.1.0: - resolution: {integrity: sha512-n4Vypz/DZgwo0iMHLQL49dJzlp7YtAJP+N07MZHpjPf/5XJuHUWstviF4Mn2jEiR/GNmtnRRqnwsXExk3igfFA==} - dev: false - - /mdn-data/2.0.14: - resolution: {integrity: sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==} - dev: false - - /mdurl/1.0.1: - resolution: {integrity: sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==} - dev: false - - /media-typer/0.3.0: - resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==} - engines: {node: '>= 0.6'} - dev: true - - /memoize-one/6.0.0: - resolution: {integrity: sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw==} - dev: false - - /memoizerific/1.11.3: - resolution: {integrity: sha512-/EuHYwAPdLtXwAwSZkh/Gutery6pD2KYd44oQLhAvQp/50mpyduZh8Q7PYHXTCJ+wuXxt7oij2LXyIJOOYFPog==} - dependencies: - map-or-similar: 1.5.0 - dev: true - - /meow/9.0.0: - resolution: {integrity: sha512-+obSblOQmRhcyBt62furQqRAQpNyWXo8BuQ5bN7dG8wmwQ+vwHKp/rCFD4CrTP8CsDQD1sjoZ94K417XEUk8IQ==} - engines: {node: '>=10'} - dependencies: - '@types/minimist': 1.2.2 - camelcase-keys: 6.2.2 - decamelize: 1.2.0 - decamelize-keys: 1.1.1 - hard-rejection: 2.1.0 - minimist-options: 4.1.0 - normalize-package-data: 3.0.3 - read-pkg-up: 7.0.1 - redent: 3.0.0 - trim-newlines: 3.0.1 - type-fest: 0.18.1 - yargs-parser: 20.2.9 - dev: true - - /merge-descriptors/1.0.1: - resolution: {integrity: sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==} - dev: true - - /merge-stream/2.0.0: - resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} - dev: true - - /merge2/1.4.1: - resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} - engines: {node: '>= 8'} - dev: true - - /methods/1.1.2: - resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} - engines: {node: '>= 0.6'} - dev: true - - /micromark-core-commonmark/1.0.6: - resolution: {integrity: sha512-K+PkJTxqjFfSNkfAhp4GB+cZPfQd6dxtTXnf+RjZOV7T4EEXnvgzOcnp+eSTmpGk9d1S9sL6/lqrgSNn/s0HZA==} - dependencies: - decode-named-character-reference: 1.0.2 - micromark-factory-destination: 1.0.0 - micromark-factory-label: 1.0.2 - micromark-factory-space: 1.0.0 - micromark-factory-title: 1.0.2 - micromark-factory-whitespace: 1.0.0 - micromark-util-character: 1.1.0 - micromark-util-chunked: 1.0.0 - micromark-util-classify-character: 1.0.0 - micromark-util-html-tag-name: 1.1.0 - micromark-util-normalize-identifier: 1.0.0 - micromark-util-resolve-all: 1.0.0 - micromark-util-subtokenize: 1.0.2 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - uvu: 0.5.6 - dev: false - - /micromark-extension-directive/2.1.2: - resolution: {integrity: sha512-brqLEztt14/73snVXYsq9Cv6ng67O+Sy69ZuM0s8ZhN/GFI9rnyXyj0Y0DaCwi648vCImv7/U1H5TzR7wMv5jw==} - dependencies: - micromark-factory-space: 1.0.0 - micromark-factory-whitespace: 1.0.0 - micromark-util-character: 1.1.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - parse-entities: 4.0.0 - uvu: 0.5.6 - dev: false - - /micromark-extension-frontmatter/1.0.0: - resolution: {integrity: sha512-EXjmRnupoX6yYuUJSQhrQ9ggK0iQtQlpi6xeJzVD5xscyAI+giqco5fdymayZhJMbIFecjnE2yz85S9NzIgQpg==} - dependencies: - fault: 2.0.1 - micromark-util-character: 1.1.0 - micromark-util-symbol: 1.0.1 - dev: false - - /micromark-extension-gfm-autolink-literal/1.0.3: - resolution: {integrity: sha512-i3dmvU0htawfWED8aHMMAzAVp/F0Z+0bPh3YrbTPPL1v4YAlCZpy5rBO5p0LPYiZo0zFVkoYh7vDU7yQSiCMjg==} - dependencies: - micromark-util-character: 1.1.0 - micromark-util-sanitize-uri: 1.1.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - uvu: 0.5.6 - dev: false - - /micromark-extension-gfm-footnote/1.0.4: - resolution: {integrity: sha512-E/fmPmDqLiMUP8mLJ8NbJWJ4bTw6tS+FEQS8CcuDtZpILuOb2kjLqPEeAePF1djXROHXChM/wPJw0iS4kHCcIg==} - dependencies: - micromark-core-commonmark: 1.0.6 - micromark-factory-space: 1.0.0 - micromark-util-character: 1.1.0 - micromark-util-normalize-identifier: 1.0.0 - micromark-util-sanitize-uri: 1.1.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - uvu: 0.5.6 - dev: false - - /micromark-extension-gfm-strikethrough/1.0.4: - resolution: {integrity: sha512-/vjHU/lalmjZCT5xt7CcHVJGq8sYRm80z24qAKXzaHzem/xsDYb2yLL+NNVbYvmpLx3O7SYPuGL5pzusL9CLIQ==} - dependencies: - micromark-util-chunked: 1.0.0 - micromark-util-classify-character: 1.0.0 - micromark-util-resolve-all: 1.0.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - uvu: 0.5.6 - dev: false - - /micromark-extension-gfm-table/1.0.5: - resolution: {integrity: sha512-xAZ8J1X9W9K3JTJTUL7G6wSKhp2ZYHrFk5qJgY/4B33scJzE2kpfRL6oiw/veJTbt7jiM/1rngLlOKPWr1G+vg==} - dependencies: - micromark-factory-space: 1.0.0 - micromark-util-character: 1.1.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - uvu: 0.5.6 - dev: false - - /micromark-extension-gfm-tagfilter/1.0.1: - resolution: {integrity: sha512-Ty6psLAcAjboRa/UKUbbUcwjVAv5plxmpUTy2XC/3nJFL37eHej8jrHrRzkqcpipJliuBH30DTs7+3wqNcQUVA==} - dependencies: - micromark-util-types: 1.0.2 - dev: false - - /micromark-extension-gfm-task-list-item/1.0.3: - resolution: {integrity: sha512-PpysK2S1Q/5VXi72IIapbi/jliaiOFzv7THH4amwXeYXLq3l1uo8/2Be0Ac1rEwK20MQEsGH2ltAZLNY2KI/0Q==} - dependencies: - micromark-factory-space: 1.0.0 - micromark-util-character: 1.1.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - uvu: 0.5.6 - dev: false - - /micromark-extension-gfm/2.0.1: - resolution: {integrity: sha512-p2sGjajLa0iYiGQdT0oelahRYtMWvLjy8J9LOCxzIQsllMCGLbsLW+Nc+N4vi02jcRJvedVJ68cjelKIO6bpDA==} - dependencies: - micromark-extension-gfm-autolink-literal: 1.0.3 - micromark-extension-gfm-footnote: 1.0.4 - micromark-extension-gfm-strikethrough: 1.0.4 - micromark-extension-gfm-table: 1.0.5 - micromark-extension-gfm-tagfilter: 1.0.1 - micromark-extension-gfm-task-list-item: 1.0.3 - micromark-util-combine-extensions: 1.0.0 - micromark-util-types: 1.0.2 - dev: false - - /micromark-factory-destination/1.0.0: - resolution: {integrity: sha512-eUBA7Rs1/xtTVun9TmV3gjfPz2wEwgK5R5xcbIM5ZYAtvGF6JkyaDsj0agx8urXnO31tEO6Ug83iVH3tdedLnw==} - dependencies: - micromark-util-character: 1.1.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - dev: false - - /micromark-factory-label/1.0.2: - resolution: {integrity: sha512-CTIwxlOnU7dEshXDQ+dsr2n+yxpP0+fn271pu0bwDIS8uqfFcumXpj5mLn3hSC8iw2MUr6Gx8EcKng1dD7i6hg==} - dependencies: - micromark-util-character: 1.1.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - uvu: 0.5.6 - dev: false - - /micromark-factory-space/1.0.0: - resolution: {integrity: sha512-qUmqs4kj9a5yBnk3JMLyjtWYN6Mzfcx8uJfi5XAveBniDevmZasdGBba5b4QsvRcAkmvGo5ACmSUmyGiKTLZew==} - dependencies: - micromark-util-character: 1.1.0 - micromark-util-types: 1.0.2 - dev: false - - /micromark-factory-title/1.0.2: - resolution: {integrity: sha512-zily+Nr4yFqgMGRKLpTVsNl5L4PMu485fGFDOQJQBl2NFpjGte1e86zC0da93wf97jrc4+2G2GQudFMHn3IX+A==} - dependencies: - micromark-factory-space: 1.0.0 - micromark-util-character: 1.1.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - uvu: 0.5.6 - dev: false - - /micromark-factory-whitespace/1.0.0: - resolution: {integrity: sha512-Qx7uEyahU1lt1RnsECBiuEbfr9INjQTGa6Err+gF3g0Tx4YEviPbqqGKNv/NrBaE7dVHdn1bVZKM/n5I/Bak7A==} - dependencies: - micromark-factory-space: 1.0.0 - micromark-util-character: 1.1.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - dev: false - - /micromark-util-character/1.1.0: - resolution: {integrity: sha512-agJ5B3unGNJ9rJvADMJ5ZiYjBRyDpzKAOk01Kpi1TKhlT1APx3XZk6eN7RtSz1erbWHC2L8T3xLZ81wdtGRZzg==} - dependencies: - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - dev: false - - /micromark-util-chunked/1.0.0: - resolution: {integrity: sha512-5e8xTis5tEZKgesfbQMKRCyzvffRRUX+lK/y+DvsMFdabAicPkkZV6gO+FEWi9RfuKKoxxPwNL+dFF0SMImc1g==} - dependencies: - micromark-util-symbol: 1.0.1 - dev: false - - /micromark-util-classify-character/1.0.0: - resolution: {integrity: sha512-F8oW2KKrQRb3vS5ud5HIqBVkCqQi224Nm55o5wYLzY/9PwHGXC01tr3d7+TqHHz6zrKQ72Okwtvm/xQm6OVNZA==} - dependencies: - micromark-util-character: 1.1.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - dev: false - - /micromark-util-combine-extensions/1.0.0: - resolution: {integrity: sha512-J8H058vFBdo/6+AsjHp2NF7AJ02SZtWaVUjsayNFeAiydTxUwViQPxN0Hf8dp4FmCQi0UUFovFsEyRSUmFH3MA==} - dependencies: - micromark-util-chunked: 1.0.0 - micromark-util-types: 1.0.2 - dev: false - - /micromark-util-decode-numeric-character-reference/1.0.0: - resolution: {integrity: sha512-OzO9AI5VUtrTD7KSdagf4MWgHMtET17Ua1fIpXTpuhclCqD8egFWo85GxSGvxgkGS74bEahvtM0WP0HjvV0e4w==} - dependencies: - micromark-util-symbol: 1.0.1 - dev: false - - /micromark-util-decode-string/1.0.2: - resolution: {integrity: sha512-DLT5Ho02qr6QWVNYbRZ3RYOSSWWFuH3tJexd3dgN1odEuPNxCngTCXJum7+ViRAd9BbdxCvMToPOD/IvVhzG6Q==} - dependencies: - decode-named-character-reference: 1.0.2 - micromark-util-character: 1.1.0 - micromark-util-decode-numeric-character-reference: 1.0.0 - micromark-util-symbol: 1.0.1 - dev: false - - /micromark-util-encode/1.0.1: - resolution: {integrity: sha512-U2s5YdnAYexjKDel31SVMPbfi+eF8y1U4pfiRW/Y8EFVCy/vgxk/2wWTxzcqE71LHtCuCzlBDRU2a5CQ5j+mQA==} - dev: false - - /micromark-util-html-tag-name/1.1.0: - resolution: {integrity: sha512-BKlClMmYROy9UiV03SwNmckkjn8QHVaWkqoAqzivabvdGcwNGMMMH/5szAnywmsTBUzDsU57/mFi0sp4BQO6dA==} - dev: false - - /micromark-util-normalize-identifier/1.0.0: - resolution: {integrity: sha512-yg+zrL14bBTFrQ7n35CmByWUTFsgst5JhA4gJYoty4Dqzj4Z4Fr/DHekSS5aLfH9bdlfnSvKAWsAgJhIbogyBg==} - dependencies: - micromark-util-symbol: 1.0.1 - dev: false - - /micromark-util-resolve-all/1.0.0: - resolution: {integrity: sha512-CB/AGk98u50k42kvgaMM94wzBqozSzDDaonKU7P7jwQIuH2RU0TeBqGYJz2WY1UdihhjweivStrJ2JdkdEmcfw==} - dependencies: - micromark-util-types: 1.0.2 - dev: false - - /micromark-util-sanitize-uri/1.1.0: - resolution: {integrity: sha512-RoxtuSCX6sUNtxhbmsEFQfWzs8VN7cTctmBPvYivo98xb/kDEoTCtJQX5wyzIYEmk/lvNFTat4hL8oW0KndFpg==} - dependencies: - micromark-util-character: 1.1.0 - micromark-util-encode: 1.0.1 - micromark-util-symbol: 1.0.1 - dev: false - - /micromark-util-subtokenize/1.0.2: - resolution: {integrity: sha512-d90uqCnXp/cy4G881Ub4psE57Sf8YD0pim9QdjCRNjfas2M1u6Lbt+XZK9gnHL2XFhnozZiEdCa9CNfXSfQ6xA==} - dependencies: - micromark-util-chunked: 1.0.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - uvu: 0.5.6 - dev: false - - /micromark-util-symbol/1.0.1: - resolution: {integrity: sha512-oKDEMK2u5qqAptasDAwWDXq0tG9AssVwAx3E9bBF3t/shRIGsWIRG+cGafs2p/SnDSOecnt6hZPCE2o6lHfFmQ==} - dev: false - - /micromark-util-types/1.0.2: - resolution: {integrity: sha512-DCfg/T8fcrhrRKTPjRrw/5LLvdGV7BHySf/1LOZx7TzWZdYRjogNtyNq885z3nNallwr3QUKARjqvHqX1/7t+w==} - dev: false - - /micromark/3.1.0: - resolution: {integrity: sha512-6Mj0yHLdUZjHnOPgr5xfWIMqMWS12zDN6iws9SLuSz76W8jTtAv24MN4/CL7gJrl5vtxGInkkqDv/JIoRsQOvA==} - dependencies: - '@types/debug': 4.1.7 - debug: 4.3.4 - decode-named-character-reference: 1.0.2 - micromark-core-commonmark: 1.0.6 - micromark-factory-space: 1.0.0 - micromark-util-character: 1.1.0 - micromark-util-chunked: 1.0.0 - micromark-util-combine-extensions: 1.0.0 - micromark-util-decode-numeric-character-reference: 1.0.0 - micromark-util-encode: 1.0.1 - micromark-util-normalize-identifier: 1.0.0 - micromark-util-resolve-all: 1.0.0 - micromark-util-sanitize-uri: 1.1.0 - micromark-util-subtokenize: 1.0.2 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - uvu: 0.5.6 - transitivePeerDependencies: - - supports-color - dev: false - - /micromatch/3.1.10: - resolution: {integrity: sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==} - engines: {node: '>=0.10.0'} - dependencies: - arr-diff: 4.0.0 - array-unique: 0.3.2 - braces: 2.3.2 - define-property: 2.0.2 - extend-shallow: 3.0.2 - extglob: 2.0.4 - fragment-cache: 0.2.1 - kind-of: 6.0.3 - nanomatch: 1.2.13 - object.pick: 1.3.0 - regex-not: 1.0.2 - snapdragon: 0.8.2 - to-regex: 3.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /micromatch/4.0.5: - resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} - engines: {node: '>=8.6'} - dependencies: - braces: 3.0.2 - picomatch: 2.3.1 - dev: true - - /microseconds/0.2.0: - resolution: {integrity: sha512-n7DHHMjR1avBbSpsTBj6fmMGh2AGrifVV4e+WYc3Q9lO+xnSZ3NyhcBND3vzzatt05LFhoKFRxrIyklmLlUtyA==} - dev: false - - /mime-db/1.52.0: - resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} - engines: {node: '>= 0.6'} - - /mime-types/2.1.35: - resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} - engines: {node: '>= 0.6'} - dependencies: - mime-db: 1.52.0 - - /mime/1.6.0: - resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} - engines: {node: '>=4'} - hasBin: true - dev: true - - /mime/2.6.0: - resolution: {integrity: sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==} - engines: {node: '>=4.0.0'} - hasBin: true - dev: true - - /mimic-fn/2.1.0: - resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} - engines: {node: '>=6'} - dev: true - - /min-indent/1.0.1: - resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} - engines: {node: '>=4'} - dev: true - - /minimatch/3.1.2: - resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} - dependencies: - brace-expansion: 1.1.11 - - /minimatch/5.1.6: - resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} - engines: {node: '>=10'} - dependencies: - brace-expansion: 2.0.1 - dev: true - - /minimist-options/4.1.0: - resolution: {integrity: sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==} - engines: {node: '>= 6'} - dependencies: - arrify: 1.0.1 - is-plain-obj: 1.1.0 - kind-of: 6.0.3 - dev: true - - /minimist/1.2.7: - resolution: {integrity: sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==} - dev: true - - /minipass/3.3.6: - resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==} - engines: {node: '>=8'} - dependencies: - yallist: 4.0.0 - dev: true - - /minipass/4.0.0: - resolution: {integrity: sha512-g2Uuh2jEKoht+zvO6vJqXmYpflPqzRBT+Th2h01DKh5z7wbY/AZ2gCQ78cP70YoHPyFdY30YBV5WxgLOEwOykw==} - engines: {node: '>=8'} - dependencies: - yallist: 4.0.0 - dev: true - - /minizlib/2.1.2: - resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} - engines: {node: '>= 8'} - dependencies: - minipass: 3.3.6 - yallist: 4.0.0 - dev: true - - /mitt/1.2.0: - resolution: {integrity: sha512-r6lj77KlwqLhIUku9UWYes7KJtsczvolZkzp8hbaDPPaE24OmWl5s539Mytlj22siEQKosZ26qCBgda2PKwoJw==} - dev: false - - /mixin-deep/1.3.2: - resolution: {integrity: sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==} - engines: {node: '>=0.10.0'} - dependencies: - for-in: 1.0.2 - is-extendable: 1.0.1 - dev: true - - /mkdirp/0.5.6: - resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} - hasBin: true - dependencies: - minimist: 1.2.7 - dev: true - - /mkdirp/1.0.4: - resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} - engines: {node: '>=10'} - hasBin: true - dev: true - - /monaco-editor/0.34.1: - resolution: {integrity: sha512-FKc80TyiMaruhJKKPz5SpJPIjL+dflGvz4CpuThaPMc94AyN7SeC9HQ8hrvaxX7EyHdJcUY5i4D0gNyJj1vSZQ==} - dev: false - - /mri/1.2.0: - resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} - engines: {node: '>=4'} - - /ms/2.0.0: - resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} - dev: true - - /ms/2.1.1: - resolution: {integrity: sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==} - dev: true - - /ms/2.1.2: - resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} - - /ms/2.1.3: - resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} - dev: true - - /nano-css/5.3.5_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-vSB9X12bbNu4ALBu7nigJgRViZ6ja3OU7CeuiV1zMIbXOdmkLahgtPmh3GBOlDxbKY0CitqlPdOReGlBLSp+yg==} - peerDependencies: - react: '*' - react-dom: '*' - dependencies: - css-tree: 1.1.3 - csstype: 3.1.1 - fastest-stable-stringify: 2.0.2 - inline-style-prefixer: 6.0.4 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - rtl-css-js: 1.16.1 - sourcemap-codec: 1.4.8 - stacktrace-js: 2.0.2 - stylis: 4.1.3 - dev: false - - /nano-time/1.0.0: - resolution: {integrity: sha512-flnngywOoQ0lLQOTRNexn2gGSNuM9bKj9RZAWSzhQ+UJYaAFG9bac4DW9VHjUAzrOaIcajHybCTHe/bkvozQqA==} - dependencies: - big-integer: 1.6.51 - dev: false - - /nanoclone/0.2.1: - resolution: {integrity: sha512-wynEP02LmIbLpcYw8uBKpcfF6dmg2vcpKqxeH5UcoKEYdExslsdUA4ugFauuaeYdTB76ez6gJW8XAZ6CgkXYxA==} - dev: false - - /nanoid/3.3.4: - resolution: {integrity: sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==} - engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} - hasBin: true - - /nanomatch/1.2.13: - resolution: {integrity: sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==} - engines: {node: '>=0.10.0'} - dependencies: - arr-diff: 4.0.0 - array-unique: 0.3.2 - define-property: 2.0.2 - extend-shallow: 3.0.2 - fragment-cache: 0.2.1 - is-windows: 1.0.2 - kind-of: 6.0.3 - object.pick: 1.3.0 - regex-not: 1.0.2 - snapdragon: 0.8.2 - to-regex: 3.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /natural-compare-lite/1.4.0: - resolution: {integrity: sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==} - dev: true - - /natural-compare/1.4.0: - resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} - dev: true - - /needle/3.2.0: - resolution: {integrity: sha512-oUvzXnyLiVyVGoianLijF9O/RecZUf7TkBfimjGrLM4eQhXyeJwM6GeAWccwfQ9aa4gMCZKqhAOuLaMIcQxajQ==} - engines: {node: '>= 4.4.x'} - hasBin: true - requiresBuild: true - dependencies: - debug: 3.2.7 - iconv-lite: 0.6.3 - sax: 1.2.4 - transitivePeerDependencies: - - supports-color - dev: true - optional: true - - /negotiator/0.6.3: - resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} - engines: {node: '>= 0.6'} - dev: true - - /neo-async/2.6.2: - resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} - dev: true - - /netmask/2.0.2: - resolution: {integrity: sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==} - engines: {node: '>= 0.4.0'} - dev: true - - /nimma/0.2.2: - resolution: {integrity: sha512-V52MLl7BU+tH2Np9tDrIXK8bql3MVUadnMIl/0/oZSGC9keuro0O9UUv9QKp0aMvtN8HRew4G7byY7H4eWsxaQ==} - engines: {node: ^12.20 || >=14.13} - dependencies: - '@jsep-plugin/regex': 1.0.3_jsep@1.3.8 - '@jsep-plugin/ternary': 1.1.3_jsep@1.3.8 - astring: 1.8.4 - jsep: 1.3.8 - optionalDependencies: - jsonpath-plus: 6.0.1 - lodash.topath: 4.5.2 - dev: true - - /node-dir/0.1.17: - resolution: {integrity: sha512-tmPX422rYgofd4epzrNoOXiE8XFZYOcCq1vD7MAXCDO+O+zndlA2ztdKKMa+EeuBG5tHETpr4ml4RGgpqDCCAg==} - engines: {node: '>= 0.10.5'} - dependencies: - minimatch: 3.1.2 - dev: true - - /node-fetch-h2/2.3.0: - resolution: {integrity: sha512-ofRW94Ab0T4AOh5Fk8t0h8OBWrmjb0SSB20xh1H8YnPV9EJ+f5AMoYSUQ2zgJ4Iq2HAK0I2l5/Nequ8YzFS3Hg==} - engines: {node: 4.x || >=6.0.0} - dependencies: - http2-client: 1.3.5 - dev: true - - /node-fetch-native/1.0.1: - resolution: {integrity: sha512-VzW+TAk2wE4X9maiKMlT+GsPU4OMmR1U9CrHSmd3DFLn2IcZ9VJ6M6BBugGfYUnPCLSYxXdZy17M0BEJyhUTwg==} - dev: true - - /node-fetch/2.6.7: - resolution: {integrity: sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==} - engines: {node: 4.x || >=6.0.0} - peerDependencies: - encoding: ^0.1.0 - peerDependenciesMeta: - encoding: - optional: true - dependencies: - whatwg-url: 5.0.0 - dev: false - - /node-fetch/2.6.8: - resolution: {integrity: sha512-RZ6dBYuj8dRSfxpUSu+NsdF1dpPpluJxwOp+6IoDp/sH2QNDSvurYsAa+F1WxY2RjA1iP93xhcsUoYbF2XBqVg==} - engines: {node: 4.x || >=6.0.0} - peerDependencies: - encoding: ^0.1.0 - peerDependenciesMeta: - encoding: - optional: true - dependencies: - whatwg-url: 5.0.0 - dev: true - - /node-fetch/2.6.9: - resolution: {integrity: sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==} - engines: {node: 4.x || >=6.0.0} - peerDependencies: - encoding: ^0.1.0 - peerDependenciesMeta: - encoding: - optional: true - dependencies: - whatwg-url: 5.0.0 - dev: true - - /node-int64/0.4.0: - resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} - dev: true - - /node-readfiles/0.2.0: - resolution: {integrity: sha512-SU00ZarexNlE4Rjdm83vglt5Y9yiQ+XI1XpflWlb7q7UTN1JUItm69xMeiQCTxtTfnzt+83T8Cx+vI2ED++VDA==} - dependencies: - es6-promise: 3.3.1 - dev: true - - /node-releases/2.0.8: - resolution: {integrity: sha512-dFSmB8fFHEH/s81Xi+Y/15DQY6VHW81nXRj86EMSL3lmuTmK1e+aT4wrFCkTbm+gSwkw4KpX+rT/pMM2c1mF+A==} - - /nopt/4.0.3: - resolution: {integrity: sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==} - hasBin: true - dependencies: - abbrev: 1.1.1 - osenv: 0.1.5 - dev: true - - /normalize-package-data/2.5.0: - resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} - dependencies: - hosted-git-info: 2.8.9 - resolve: 1.22.1 - semver: 5.7.1 - validate-npm-package-license: 3.0.4 - dev: true - - /normalize-package-data/3.0.3: - resolution: {integrity: sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==} - engines: {node: '>=10'} - dependencies: - hosted-git-info: 4.1.0 - is-core-module: 2.11.0 - semver: 7.3.8 - validate-npm-package-license: 3.0.4 - dev: true - - /normalize-path/3.0.0: - resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} - engines: {node: '>=0.10.0'} - - /npm-normalize-package-bin/1.0.1: - resolution: {integrity: sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==} - dev: true - - /npm-run-path/4.0.1: - resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} - engines: {node: '>=8'} - dependencies: - path-key: 3.1.1 - dev: true - - /npmlog/5.0.1: - resolution: {integrity: sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==} - dependencies: - are-we-there-yet: 2.0.0 - console-control-strings: 1.1.0 - gauge: 3.0.2 - set-blocking: 2.0.0 - dev: true - - /nwsapi/2.2.2: - resolution: {integrity: sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw==} - dev: true - - /oas-kit-common/1.0.8: - resolution: {integrity: sha512-pJTS2+T0oGIwgjGpw7sIRU8RQMcUoKCDWFLdBqKB2BNmGpbBMH2sdqAaOXUg8OzonZHU0L7vfJu1mJFEiYDWOQ==} - dependencies: - fast-safe-stringify: 2.1.1 - dev: true - - /oas-linter/3.2.2: - resolution: {integrity: sha512-KEGjPDVoU5K6swgo9hJVA/qYGlwfbFx+Kg2QB/kd7rzV5N8N5Mg6PlsoCMohVnQmo+pzJap/F610qTodKzecGQ==} - dependencies: - '@exodus/schemasafe': 1.0.0-rc.9 - should: 13.2.3 - yaml: 1.10.2 - dev: true - - /oas-resolver/2.5.6: - resolution: {integrity: sha512-Yx5PWQNZomfEhPPOphFbZKi9W93CocQj18NlD2Pa4GWZzdZpSJvYwoiuurRI7m3SpcChrnO08hkuQDL3FGsVFQ==} - hasBin: true - dependencies: - node-fetch-h2: 2.3.0 - oas-kit-common: 1.0.8 - reftools: 1.1.9 - yaml: 1.10.2 - yargs: 17.6.2 - dev: true - - /oas-schema-walker/1.1.5: - resolution: {integrity: sha512-2yucenq1a9YPmeNExoUa9Qwrt9RFkjqaMAA1X+U7sbb0AqBeTIdMHky9SQQ6iN94bO5NW0W4TRYXerG+BdAvAQ==} - dev: true - - /oas-validator/5.0.8: - resolution: {integrity: sha512-cu20/HE5N5HKqVygs3dt94eYJfBi0TsZvPVXDhbXQHiEityDN+RROTleefoKRKKJ9dFAF2JBkDHgvWj0sjKGmw==} - dependencies: - call-me-maybe: 1.0.2 - oas-kit-common: 1.0.8 - oas-linter: 3.2.2 - oas-resolver: 2.5.6 - oas-schema-walker: 1.1.5 - reftools: 1.1.9 - should: 13.2.3 - yaml: 1.10.2 - dev: true - - /object-assign/4.1.1: - resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} - engines: {node: '>=0.10.0'} - - /object-copy/0.1.0: - resolution: {integrity: sha512-79LYn6VAb63zgtmAteVOWo9Vdj71ZVBy3Pbse+VqxDpEP83XuujMrGqHIwAXJ5I/aM0zU7dIyIAhifVTPrNItQ==} - engines: {node: '>=0.10.0'} - dependencies: - copy-descriptor: 0.1.1 - define-property: 0.2.5 - kind-of: 3.2.2 - dev: true - - /object-inspect/1.12.3: - resolution: {integrity: sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==} - dev: true - - /object-is/1.1.5: - resolution: {integrity: sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - dev: true - - /object-keys/1.1.1: - resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} - engines: {node: '>= 0.4'} - dev: true - - /object-visit/1.0.1: - resolution: {integrity: sha512-GBaMwwAVK9qbQN3Scdo0OyvgPW7l3lnaVMj84uTOZlswkX0KpF6fyDBJhtTthf7pymztoN36/KEr1DyhF96zEA==} - engines: {node: '>=0.10.0'} - dependencies: - isobject: 3.0.1 - dev: true - - /object.assign/4.1.4: - resolution: {integrity: sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - has-symbols: 1.0.3 - object-keys: 1.1.1 - dev: true - - /object.entries/1.1.6: - resolution: {integrity: sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.21.1 - dev: true - - /object.fromentries/2.0.6: - resolution: {integrity: sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.21.1 - dev: true - - /object.hasown/1.1.2: - resolution: {integrity: sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw==} - dependencies: - define-properties: 1.1.4 - es-abstract: 1.21.1 - dev: true - - /object.pick/1.3.0: - resolution: {integrity: sha512-tqa/UMy/CCoYmj+H5qc07qvSL9dqcs/WZENZ1JbtWBlATP+iVOe778gE6MSijnyCnORzDuX6hU+LA4SZ09YjFQ==} - engines: {node: '>=0.10.0'} - dependencies: - isobject: 3.0.1 - dev: true - - /object.values/1.1.6: - resolution: {integrity: sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.21.1 - dev: true - - /oblivious-set/1.0.0: - resolution: {integrity: sha512-z+pI07qxo4c2CulUHCDf9lcqDlMSo72N/4rLUpRXf6fu+q8vjt8y0xS+Tlf8NTJDdTXHbdeO1n3MlbctwEoXZw==} - dev: false - - /on-finished/2.4.1: - resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} - engines: {node: '>= 0.8'} - dependencies: - ee-first: 1.1.1 - dev: true - - /on-headers/1.0.2: - resolution: {integrity: sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==} - engines: {node: '>= 0.8'} - dev: true - - /once/1.4.0: - resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} - dependencies: - wrappy: 1.0.2 - - /onetime/5.1.2: - resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} - engines: {node: '>=6'} - dependencies: - mimic-fn: 2.1.0 - dev: true - - /ono/4.0.11: - resolution: {integrity: sha512-jQ31cORBFE6td25deYeD80wxKBMj+zBmHTrVxnc6CKhx8gho6ipmWM5zj/oeoqioZ99yqBls9Z/9Nss7J26G2g==} - dependencies: - format-util: 1.0.5 - dev: true - - /open/7.4.2: - resolution: {integrity: sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==} - engines: {node: '>=8'} - dependencies: - is-docker: 2.2.1 - is-wsl: 2.2.0 - dev: true - - /open/8.4.0: - resolution: {integrity: sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q==} - engines: {node: '>=12'} - dependencies: - define-lazy-prop: 2.0.0 - is-docker: 2.2.1 - is-wsl: 2.2.0 - dev: true - - /openapi3-ts/3.1.2: - resolution: {integrity: sha512-S8fijNOqe/ut0kEDAwHZnI7sVYqb8Q3XnISmSyXmK76jgrcf4ableI75KTY1qdksd9EI/t39Vi5M4VYKrkNKfQ==} - dependencies: - yaml: 2.2.1 - dev: true - - /optionator/0.8.3: - resolution: {integrity: sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==} - engines: {node: '>= 0.8.0'} - dependencies: - deep-is: 0.1.4 - fast-levenshtein: 2.0.6 - levn: 0.3.0 - prelude-ls: 1.1.2 - type-check: 0.3.2 - word-wrap: 1.2.3 - dev: true - - /optionator/0.9.1: - resolution: {integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==} - engines: {node: '>= 0.8.0'} - dependencies: - deep-is: 0.1.4 - fast-levenshtein: 2.0.6 - levn: 0.4.1 - prelude-ls: 1.2.1 - type-check: 0.4.0 - word-wrap: 1.2.3 - dev: true - - /orval/6.11.1_typescript@4.9.5: - resolution: {integrity: sha512-3QjI9i8mp/lT+ufJQWghQyCf6vNXqKVHva2IgLL/OWEuBA8AsGPXvJvCeqzFmi+fBSC7LzpR2Mkyle7dxfycLQ==} - hasBin: true - dependencies: - '@apidevtools/swagger-parser': 10.1.0 - '@orval/angular': 6.11.1 - '@orval/axios': 6.11.1 - '@orval/core': 6.11.1 - '@orval/msw': 6.11.1 - '@orval/query': 6.11.1 - '@orval/swr': 6.11.1 - ajv: 8.12.0 - cac: 6.7.14 - chalk: 4.1.2 - chokidar: 3.5.3 - enquirer: 2.3.6 - execa: 5.1.1 - find-up: 5.0.0 - fs-extra: 10.1.0 - lodash.uniq: 4.5.0 - openapi3-ts: 3.1.2 - string-argv: 0.3.1 - tsconfck: 2.0.2_typescript@4.9.5 - transitivePeerDependencies: - - encoding - - openapi-types - - supports-color - - typescript - dev: true - - /os-homedir/1.0.2: - resolution: {integrity: sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==} - engines: {node: '>=0.10.0'} - dev: true - - /os-tmpdir/1.0.2: - resolution: {integrity: sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==} - engines: {node: '>=0.10.0'} - dev: true - - /osenv/0.1.5: - resolution: {integrity: sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==} - dependencies: - os-homedir: 1.0.2 - os-tmpdir: 1.0.2 - dev: true - - /p-limit/2.3.0: - resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} - engines: {node: '>=6'} - dependencies: - p-try: 2.2.0 - dev: true - - /p-limit/3.1.0: - resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} - engines: {node: '>=10'} - dependencies: - yocto-queue: 0.1.0 - dev: true - - /p-locate/3.0.0: - resolution: {integrity: sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==} - engines: {node: '>=6'} - dependencies: - p-limit: 2.3.0 - dev: true - - /p-locate/4.1.0: - resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} - engines: {node: '>=8'} - dependencies: - p-limit: 2.3.0 - dev: true - - /p-locate/5.0.0: - resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} - engines: {node: '>=10'} - dependencies: - p-limit: 3.1.0 - dev: true - - /p-map/4.0.0: - resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} - engines: {node: '>=10'} - dependencies: - aggregate-error: 3.1.0 - dev: true - - /p-try/2.2.0: - resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} - engines: {node: '>=6'} - dev: true - - /pac-proxy-agent/5.0.0: - resolution: {integrity: sha512-CcFG3ZtnxO8McDigozwE3AqAw15zDvGH+OjXO4kzf7IkEKkQ4gxQ+3sdF50WmhQ4P/bVusXcqNE2S3XrNURwzQ==} - engines: {node: '>= 8'} - dependencies: - '@tootallnate/once': 1.1.2 - agent-base: 6.0.2 - debug: 4.3.4 - get-uri: 3.0.2 - http-proxy-agent: 4.0.1 - https-proxy-agent: 5.0.1 - pac-resolver: 5.0.1 - raw-body: 2.5.1 - socks-proxy-agent: 5.0.1 - transitivePeerDependencies: - - supports-color - dev: true - - /pac-resolver/5.0.1: - resolution: {integrity: sha512-cy7u00ko2KVgBAjuhevqpPeHIkCIqPe1v24cydhWjmeuzaBfmUWFCZJ1iAh5TuVzVZoUzXIW7K8sMYOZ84uZ9Q==} - engines: {node: '>= 8'} - dependencies: - degenerator: 3.0.2 - ip: 1.1.8 - netmask: 2.0.2 - dev: true - - /pad/2.3.0: - resolution: {integrity: sha512-lxrgnOG5AXmzMRT1O5urWtYFxHnFSE+QntgTHij1nvS4W+ubhQLmQRHmZXDeEvk9I00itAixLqU9Q6fE0gW3sw==} - engines: {node: '>= 4.0.0'} - dependencies: - wcwidth: 1.0.1 - dev: true - - /parent-module/1.0.1: - resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} - engines: {node: '>=6'} - dependencies: - callsites: 3.1.0 - - /parse-entities/4.0.0: - resolution: {integrity: sha512-5nk9Fn03x3rEhGaX1FU6IDwG/k+GxLXlFAkgrbM1asuAFl3BhdQWvASaIsmwWypRNcZKHPYnIuOSfIWEyEQnPQ==} - dependencies: - '@types/unist': 2.0.6 - character-entities: 2.0.2 - character-entities-legacy: 3.0.0 - character-reference-invalid: 2.0.1 - decode-named-character-reference: 1.0.2 - is-alphanumerical: 2.0.1 - is-decimal: 2.0.1 - is-hexadecimal: 2.0.1 - dev: false - - /parse-json/5.2.0: - resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} - engines: {node: '>=8'} - dependencies: - '@babel/code-frame': 7.18.6 - error-ex: 1.3.2 - json-parse-even-better-errors: 2.3.1 - lines-and-columns: 1.2.4 - - /parse-node-version/1.0.1: - resolution: {integrity: sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==} - engines: {node: '>= 0.10'} - dev: true - - /parse-srcset/1.0.2: - resolution: {integrity: sha512-/2qh0lav6CmI15FzA3i/2Bzk2zCgQhGMkvhOhKNcBVQ1ldgpbfiNTVslmooUmWJcADi1f1kIeynbDRVzNlfR6Q==} - dev: false - - /parse5/7.1.2: - resolution: {integrity: sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==} - dependencies: - entities: 4.4.0 - dev: true - - /parseurl/1.3.3: - resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} - engines: {node: '>= 0.8'} - dev: true - - /pascalcase/0.1.1: - resolution: {integrity: sha512-XHXfu/yOQRy9vYOtUDVMN60OEJjW013GoObG1o+xwQTpB9eYJX/BjXMsdW13ZDPruFhYYn0AG22w0xgQMwl3Nw==} - engines: {node: '>=0.10.0'} - dev: true - - /path-exists/3.0.0: - resolution: {integrity: sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==} - engines: {node: '>=4'} - dev: true - - /path-exists/4.0.0: - resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} - engines: {node: '>=8'} - dev: true - - /path-is-absolute/1.0.1: - resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} - engines: {node: '>=0.10.0'} - - /path-key/3.1.1: - resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} - engines: {node: '>=8'} - dev: true - - /path-parse/1.0.7: - resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} - - /path-to-regexp/0.1.7: - resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==} - dev: true - - /path-type/4.0.0: - resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} - engines: {node: '>=8'} - - /pathe/1.1.0: - resolution: {integrity: sha512-ODbEPR0KKHqECXW1GoxdDb+AZvULmXjVPy4rt+pGo2+TnjJTIPJQSVS6N63n8T2Ip+syHhbn52OewKicV0373w==} - dev: true - - /pend/1.2.0: - resolution: {integrity: sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==} - dev: true - - /picocolors/1.0.0: - resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} - - /picomatch/2.3.1: - resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} - engines: {node: '>=8.6'} - - /pidtree/0.5.0: - resolution: {integrity: sha512-9nxspIM7OpZuhBxPg73Zvyq7j1QMPMPsGKTqRc2XOaFQauDvoNz9fM1Wdkjmeo7l9GXOZiRs97sPkuayl39wjA==} - engines: {node: '>=0.10'} - hasBin: true - dev: true - - /pify/4.0.1: - resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==} - engines: {node: '>=6'} - dev: true - - /pirates/4.0.5: - resolution: {integrity: sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==} - engines: {node: '>= 6'} - dev: true - - /pkg-dir/3.0.0: - resolution: {integrity: sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==} - engines: {node: '>=6'} - dependencies: - find-up: 3.0.0 - dev: true - - /pkg-dir/4.2.0: - resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} - engines: {node: '>=8'} - dependencies: - find-up: 4.1.0 - dev: true - - /pkg-dir/5.0.0: - resolution: {integrity: sha512-NPE8TDbzl/3YQYY7CSS228s3g2ollTFnc+Qi3tqmqJp9Vg2ovUpixcJEo2HJScN2Ez+kEaal6y70c0ehqJBJeA==} - engines: {node: '>=10'} - dependencies: - find-up: 5.0.0 - dev: true - - /polished/4.2.2: - resolution: {integrity: sha512-Sz2Lkdxz6F2Pgnpi9U5Ng/WdWAUZxmHrNPoVlm3aAemxoy2Qy7LGjQg4uf8qKelDAUW94F4np3iH2YPf2qefcQ==} - engines: {node: '>=10'} - dependencies: - '@babel/runtime': 7.20.7 - dev: true - - /pony-cause/1.1.1: - resolution: {integrity: sha512-PxkIc/2ZpLiEzQXu5YRDOUgBlfGYBY8156HY5ZcRAwwonMk5W/MrJP2LLkG/hF7GEQzaHo2aS7ho6ZLCOvf+6g==} - engines: {node: '>=12.0.0'} - dev: true - - /popmotion/11.0.3: - resolution: {integrity: sha512-Y55FLdj3UxkR7Vl3s7Qr4e9m0onSnP8W7d/xQLsoJM40vs6UKHFdygs6SWryasTZYqugMjm3BepCF4CWXDiHgA==} - dependencies: - framesync: 6.0.1 - hey-listen: 1.0.8 - style-value-types: 5.0.0 - tslib: 2.4.1 - dev: false - - /posix-character-classes/0.1.1: - resolution: {integrity: sha512-xTgYBc3fuo7Yt7JbiuFxSYGToMoz8fLoE6TC9Wx1P/u+LfeThMOAqmuyECnlBaaJb+u1m9hHiXUEtwW4OzfUJg==} - engines: {node: '>=0.10.0'} - dev: true - - /postcss-filter-plugins/3.0.1: - resolution: {integrity: sha512-tRKbW4wWBEkSSFuJtamV2wkiV9rj6Yy7P3Y13+zaynlPEEZt8EgYKn3y/RBpMeIhNmHXFlSdzofml65hD5OafA==} - dependencies: - postcss: 8.4.21 - dev: true - - /postcss-icss-keyframes/0.2.1: - resolution: {integrity: sha512-4m+hLY5TVqoTM198KKnzdNudyu1OvtqwD+8kVZ9PNiEO4+IfHYoyVvEXsOHjV8nZ1k6xowf+nY4HlUfZhOFvvw==} - dependencies: - icss-utils: 3.0.1 - postcss: 8.4.21 - postcss-value-parser: 3.3.1 - dev: true - - /postcss-icss-selectors/2.0.3: - resolution: {integrity: sha512-dxFtq+wscbU9faJaH8kIi98vvCPDbt+qg1g9GoG0os1PY3UvgY1Y2G06iZrZb1iVC9cyFfafwSY1IS+IQpRQ4w==} - dependencies: - css-selector-tokenizer: 0.7.3 - generic-names: 1.0.3 - icss-utils: 3.0.1 - lodash: 4.17.21 - postcss: 8.4.21 - dev: true - - /postcss-load-config/3.1.4_aesdjsunmf4wiehhujt67my7tu: - resolution: {integrity: sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==} - engines: {node: '>= 10'} - peerDependencies: - postcss: '>=8.0.9' - ts-node: '>=9.0.0' - peerDependenciesMeta: - postcss: - optional: true - ts-node: - optional: true - dependencies: - lilconfig: 2.0.5 - postcss: 8.4.21 - ts-node: 10.9.1_cin3sed6ohfsopbmt6orxeb4o4 - yaml: 1.10.2 - dev: true - - /postcss-media-query-parser/0.2.3: - resolution: {integrity: sha512-3sOlxmbKcSHMjlUXQZKQ06jOswE7oVkXPxmZdoB1r5l0q6gTFTQSHxNxOrCccElbW7dxNytifNEo8qidX2Vsig==} - dev: true - - /postcss-resolve-nested-selector/0.1.1: - resolution: {integrity: sha512-HvExULSwLqHLgUy1rl3ANIqCsvMS0WHss2UOsXhXnQaZ9VCc2oBvIpXrl00IUFT5ZDITME0o6oiXeiHr2SAIfw==} - dev: true - - /postcss-safe-parser/6.0.0_postcss@8.4.21: - resolution: {integrity: sha512-FARHN8pwH+WiS2OPCxJI8FuRJpTVnn6ZNFiqAM2aeW2LwTHWWmWgIyKC6cUo0L8aeKiF/14MNvnpls6R2PBeMQ==} - engines: {node: '>=12.0'} - peerDependencies: - postcss: ^8.3.3 - dependencies: - postcss: 8.4.21 - dev: true - - /postcss-scss/4.0.6: - resolution: {integrity: sha512-rLDPhJY4z/i4nVFZ27j9GqLxj1pwxE80eAzUNRMXtcpipFYIeowerzBgG3yJhMtObGEXidtIgbUpQ3eLDsf5OQ==} - engines: {node: '>=12.0'} - peerDependencies: - postcss: ^8.4.19 - dev: true - - /postcss-selector-parser/6.0.11: - resolution: {integrity: sha512-zbARubNdogI9j7WY4nQJBiNqQf3sLS3wCP4WfOidu+p28LofJqDH1tcXypGrcmMHhDk2t9wGhCsYe/+szLTy1g==} - engines: {node: '>=4'} - dependencies: - cssesc: 3.0.0 - util-deprecate: 1.0.2 - dev: true - - /postcss-value-parser/3.3.1: - resolution: {integrity: sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==} - - /postcss-value-parser/4.2.0: - resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} - - /postcss/8.4.21: - resolution: {integrity: sha512-tP7u/Sn/dVxK2NnruI4H9BG+x+Wxz6oeZ1cJ8P6G/PZY0IKk4k/63TDsQf2kQq3+qoJeLm2kIBUNlZe3zgb4Zg==} - engines: {node: ^10 || ^12 || >=14} - dependencies: - nanoid: 3.3.4 - picocolors: 1.0.0 - source-map-js: 1.0.2 - - /prelude-ls/1.1.2: - resolution: {integrity: sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==} - engines: {node: '>= 0.8.0'} - dev: true - - /prelude-ls/1.2.1: - resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} - engines: {node: '>= 0.8.0'} - dev: true - - /prettier-linter-helpers/1.0.0: - resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==} - engines: {node: '>=6.0.0'} - dependencies: - fast-diff: 1.2.0 - dev: true - - /prettier/2.8.3: - resolution: {integrity: sha512-tJ/oJ4amDihPoufT5sM0Z1SKEuKay8LfVAMlbbhnnkvt6BUserZylqo2PN+p9KeljLr0OHa2rXHU1T8reeoTrw==} - engines: {node: '>=10.13.0'} - hasBin: true - dev: true - - /pretty-format/27.5.1: - resolution: {integrity: sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==} - engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} - dependencies: - ansi-regex: 5.0.1 - ansi-styles: 5.2.0 - react-is: 17.0.2 - dev: true - - /pretty-format/29.3.1: - resolution: {integrity: sha512-FyLnmb1cYJV8biEIiRyzRFvs2lry7PPIvOqKVe1GCUEYg4YGmlx1qG9EJNMxArYm7piII4qb8UV1Pncq5dxmcg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/schemas': 29.0.0 - ansi-styles: 5.2.0 - react-is: 18.2.0 - dev: true - - /pretty-hrtime/1.0.3: - resolution: {integrity: sha512-66hKPCr+72mlfiSjlEB1+45IjXSqvVAIy6mocupoww4tBFE9R9IhwwUGoI4G++Tc9Aq+2rxOt0RFU6gPcrte0A==} - engines: {node: '>= 0.8'} - dev: true - - /printable-characters/1.0.42: - resolution: {integrity: sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ==} - dev: true - - /process-nextick-args/2.0.1: - resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} - dev: true - - /process/0.11.10: - resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} - engines: {node: '>= 0.6.0'} - dev: true - - /progress/2.0.3: - resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==} - engines: {node: '>=0.4.0'} - dev: true - - /prompts/2.4.2: - resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} - engines: {node: '>= 6'} - dependencies: - kleur: 3.0.3 - sisteransi: 1.0.5 - dev: true - - /prop-types-extra/1.1.1_react@17.0.2: - resolution: {integrity: sha512-59+AHNnHYCdiC+vMwY52WmvP5dM3QLeoumYuEyceQDi9aEhtwN9zIQ2ZNo25sMyXnbh32h+P1ezDsUpUH3JAew==} - peerDependencies: - react: '>=0.14.0' - dependencies: - react: 17.0.2 - react-is: 16.13.1 - warning: 4.0.3 - dev: false - - /prop-types/15.8.1: - resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} - dependencies: - loose-envify: 1.4.0 - object-assign: 4.1.1 - react-is: 16.13.1 - - /property-expr/2.0.5: - resolution: {integrity: sha512-IJUkICM5dP5znhCckHSv30Q4b5/JA5enCtkRHYaOVOAocnH/1BQEYTC5NMfT3AVl/iXKdr3aqQbQn9DxyWknwA==} - dev: false - - /property-information/6.2.0: - resolution: {integrity: sha512-kma4U7AFCTwpqq5twzC1YVIDXSqg6qQK6JN0smOw8fgRy1OkMi0CYSzFmsy6dnqSenamAtj0CyXMUJ1Mf6oROg==} - dev: false - - /protobufjs/6.11.3: - resolution: {integrity: sha512-xL96WDdCZYdU7Slin569tFX712BxsxslWwAfAhCYjQKGTq7dAU91Lomy6nLLhh/dyGhk/YH4TwTSRxTzhuHyZg==} - hasBin: true - requiresBuild: true - dependencies: - '@protobufjs/aspromise': 1.1.2 - '@protobufjs/base64': 1.1.2 - '@protobufjs/codegen': 2.0.4 - '@protobufjs/eventemitter': 1.1.0 - '@protobufjs/fetch': 1.1.0 - '@protobufjs/float': 1.0.2 - '@protobufjs/inquire': 1.1.0 - '@protobufjs/path': 1.1.2 - '@protobufjs/pool': 1.1.0 - '@protobufjs/utf8': 1.1.0 - '@types/long': 4.0.2 - '@types/node': 17.0.45 - long: 4.0.0 - dev: false - - /protobufjs/7.1.2: - resolution: {integrity: sha512-4ZPTPkXCdel3+L81yw3dG6+Kq3umdWKh7Dc7GW/CpNk4SX3hK58iPCWeCyhVTDrbkNeKrYNZ7EojM5WDaEWTLQ==} - engines: {node: '>=12.0.0'} - requiresBuild: true - dependencies: - '@protobufjs/aspromise': 1.1.2 - '@protobufjs/base64': 1.1.2 - '@protobufjs/codegen': 2.0.4 - '@protobufjs/eventemitter': 1.1.0 - '@protobufjs/fetch': 1.1.0 - '@protobufjs/float': 1.0.2 - '@protobufjs/inquire': 1.1.0 - '@protobufjs/path': 1.1.2 - '@protobufjs/pool': 1.1.0 - '@protobufjs/utf8': 1.1.0 - '@types/node': 17.0.45 - long: 5.2.1 - dev: false - - /proxy-addr/2.0.7: - resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} - engines: {node: '>= 0.10'} - dependencies: - forwarded: 0.2.0 - ipaddr.js: 1.9.1 - dev: true - - /proxy-agent/5.0.0: - resolution: {integrity: sha512-gkH7BkvLVkSfX9Dk27W6TyNOWWZWRilRfk1XxGNWOYJ2TuedAv1yFpCaU9QSBmBe716XOTNpYNOzhysyw8xn7g==} - engines: {node: '>= 8'} - dependencies: - agent-base: 6.0.2 - debug: 4.3.4 - http-proxy-agent: 4.0.1 - https-proxy-agent: 5.0.1 - lru-cache: 5.1.1 - pac-proxy-agent: 5.0.0 - proxy-from-env: 1.1.0 - socks-proxy-agent: 5.0.1 - transitivePeerDependencies: - - supports-color - dev: true - - /proxy-from-env/1.1.0: - resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} - dev: true - - /prr/1.0.1: - resolution: {integrity: sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==} - dev: true - optional: true - - /psl/1.9.0: - resolution: {integrity: sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==} - dev: true - - /punycode/1.3.2: - resolution: {integrity: sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==} - dev: false - - /punycode/2.2.0: - resolution: {integrity: sha512-LN6QV1IJ9ZhxWTNdktaPClrNfp8xdSAYS0Zk2ddX7XsXZAxckMHPCBcHRo0cTcEIgYPRiGEkmji3Idkh2yFtYw==} - engines: {node: '>=6'} - dev: true - - /puppeteer-core/2.1.1: - resolution: {integrity: sha512-n13AWriBMPYxnpbb6bnaY5YoY6rGj8vPLrz6CZF3o0qJNEwlcfJVxBzYZ0NJsQ21UbdJoijPCDrM++SUVEz7+w==} - engines: {node: '>=8.16.0'} - dependencies: - '@types/mime-types': 2.1.1 - debug: 4.3.4 - extract-zip: 1.7.0 - https-proxy-agent: 4.0.0 - mime: 2.6.0 - mime-types: 2.1.35 - progress: 2.0.3 - proxy-from-env: 1.1.0 - rimraf: 2.7.1 - ws: 6.2.2 - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - dev: true - - /qs/6.11.0: - resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==} - engines: {node: '>=0.6'} - dependencies: - side-channel: 1.0.4 - dev: true - - /query-string/6.14.1: - resolution: {integrity: sha512-XDxAeVmpfu1/6IjyT/gXHOl+S0vQ9owggJ30hhWKdHAsNPOcasn5o9BW0eejZqL2e4vMjhAxoW3jVHcD6mbcYw==} - engines: {node: '>=6'} - dependencies: - decode-uri-component: 0.2.2 - filter-obj: 1.1.0 - split-on-first: 1.1.0 - strict-uri-encode: 2.0.0 - dev: false - - /querystring/0.2.0: - resolution: {integrity: sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==} - engines: {node: '>=0.4.x'} - deprecated: The querystring API is considered Legacy. new code should use the URLSearchParams API instead. - dev: false - - /querystringify/2.2.0: - resolution: {integrity: sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==} - dev: true - - /queue-microtask/1.2.3: - resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - dev: true - - /quick-lru/4.0.1: - resolution: {integrity: sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==} - engines: {node: '>=8'} - dev: true - - /ramda/0.28.0: - resolution: {integrity: sha512-9QnLuG/kPVgWvMQ4aODhsBUFKOUmnbUnsSXACv+NCQZcHbeb+v8Lodp8OVxtRULN1/xOyYLLaL6npE6dMq5QTA==} - dev: true - - /range-parser/1.2.1: - resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} - engines: {node: '>= 0.6'} - dev: true - - /raw-body/2.5.1: - resolution: {integrity: sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==} - engines: {node: '>= 0.8'} - dependencies: - bytes: 3.1.2 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - unpipe: 1.0.0 - dev: true - - /react-colorful/5.6.1_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-1exovf0uGTGyq5mXQT0zgQ80uvj2PCwvF8zY1RN9/vbJVSjSo3fsB/4L3ObbF7u70NduSiK4xu4Y6q1MHoUGEw==} - peerDependencies: - react: '>=16.8.0' - react-dom: '>=16.8.0' - dependencies: - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: true - - /react-component-managers/3.2.2_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-SqtB09hS1ir0koBNybvNbNAB3k/r7IbIGbXSxvkkTV0m50s+4oJ59KYsbPAQ/2DhE169Rc5V26d674EcGcDbGA==} - peerDependencies: - react: '>=15.3.0' - react-dom: '>=15.3.0' - dependencies: - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - spy-on-component: 1.1.3 - dev: false - - /react-datepicker/4.8.0_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-u69zXGHMpxAa4LeYR83vucQoUCJQ6m/WBsSxmUMu/M8ahTSVMMyiyQzauHgZA2NUr9y0FUgOAix71hGYUb6tvg==} - peerDependencies: - react: ^16.9.0 || ^17 || ^18 - react-dom: ^16.9.0 || ^17 || ^18 - dependencies: - '@popperjs/core': 2.11.6 - classnames: 2.3.2 - date-fns: 2.29.3 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-onclickoutside: 6.12.2_sfoxds7t5ydpegc3knd667wn6m - react-popper: 2.3.0_vov5yimr6vvxyufd6uigwwkst4 - dev: false - - /react-docgen-typescript/2.2.2_typescript@4.9.5: - resolution: {integrity: sha512-tvg2ZtOpOi6QDwsb3GZhOjDkkX0h8Z2gipvTg6OVMUyoYoURhEiRNePT8NZItTVCDh39JJHnLdfCOkzoLbFnTg==} - peerDependencies: - typescript: '>= 4.3.x' - dependencies: - typescript: 4.9.5 - dev: true - - /react-docgen/6.0.0-alpha.3: - resolution: {integrity: sha512-DDLvB5EV9As1/zoUsct6Iz2Cupw9FObEGD3DMcIs3EDFIoSKyz8FZtoWj3Wj+oodrU4/NfidN0BL5yrapIcTSA==} - engines: {node: '>=12.0.0'} - hasBin: true - dependencies: - '@babel/core': 7.20.12 - '@babel/generator': 7.20.7 - ast-types: 0.14.2 - commander: 2.20.3 - doctrine: 3.0.0 - estree-to-babel: 3.2.1 - neo-async: 2.6.2 - node-dir: 0.1.17 - resolve: 1.22.1 - strip-indent: 3.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /react-dom/17.0.2_react@17.0.2: - resolution: {integrity: sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==} - peerDependencies: - react: 17.0.2 - dependencies: - loose-envify: 1.4.0 - object-assign: 4.1.1 - react: 17.0.2 - scheduler: 0.20.2 - - /react-element-to-jsx-string/15.0.0_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-UDg4lXB6BzlobN60P8fHWVPX3Kyw8ORrTeBtClmIlGdkOOE+GYQSFvmEU5iLLpwp/6v42DINwNcwOhOLfQ//FQ==} - peerDependencies: - react: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0 - react-dom: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0 - dependencies: - '@base2/pretty-print-object': 1.0.1 - is-plain-object: 5.0.0 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-is: 18.1.0 - dev: true - - /react-error-boundary/3.1.4_react@17.0.2: - resolution: {integrity: sha512-uM9uPzZJTF6wRQORmSrvOIgt4lJ9MC1sNgEOj2XGsDTRE4kmpWxg7ENK9EWNKJRMAOY9z0MuF4yIfl6gp4sotA==} - engines: {node: '>=10', npm: '>=6'} - peerDependencies: - react: '>=16.13.1' - dependencies: - '@babel/runtime': 7.20.7 - react: 17.0.2 - dev: true - - /react-fast-compare/2.0.4: - resolution: {integrity: sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==} - dev: false - - /react-fast-compare/3.2.0: - resolution: {integrity: sha512-rtGImPZ0YyLrscKI9xTpV8psd6I8VAtjKCzQDlzyDvqJA8XOW78TXYQwNRNd8g8JZnDu8q9Fu/1v4HPAVwVdHA==} - - /react-helmet-async/1.3.0_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-9jZ57/dAn9t3q6hneQS0wukqC2ENOBgMNVEhb/ZG9ZSxUetzVIw4iAmEU38IaVg3QGYauQPhSeUTuIUtFglWpg==} - peerDependencies: - react: ^16.6.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.6.0 || ^17.0.0 || ^18.0.0 - dependencies: - '@babel/runtime': 7.20.7 - invariant: 2.2.4 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-fast-compare: 3.2.0 - shallowequal: 1.1.0 - dev: false - - /react-inspector/6.0.1_react@17.0.2: - resolution: {integrity: sha512-cxKSeFTf7jpSSVddm66sKdolG90qURAX3g1roTeaN6x0YEbtWc8JpmFN9+yIqLNH2uEkYerWLtJZIXRIFuBKrg==} - peerDependencies: - react: ^16.8.4 || ^17.0.0 || ^18.0.0 - dependencies: - react: 17.0.2 - dev: true - - /react-intersection-observer/9.4.2_react@17.0.2: - resolution: {integrity: sha512-AdK+ryzZ7U9ZJYttDUZ8q2Am3nqE0exg5Ryl5Y124KeVsix/1hGZPbdu58EqA98TwnzwDNWHxg/kwNawmIiUig==} - peerDependencies: - react: ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 - dependencies: - react: 17.0.2 - dev: false - - /react-intl/6.2.5_oatgdhaahtizs2uezdzbohxvne: - resolution: {integrity: sha512-nz21POTKbE0sPEuEJU4o5YTZYY7VlIYCPNJaD6D2+xKyk6Noj6DoUK0LRO9LXuQNUuQ044IZl3m6ymzZRj8XFQ==} - peerDependencies: - react: ^16.6.0 || 17 || 18 - typescript: ^4.7 - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@formatjs/ecma402-abstract': 1.14.3 - '@formatjs/icu-messageformat-parser': 2.1.14 - '@formatjs/intl': 2.6.3_typescript@4.9.5 - '@formatjs/intl-displaynames': 6.2.3 - '@formatjs/intl-listformat': 7.1.7 - '@types/hoist-non-react-statics': 3.3.1 - '@types/react': 17.0.52 - hoist-non-react-statics: 3.3.2 - intl-messageformat: 10.2.5 - react: 17.0.2 - tslib: 2.4.1 - typescript: 4.9.5 - dev: false - - /react-is/16.13.1: - resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} - - /react-is/17.0.2: - resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} - - /react-is/18.1.0: - resolution: {integrity: sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg==} - dev: true - - /react-is/18.2.0: - resolution: {integrity: sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==} - dev: true - - /react-lazylog/4.5.3_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-lyov32A/4BqihgXgtNXTHCajXSXkYHPlIEmV8RbYjHIMxCFSnmtdg4kDCI3vATz7dURtiFTvrw5yonHnrS+NNg==} - peerDependencies: - react: '>=16.3.0' - dependencies: - '@mattiasbuelens/web-streams-polyfill': 0.2.1 - fetch-readablestream: 0.2.0 - immutable: 3.8.2 - mitt: 1.2.0 - prop-types: 15.8.1 - react: 17.0.2 - react-string-replace: 0.4.4 - react-virtualized: 9.22.3_wem7zdhrj6jola7ic3qcehiqii_sfoxds7t5ydpegc3knd667wn6m - text-encoding-utf-8: 1.0.2 - whatwg-fetch: 2.0.4 - transitivePeerDependencies: - - react-dom - dev: false - - /react-lifecycles-compat/3.0.4: - resolution: {integrity: sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==} - dev: false - - /react-markdown/7.1.2_q5o373oqrklnndq2vhekyuzhxi: - resolution: {integrity: sha512-ibMcc0EbfmbwApqJD8AUr0yls8BSrKzIbHaUsPidQljxToCqFh34nwtu3CXNEItcVJNzpjDHrhK8A+MAh2JW3A==} - peerDependencies: - '@types/react': '>=16' - react: '>=16' - dependencies: - '@types/hast': 2.3.4 - '@types/react': 17.0.52 - '@types/unist': 2.0.6 - comma-separated-tokens: 2.0.3 - hast-util-whitespace: 2.0.1 - prop-types: 15.8.1 - property-information: 6.2.0 - react: 17.0.2 - react-is: 17.0.2 - remark-parse: 10.0.1 - remark-rehype: 9.1.0 - space-separated-tokens: 2.0.2 - style-to-object: 0.3.0 - unified: 10.1.2 - unist-util-visit: 4.1.1 - vfile: 5.3.6 - transitivePeerDependencies: - - supports-color - dev: false - - /react-measure/2.5.2_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-M+rpbTLWJ3FD6FXvYV6YEGvQ5tMayQ3fGrZhRPHrE9bVlBYfDCLuDcgNttYfk8IqfOI03jz6cbpqMRTUclQnaA==} - peerDependencies: - react: '>0.13.0' - react-dom: '>0.13.0' - dependencies: - '@babel/runtime': 7.20.7 - get-node-dimensions: 1.2.1 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - resize-observer-polyfill: 1.5.1 - dev: false - - /react-onclickoutside/6.12.2_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-NMXGa223OnsrGVp5dJHkuKxQ4czdLmXSp5jSV9OqiCky9LOpPATn3vLldc+q5fK3gKbEHvr7J1u0yhBh/xYkpA==} - peerDependencies: - react: ^15.5.x || ^16.x || ^17.x || ^18.x - react-dom: ^15.5.x || ^16.x || ^17.x || ^18.x - dependencies: - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - - /react-paginate/8.1.4_react@17.0.2: - resolution: {integrity: sha512-c3rxjcTEqeDQa6LqXifxLeFguY2qy2CHGRphVjHLFFMGfIHyaJ+v3bOvIlLYEeohwQ1q+cQpknjsqBVrkc/SNA==} - peerDependencies: - react: ^16 || ^17 || ^18 - dependencies: - prop-types: 15.8.1 - react: 17.0.2 - dev: false - - /react-popper/2.3.0_vov5yimr6vvxyufd6uigwwkst4: - resolution: {integrity: sha512-e1hj8lL3uM+sgSR4Lxzn5h1GxBlpa4CQz0XLF8kx4MDrDRWY0Ena4c97PUeSX9i5W3UAfDP0z0FXCTQkoXUl3Q==} - peerDependencies: - '@popperjs/core': ^2.0.0 - react: ^16.8.0 || ^17 || ^18 - react-dom: ^16.8.0 || ^17 || ^18 - dependencies: - '@popperjs/core': 2.11.6 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-fast-compare: 3.2.0 - warning: 4.0.3 - - /react-query/3.39.2_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-F6hYDKyNgDQfQOuR1Rsp3VRzJnWHx6aRnnIZHMNGGgbL3SBgpZTDg8MQwmxOgpCAoqZJA+JSNCydF1xGJqKOCA==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: '*' - react-native: '*' - peerDependenciesMeta: - react-dom: - optional: true - react-native: - optional: true - dependencies: - '@babel/runtime': 7.20.7 - broadcast-channel: 3.7.0 - match-sorter: 6.3.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - - /react-reflex/4.0.9_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-XFTNRekFK4ul8mzVd1lniKT/SI0FvNYhXyLNl5gagS1i3iW9QKlpFYcRfVhZlxxaYHb8UyLOs3+H4Ay5cjtbxQ==} - peerDependencies: - react: ^16.0.0 || ^17.0.0 || ^18.0.0 - dependencies: - '@babel/runtime': 7.20.7 - lodash.throttle: 4.1.1 - prop-types: 15.8.1 - react: 17.0.2 - react-measure: 2.5.2_sfoxds7t5ydpegc3knd667wn6m - transitivePeerDependencies: - - react-dom - dev: false - - /react-refresh/0.14.0: - resolution: {integrity: sha512-wViHqhAd8OHeLS/IRMJjTSDHF3U9eWi62F/MledQGPdJGDhodXJ9PBLNGr6WWL7qlH12Mt3TyTpbS+hGXMjCzQ==} - engines: {node: '>=0.10.0'} - dev: true - - /react-resize-detector/7.1.2_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-zXnPJ2m8+6oq9Nn8zsep/orts9vQv3elrpA+R8XTcW7DVVUJ9vwDwMXaBtykAYjMnkCIaOoK9vObyR7ZgFNlOw==} - peerDependencies: - react: ^16.0.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0 - dependencies: - lodash: 4.17.21 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - - /react-resize-detector/8.0.3_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-c3eqm5BVcluVhxHsBQnhyPO/5uYB3XHIHz6D1ZOHzU2WcnZF0Cr3KLl5OIozRC2RSsdQlu5vn1PHEqrvKRnIYA==} - peerDependencies: - react: ^16.0.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0 - dependencies: - lodash: 4.17.21 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - - /react-router-dom/6.3.0_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-uaJj7LKytRxZNQV8+RbzJWnJ8K2nPsOOEuX7aQstlMZKQT0164C+X2w6bnkqU3sjtLvpd5ojrezAyfZ1+0sStw==} - peerDependencies: - react: '>=16.8' - react-dom: '>=16.8' - dependencies: - history: 5.3.0 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-router: 6.3.0_react@17.0.2 - dev: false - - /react-router/6.3.0_react@17.0.2: - resolution: {integrity: sha512-7Wh1DzVQ+tlFjkeo+ujvjSqSJmkt1+8JO+T5xklPlgrh70y7ogx75ODRW0ThWhY7S+6yEDks8TYrtQe/aoboBQ==} - peerDependencies: - react: '>=16.8' - dependencies: - history: 5.3.0 - react: 17.0.2 - dev: false - - /react-select-event/5.5.1: - resolution: {integrity: sha512-goAx28y0+iYrbqZA2FeRTreHHs/ZtSuKxtA+J5jpKT5RHPCbVZJ4MqACfPnWyFXsEec+3dP5bCrNTxIX8oYe9A==} - dependencies: - '@testing-library/dom': 8.20.0 - dev: true - - /react-select/5.7.0_dlps62spiehie4hvtd46aaye5u: - resolution: {integrity: sha512-lJGiMxCa3cqnUr2Jjtg9YHsaytiZqeNOKeibv6WF5zbK/fPegZ1hg3y/9P1RZVLhqBTs0PfqQLKuAACednYGhQ==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - dependencies: - '@babel/runtime': 7.20.7 - '@emotion/cache': 11.10.5 - '@emotion/react': 11.10.5_nehdyrcubdy45i2h35h56gfg7i - '@floating-ui/dom': 1.1.0 - '@types/react-transition-group': 4.4.5 - memoize-one: 6.0.0 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-transition-group: 4.4.5_sfoxds7t5ydpegc3knd667wn6m - use-isomorphic-layout-effect: 1.1.2_q5o373oqrklnndq2vhekyuzhxi - transitivePeerDependencies: - - '@babel/core' - - '@types/react' - dev: false - - /react-slick/0.29.0_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-TGdOKE+ZkJHHeC4aaoH85m8RnFyWqdqRfAGkhd6dirmATXMZWAxOpTLmw2Ll/jPTQ3eEG7ercFr/sbzdeYCJXA==} - peerDependencies: - react: ^0.14.0 || ^15.0.1 || ^16.0.0 || ^17.0.0 || ^18.0.0 - react-dom: ^0.14.0 || ^15.0.1 || ^16.0.0 || ^17.0.0 || ^18.0.0 - dependencies: - classnames: 2.3.2 - enquire.js: 2.1.6 - json2mq: 0.2.0 - lodash.debounce: 4.0.8 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - resize-observer-polyfill: 1.5.1 - dev: false - - /react-smooth/2.0.1_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-Own9TA0GPPf3as4vSwFhDouVfXP15ie/wIHklhyKBH5AN6NFtdk0UpHBnonV11BtqDkAWlt40MOUc+5srmW7NA==} - peerDependencies: - prop-types: ^15.6.0 - react: ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 - react-dom: ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 - dependencies: - fast-equals: 2.0.4 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-transition-group: 2.9.0_sfoxds7t5ydpegc3knd667wn6m - dev: false - - /react-string-replace/0.4.4: - resolution: {integrity: sha512-FAMkhxmDpCsGTwTZg7p/2v+/GTmxAp73so3fbSvlAcBBX36ujiGRNEaM/1u+jiYQrArhns+7eE92g2pi5E5FUA==} - engines: {node: '>=0.12.0'} - dependencies: - lodash: 4.17.21 - dev: false - - /react-table/7.8.0_react@17.0.2: - resolution: {integrity: sha512-hNaz4ygkZO4bESeFfnfOft73iBUj8K5oKi1EcSHPAibEydfsX2MyU6Z8KCr3mv3C9Kqqh71U+DhZkFvibbnPbA==} - peerDependencies: - react: ^16.8.3 || ^17.0.0-0 || ^18.0.0 - dependencies: - react: 17.0.2 - dev: false - - /react-transition-group/2.9.0_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-+HzNTCHpeQyl4MJ/bdE0u6XRMe9+XG/+aL4mCxVN4DnPBQ0/5bfHWPDuOZUzYdMj94daZaZdCCc1Dzt9R/xSSg==} - peerDependencies: - react: '>=15.0.0' - react-dom: '>=15.0.0' - dependencies: - dom-helpers: 3.4.0 - loose-envify: 1.4.0 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-lifecycles-compat: 3.0.4 - dev: false - - /react-transition-group/4.4.5_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==} - peerDependencies: - react: '>=16.6.0' - react-dom: '>=16.6.0' - dependencies: - '@babel/runtime': 7.20.7 - dom-helpers: 5.2.1 - loose-envify: 1.4.0 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - - /react-universal-interface/0.6.2_react@17.0.2+tslib@2.4.1: - resolution: {integrity: sha512-dg8yXdcQmvgR13RIlZbTRQOoUrDciFVoSBZILwjE2LFISxZZ8loVJKAkuzswl5js8BHda79bIb2b84ehU8IjXw==} - peerDependencies: - react: '*' - tslib: '*' - dependencies: - react: 17.0.2 - tslib: 2.4.1 - dev: false - - /react-use-intercom/1.5.2_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-5bK3TtsZ9S18vWuagncc1tJA/+2WouIjMHtVlgcrvECYbDhOwFNMb1h1XDDcOIk4gZv+eDx/oiWLbNHFAR7lIA==} - engines: {node: '>=10'} - peerDependencies: - react: '>=16.8.0' - react-dom: '>=16.8.0' - dependencies: - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - dev: false - - /react-use/17.4.0_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-TgbNTCA33Wl7xzIJegn1HndB4qTS9u03QUwyNycUnXaweZkE4Kq2SB+Yoxx8qbshkZGYBDvUXbXWRUmQDcZZ/Q==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - dependencies: - '@types/js-cookie': 2.2.7 - '@xobotyi/scrollbar-width': 1.9.5 - copy-to-clipboard: 3.3.3 - fast-deep-equal: 3.1.3 - fast-shallow-equal: 1.0.0 - js-cookie: 2.2.1 - nano-css: 5.3.5_sfoxds7t5ydpegc3knd667wn6m - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-universal-interface: 0.6.2_react@17.0.2+tslib@2.4.1 - resize-observer-polyfill: 1.5.1 - screenfull: 5.2.0 - set-harmonic-interval: 1.0.1 - throttle-debounce: 3.0.1 - ts-easing: 0.2.0 - tslib: 2.4.1 - dev: false - - /react-virtualized/9.22.3_wem7zdhrj6jola7ic3qcehiqii_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-MKovKMxWTcwPSxE1kK1HcheQTWfuCxAuBoSTf2gwyMM21NdX/PXUhnoP8Uc5dRKd+nKm8v41R36OellhdCpkrw==} - peerDependencies: - react: ^15.3.0 || ^16.0.0-alpha - react-dom: ^15.3.0 || ^16.0.0-alpha - dependencies: - '@babel/runtime': 7.20.7 - clsx: 1.2.1 - dom-helpers: 5.2.1 - loose-envify: 1.4.0 - prop-types: 15.8.1 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-lifecycles-compat: 3.0.4 - dev: false - patched: true - - /react-widgets/4.6.1_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-x2n4EFQnk1ZG2rWsdekGK3js091k+b06e0CRI4pDEZ0uh/cft3NyGFKS5/x7CV/fN51kHMaM4r5IRGIbPfsLLw==} - peerDependencies: - react: '>=0.14.0' - react-dom: '>=0.14.0' - dependencies: - classnames: 2.3.2 - date-arithmetic: 3.1.0 - dom-helpers: 3.4.0 - invariant: 2.2.4 - prop-types-extra: 1.1.1_react@17.0.2 - react: 17.0.2 - react-component-managers: 3.2.2_sfoxds7t5ydpegc3knd667wn6m - react-dom: 17.0.2_react@17.0.2 - react-lifecycles-compat: 3.0.4 - react-transition-group: 2.9.0_sfoxds7t5ydpegc3knd667wn6m - uncontrollable: 7.2.1_react@17.0.2 - warning: 3.0.0 - dev: false - - /react/17.0.2: - resolution: {integrity: sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==} - engines: {node: '>=0.10.0'} - dependencies: - loose-envify: 1.4.0 - object-assign: 4.1.1 - - /read-installed/4.0.3: - resolution: {integrity: sha512-O03wg/IYuV/VtnK2h/KXEt9VIbMUFbk3ERG0Iu4FhLZw0EP0T9znqrYDGn6ncbEsXUFaUjiVAWXHzxwt3lhRPQ==} - dependencies: - debuglog: 1.0.1 - read-package-json: 2.1.2 - readdir-scoped-modules: 1.1.0 - semver: 5.7.1 - slide: 1.1.6 - util-extend: 1.0.3 - optionalDependencies: - graceful-fs: 4.2.10 - dev: true - - /read-package-json/2.1.2: - resolution: {integrity: sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA==} - dependencies: - glob: 7.2.3 - json-parse-even-better-errors: 2.3.1 - normalize-package-data: 2.5.0 - npm-normalize-package-bin: 1.0.1 - dev: true - - /read-pkg-up/7.0.1: - resolution: {integrity: sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==} - engines: {node: '>=8'} - dependencies: - find-up: 4.1.0 - read-pkg: 5.2.0 - type-fest: 0.8.1 - dev: true - - /read-pkg/5.2.0: - resolution: {integrity: sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==} - engines: {node: '>=8'} - dependencies: - '@types/normalize-package-data': 2.4.1 - normalize-package-data: 2.5.0 - parse-json: 5.2.0 - type-fest: 0.6.0 - dev: true - - /readable-stream/1.1.14: - resolution: {integrity: sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==} - dependencies: - core-util-is: 1.0.3 - inherits: 2.0.4 - isarray: 0.0.1 - string_decoder: 0.10.31 - dev: true - - /readable-stream/2.3.7: - resolution: {integrity: sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==} - dependencies: - core-util-is: 1.0.3 - inherits: 2.0.4 - isarray: 1.0.0 - process-nextick-args: 2.0.1 - safe-buffer: 5.1.2 - string_decoder: 1.1.1 - util-deprecate: 1.0.2 - dev: true - - /readable-stream/3.6.0: - resolution: {integrity: sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==} - engines: {node: '>= 6'} - dependencies: - inherits: 2.0.4 - string_decoder: 1.3.0 - util-deprecate: 1.0.2 - dev: true - - /readdir-scoped-modules/1.1.0: - resolution: {integrity: sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw==} - deprecated: This functionality has been moved to @npmcli/fs - dependencies: - debuglog: 1.0.1 - dezalgo: 1.0.4 - graceful-fs: 4.2.10 - once: 1.4.0 - dev: true - - /readdirp/3.6.0: - resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} - engines: {node: '>=8.10.0'} - dependencies: - picomatch: 2.3.1 - - /recast/0.20.5: - resolution: {integrity: sha512-E5qICoPoNL4yU0H0NoBDntNB0Q5oMSNh9usFctYniLBluTthi3RsQVBXIJNbApOlvSwW/RGxIuokPcAc59J5fQ==} - engines: {node: '>= 4'} - dependencies: - ast-types: 0.14.2 - esprima: 4.0.1 - source-map: 0.6.1 - tslib: 2.5.0 - dev: true - - /recast/0.23.1: - resolution: {integrity: sha512-RokaBcoxSjXUDzz1TXSZmZsSW6ZpLmlA3GGqJ8uuTrQ9hZhEz+4Tpsc+gRvYRJ2BU4H+ZyUlg91eSGDw7bwy7g==} - engines: {node: '>= 4'} - dependencies: - assert: 2.0.0 - ast-types: 0.16.1 - esprima: 4.0.1 - source-map: 0.6.1 - tslib: 2.5.0 - dev: true - - /recharts-scale/0.4.5: - resolution: {integrity: sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==} - dependencies: - decimal.js-light: 2.5.1 - dev: false - - /recharts/2.3.2_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-2II30fGzKaypHfHNQNUhCfiLMxrOS/gF0WFahDIEFgXtJkVEe2DpZWFfEfAn+RU3B7/h2V/B05Bwmqq3rTXwLw==} - engines: {node: '>=12'} - peerDependencies: - prop-types: ^15.6.0 - react: ^16.0.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0 - dependencies: - classnames: 2.3.2 - eventemitter3: 4.0.7 - lodash: 4.17.21 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - react-is: 16.13.1 - react-resize-detector: 7.1.2_sfoxds7t5ydpegc3knd667wn6m - react-smooth: 2.0.1_sfoxds7t5ydpegc3knd667wn6m - recharts-scale: 0.4.5 - reduce-css-calc: 2.1.8 - victory-vendor: 36.6.8 - dev: false - - /rechoir/0.6.2: - resolution: {integrity: sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==} - engines: {node: '>= 0.10'} - dependencies: - resolve: 1.22.1 - dev: true - - /redent/3.0.0: - resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==} - engines: {node: '>=8'} - dependencies: - indent-string: 4.0.0 - strip-indent: 3.0.0 - dev: true - - /reduce-css-calc/2.1.8: - resolution: {integrity: sha512-8liAVezDmUcH+tdzoEGrhfbGcP7nOV4NkGE3a74+qqvE7nt9i4sKLGBuZNOnpI4WiGksiNPklZxva80061QiPg==} - dependencies: - css-unit-converter: 1.1.2 - postcss-value-parser: 3.3.1 - dev: false - - /reftools/1.1.9: - resolution: {integrity: sha512-OVede/NQE13xBQ+ob5CKd5KyeJYU2YInb1bmV4nRoOfquZPkAkxuOXicSe1PvqIuZZ4kD13sPKBbR7UFDmli6w==} - dev: true - - /regenerate-unicode-properties/10.1.0: - resolution: {integrity: sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ==} - engines: {node: '>=4'} - dependencies: - regenerate: 1.4.2 - dev: true - - /regenerate/1.4.2: - resolution: {integrity: sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==} - dev: true - - /regenerator-runtime/0.13.11: - resolution: {integrity: sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==} - - /regenerator-transform/0.15.1: - resolution: {integrity: sha512-knzmNAcuyxV+gQCufkYcvOqX/qIIfHLv0u5x79kRxuGojfYVky1f15TzZEu2Avte8QGepvUNTnLskf8E6X6Vyg==} - dependencies: - '@babel/runtime': 7.20.7 - dev: true - - /regex-not/1.0.2: - resolution: {integrity: sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==} - engines: {node: '>=0.10.0'} - dependencies: - extend-shallow: 3.0.2 - safe-regex: 1.1.0 - dev: true - - /regexp.prototype.flags/1.4.3: - resolution: {integrity: sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - functions-have-names: 1.2.3 - dev: true - - /regexpp/3.2.0: - resolution: {integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==} - engines: {node: '>=8'} - dev: true - - /regexpu-core/5.2.2: - resolution: {integrity: sha512-T0+1Zp2wjF/juXMrMxHxidqGYn8U4R+zleSJhX9tQ1PUsS8a9UtYfbsF9LdiVgNX3kiX8RNaKM42nfSgvFJjmw==} - engines: {node: '>=4'} - dependencies: - regenerate: 1.4.2 - regenerate-unicode-properties: 10.1.0 - regjsgen: 0.7.1 - regjsparser: 0.9.1 - unicode-match-property-ecmascript: 2.0.0 - unicode-match-property-value-ecmascript: 2.1.0 - dev: true - - /regjsgen/0.7.1: - resolution: {integrity: sha512-RAt+8H2ZEzHeYWxZ3H2z6tF18zyyOnlcdaafLrm21Bguj7uZy6ULibiAFdXEtKQY4Sy7wDTwDiOazasMLc4KPA==} - dev: true - - /regjsparser/0.9.1: - resolution: {integrity: sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==} - hasBin: true - dependencies: - jsesc: 0.5.0 - dev: true - - /rehype-slug/5.1.0: - resolution: {integrity: sha512-Gf91dJoXneiorNEnn+Phx97CO7oRMrpi+6r155tTxzGuLtm+QrI4cTwCa9e1rtePdL4i9tSO58PeSS6HWfgsiw==} - dependencies: - '@types/hast': 2.3.4 - github-slugger: 2.0.0 - hast-util-has-property: 2.0.1 - hast-util-heading-rank: 2.1.1 - hast-util-to-string: 2.0.0 - unified: 10.1.2 - unist-util-visit: 4.1.1 - dev: false - - /rehype-urls/1.1.1: - resolution: {integrity: sha512-ct9Kb/nAL6oe/O5fDc0xjiqm8Z9xgXdorOdDhZAWx7awucyiuYXU7Dax+23Gu24nnGwtdaCW6zslKAYzlEW1lw==} - dependencies: - hast-util-has-property: 1.0.4 - stdopt: 2.2.0 - unist-util-visit: 1.4.1 - dev: false - - /remark-directive/2.0.1: - resolution: {integrity: sha512-oosbsUAkU/qmUE78anLaJePnPis4ihsE7Agp0T/oqTzvTea8pOiaYEtfInU/+xMOVTS9PN5AhGOiaIVe4GD8gw==} - dependencies: - '@types/mdast': 3.0.10 - mdast-util-directive: 2.2.2 - micromark-extension-directive: 2.1.2 - unified: 10.1.2 - dev: false - - /remark-external-links/8.0.0: - resolution: {integrity: sha512-5vPSX0kHoSsqtdftSHhIYofVINC8qmp0nctkeU9YoJwV3YfiBRiI6cbFRJ0oI/1F9xS+bopXG0m2KS8VFscuKA==} - dependencies: - extend: 3.0.2 - is-absolute-url: 3.0.3 - mdast-util-definitions: 4.0.0 - space-separated-tokens: 1.1.5 - unist-util-visit: 2.0.3 - dev: true - - /remark-frontmatter/4.0.1: - resolution: {integrity: sha512-38fJrB0KnmD3E33a5jZC/5+gGAC2WKNiPw1/fdXJvijBlhA7RCsvJklrYJakS0HedninvaCYW8lQGf9C918GfA==} - dependencies: - '@types/mdast': 3.0.10 - mdast-util-frontmatter: 1.0.0 - micromark-extension-frontmatter: 1.0.0 - unified: 10.1.2 - dev: false - - /remark-gfm/3.0.1: - resolution: {integrity: sha512-lEFDoi2PICJyNrACFOfDD3JlLkuSbOa5Wd8EPt06HUdptv8Gn0bxYTdbU/XXQ3swAPkEaGxxPN9cbnMHvVu1Ig==} - dependencies: - '@types/mdast': 3.0.10 - mdast-util-gfm: 2.0.1 - micromark-extension-gfm: 2.0.1 - unified: 10.1.2 - transitivePeerDependencies: - - supports-color - dev: false - - /remark-parse/10.0.1: - resolution: {integrity: sha512-1fUyHr2jLsVOkhbvPRBJ5zTKZZyD6yZzYaWCS6BPBdQ8vEMBCH+9zNCDA6tET/zHCi/jLqjCWtlJZUPk+DbnFw==} - dependencies: - '@types/mdast': 3.0.10 - mdast-util-from-markdown: 1.2.0 - unified: 10.1.2 - transitivePeerDependencies: - - supports-color - dev: false - - /remark-rehype/9.1.0: - resolution: {integrity: sha512-oLa6YmgAYg19zb0ZrBACh40hpBLteYROaPLhBXzLgjqyHQrN+gVP9N/FJvfzuNNuzCutktkroXEZBrxAxKhh7Q==} - dependencies: - '@types/hast': 2.3.4 - '@types/mdast': 3.0.10 - mdast-util-to-hast: 11.3.0 - unified: 10.1.2 - dev: false - - /remark-slug/6.1.0: - resolution: {integrity: sha512-oGCxDF9deA8phWvxFuyr3oSJsdyUAxMFbA0mZ7Y1Sas+emILtO+e5WutF9564gDsEN4IXaQXm5pFo6MLH+YmwQ==} - dependencies: - github-slugger: 1.5.0 - mdast-util-to-string: 1.1.0 - unist-util-visit: 2.0.3 - dev: true - - /remove-accents/0.4.2: - resolution: {integrity: sha512-7pXIJqJOq5tFgG1A2Zxti3Ht8jJF337m4sowbuHsW30ZnkQFnDzy9qBNhgzX8ZLW4+UBcXiiR7SwR6pokHsxiA==} - dev: false - - /repeat-element/1.1.4: - resolution: {integrity: sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ==} - engines: {node: '>=0.10.0'} - dev: true - - /repeat-string/1.6.1: - resolution: {integrity: sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==} - engines: {node: '>=0.10'} - dev: true - - /require-all/3.0.0: - resolution: {integrity: sha512-jPGN876lc5exWYrMcgZSd7U42P0PmVQzxnQB13fCSzmyGnqQWW4WUz5DosZ/qe24hz+5o9lSvW2epBNZ1xa6Fw==} - engines: {node: '>= 0.8'} - dev: true - - /require-directory/2.1.1: - resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} - engines: {node: '>=0.10.0'} - - /require-from-string/2.0.2: - resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} - engines: {node: '>=0.10.0'} - dev: true - - /requires-port/1.0.0: - resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==} - dev: true - - /reserved-words/0.1.2: - resolution: {integrity: sha512-0S5SrIUJ9LfpbVl4Yzij6VipUdafHrOTzvmfazSw/jeZrZtQK303OPZW+obtkaw7jQlTQppy0UvZWm9872PbRw==} - dev: true - - /reserved/0.1.2: - resolution: {integrity: sha512-/qO54MWj5L8WCBP9/UNe2iefJc+L9yETbH32xO/ft/EYPOTCR5k+azvDUgdCOKwZH8hXwPd0b8XBL78Nn2U69g==} - engines: {node: '>=0.8'} - dev: true - - /resize-observer-polyfill/1.5.1: - resolution: {integrity: sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==} - dev: false - - /resolve-cwd/3.0.0: - resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} - engines: {node: '>=8'} - dependencies: - resolve-from: 5.0.0 - dev: true - - /resolve-from/4.0.0: - resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} - engines: {node: '>=4'} - - /resolve-from/5.0.0: - resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} - engines: {node: '>=8'} - dev: true - - /resolve-url/0.2.1: - resolution: {integrity: sha512-ZuF55hVUQaaczgOIwqWzkEcEidmlD/xl44x1UZnhOXcYuFN2S6+rcxpG+C1N3So0wvNI3DmJICUFfu2SxhBmvg==} - deprecated: https://github.com/lydell/resolve-url#deprecated - dev: true - - /resolve.exports/1.1.1: - resolution: {integrity: sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ==} - engines: {node: '>=10'} - dev: true - - /resolve/1.22.1: - resolution: {integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==} - hasBin: true - dependencies: - is-core-module: 2.11.0 - path-parse: 1.0.7 - supports-preserve-symlinks-flag: 1.0.0 - - /resolve/2.0.0-next.4: - resolution: {integrity: sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==} - hasBin: true - dependencies: - is-core-module: 2.11.0 - path-parse: 1.0.7 - supports-preserve-symlinks-flag: 1.0.0 - dev: true - - /restore-cursor/3.1.0: - resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} - engines: {node: '>=8'} - dependencies: - onetime: 5.1.2 - signal-exit: 3.0.7 - dev: true - - /ret/0.1.15: - resolution: {integrity: sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==} - engines: {node: '>=0.12'} - dev: true - - /reusify/1.0.4: - resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} - engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - dev: true - - /rfdc/1.3.0: - resolution: {integrity: sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==} - dev: true - - /rimraf/2.6.3: - resolution: {integrity: sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==} - hasBin: true - dependencies: - glob: 7.2.3 - dev: true - - /rimraf/2.7.1: - resolution: {integrity: sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==} - hasBin: true - dependencies: - glob: 7.2.3 - dev: true - - /rimraf/3.0.2: - resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} - hasBin: true - dependencies: - glob: 7.2.3 - - /rollup/2.79.1: - resolution: {integrity: sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw==} - engines: {node: '>=10.0.0'} - hasBin: true - optionalDependencies: - fsevents: 2.3.2 - dev: true - - /rollup/3.10.1: - resolution: {integrity: sha512-3Er+yel3bZbZX1g2kjVM+FW+RUWDxbG87fcqFM5/9HbPCTpbVp6JOLn7jlxnNlbu7s/N/uDA4EV/91E2gWnxzw==} - engines: {node: '>=14.18.0', npm: '>=8.0.0'} - hasBin: true - optionalDependencies: - fsevents: 2.3.2 - dev: true - - /rtl-css-js/1.16.1: - resolution: {integrity: sha512-lRQgou1mu19e+Ya0LsTvKrVJ5TYUbqCVPAiImX3UfLTenarvPUl1QFdvu5Z3PYmHT9RCcwIfbjRQBntExyj3Zg==} - dependencies: - '@babel/runtime': 7.20.7 - dev: false - - /run-parallel/1.2.0: - resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} - dependencies: - queue-microtask: 1.2.3 - dev: true - - /rxjs/7.8.0: - resolution: {integrity: sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg==} - dependencies: - tslib: 2.4.1 - - /sade/1.8.1: - resolution: {integrity: sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==} - engines: {node: '>=6'} - dependencies: - mri: 1.2.0 - dev: false - - /safe-buffer/5.1.1: - resolution: {integrity: sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==} - dev: true - - /safe-buffer/5.1.2: - resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} - dev: true - - /safe-buffer/5.2.1: - resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - - /safe-regex-test/1.0.0: - resolution: {integrity: sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==} - dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 - is-regex: 1.1.4 - dev: true - - /safe-regex/1.1.0: - resolution: {integrity: sha512-aJXcif4xnaNUzvUuC5gcb46oTS7zvg4jpMTnuqtrEPlR3vFr4pxtdTwaF1Qs3Enjn9HK+ZlwQui+a7z0SywIzg==} - dependencies: - ret: 0.1.15 - dev: true - - /safe-stable-stringify/1.1.1: - resolution: {integrity: sha512-ERq4hUjKDbJfE4+XtZLFPCDi8Vb1JqaxAPTxWFLBx8XcAlf9Bda/ZJdVezs/NAfsMQScyIlUMx+Yeu7P7rx5jw==} - dev: true - - /safer-buffer/2.1.2: - resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} - dev: true - - /sanitize-html/2.8.1: - resolution: {integrity: sha512-qK5neD0SaMxGwVv5txOYv05huC3o6ZAA4h5+7nJJgWMNFUNRjcjLO6FpwAtKzfKCZ0jrG6xTk6eVFskbvOGblg==} - dependencies: - deepmerge: 4.2.2 - escape-string-regexp: 4.0.0 - htmlparser2: 8.0.1 - is-plain-object: 5.0.0 - parse-srcset: 1.0.2 - postcss: 8.4.21 - dev: false - - /sass/1.57.1: - resolution: {integrity: sha512-O2+LwLS79op7GI0xZ8fqzF7X2m/m8WFfI02dHOdsK5R2ECeS5F62zrwg/relM1rjSLy7Vd/DiMNIvPrQGsA0jw==} - engines: {node: '>=12.0.0'} - hasBin: true - dependencies: - chokidar: 3.5.3 - immutable: 4.2.2 - source-map-js: 1.0.2 - - /sax/1.2.4: - resolution: {integrity: sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==} - dev: true - - /saxes/6.0.0: - resolution: {integrity: sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==} - engines: {node: '>=v12.22.7'} - dependencies: - xmlchars: 2.2.0 - dev: true - - /scheduler/0.20.2: - resolution: {integrity: sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==} - dependencies: - loose-envify: 1.4.0 - object-assign: 4.1.1 - - /screenfull/5.2.0: - resolution: {integrity: sha512-9BakfsO2aUQN2K9Fdbj87RJIEZ82Q9IGim7FqM5OsebfoFC6ZHXgDq/KvniuLTPdeM8wY2o6Dj3WQ7KeQCj3cA==} - engines: {node: '>=0.10.0'} - dev: false - - /semver/5.7.1: - resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==} - hasBin: true - dev: true - - /semver/6.3.0: - resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==} - hasBin: true - - /semver/7.0.0: - resolution: {integrity: sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==} - hasBin: true - dev: true - - /semver/7.3.8: - resolution: {integrity: sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==} - engines: {node: '>=10'} - hasBin: true - dependencies: - lru-cache: 6.0.0 - dev: true - - /send/0.18.0: - resolution: {integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==} - engines: {node: '>= 0.8.0'} - dependencies: - debug: 2.6.9 - depd: 2.0.0 - destroy: 1.2.0 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - fresh: 0.5.2 - http-errors: 2.0.0 - mime: 1.6.0 - ms: 2.1.3 - on-finished: 2.4.1 - range-parser: 1.2.1 - statuses: 2.0.1 - transitivePeerDependencies: - - supports-color - dev: true - - /serve-favicon/2.5.0: - resolution: {integrity: sha512-FMW2RvqNr03x+C0WxTyu6sOv21oOjkq5j8tjquWccwa6ScNyGFOGJVpuS1NmTVGBAHS07xnSKotgf2ehQmf9iA==} - engines: {node: '>= 0.8.0'} - dependencies: - etag: 1.8.1 - fresh: 0.5.2 - ms: 2.1.1 - parseurl: 1.3.3 - safe-buffer: 5.1.1 - dev: true - - /serve-static/1.15.0: - resolution: {integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==} - engines: {node: '>= 0.8.0'} - dependencies: - encodeurl: 1.0.2 - escape-html: 1.0.3 - parseurl: 1.3.3 - send: 0.18.0 - transitivePeerDependencies: - - supports-color - dev: true - - /set-blocking/2.0.0: - resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} - dev: true - - /set-harmonic-interval/1.0.1: - resolution: {integrity: sha512-AhICkFV84tBP1aWqPwLZqFvAwqEoVA9kxNMniGEUvzOlm4vLmOFLiTT3UZ6bziJTy4bOVpzWGTfSCbmaayGx8g==} - engines: {node: '>=6.9'} - dev: false - - /set-value/2.0.1: - resolution: {integrity: sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==} - engines: {node: '>=0.10.0'} - dependencies: - extend-shallow: 2.0.1 - is-extendable: 0.1.1 - is-plain-object: 2.0.4 - split-string: 3.1.0 - dev: true - - /setprototypeof/1.2.0: - resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} - dev: true - - /shallow-clone/3.0.1: - resolution: {integrity: sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==} - engines: {node: '>=8'} - dependencies: - kind-of: 6.0.3 - dev: true - - /shallowequal/1.1.0: - resolution: {integrity: sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==} - dev: false - - /shebang-command/2.0.0: - resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} - engines: {node: '>=8'} - dependencies: - shebang-regex: 3.0.0 - dev: true - - /shebang-regex/3.0.0: - resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} - engines: {node: '>=8'} - dev: true - - /shelljs/0.8.5: - resolution: {integrity: sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==} - engines: {node: '>=4'} - hasBin: true - dependencies: - glob: 7.2.3 - interpret: 1.4.0 - rechoir: 0.6.2 - dev: true - - /should-equal/2.0.0: - resolution: {integrity: sha512-ZP36TMrK9euEuWQYBig9W55WPC7uo37qzAEmbjHz4gfyuXrEUgF8cUvQVO+w+d3OMfPvSRQJ22lSm8MQJ43LTA==} - dependencies: - should-type: 1.4.0 - dev: true - - /should-format/3.0.3: - resolution: {integrity: sha512-hZ58adtulAk0gKtua7QxevgUaXTTXxIi8t41L3zo9AHvjXO1/7sdLECuHeIN2SRtYXpNkmhoUP2pdeWgricQ+Q==} - dependencies: - should-type: 1.4.0 - should-type-adaptors: 1.1.0 - dev: true - - /should-type-adaptors/1.1.0: - resolution: {integrity: sha512-JA4hdoLnN+kebEp2Vs8eBe9g7uy0zbRo+RMcU0EsNy+R+k049Ki+N5tT5Jagst2g7EAja+euFuoXFCa8vIklfA==} - dependencies: - should-type: 1.4.0 - should-util: 1.0.1 - dev: true - - /should-type/1.4.0: - resolution: {integrity: sha512-MdAsTu3n25yDbIe1NeN69G4n6mUnJGtSJHygX3+oN0ZbO3DTiATnf7XnYJdGT42JCXurTb1JI0qOBR65shvhPQ==} - dev: true - - /should-util/1.0.1: - resolution: {integrity: sha512-oXF8tfxx5cDk8r2kYqlkUJzZpDBqVY/II2WhvU0n9Y3XYvAYRmeaf1PvvIvTgPnv4KJ+ES5M0PyDq5Jp+Ygy2g==} - dev: true - - /should/13.2.3: - resolution: {integrity: sha512-ggLesLtu2xp+ZxI+ysJTmNjh2U0TsC+rQ/pfED9bUZZ4DKefP27D+7YJVVTvKsmjLpIi9jAa7itwDGkDDmt1GQ==} - dependencies: - should-equal: 2.0.0 - should-format: 3.0.3 - should-type: 1.4.0 - should-type-adaptors: 1.1.0 - should-util: 1.0.1 - dev: true - - /side-channel/1.0.4: - resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} - dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 - object-inspect: 1.12.3 - dev: true - - /signal-exit/3.0.7: - resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} - dev: true - - /simple-eval/1.0.0: - resolution: {integrity: sha512-kpKJR+bqTscgC0xuAl2xHN6bB12lHjC2DCUfqjAx19bQyO3R2EVLOurm3H9AUltv/uFVcSCVNc6faegR+8NYLw==} - engines: {node: '>=12'} - dependencies: - jsep: 1.3.8 - dev: true - - /simple-update-notifier/1.1.0: - resolution: {integrity: sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==} - engines: {node: '>=8.10.0'} - dependencies: - semver: 7.0.0 - dev: true - - /sisteransi/1.0.5: - resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} - dev: true - - /slash/3.0.0: - resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} - engines: {node: '>=8'} - dev: true - - /slice-ansi/3.0.0: - resolution: {integrity: sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==} - engines: {node: '>=8'} - dependencies: - ansi-styles: 4.3.0 - astral-regex: 2.0.0 - is-fullwidth-code-point: 3.0.0 - dev: true - - /slice-ansi/4.0.0: - resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==} - engines: {node: '>=10'} - dependencies: - ansi-styles: 4.3.0 - astral-regex: 2.0.0 - is-fullwidth-code-point: 3.0.0 - dev: true - - /slice-ansi/5.0.0: - resolution: {integrity: sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==} - engines: {node: '>=12'} - dependencies: - ansi-styles: 6.2.1 - is-fullwidth-code-point: 4.0.0 - dev: true - - /slide/1.1.6: - resolution: {integrity: sha512-NwrtjCg+lZoqhFU8fOwl4ay2ei8PaqCBOUV3/ektPY9trO1yQ1oXEfmHAhKArUVUr/hOHvy5f6AdP17dCM0zMw==} - dev: true - - /smart-buffer/4.2.0: - resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==} - engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} - dev: true - - /snapdragon-node/2.1.1: - resolution: {integrity: sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==} - engines: {node: '>=0.10.0'} - dependencies: - define-property: 1.0.0 - isobject: 3.0.1 - snapdragon-util: 3.0.1 - dev: true - - /snapdragon-util/3.0.1: - resolution: {integrity: sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==} - engines: {node: '>=0.10.0'} - dependencies: - kind-of: 3.2.2 - dev: true - - /snapdragon/0.8.2: - resolution: {integrity: sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==} - engines: {node: '>=0.10.0'} - dependencies: - base: 0.11.2 - debug: 2.6.9 - define-property: 0.2.5 - extend-shallow: 2.0.1 - map-cache: 0.2.2 - source-map: 0.5.7 - source-map-resolve: 0.5.3 - use: 3.1.1 - transitivePeerDependencies: - - supports-color - dev: true - - /socks-proxy-agent/5.0.1: - resolution: {integrity: sha512-vZdmnjb9a2Tz6WEQVIurybSwElwPxMZaIc7PzqbJTrezcKNznv6giT7J7tZDZ1BojVaa1jvO/UiUdhDVB0ACoQ==} - engines: {node: '>= 6'} - dependencies: - agent-base: 6.0.2 - debug: 4.3.4 - socks: 2.7.1 - transitivePeerDependencies: - - supports-color - dev: true - - /socks/2.7.1: - resolution: {integrity: sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==} - engines: {node: '>= 10.13.0', npm: '>= 3.0.0'} - dependencies: - ip: 2.0.0 - smart-buffer: 4.2.0 - dev: true - - /source-map-js/1.0.2: - resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} - engines: {node: '>=0.10.0'} - - /source-map-resolve/0.5.3: - resolution: {integrity: sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==} - deprecated: See https://github.com/lydell/source-map-resolve#deprecated - dependencies: - atob: 2.1.2 - decode-uri-component: 0.2.2 - resolve-url: 0.2.1 - source-map-url: 0.4.1 - urix: 0.1.0 - dev: true - - /source-map-support/0.5.13: - resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} - dependencies: - buffer-from: 1.1.2 - source-map: 0.6.1 - dev: true - - /source-map-support/0.5.21: - resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} - dependencies: - buffer-from: 1.1.2 - source-map: 0.6.1 - dev: true - - /source-map-url/0.4.1: - resolution: {integrity: sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw==} - deprecated: See https://github.com/lydell/source-map-url#deprecated - dev: true - - /source-map/0.5.6: - resolution: {integrity: sha512-MjZkVp0NHr5+TPihLcadqnlVoGIoWo4IBHptutGh9wI3ttUYvCG26HkSuDi+K6lsZ25syXJXcctwgyVCt//xqA==} - engines: {node: '>=0.10.0'} - dev: false - - /source-map/0.5.7: - resolution: {integrity: sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==} - engines: {node: '>=0.10.0'} - - /source-map/0.6.1: - resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} - engines: {node: '>=0.10.0'} - - /source-map/0.7.4: - resolution: {integrity: sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==} - engines: {node: '>= 8'} - dev: true - - /sourcemap-codec/1.4.8: - resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==} - deprecated: Please use @jridgewell/sourcemap-codec instead - - /space-separated-tokens/1.1.5: - resolution: {integrity: sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==} - dev: true - - /space-separated-tokens/2.0.2: - resolution: {integrity: sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==} - dev: false - - /spdx-compare/1.0.0: - resolution: {integrity: sha512-C1mDZOX0hnu0ep9dfmuoi03+eOdDoz2yvK79RxbcrVEG1NO1Ph35yW102DHWKN4pk80nwCgeMmSY5L25VE4D9A==} - dependencies: - array-find-index: 1.0.2 - spdx-expression-parse: 3.0.1 - spdx-ranges: 2.1.1 - dev: true - - /spdx-correct/3.1.1: - resolution: {integrity: sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==} - dependencies: - spdx-expression-parse: 3.0.1 - spdx-license-ids: 3.0.12 - dev: true - - /spdx-exceptions/2.3.0: - resolution: {integrity: sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==} - dev: true - - /spdx-expression-parse/3.0.1: - resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} - dependencies: - spdx-exceptions: 2.3.0 - spdx-license-ids: 3.0.12 - dev: true - - /spdx-license-ids/3.0.12: - resolution: {integrity: sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA==} - dev: true - - /spdx-ranges/2.1.1: - resolution: {integrity: sha512-mcdpQFV7UDAgLpXEE/jOMqvK4LBoO0uTQg0uvXUewmEFhpiZx5yJSZITHB8w1ZahKdhfZqP5GPEOKLyEq5p8XA==} - dev: true - - /spdx-satisfies/4.0.1: - resolution: {integrity: sha512-WVzZ/cXAzoNmjCWiEluEA3BjHp5tiUmmhn9MK+X0tBbR9sOqtC6UQwmgCNrAIZvNlMuBUYAaHYfb2oqlF9SwKA==} - dependencies: - spdx-compare: 1.0.0 - spdx-expression-parse: 3.0.1 - spdx-ranges: 2.1.1 - dev: true - - /split-on-first/1.1.0: - resolution: {integrity: sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==} - engines: {node: '>=6'} - dev: false - - /split-string/3.1.0: - resolution: {integrity: sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==} - engines: {node: '>=0.10.0'} - dependencies: - extend-shallow: 3.0.2 - dev: true - - /sprintf-js/1.0.3: - resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - dev: true - - /spy-on-component/1.1.3: - resolution: {integrity: sha512-a7jgnoBSdkcDWIQQwtEgUq4etajwG6+wGIjfC9ARUKwKOdHxJd+utgHTgLn81ETizpsw4xddUS3W8VePedtaIQ==} - dev: false - - /stack-generator/2.0.10: - resolution: {integrity: sha512-mwnua/hkqM6pF4k8SnmZ2zfETsRUpWXREfA/goT8SLCV4iOFa4bzOX2nDipWAZFPTjLvQB82f5yaodMVhK0yJQ==} - dependencies: - stackframe: 1.3.4 - dev: false - - /stack-utils/2.0.6: - resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} - engines: {node: '>=10'} - dependencies: - escape-string-regexp: 2.0.0 - dev: true - - /stackframe/1.3.4: - resolution: {integrity: sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw==} - dev: false - - /stacktrace-gps/3.1.2: - resolution: {integrity: sha512-GcUgbO4Jsqqg6RxfyTHFiPxdPqF+3LFmQhm7MgCuYQOYuWyqxo5pwRPz5d/u6/WYJdEnWfK4r+jGbyD8TSggXQ==} - dependencies: - source-map: 0.5.6 - stackframe: 1.3.4 - dev: false - - /stacktrace-js/2.0.2: - resolution: {integrity: sha512-Je5vBeY4S1r/RnLydLl0TBTi3F2qdfWmYsGvtfZgEI+SCprPppaIhQf5nGcal4gI4cGpCV/duLcAzT1np6sQqg==} - dependencies: - error-stack-parser: 2.1.4 - stack-generator: 2.0.10 - stacktrace-gps: 3.1.2 - dev: false - - /stacktracey/2.1.8: - resolution: {integrity: sha512-Kpij9riA+UNg7TnphqjH7/CzctQ/owJGNbFkfEeve4Z4uxT5+JapVLFXcsurIfN34gnTWZNJ/f7NMG0E8JDzTw==} - dependencies: - as-table: 1.0.55 - get-source: 2.0.12 - dev: true - - /state-local/1.0.7: - resolution: {integrity: sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==} - dev: false - - /static-extend/0.1.2: - resolution: {integrity: sha512-72E9+uLc27Mt718pMHt9VMNiAL4LMsmDbBva8mxWUCkT07fSzEGMYUCk0XWY6lp0j6RBAG4cJ3mWuZv2OE3s0g==} - engines: {node: '>=0.10.0'} - dependencies: - define-property: 0.2.5 - object-copy: 0.1.0 - dev: true - - /statuses/2.0.1: - resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} - engines: {node: '>= 0.8'} - dev: true - - /stdopt/2.2.0: - resolution: {integrity: sha512-D/p41NgXOkcj1SeGhfXOwv9z1K6EV3sjAUY5aeepVbgEHv7DpKWLTjhjScyzMWAQCAgUQys1mjH0eArm4cjRGw==} - dependencies: - is-arrayish: 0.3.2 - dev: false - - /stop-iteration-iterator/1.0.0: - resolution: {integrity: sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==} - engines: {node: '>= 0.4'} - dependencies: - internal-slot: 1.0.4 - dev: true - - /store2/2.14.2: - resolution: {integrity: sha512-siT1RiqlfQnGqgT/YzXVUNsom9S0H1OX+dpdGN1xkyYATo4I6sep5NmsRD/40s3IIOvlCq6akxkqG82urIZW1w==} - dev: true - - /storybook/7.0.0-beta.38: - resolution: {integrity: sha512-9sGO7yrgD+XqoXSysWcUppRmp+umntsJqz9FAITDgnbtehX7rsSkvqxRfghtukIqzWow5zsv/YQLFGy/zv1oTg==} - hasBin: true - dependencies: - '@storybook/cli': 7.0.0-beta.38 - transitivePeerDependencies: - - bufferutil - - encoding - - supports-color - - utf-8-validate - dev: true - - /strict-uri-encode/2.0.0: - resolution: {integrity: sha512-QwiXZgpRcKkhTj2Scnn++4PKtWsH0kpzZ62L2R6c/LUVYv7hVnZqcg2+sMuT6R7Jusu1vviK/MFsu6kNJfWlEQ==} - engines: {node: '>=4'} - dev: false - - /string-argv/0.3.1: - resolution: {integrity: sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==} - engines: {node: '>=0.6.19'} - dev: true - - /string-convert/0.2.1: - resolution: {integrity: sha512-u/1tdPl4yQnPBjnVrmdLo9gtuLvELKsAoRapekWggdiQNvvvum+jYF329d84NAa660KQw7pB2n36KrIKVoXa3A==} - dev: false - - /string-length/4.0.2: - resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} - engines: {node: '>=10'} - dependencies: - char-regex: 1.0.2 - strip-ansi: 6.0.1 - dev: true - - /string-natural-compare/3.0.1: - resolution: {integrity: sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw==} - dev: true - - /string-width/4.2.3: - resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} - engines: {node: '>=8'} - dependencies: - emoji-regex: 8.0.0 - is-fullwidth-code-point: 3.0.0 - strip-ansi: 6.0.1 - - /string-width/5.1.2: - resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} - engines: {node: '>=12'} - dependencies: - eastasianwidth: 0.2.0 - emoji-regex: 9.2.2 - strip-ansi: 7.0.1 - dev: true - - /string.prototype.matchall/4.0.8: - resolution: {integrity: sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.21.1 - get-intrinsic: 1.1.3 - has-symbols: 1.0.3 - internal-slot: 1.0.4 - regexp.prototype.flags: 1.4.3 - side-channel: 1.0.4 - dev: true - - /string.prototype.trimend/1.0.6: - resolution: {integrity: sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.21.1 - dev: true - - /string.prototype.trimstart/1.0.6: - resolution: {integrity: sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.21.1 - dev: true - - /string_decoder/0.10.31: - resolution: {integrity: sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==} - dev: true - - /string_decoder/1.1.1: - resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} - dependencies: - safe-buffer: 5.1.2 - dev: true - - /string_decoder/1.3.0: - resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} - dependencies: - safe-buffer: 5.2.1 - dev: true - - /stringify-entities/4.0.3: - resolution: {integrity: sha512-BP9nNHMhhfcMbiuQKCqMjhDP5yBCAxsPu4pHFFzJ6Alo9dZgY4VLDPutXqIjpRiMoKdp7Av85Gr73Q5uH9k7+g==} - dependencies: - character-entities-html4: 2.1.0 - character-entities-legacy: 3.0.0 - dev: false - - /strip-ansi/6.0.1: - resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} - engines: {node: '>=8'} - dependencies: - ansi-regex: 5.0.1 - - /strip-ansi/7.0.1: - resolution: {integrity: sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==} - engines: {node: '>=12'} - dependencies: - ansi-regex: 6.0.1 - dev: true - - /strip-bom/3.0.0: - resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} - engines: {node: '>=4'} - dev: true - - /strip-bom/4.0.0: - resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} - engines: {node: '>=8'} - dev: true - - /strip-final-newline/2.0.0: - resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} - engines: {node: '>=6'} - dev: true - - /strip-indent/3.0.0: - resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} - engines: {node: '>=8'} - dependencies: - min-indent: 1.0.1 - dev: true - - /strip-json-comments/3.1.1: - resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} - engines: {node: '>=8'} - dev: true - - /style-search/0.1.0: - resolution: {integrity: sha512-Dj1Okke1C3uKKwQcetra4jSuk0DqbzbYtXipzFlFMZtowbF1x7BKJwB9AayVMyFARvU8EDrZdcax4At/452cAg==} - dev: true - - /style-to-object/0.3.0: - resolution: {integrity: sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==} - dependencies: - inline-style-parser: 0.1.1 - dev: false - - /style-value-types/5.0.0: - resolution: {integrity: sha512-08yq36Ikn4kx4YU6RD7jWEv27v4V+PUsOGa4n/as8Et3CuODMJQ00ENeAVXAeydX4Z2j1XHZF1K2sX4mGl18fA==} - dependencies: - hey-listen: 1.0.8 - tslib: 2.4.1 - dev: false - - /styled-components/5.3.6_sfoxds7t5ydpegc3knd667wn6m: - resolution: {integrity: sha512-hGTZquGAaTqhGWldX7hhfzjnIYBZ0IXQXkCYdvF1Sq3DsUaLx6+NTHC5Jj1ooM2F68sBiVz3lvhfwQs/S3l6qg==} - engines: {node: '>=10'} - requiresBuild: true - peerDependencies: - react: '>= 16.8.0' - react-dom: '>= 16.8.0' - react-is: '>= 16.8.0' - dependencies: - '@babel/helper-module-imports': 7.18.6 - '@babel/traverse': 7.20.12_supports-color@5.5.0 - '@emotion/is-prop-valid': 1.2.0 - '@emotion/stylis': 0.8.5 - '@emotion/unitless': 0.7.5 - babel-plugin-styled-components: 2.0.7_styled-components@5.3.6 - css-to-react-native: 3.1.0 - hoist-non-react-statics: 3.3.2 - react: 17.0.2 - react-dom: 17.0.2_react@17.0.2 - shallowequal: 1.1.0 - supports-color: 5.5.0 - dev: false - - /stylelint-config-css-modules/4.1.0_stylelint@14.16.1: - resolution: {integrity: sha512-w6d552NscwvpUEaUcmq8GgWXKRv6lVHLbDj6QIHSM2vCWr83qRqRvXBJCfXDyaG/J3Zojw2inU9VvU99ZlXuUw==} - peerDependencies: - stylelint: ^14.5.1 - dependencies: - stylelint: 14.16.1 - optionalDependencies: - stylelint-scss: 4.3.0_stylelint@14.16.1 - dev: true - - /stylelint-config-prettier-scss/0.0.1_stylelint@14.16.1: - resolution: {integrity: sha512-lBAYG9xYOh2LeWEPC/64xeUxwOTnQ8nDyBijQoWoJb10/bMGrUwnokpt8jegGck2Vbtxh6XGwH63z5qBcVHreQ==} - engines: {node: '>= 12'} - hasBin: true - peerDependencies: - stylelint: '>=11.0.0' - dependencies: - stylelint: 14.16.1 - stylelint-config-prettier: 9.0.4_stylelint@14.16.1 - dev: true - - /stylelint-config-prettier/9.0.4_stylelint@14.16.1: - resolution: {integrity: sha512-38nIGTGpFOiK5LjJ8Ma1yUgpKENxoKSOhbDNSemY7Ep0VsJoXIW9Iq/2hSt699oB9tReynfWicTAoIHiq8Rvbg==} - engines: {node: '>= 12'} - hasBin: true - peerDependencies: - stylelint: '>=11.0.0' - dependencies: - stylelint: 14.16.1 - dev: true - - /stylelint-config-recommended-scss/7.0.0_stylelint@14.16.1: - resolution: {integrity: sha512-rGz1J4rMAyJkvoJW4hZasuQBB7y9KIrShb20l9DVEKKZSEi1HAy0vuNlR8HyCKy/jveb/BdaQFcoiYnmx4HoiA==} - peerDependencies: - stylelint: ^14.4.0 - dependencies: - postcss-scss: 4.0.6 - stylelint: 14.16.1 - stylelint-config-recommended: 8.0.0_stylelint@14.16.1 - stylelint-scss: 4.3.0_stylelint@14.16.1 - transitivePeerDependencies: - - postcss - dev: true - - /stylelint-config-recommended/8.0.0_stylelint@14.16.1: - resolution: {integrity: sha512-IK6dWvE000+xBv9jbnHOnBq01gt6HGVB2ZTsot+QsMpe82doDQ9hvplxfv4YnpEuUwVGGd9y6nbaAnhrjcxhZQ==} - peerDependencies: - stylelint: ^14.8.0 - dependencies: - stylelint: 14.16.1 - dev: true - - /stylelint-config-standard-scss/5.0.0_stylelint@14.16.1: - resolution: {integrity: sha512-zoXLibojHZYPFjtkc4STZtAJ2yGTq3Bb4MYO0oiyO6f/vNxDKRcSDZYoqN260Gv2eD5niQIr1/kr5SXlFj9kcQ==} - peerDependencies: - stylelint: ^14.9.0 - dependencies: - stylelint: 14.16.1 - stylelint-config-recommended-scss: 7.0.0_stylelint@14.16.1 - stylelint-config-standard: 26.0.0_stylelint@14.16.1 - transitivePeerDependencies: - - postcss - dev: true - - /stylelint-config-standard/26.0.0_stylelint@14.16.1: - resolution: {integrity: sha512-hUuB7LaaqM8abvkOO84wh5oYSkpXgTzHu2Zza6e7mY+aOmpNTjoFBRxSLlzY0uAOMWEFx0OMKzr+reG1BUtcqQ==} - peerDependencies: - stylelint: ^14.9.0 - dependencies: - stylelint: 14.16.1 - stylelint-config-recommended: 8.0.0_stylelint@14.16.1 - dev: true - - /stylelint-scss/4.3.0_stylelint@14.16.1: - resolution: {integrity: sha512-GvSaKCA3tipzZHoz+nNO7S02ZqOsdBzMiCx9poSmLlb3tdJlGddEX/8QzCOD8O7GQan9bjsvLMsO5xiw6IhhIQ==} - peerDependencies: - stylelint: ^14.5.1 - dependencies: - lodash: 4.17.21 - postcss-media-query-parser: 0.2.3 - postcss-resolve-nested-selector: 0.1.1 - postcss-selector-parser: 6.0.11 - postcss-value-parser: 4.2.0 - stylelint: 14.16.1 - dev: true - - /stylelint/14.16.1: - resolution: {integrity: sha512-ErlzR/T3hhbV+a925/gbfc3f3Fep9/bnspMiJPorfGEmcBbXdS+oo6LrVtoUZ/w9fqD6o6k7PtUlCOsCRdjX/A==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - hasBin: true - dependencies: - '@csstools/selector-specificity': 2.0.2_wajs5nedgkikc5pcuwett7legi - balanced-match: 2.0.0 - colord: 2.9.3 - cosmiconfig: 7.1.0 - css-functions-list: 3.1.0 - debug: 4.3.4 - fast-glob: 3.2.12 - fastest-levenshtein: 1.0.16 - file-entry-cache: 6.0.1 - global-modules: 2.0.0 - globby: 11.1.0 - globjoin: 0.1.4 - html-tags: 3.2.0 - ignore: 5.2.4 - import-lazy: 4.0.0 - imurmurhash: 0.1.4 - is-plain-object: 5.0.0 - known-css-properties: 0.26.0 - mathml-tag-names: 2.1.3 - meow: 9.0.0 - micromatch: 4.0.5 - normalize-path: 3.0.0 - picocolors: 1.0.0 - postcss: 8.4.21 - postcss-media-query-parser: 0.2.3 - postcss-resolve-nested-selector: 0.1.1 - postcss-safe-parser: 6.0.0_postcss@8.4.21 - postcss-selector-parser: 6.0.11 - postcss-value-parser: 4.2.0 - resolve-from: 5.0.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - style-search: 0.1.0 - supports-hyperlinks: 2.3.0 - svg-tags: 1.0.0 - table: 6.8.1 - v8-compile-cache: 2.3.0 - write-file-atomic: 4.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /stylis/4.1.3: - resolution: {integrity: sha512-GP6WDNWf+o403jrEp9c5jibKavrtLW+/qYGhFxFrG8maXhwTBI7gLLhiBb0o7uFccWN+EOS9aMO6cGHWAO07OA==} - dev: false - - /stylus/0.59.0: - resolution: {integrity: sha512-lQ9w/XIOH5ZHVNuNbWW8D822r+/wBSO/d6XvtyHLF7LW4KaCIDeVbvn5DF8fGCJAUCwVhVi/h6J0NUcnylUEjg==} - hasBin: true - dependencies: - '@adobe/css-tools': 4.0.2 - debug: 4.3.4 - glob: 7.2.3 - sax: 1.2.4 - source-map: 0.7.4 - transitivePeerDependencies: - - supports-color - dev: true - - /supports-color/5.5.0: - resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} - engines: {node: '>=4'} - dependencies: - has-flag: 3.0.0 - - /supports-color/7.2.0: - resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} - engines: {node: '>=8'} - dependencies: - has-flag: 4.0.0 - dev: true - - /supports-color/8.1.1: - resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} - engines: {node: '>=10'} - dependencies: - has-flag: 4.0.0 - dev: true - - /supports-color/9.3.1: - resolution: {integrity: sha512-knBY82pjmnIzK3NifMo3RxEIRD9E0kIzV4BKcyTZ9+9kWgLMxd4PrsTSMoFQUabgRBbF8KOLRDCyKgNV+iK44Q==} - engines: {node: '>=12'} - dev: true - - /supports-hyperlinks/2.3.0: - resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==} - engines: {node: '>=8'} - dependencies: - has-flag: 4.0.0 - supports-color: 7.2.0 - dev: true - - /supports-preserve-symlinks-flag/1.0.0: - resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} - engines: {node: '>= 0.4'} - - /svg-parser/2.0.4: - resolution: {integrity: sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==} - dev: true - - /svg-tags/1.0.0: - resolution: {integrity: sha512-ovssysQTa+luh7A5Weu3Rta6FJlFBBbInjOh722LIt6klpU2/HtdUbszju/G4devcvk8PGt7FCLv5wftu3THUA==} - dev: true - - /swagger2openapi/7.0.8: - resolution: {integrity: sha512-upi/0ZGkYgEcLeGieoz8gT74oWHA0E7JivX7aN9mAf+Tc7BQoRBvnIGHoPDw+f9TXTW4s6kGYCZJtauP6OYp7g==} - hasBin: true - dependencies: - call-me-maybe: 1.0.2 - node-fetch: 2.6.9 - node-fetch-h2: 2.3.0 - node-readfiles: 0.2.0 - oas-kit-common: 1.0.8 - oas-resolver: 2.5.6 - oas-schema-walker: 1.1.5 - oas-validator: 5.0.8 - reftools: 1.1.9 - yaml: 1.10.2 - yargs: 17.6.2 - transitivePeerDependencies: - - encoding - dev: true - - /symbol-tree/3.2.4: - resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} - dev: true - - /synchronous-promise/2.0.16: - resolution: {integrity: sha512-qImOD23aDfnIDNqlG1NOehdB9IYsn1V9oByPjKY1nakv2MQYCEMyX033/q+aEtYCpmYK1cv2+NTmlH+ra6GA5A==} - dev: true - - /table/6.8.1: - resolution: {integrity: sha512-Y4X9zqrCftUhMeH2EptSSERdVKt/nEdijTOacGD/97EKjhQ/Qs8RTlEGABSJNNN8lac9kheH+af7yAkEWlgneA==} - engines: {node: '>=10.0.0'} - dependencies: - ajv: 8.12.0 - lodash.truncate: 4.4.2 - slice-ansi: 4.0.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - dev: true - - /tar/6.1.13: - resolution: {integrity: sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==} - engines: {node: '>=10'} - dependencies: - chownr: 2.0.0 - fs-minipass: 2.1.0 - minipass: 4.0.0 - minizlib: 2.1.2 - mkdirp: 1.0.4 - yallist: 4.0.0 - dev: true - - /telejson/7.0.4: - resolution: {integrity: sha512-J4QEuCnYGXAI9KSN7RXK0a0cOW2ONpjc4IQbInGZ6c3stvplLAYyZjTnScrRd8deXVjNCFV1wXcLC7SObDuQYA==} - dependencies: - memoizerific: 1.11.3 - dev: true - - /temp-dir/2.0.0: - resolution: {integrity: sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg==} - engines: {node: '>=8'} - dev: true - - /temp/0.8.4: - resolution: {integrity: sha512-s0ZZzd0BzYv5tLSptZooSjK8oj6C+c19p7Vqta9+6NPOf7r+fxq0cJe6/oN4LTC79sy5NY8ucOJNgwsKCSbfqg==} - engines: {node: '>=6.0.0'} - dependencies: - rimraf: 2.6.3 - dev: true - - /tempy/1.0.1: - resolution: {integrity: sha512-biM9brNqxSc04Ee71hzFbryD11nX7VPhQQY32AdDmjFvodsRFz/3ufeoTZ6uYkRFfGo188tENcASNs3vTdsM0w==} - engines: {node: '>=10'} - dependencies: - del: 6.1.1 - is-stream: 2.0.1 - temp-dir: 2.0.0 - type-fest: 0.16.0 - unique-string: 2.0.0 - dev: true - - /test-exclude/6.0.0: - resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} - engines: {node: '>=8'} - dependencies: - '@istanbuljs/schema': 0.1.3 - glob: 7.2.3 - minimatch: 3.1.2 - dev: true - - /text-encoding-utf-8/1.0.2: - resolution: {integrity: sha512-8bw4MY9WjdsD2aMtO0OzOCY3pXGYNx2d2FfHRVUKkiCPDWjKuOlhLVASS+pD7VkLTVjW268LYJHwsnPFlBpbAg==} - dev: false - - /text-table/0.2.0: - resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==} - dev: true - - /throttle-debounce/3.0.1: - resolution: {integrity: sha512-dTEWWNu6JmeVXY0ZYoPuH5cRIwc0MeGbJwah9KUNYSJwommQpCzTySTpEe8Gs1J23aeWEuAobe4Ag7EHVt/LOg==} - engines: {node: '>=10'} - dev: false - - /through/2.3.8: - resolution: {integrity: sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==} - dev: true - - /timezone-mock/1.3.6: - resolution: {integrity: sha512-YcloWmZfLD9Li5m2VcobkCDNVaLMx8ohAb/97l/wYS3m+0TIEK5PFNMZZfRcusc6sFjIfxu8qcJT0CNnOdpqmg==} - dev: true - - /tiny-invariant/1.3.1: - resolution: {integrity: sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw==} - dev: true - - /tiny-warning/1.0.3: - resolution: {integrity: sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==} - dev: false - - /tmpl/1.0.5: - resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} - dev: true - - /to-fast-properties/2.0.0: - resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} - engines: {node: '>=4'} - - /to-object-path/0.3.0: - resolution: {integrity: sha512-9mWHdnGRuh3onocaHzukyvCZhzvr6tiflAy/JRFXcJX0TjgfWA9pk9t8CMbzmBE4Jfw58pXbkngtBtqYxzNEyg==} - engines: {node: '>=0.10.0'} - dependencies: - kind-of: 3.2.2 - dev: true - - /to-regex-range/2.1.1: - resolution: {integrity: sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==} - engines: {node: '>=0.10.0'} - dependencies: - is-number: 3.0.0 - repeat-string: 1.6.1 - dev: true - - /to-regex-range/5.0.1: - resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} - engines: {node: '>=8.0'} - dependencies: - is-number: 7.0.0 - - /to-regex/3.0.2: - resolution: {integrity: sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==} - engines: {node: '>=0.10.0'} - dependencies: - define-property: 2.0.2 - extend-shallow: 3.0.2 - regex-not: 1.0.2 - safe-regex: 1.1.0 - dev: true - - /toggle-selection/1.0.6: - resolution: {integrity: sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==} - dev: false - - /toidentifier/1.0.1: - resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} - engines: {node: '>=0.6'} - dev: true - - /toposort/2.0.2: - resolution: {integrity: sha512-0a5EOkAUp8D4moMi2W8ZF8jcga7BgZd91O/yabJCFY8az+XSzeGyTKs0Aoo897iV1Nj6guFq8orWDS96z91oGg==} - dev: false - - /tough-cookie/4.1.2: - resolution: {integrity: sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ==} - engines: {node: '>=6'} - dependencies: - psl: 1.9.0 - punycode: 2.2.0 - universalify: 0.2.0 - url-parse: 1.5.10 - dev: true - - /tr46/0.0.3: - resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} - - /tr46/3.0.0: - resolution: {integrity: sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==} - engines: {node: '>=12'} - dependencies: - punycode: 2.2.0 - dev: true - - /treeify/1.1.0: - resolution: {integrity: sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A==} - engines: {node: '>=0.6'} - dev: true - - /trim-newlines/3.0.1: - resolution: {integrity: sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==} - engines: {node: '>=8'} - dev: true - - /trough/2.1.0: - resolution: {integrity: sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==} - dev: false - - /ts-dedent/2.2.0: - resolution: {integrity: sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==} - engines: {node: '>=6.10'} - dev: true - - /ts-easing/0.2.0: - resolution: {integrity: sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ==} - dev: false - - /ts-node/10.9.1_cin3sed6ohfsopbmt6orxeb4o4: - resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} - hasBin: true - peerDependencies: - '@swc/core': '>=1.2.50' - '@swc/wasm': '>=1.2.50' - '@types/node': '*' - typescript: '>=2.7' - peerDependenciesMeta: - '@swc/core': - optional: true - '@swc/wasm': - optional: true - dependencies: - '@cspotcode/source-map-support': 0.8.1 - '@tsconfig/node10': 1.0.9 - '@tsconfig/node12': 1.0.11 - '@tsconfig/node14': 1.0.3 - '@tsconfig/node16': 1.0.3 - '@types/node': 17.0.45 - acorn: 8.8.1 - acorn-walk: 8.2.0 - arg: 4.1.3 - create-require: 1.1.1 - diff: 4.0.2 - make-error: 1.3.6 - typescript: 4.9.5 - v8-compile-cache-lib: 3.0.1 - yn: 3.1.1 - dev: true - - /tsconfck/2.0.2_typescript@4.9.5: - resolution: {integrity: sha512-H3DWlwKpow+GpVLm/2cpmok72pwRr1YFROV3YzAmvzfGFiC1zEM/mc9b7+1XnrxuXtEbhJ7xUSIqjPFbedp7aQ==} - engines: {node: ^14.13.1 || ^16 || >=18, pnpm: ^7.18.0} - hasBin: true - peerDependencies: - typescript: ^4.3.5 - peerDependenciesMeta: - typescript: - optional: true - dependencies: - typescript: 4.9.5 - dev: true - - /tsconfig-paths/3.14.1: - resolution: {integrity: sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==} - dependencies: - '@types/json5': 0.0.29 - json5: 1.0.2 - minimist: 1.2.7 - strip-bom: 3.0.0 - dev: true - - /tsconfig-paths/4.1.2: - resolution: {integrity: sha512-uhxiMgnXQp1IR622dUXI+9Ehnws7i/y6xvpZB9IbUVOPy0muvdvgXeZOn88UcGPiT98Vp3rJPTa8bFoalZ3Qhw==} - engines: {node: '>=6'} - dependencies: - json5: 2.2.3 - minimist: 1.2.7 - strip-bom: 3.0.0 - dev: true - - /tslib/1.14.1: - resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} - - /tslib/2.4.1: - resolution: {integrity: sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==} - - /tslib/2.5.0: - resolution: {integrity: sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==} - - /tsutils/3.21.0_typescript@4.9.5: - resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} - engines: {node: '>= 6'} - peerDependencies: - typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta' - dependencies: - tslib: 1.14.1 - typescript: 4.9.5 - dev: true - - /type-check/0.3.2: - resolution: {integrity: sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==} - engines: {node: '>= 0.8.0'} - dependencies: - prelude-ls: 1.1.2 - dev: true - - /type-check/0.4.0: - resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} - engines: {node: '>= 0.8.0'} - dependencies: - prelude-ls: 1.2.1 - dev: true - - /type-detect/4.0.8: - resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} - engines: {node: '>=4'} - dev: true - - /type-fest/0.16.0: - resolution: {integrity: sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg==} - engines: {node: '>=10'} - dev: true - - /type-fest/0.18.1: - resolution: {integrity: sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==} - engines: {node: '>=10'} - dev: true - - /type-fest/0.20.2: - resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} - engines: {node: '>=10'} - dev: true - - /type-fest/0.21.3: - resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} - engines: {node: '>=10'} - dev: true - - /type-fest/0.6.0: - resolution: {integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==} - engines: {node: '>=8'} - dev: true - - /type-fest/0.8.1: - resolution: {integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==} - engines: {node: '>=8'} - dev: true - - /type-fest/2.19.0: - resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} - engines: {node: '>=12.20'} - dev: true - - /type-is/1.6.18: - resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} - engines: {node: '>= 0.6'} - dependencies: - media-typer: 0.3.0 - mime-types: 2.1.35 - dev: true - - /typed-array-length/1.0.4: - resolution: {integrity: sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==} - dependencies: - call-bind: 1.0.2 - for-each: 0.3.3 - is-typed-array: 1.1.10 - dev: true - - /typedarray/0.0.6: - resolution: {integrity: sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==} - dev: true - - /typesafe-actions/5.1.0: - resolution: {integrity: sha512-bna6Yi1pRznoo6Bz1cE6btB/Yy8Xywytyfrzu/wc+NFW3ZF0I+2iCGImhBsoYYCOWuICtRO4yHcnDlzgo1AdNg==} - engines: {node: '>= 4'} - dev: false - - /typescript-plugin-css-modules/4.1.1_6qtx7vkbdhwvdm4crzlegk4mvi: - resolution: {integrity: sha512-kpVxGkY/go9eV5TP1YUDJ6SqwBx2OIuVStMCxKyg9PhJVFXjLYR7AuItVLwoz0NCdiemH91WhtgAjb96jI34DA==} - peerDependencies: - typescript: '>=3.9.0' - dependencies: - dotenv: 16.0.3 - icss-utils: 5.1.0_postcss@8.4.21 - less: 4.1.3 - lodash.camelcase: 4.3.0 - postcss: 8.4.21 - postcss-filter-plugins: 3.0.1 - postcss-icss-keyframes: 0.2.1 - postcss-icss-selectors: 2.0.3 - postcss-load-config: 3.1.4_aesdjsunmf4wiehhujt67my7tu - reserved-words: 0.1.2 - sass: 1.57.1 - source-map-js: 1.0.2 - stylus: 0.59.0 - tsconfig-paths: 4.1.2 - typescript: 4.9.5 - transitivePeerDependencies: - - supports-color - - ts-node - dev: true - - /typescript/4.9.5: - resolution: {integrity: sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==} - engines: {node: '>=4.2.0'} - hasBin: true - - /uglify-js/3.17.4: - resolution: {integrity: sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g==} - engines: {node: '>=0.8.0'} - hasBin: true - requiresBuild: true - dev: true - optional: true - - /unbox-primitive/1.0.2: - resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==} - dependencies: - call-bind: 1.0.2 - has-bigints: 1.0.2 - has-symbols: 1.0.3 - which-boxed-primitive: 1.0.2 - dev: true - - /uncontrollable/7.2.1_react@17.0.2: - resolution: {integrity: sha512-svtcfoTADIB0nT9nltgjujTi7BzVmwjZClOmskKu/E8FW9BXzg9os8OLr4f8Dlnk0rYWJIWr4wv9eKUXiQvQwQ==} - peerDependencies: - react: '>=15.0.0' - dependencies: - '@babel/runtime': 7.20.7 - '@types/react': 17.0.52 - invariant: 2.2.4 - react: 17.0.2 - react-lifecycles-compat: 3.0.4 - dev: false - - /unfetch/4.2.0: - resolution: {integrity: sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA==} - dev: true - - /unicode-canonical-property-names-ecmascript/2.0.0: - resolution: {integrity: sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==} - engines: {node: '>=4'} - dev: true - - /unicode-match-property-ecmascript/2.0.0: - resolution: {integrity: sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==} - engines: {node: '>=4'} - dependencies: - unicode-canonical-property-names-ecmascript: 2.0.0 - unicode-property-aliases-ecmascript: 2.1.0 - dev: true - - /unicode-match-property-value-ecmascript/2.1.0: - resolution: {integrity: sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA==} - engines: {node: '>=4'} - dev: true - - /unicode-property-aliases-ecmascript/2.1.0: - resolution: {integrity: sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==} - engines: {node: '>=4'} - dev: true - - /unified/10.1.2: - resolution: {integrity: sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==} - dependencies: - '@types/unist': 2.0.6 - bail: 2.0.2 - extend: 3.0.2 - is-buffer: 2.0.5 - is-plain-obj: 4.1.0 - trough: 2.1.0 - vfile: 5.3.6 - dev: false - - /union-value/1.0.1: - resolution: {integrity: sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==} - engines: {node: '>=0.10.0'} - dependencies: - arr-union: 3.1.0 - get-value: 2.0.6 - is-extendable: 0.1.1 - set-value: 2.0.1 - dev: true - - /unique-string/2.0.0: - resolution: {integrity: sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==} - engines: {node: '>=8'} - dependencies: - crypto-random-string: 2.0.0 - dev: true - - /unist-builder/3.0.0: - resolution: {integrity: sha512-GFxmfEAa0vi9i5sd0R2kcrI9ks0r82NasRq5QHh2ysGngrc6GiqD5CDf1FjPenY4vApmFASBIIlk/jj5J5YbmQ==} - dependencies: - '@types/unist': 2.0.6 - dev: false - - /unist-util-generated/2.0.0: - resolution: {integrity: sha512-TiWE6DVtVe7Ye2QxOVW9kqybs6cZexNwTwSMVgkfjEReqy/xwGpAXb99OxktoWwmL+Z+Epb0Dn8/GNDYP1wnUw==} - dev: false - - /unist-util-is/3.0.0: - resolution: {integrity: sha512-sVZZX3+kspVNmLWBPAB6r+7D9ZgAFPNWm66f7YNb420RlQSbn+n8rG8dGZSkrER7ZIXGQYNm5pqC3v3HopH24A==} - dev: false - - /unist-util-is/4.1.0: - resolution: {integrity: sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==} - dev: true - - /unist-util-is/5.1.1: - resolution: {integrity: sha512-F5CZ68eYzuSvJjGhCLPL3cYx45IxkqXSetCcRgUXtbcm50X2L9oOWQlfUfDdAf+6Pd27YDblBfdtmsThXmwpbQ==} - dev: false - - /unist-util-position/4.0.3: - resolution: {integrity: sha512-p/5EMGIa1qwbXjA+QgcBXaPWjSnZfQ2Sc3yBEEfgPwsEmJd8Qh+DSk3LGnmOM4S1bY2C0AjmMnB8RuEYxpPwXQ==} - dependencies: - '@types/unist': 2.0.6 - dev: false - - /unist-util-stringify-position/3.0.2: - resolution: {integrity: sha512-7A6eiDCs9UtjcwZOcCpM4aPII3bAAGv13E96IkawkOAW0OhH+yRxtY0lzo8KiHpzEMfH7Q+FizUmwp8Iqy5EWg==} - dependencies: - '@types/unist': 2.0.6 - dev: false - - /unist-util-visit-parents/2.1.2: - resolution: {integrity: sha512-DyN5vD4NE3aSeB+PXYNKxzGsfocxp6asDc2XXE3b0ekO2BaRUpBicbbUygfSvYfUz1IkmjFR1YF7dPklraMZ2g==} - dependencies: - unist-util-is: 3.0.0 - dev: false - - /unist-util-visit-parents/3.1.1: - resolution: {integrity: sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==} - dependencies: - '@types/unist': 2.0.6 - unist-util-is: 4.1.0 - dev: true - - /unist-util-visit-parents/5.1.1: - resolution: {integrity: sha512-gks4baapT/kNRaWxuGkl5BIhoanZo7sC/cUT/JToSRNL1dYoXRFl75d++NkjYk4TAu2uv2Px+l8guMajogeuiw==} - dependencies: - '@types/unist': 2.0.6 - unist-util-is: 5.1.1 - dev: false - - /unist-util-visit/1.4.1: - resolution: {integrity: sha512-AvGNk7Bb//EmJZyhtRUnNMEpId/AZ5Ph/KUpTI09WHQuDZHKovQ1oEv3mfmKpWKtoMzyMC4GLBm1Zy5k12fjIw==} - dependencies: - unist-util-visit-parents: 2.1.2 - dev: false - - /unist-util-visit/2.0.3: - resolution: {integrity: sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==} - dependencies: - '@types/unist': 2.0.6 - unist-util-is: 4.1.0 - unist-util-visit-parents: 3.1.1 - dev: true - - /unist-util-visit/4.1.1: - resolution: {integrity: sha512-n9KN3WV9k4h1DxYR1LoajgN93wpEi/7ZplVe02IoB4gH5ctI1AaF2670BLHQYbwj+pY83gFtyeySFiyMHJklrg==} - dependencies: - '@types/unist': 2.0.6 - unist-util-is: 5.1.1 - unist-util-visit-parents: 5.1.1 - dev: false - - /universalify/0.1.2: - resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} - engines: {node: '>= 4.0.0'} - dev: true - - /universalify/0.2.0: - resolution: {integrity: sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==} - engines: {node: '>= 4.0.0'} - dev: true - - /universalify/2.0.0: - resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} - engines: {node: '>= 10.0.0'} - dev: true - - /unload/2.2.0: - resolution: {integrity: sha512-B60uB5TNBLtN6/LsgAf3udH9saB5p7gqJwcFfbOEZ8BcBHnGwCf6G/TGiEqkRAxX7zAFIUtzdrXQSdL3Q/wqNA==} - dependencies: - '@babel/runtime': 7.20.7 - detect-node: 2.1.0 - dev: false - - /unpipe/1.0.0: - resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} - engines: {node: '>= 0.8'} - dev: true - - /unplugin/0.10.2: - resolution: {integrity: sha512-6rk7GUa4ICYjae5PrAllvcDeuT8pA9+j5J5EkxbMFaV+SalHhxZ7X2dohMzu6C3XzsMT+6jwR/+pwPNR3uK9MA==} - dependencies: - acorn: 8.8.2 - chokidar: 3.5.3 - webpack-sources: 3.2.3 - webpack-virtual-modules: 0.4.6 - dev: true - - /unset-value/1.0.0: - resolution: {integrity: sha512-PcA2tsuGSF9cnySLHTLSh2qrQiJ70mn+r+Glzxv2TWZblxsxCC52BDlZoPCsz7STd9pN7EZetkWZBAvk4cgZdQ==} - engines: {node: '>=0.10.0'} - dependencies: - has-value: 0.3.1 - isobject: 3.0.1 - dev: true - - /untildify/4.0.0: - resolution: {integrity: sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==} - engines: {node: '>=8'} - dev: true - - /update-browserslist-db/1.0.10_browserslist@4.21.4: - resolution: {integrity: sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==} - hasBin: true - peerDependencies: - browserslist: '>= 4.21.0' - dependencies: - browserslist: 4.21.4 - escalade: 3.1.1 - picocolors: 1.0.0 - - /uri-js/4.4.1: - resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} - dependencies: - punycode: 2.2.0 - dev: true - - /urijs/1.19.11: - resolution: {integrity: sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==} - dev: true - - /urix/0.1.0: - resolution: {integrity: sha512-Am1ousAhSLBeB9cG/7k7r2R0zj50uDRlZHPGbazid5s9rlF1F/QKYObEKSIunSjIOkJZqwRRLpvewjEkM7pSqg==} - deprecated: Please see https://github.com/lydell/urix#deprecated - dev: true - - /url-parse/1.5.10: - resolution: {integrity: sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==} - dependencies: - querystringify: 2.2.0 - requires-port: 1.0.0 - dev: true - - /url/0.11.0: - resolution: {integrity: sha512-kbailJa29QrtXnxgq+DdCEGlbTeYM2eJUxsz6vjZavrCYPMIFHMKQmSKYAIuUK2i7hgPm28a8piX5NTUtM/LKQ==} - dependencies: - punycode: 1.3.2 - querystring: 0.2.0 - dev: false - - /use-isomorphic-layout-effect/1.1.2_q5o373oqrklnndq2vhekyuzhxi: - resolution: {integrity: sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA==} - peerDependencies: - '@types/react': '*' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - '@types/react': - optional: true - dependencies: - '@types/react': 17.0.52 - react: 17.0.2 - dev: false - - /use/3.1.1: - resolution: {integrity: sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==} - engines: {node: '>=0.10.0'} - dev: true - - /util-deprecate/1.0.2: - resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} - dev: true - - /util-extend/1.0.3: - resolution: {integrity: sha512-mLs5zAK+ctllYBj+iAQvlDCwoxU/WDOUaJkcFudeiAX6OajC6BKXJUa9a+tbtkC11dz2Ufb7h0lyvIOVn4LADA==} - dev: true - - /util/0.12.5: - resolution: {integrity: sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==} - dependencies: - inherits: 2.0.4 - is-arguments: 1.1.1 - is-generator-function: 1.0.10 - is-typed-array: 1.1.10 - which-typed-array: 1.1.9 - dev: true - - /utility-types/3.10.0: - resolution: {integrity: sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg==} - engines: {node: '>= 4'} - dev: true - - /utils-merge/1.0.1: - resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} - engines: {node: '>= 0.4.0'} - dev: true - - /uuid-browser/3.1.0: - resolution: {integrity: sha512-dsNgbLaTrd6l3MMxTtouOCFw4CBFc/3a+GgYA2YyrJvyQ1u6q4pcu3ktLoUZ/VN/Aw9WsauazbgsgdfVWgAKQg==} - dev: true - - /uuid/8.3.2: - resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} - hasBin: true - dev: false - - /uuid/9.0.0: - resolution: {integrity: sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==} - hasBin: true - dev: false - - /uvu/0.5.6: - resolution: {integrity: sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==} - engines: {node: '>=8'} - hasBin: true - dependencies: - dequal: 2.0.3 - diff: 5.1.0 - kleur: 4.1.5 - sade: 1.8.1 - dev: false - - /v8-compile-cache-lib/3.0.1: - resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} - dev: true - - /v8-compile-cache/2.3.0: - resolution: {integrity: sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==} - dev: true - - /v8-to-istanbul/9.0.1: - resolution: {integrity: sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w==} - engines: {node: '>=10.12.0'} - dependencies: - '@jridgewell/trace-mapping': 0.3.17 - '@types/istanbul-lib-coverage': 2.0.4 - convert-source-map: 1.9.0 - dev: true - - /validate-npm-package-license/3.0.4: - resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} - dependencies: - spdx-correct: 3.1.1 - spdx-expression-parse: 3.0.1 - dev: true - - /validate-npm-package-name/3.0.0: - resolution: {integrity: sha512-M6w37eVCMMouJ9V/sdPGnC5H4uDr73/+xdq0FBLO3TFFX1+7wiUY6Es328NN+y43tmY+doUdN9g9J21vqB7iLw==} - dependencies: - builtins: 1.0.3 - dev: true - - /validator/13.7.0: - resolution: {integrity: sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==} - engines: {node: '>= 0.10'} - dev: true - - /vary/1.1.2: - resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} - engines: {node: '>= 0.8'} - dev: true - - /vfile-message/3.1.3: - resolution: {integrity: sha512-0yaU+rj2gKAyEk12ffdSbBfjnnj+b1zqTBv3OQCTn8yEB02bsPizwdBPrLJjHnK+cU9EMMcUnNv938XcZIkmdA==} - dependencies: - '@types/unist': 2.0.6 - unist-util-stringify-position: 3.0.2 - dev: false - - /vfile/5.3.6: - resolution: {integrity: sha512-ADBsmerdGBs2WYckrLBEmuETSPyTD4TuLxTrw0DvjirxW1ra4ZwkbzG8ndsv3Q57smvHxo677MHaQrY9yxH8cA==} - dependencies: - '@types/unist': 2.0.6 - is-buffer: 2.0.5 - unist-util-stringify-position: 3.0.2 - vfile-message: 3.1.3 - dev: false - - /victory-vendor/36.6.8: - resolution: {integrity: sha512-H3kyQ+2zgjMPvbPqAl7Vwm2FD5dU7/4bCTQakFQnpIsfDljeOMDojRsrmJfwh4oAlNnWhpAf+mbAoLh8u7dwyQ==} - dependencies: - '@types/d3-array': 3.0.4 - '@types/d3-ease': 3.0.0 - '@types/d3-interpolate': 3.0.1 - '@types/d3-scale': 4.0.3 - '@types/d3-shape': 3.1.1 - '@types/d3-time': 3.0.0 - '@types/d3-timer': 3.0.0 - d3-array: 3.2.1 - d3-ease: 3.0.1 - d3-interpolate: 3.0.1 - d3-scale: 4.0.2 - d3-shape: 3.2.0 - d3-time: 3.1.0 - d3-timer: 3.0.1 - dev: false - - /vite-plugin-checker/0.5.5_svpeoejlom624h637oo4yy72dy: - resolution: {integrity: sha512-BLaRlBmiVn3Fg/wR9A0+YNwgXVteFJaH8rCIiIgYQcQ50jc3oVe2m8i0xxG5geq36UttNJsAj7DpDelN7/KjOg==} - engines: {node: '>=14.16'} - peerDependencies: - eslint: '>=7' - meow: ^9.0.0 - optionator: ^0.9.1 - stylelint: '>=13' - typescript: '*' - vite: '>=2.0.0' - vls: '*' - vti: '*' - vue-tsc: '*' - peerDependenciesMeta: - eslint: - optional: true - meow: - optional: true - optionator: - optional: true - stylelint: - optional: true - typescript: - optional: true - vls: - optional: true - vti: - optional: true - vue-tsc: - optional: true - dependencies: - '@babel/code-frame': 7.18.6 - ansi-escapes: 4.3.2 - chalk: 4.1.2 - chokidar: 3.5.3 - commander: 8.3.0 - eslint: 8.32.0 - fast-glob: 3.2.12 - fs-extra: 11.1.0 - lodash.debounce: 4.0.8 - lodash.pick: 4.4.0 - meow: 9.0.0 - npm-run-path: 4.0.1 - optionator: 0.9.1 - strip-ansi: 6.0.1 - stylelint: 14.16.1 - tiny-invariant: 1.3.1 - typescript: 4.9.5 - vite: 4.0.4_arwryhsn4zwmtf5pq2mmdxlt6a - vscode-languageclient: 7.0.0 - vscode-languageserver: 7.0.0 - vscode-languageserver-textdocument: 1.0.8 - vscode-uri: 3.0.7 - dev: true - - /vite-plugin-svgr/2.4.0_vite@4.0.4: - resolution: {integrity: sha512-q+mJJol6ThvqkkJvvVFEndI4EaKIjSI0I3jNFgSoC9fXAz1M7kYTVUin8fhUsFojFDKZ9VHKtX6NXNaOLpbsHA==} - peerDependencies: - vite: ^2.6.0 || 3 || 4 - dependencies: - '@rollup/pluginutils': 5.0.2 - '@svgr/core': 6.5.1 - vite: 4.0.4_arwryhsn4zwmtf5pq2mmdxlt6a - transitivePeerDependencies: - - rollup - - supports-color - dev: true - - /vite-tsconfig-paths/4.0.5_typescript@4.9.5: - resolution: {integrity: sha512-/L/eHwySFYjwxoYt1WRJniuK/jPv+WGwgRGBYx3leciR5wBeqntQpUE6Js6+TJemChc+ter7fDBKieyEWDx4yQ==} - dependencies: - debug: 4.3.4 - globrex: 0.1.2 - tsconfck: 2.0.2_typescript@4.9.5 - transitivePeerDependencies: - - supports-color - - typescript - dev: true - - /vite/4.0.4_arwryhsn4zwmtf5pq2mmdxlt6a: - resolution: {integrity: sha512-xevPU7M8FU0i/80DMR+YhgrzR5KS2ORy1B4xcX/cXLsvnUWvfHuqMmVU6N0YiJ4JWGRJJsLCgjEzKjG9/GKoSw==} - engines: {node: ^14.18.0 || >=16.0.0} - hasBin: true - peerDependencies: - '@types/node': '>= 14' - less: '*' - sass: '*' - stylus: '*' - sugarss: '*' - terser: ^5.4.0 - peerDependenciesMeta: - '@types/node': - optional: true - less: - optional: true - sass: - optional: true - stylus: - optional: true - sugarss: - optional: true - terser: - optional: true - dependencies: - '@types/node': 17.0.45 - esbuild: 0.16.17 - postcss: 8.4.21 - resolve: 1.22.1 - rollup: 3.10.1 - sass: 1.57.1 - optionalDependencies: - fsevents: 2.3.2 - dev: true - - /vm2/3.9.13: - resolution: {integrity: sha512-0rvxpB8P8Shm4wX2EKOiMp7H2zq+HUE/UwodY0pCZXs9IffIKZq6vUti5OgkVCTakKo9e/fgO4X1fkwfjWxE3Q==} - engines: {node: '>=6.0'} - hasBin: true - dependencies: - acorn: 8.8.2 - acorn-walk: 8.2.0 - dev: true - - /vscode-jsonrpc/6.0.0: - resolution: {integrity: sha512-wnJA4BnEjOSyFMvjZdpiOwhSq9uDoK8e/kpRJDTaMYzwlkrhG1fwDIZI94CLsLzlCK5cIbMMtFlJlfR57Lavmg==} - engines: {node: '>=8.0.0 || >=10.0.0'} - dev: true - - /vscode-languageclient/7.0.0: - resolution: {integrity: sha512-P9AXdAPlsCgslpP9pRxYPqkNYV7Xq8300/aZDpO35j1fJm/ncize8iGswzYlcvFw5DQUx4eVk+KvfXdL0rehNg==} - engines: {vscode: ^1.52.0} - dependencies: - minimatch: 3.1.2 - semver: 7.3.8 - vscode-languageserver-protocol: 3.16.0 - dev: true - - /vscode-languageserver-protocol/3.16.0: - resolution: {integrity: sha512-sdeUoAawceQdgIfTI+sdcwkiK2KU+2cbEYA0agzM2uqaUy2UpnnGHtWTHVEtS0ES4zHU0eMFRGN+oQgDxlD66A==} - dependencies: - vscode-jsonrpc: 6.0.0 - vscode-languageserver-types: 3.16.0 - dev: true - - /vscode-languageserver-textdocument/1.0.8: - resolution: {integrity: sha512-1bonkGqQs5/fxGT5UchTgjGVnfysL0O8v1AYMBjqTbWQTFn721zaPGDYFkOKtfDgFiSgXM3KwaG3FMGfW4Ed9Q==} - dev: true - - /vscode-languageserver-types/3.16.0: - resolution: {integrity: sha512-k8luDIWJWyenLc5ToFQQMaSrqCHiLwyKPHKPQZ5zz21vM+vIVUSvsRpcbiECH4WR88K2XZqc4ScRcZ7nk/jbeA==} - dev: true - - /vscode-languageserver/7.0.0: - resolution: {integrity: sha512-60HTx5ID+fLRcgdHfmz0LDZAXYEV68fzwG0JWwEPBode9NuMYTIxuYXPg4ngO8i8+Ou0lM7y6GzaYWbiDL0drw==} - hasBin: true - dependencies: - vscode-languageserver-protocol: 3.16.0 - dev: true - - /vscode-uri/3.0.7: - resolution: {integrity: sha512-eOpPHogvorZRobNqJGhapa0JdwaxpjVvyBp0QIUMRMSf8ZAlqOdEquKuRmw9Qwu0qXtJIWqFtMkmvJjUZmMjVA==} - dev: true - - /w3c-xmlserializer/4.0.0: - resolution: {integrity: sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw==} - engines: {node: '>=14'} - dependencies: - xml-name-validator: 4.0.0 - dev: true - - /walker/1.0.8: - resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} - dependencies: - makeerror: 1.0.12 - dev: true - - /warning/3.0.0: - resolution: {integrity: sha512-jMBt6pUrKn5I+OGgtQ4YZLdhIeJmObddh6CsibPxyQ5yPZm1XExSyzC1LCNX7BzhxWgiHmizBWJTHJIjMjTQYQ==} - dependencies: - loose-envify: 1.4.0 - dev: false - - /warning/4.0.3: - resolution: {integrity: sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==} - dependencies: - loose-envify: 1.4.0 - - /watchpack/2.4.0: - resolution: {integrity: sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==} - engines: {node: '>=10.13.0'} - dependencies: - glob-to-regexp: 0.4.1 - graceful-fs: 4.2.10 - dev: true - - /wcwidth/1.0.1: - resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==} - dependencies: - defaults: 1.0.4 - dev: true - - /webidl-conversions/3.0.1: - resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} - - /webidl-conversions/7.0.0: - resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==} - engines: {node: '>=12'} - dev: true - - /webpack-sources/3.2.3: - resolution: {integrity: sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==} - engines: {node: '>=10.13.0'} - dev: true - - /webpack-virtual-modules/0.4.6: - resolution: {integrity: sha512-5tyDlKLqPfMqjT3Q9TAqf2YqjwmnUleZwzJi1A5qXnlBCdj2AtOJ6wAWdglTIDOPgOiOrXeBeFcsQ8+aGQ6QbA==} - dev: true - - /websocket-driver/0.7.4: - resolution: {integrity: sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==} - engines: {node: '>=0.8.0'} - dependencies: - http-parser-js: 0.5.8 - safe-buffer: 5.2.1 - websocket-extensions: 0.1.4 - dev: false - - /websocket-extensions/0.1.4: - resolution: {integrity: sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==} - engines: {node: '>=0.8.0'} - dev: false - - /whatwg-encoding/2.0.0: - resolution: {integrity: sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==} - engines: {node: '>=12'} - dependencies: - iconv-lite: 0.6.3 - dev: true - - /whatwg-fetch/2.0.4: - resolution: {integrity: sha512-dcQ1GWpOD/eEQ97k66aiEVpNnapVj90/+R+SXTPYGHpYBBypfKJEQjLrvMZ7YXbKm21gXd4NcuxUTjiv1YtLng==} - dev: false - - /whatwg-mimetype/3.0.0: - resolution: {integrity: sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==} - engines: {node: '>=12'} - dev: true - - /whatwg-url/11.0.0: - resolution: {integrity: sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==} - engines: {node: '>=12'} - dependencies: - tr46: 3.0.0 - webidl-conversions: 7.0.0 - dev: true - - /whatwg-url/5.0.0: - resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} - dependencies: - tr46: 0.0.3 - webidl-conversions: 3.0.1 - - /which-boxed-primitive/1.0.2: - resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==} - dependencies: - is-bigint: 1.0.4 - is-boolean-object: 1.1.2 - is-number-object: 1.0.7 - is-string: 1.0.7 - is-symbol: 1.0.4 - dev: true - - /which-collection/1.0.1: - resolution: {integrity: sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==} - dependencies: - is-map: 2.0.2 - is-set: 2.0.2 - is-weakmap: 2.0.1 - is-weakset: 2.0.2 - dev: true - - /which-typed-array/1.1.9: - resolution: {integrity: sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==} - engines: {node: '>= 0.4'} - dependencies: - available-typed-arrays: 1.0.5 - call-bind: 1.0.2 - for-each: 0.3.3 - gopd: 1.0.1 - has-tostringtag: 1.0.0 - is-typed-array: 1.1.10 - dev: true - - /which/1.3.1: - resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==} - hasBin: true - dependencies: - isexe: 2.0.0 - dev: true - - /which/2.0.2: - resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} - engines: {node: '>= 8'} - hasBin: true - dependencies: - isexe: 2.0.0 - dev: true - - /wide-align/1.1.5: - resolution: {integrity: sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==} - dependencies: - string-width: 4.2.3 - dev: true - - /widest-line/3.1.0: - resolution: {integrity: sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==} - engines: {node: '>=8'} - dependencies: - string-width: 4.2.3 - dev: true - - /word-wrap/1.2.3: - resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} - engines: {node: '>=0.10.0'} - dev: true - - /wordwrap/1.0.0: - resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} - dev: true - - /wrap-ansi/6.2.0: - resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} - engines: {node: '>=8'} - dependencies: - ansi-styles: 4.3.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - dev: true - - /wrap-ansi/7.0.0: - resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} - engines: {node: '>=10'} - dependencies: - ansi-styles: 4.3.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - - /wrappy/1.0.2: - resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} - - /write-file-atomic/2.4.3: - resolution: {integrity: sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==} - dependencies: - graceful-fs: 4.2.10 - imurmurhash: 0.1.4 - signal-exit: 3.0.7 - dev: true - - /write-file-atomic/4.0.2: - resolution: {integrity: sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - imurmurhash: 0.1.4 - signal-exit: 3.0.7 - dev: true - - /ws/6.2.2: - resolution: {integrity: sha512-zmhltoSR8u1cnDsD43TX59mzoMZsLKqUweyYBAIvTngR3shc0W6aOZylZmq/7hqyVxPdi+5Ud2QInblgyE72fw==} - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ^5.0.2 - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - dependencies: - async-limiter: 1.0.1 - dev: true - - /ws/8.12.0: - resolution: {integrity: sha512-kU62emKIdKVeEIOIKVegvqpXMSTAMLJozpHZaJNDYqBjzlSYXQGviYwN1osDLJ9av68qHd4a2oSjd7yD4pacig==} - engines: {node: '>=10.0.0'} - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: '>=5.0.2' - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - dev: true - - /xml-name-validator/4.0.0: - resolution: {integrity: sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==} - engines: {node: '>=12'} - dev: true - - /xmlchars/2.2.0: - resolution: {integrity: sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==} - dev: true - - /xregexp/2.0.0: - resolution: {integrity: sha512-xl/50/Cf32VsGq/1R8jJE5ajH1yMCQkpmoS10QbFZWl2Oor4H0Me64Pu2yxvsRWK3m6soJbmGfzSR7BYmDcWAA==} - dev: true - - /y18n/5.0.8: - resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} - engines: {node: '>=10'} - - /yallist/3.1.1: - resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} - - /yallist/4.0.0: - resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} - dev: true - - /yaml-js/0.2.3: - resolution: {integrity: sha512-6xUQtVKl1qcd0EXtTEzUDVJy9Ji1fYa47LtkDtYKlIjhibPE9knNPmoRyf6SGREFHlOAUyDe9OdYqRP4DuSi5Q==} - dev: true - - /yaml/1.10.2: - resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} - engines: {node: '>= 6'} - - /yaml/2.2.1: - resolution: {integrity: sha512-e0WHiYql7+9wr4cWMx3TVQrNwejKaEe7/rHNmQmqRjazfOP5W8PB6Jpebb5o6fIapbz9o9+2ipcaTM2ZwDI6lw==} - engines: {node: '>= 14'} - dev: true - - /yargs-parser/20.2.9: - resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==} - engines: {node: '>=10'} - - /yargs-parser/21.1.1: - resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} - engines: {node: '>=12'} - dev: true - - /yargs/16.2.0: - resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==} - engines: {node: '>=10'} - dependencies: - cliui: 7.0.4 - escalade: 3.1.1 - get-caller-file: 2.0.5 - require-directory: 2.1.1 - string-width: 4.2.3 - y18n: 5.0.8 - yargs-parser: 20.2.9 - - /yargs/17.3.1: - resolution: {integrity: sha512-WUANQeVgjLbNsEmGk20f+nlHgOqzRFpiGWVaBrYGYIGANIIu3lWjoyi0fNlFmJkvfhCZ6BXINe7/W2O2bV4iaA==} - engines: {node: '>=12'} - dependencies: - cliui: 7.0.4 - escalade: 3.1.1 - get-caller-file: 2.0.5 - require-directory: 2.1.1 - string-width: 4.2.3 - y18n: 5.0.8 - yargs-parser: 21.1.1 - dev: true - - /yargs/17.6.2: - resolution: {integrity: sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==} - engines: {node: '>=12'} - dependencies: - cliui: 8.0.1 - escalade: 3.1.1 - get-caller-file: 2.0.5 - require-directory: 2.1.1 - string-width: 4.2.3 - y18n: 5.0.8 - yargs-parser: 21.1.1 - dev: true - - /yauzl/2.10.0: - resolution: {integrity: sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==} - dependencies: - buffer-crc32: 0.2.13 - fd-slicer: 1.1.0 - dev: true - - /yn/3.1.1: - resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} - engines: {node: '>=6'} - dev: true - - /yocto-queue/0.1.0: - resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} - engines: {node: '>=10'} - dev: true - - /yup/0.32.11: - resolution: {integrity: sha512-Z2Fe1bn+eLstG8DRR6FTavGD+MeAwyfmouhHsIUgaADz8jvFKbO/fXc2trJKZg+5EBjh4gGm3iU/t3onKlXHIg==} - engines: {node: '>=10'} - dependencies: - '@babel/runtime': 7.20.7 - '@types/lodash': 4.14.191 - lodash: 4.17.21 - lodash-es: 4.17.21 - nanoclone: 0.2.1 - property-expr: 2.0.5 - toposort: 2.0.2 - dev: false - - /zwitch/2.0.4: - resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} - dev: false diff --git a/airbyte-webapp/public/cactus.png b/airbyte-webapp/public/cactus.png deleted file mode 100644 index ca452bd25e3c..000000000000 Binary files a/airbyte-webapp/public/cactus.png and /dev/null differ diff --git a/airbyte-webapp/public/cloud-main-logo.svg b/airbyte-webapp/public/cloud-main-logo.svg deleted file mode 100644 index 8987b1b4d027..000000000000 --- a/airbyte-webapp/public/cloud-main-logo.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/airbyte-webapp/public/connectors/google/btn_google_light_normal_ios.svg b/airbyte-webapp/public/connectors/google/btn_google_light_normal_ios.svg deleted file mode 100644 index 032b6ac5f761..000000000000 --- a/airbyte-webapp/public/connectors/google/btn_google_light_normal_ios.svg +++ /dev/null @@ -1,43 +0,0 @@ - - - - btn_google_light_normal_ios - Created with Sketch. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/airbyte-webapp/public/default-logo-catalog.svg b/airbyte-webapp/public/default-logo-catalog.svg deleted file mode 100644 index 528e851e8ec7..000000000000 --- a/airbyte-webapp/public/default-logo-catalog.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/airbyte-webapp/public/empty-connections.png b/airbyte-webapp/public/empty-connections.png deleted file mode 100644 index c7db3d284e02..000000000000 Binary files a/airbyte-webapp/public/empty-connections.png and /dev/null differ diff --git a/airbyte-webapp/public/empty-destinations.png b/airbyte-webapp/public/empty-destinations.png deleted file mode 100644 index a909cf38f865..000000000000 Binary files a/airbyte-webapp/public/empty-destinations.png and /dev/null differ diff --git a/airbyte-webapp/public/empty-sources.png b/airbyte-webapp/public/empty-sources.png deleted file mode 100644 index 9377bde91cf5..000000000000 Binary files a/airbyte-webapp/public/empty-sources.png and /dev/null differ diff --git a/airbyte-webapp/public/favicon.ico b/airbyte-webapp/public/favicon.ico deleted file mode 100644 index f1e01696c6d2..000000000000 Binary files a/airbyte-webapp/public/favicon.ico and /dev/null differ diff --git a/airbyte-webapp/public/fonts/inter/Inter-italic.var.woff2 b/airbyte-webapp/public/fonts/inter/Inter-italic.var.woff2 deleted file mode 100644 index b826d5af84b3..000000000000 Binary files a/airbyte-webapp/public/fonts/inter/Inter-italic.var.woff2 and /dev/null differ diff --git a/airbyte-webapp/public/fonts/inter/Inter-roman.var.woff2 b/airbyte-webapp/public/fonts/inter/Inter-roman.var.woff2 deleted file mode 100644 index 6a256a068f0d..000000000000 Binary files a/airbyte-webapp/public/fonts/inter/Inter-roman.var.woff2 and /dev/null differ diff --git a/airbyte-webapp/public/fonts/roboto/Roboto-Medium.eot b/airbyte-webapp/public/fonts/roboto/Roboto-Medium.eot deleted file mode 100644 index 5ca6bb94e9ed..000000000000 Binary files a/airbyte-webapp/public/fonts/roboto/Roboto-Medium.eot and /dev/null differ diff --git a/airbyte-webapp/public/fonts/roboto/Roboto-Medium.svg b/airbyte-webapp/public/fonts/roboto/Roboto-Medium.svg deleted file mode 100644 index c18a0e924171..000000000000 --- a/airbyte-webapp/public/fonts/roboto/Roboto-Medium.svg +++ /dev/null @@ -1,1063 +0,0 @@ - - - - -This is a custom SVG webfont generated by Font Squirrel. -Copyright : Font data copyright Google 2011 -Designer : Google -Foundry URL : Googlecom - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/airbyte-webapp/public/fonts/roboto/Roboto-Medium.ttf b/airbyte-webapp/public/fonts/roboto/Roboto-Medium.ttf deleted file mode 100644 index 26984b15a292..000000000000 Binary files a/airbyte-webapp/public/fonts/roboto/Roboto-Medium.ttf and /dev/null differ diff --git a/airbyte-webapp/public/fonts/roboto/Roboto-Medium.woff b/airbyte-webapp/public/fonts/roboto/Roboto-Medium.woff deleted file mode 100644 index 15166091980f..000000000000 Binary files a/airbyte-webapp/public/fonts/roboto/Roboto-Medium.woff and /dev/null differ diff --git a/airbyte-webapp/public/fonts/roboto/Roboto-Medium.woff2 b/airbyte-webapp/public/fonts/roboto/Roboto-Medium.woff2 deleted file mode 100644 index d10a59261f04..000000000000 Binary files a/airbyte-webapp/public/fonts/roboto/Roboto-Medium.woff2 and /dev/null differ diff --git a/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.eot b/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.eot deleted file mode 100644 index a3ae3682aeb5..000000000000 Binary files a/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.eot and /dev/null differ diff --git a/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.svg b/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.svg deleted file mode 100644 index 296abd0c6f2e..000000000000 --- a/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.svg +++ /dev/null @@ -1,2811 +0,0 @@ - - - - -Created by FontForge 20190801 at Wed May 20 23:55:39 2015 - By Jimmy Wärting -Copyright 2015 Google Inc. All Rights Reserved. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.ttf b/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.ttf deleted file mode 100644 index b920169edb7c..000000000000 Binary files a/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.ttf and /dev/null differ diff --git a/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.woff b/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.woff deleted file mode 100644 index 74646233623b..000000000000 Binary files a/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.woff and /dev/null differ diff --git a/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.woff2 b/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.woff2 deleted file mode 100644 index 8ba3b03fccf5..000000000000 Binary files a/airbyte-webapp/public/fonts/robotoMono/RobotoMono-Regular.woff2 and /dev/null differ diff --git a/airbyte-webapp/public/images/airbyte/logo.svg b/airbyte-webapp/public/images/airbyte/logo.svg deleted file mode 100644 index a2f78bbe3972..000000000000 --- a/airbyte-webapp/public/images/airbyte/logo.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/airbyte-webapp/public/images/connector-builder/import-yaml.svg b/airbyte-webapp/public/images/connector-builder/import-yaml.svg deleted file mode 100644 index 62b89447dcec..000000000000 --- a/airbyte-webapp/public/images/connector-builder/import-yaml.svg +++ /dev/null @@ -1,155 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/airbyte-webapp/public/images/connector-builder/load-existing-connector.svg b/airbyte-webapp/public/images/connector-builder/load-existing-connector.svg deleted file mode 100644 index c322290d539f..000000000000 --- a/airbyte-webapp/public/images/connector-builder/load-existing-connector.svg +++ /dev/null @@ -1,352 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/airbyte-webapp/public/images/connector-builder/start-from-scratch.svg b/airbyte-webapp/public/images/connector-builder/start-from-scratch.svg deleted file mode 100644 index 7b152f391e4e..000000000000 --- a/airbyte-webapp/public/images/connector-builder/start-from-scratch.svg +++ /dev/null @@ -1,223 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/airbyte-webapp/public/images/octavia/biting-nails.png b/airbyte-webapp/public/images/octavia/biting-nails.png deleted file mode 100644 index cb2073c17b25..000000000000 Binary files a/airbyte-webapp/public/images/octavia/biting-nails.png and /dev/null differ diff --git a/airbyte-webapp/public/images/octavia/pointing.svg b/airbyte-webapp/public/images/octavia/pointing.svg deleted file mode 100644 index b4453c1fb31d..000000000000 --- a/airbyte-webapp/public/images/octavia/pointing.svg +++ /dev/null @@ -1,52 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/airbyte-webapp/public/images/testimonials/cartdotcom-logo.svg b/airbyte-webapp/public/images/testimonials/cartdotcom-logo.svg deleted file mode 100644 index a9e5e18e799c..000000000000 --- a/airbyte-webapp/public/images/testimonials/cartdotcom-logo.svg +++ /dev/null @@ -1,44 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/airbyte-webapp/public/images/testimonials/cartdotcom-person-photo.png b/airbyte-webapp/public/images/testimonials/cartdotcom-person-photo.png deleted file mode 100644 index ac135a7e079b..000000000000 Binary files a/airbyte-webapp/public/images/testimonials/cartdotcom-person-photo.png and /dev/null differ diff --git a/airbyte-webapp/public/index.css b/airbyte-webapp/public/index.css deleted file mode 100644 index 27f070aa1bbb..000000000000 --- a/airbyte-webapp/public/index.css +++ /dev/null @@ -1,85 +0,0 @@ -/* stylelint-disable color-no-hex */ -body, html { - margin: 0; - height: 100%; - width: 100%; -} - -noscript { - box-sizing: border-box; - background: #F8F8FF; - height: 100%; - width: 100%; - padding: 34px 0 13px; - display: flex; - align-items: center; - flex-direction: column; -} - -.card { - width: 100%; - max-width: 600px; - padding: 50px 10px; - background: #FFF; - border-radius: 8px; - box-shadow: 0 1px 2px rgba(0, 0, 0, 25%); - border: 1px solid #E8E8ED; - margin-top: 20px; - text-align: center; - font-style: normal; - font-weight: 500; - color: #1A1A21; - font-size: 18px; - line-height: 22px; - font-family: Inter, Helvetica, Arial, sans-serif; -} - -@font-face { - font-family: 'Inter'; - font-weight: 100 900; - font-display: swap; - font-style: normal; - src: url("/fonts/inter/Inter-roman.var.woff2?v=3.19") format("woff2"); -} - -@font-face { - font-family: 'Inter'; - font-weight: 100 900; - font-display: swap; - font-style: italic; - src: url("/fonts/inter/Inter-italic.var.woff2?v=3.19") format("woff2"); -} - -/* Regular Roboto Mono */ -@font-face { - font-family: "Roboto Mono"; - src: url("/fonts/robotoMono/RobotoMono-Regular.eot"); /* IE9 Compat Modes */ - src: local("Roboto Mono"), - url("/fonts/robotoMono/RobotoMono-Regular.eot?#iefix") - format("embedded-opentype"), - /* IE6-IE8 */ url("/fonts/robotoMono/RobotoMono-Regular.woff2") - format("woff2"), - /* Super Modern Browsers */ url("/fonts/robotoMono/RobotoMono-Regular.woff") - format("woff"), - /* Modern Browsers */ url("/fonts/robotoMono/RobotoMono-Regular.ttf") - format("truetype"), - /* Safari, Android, iOS */ - url("/fonts/robotoMono/RobotoMono-Regular.svg#OpenSans") format("svg"); /* Legacy iOS */ - - font-weight: normal; - font-style: normal; -} - -/* Roboto Medium */ -@font-face { - font-family: "Roboto"; - font-style: normal; - font-weight: 500; - src: url("/fonts/roboto/Roboto-Medium.eot"); /* IE9 Compat Modes */ - src: local("Roboto"), - url("/fonts/roboto/Roboto-Medium.eot?#iefix") format("embedded-opentype"), /* IE6-IE8 */ - url("/fonts/roboto/Roboto-Medium.woff2") format("woff2"), /* Super Modern Browsers */ - url("/fonts/roboto/Roboto-Medium.woff") format("woff"), /* Modern Browsers */ - url("/fonts/roboto/Roboto-Medium.ttf") format("truetype"), /* Safari, Android, iOS */ - url("/fonts/roboto/Roboto-Medium.svg#RobotoMedium") format("svg"); /* Legacy iOS */ -} diff --git a/airbyte-webapp/public/logo.png b/airbyte-webapp/public/logo.png deleted file mode 100644 index f7b4e1b72580..000000000000 Binary files a/airbyte-webapp/public/logo.png and /dev/null differ diff --git a/airbyte-webapp/public/logo112.png b/airbyte-webapp/public/logo112.png deleted file mode 100644 index a0f50b6e46c7..000000000000 Binary files a/airbyte-webapp/public/logo112.png and /dev/null differ diff --git a/airbyte-webapp/public/logo224.png b/airbyte-webapp/public/logo224.png deleted file mode 100644 index 941e0a7beacc..000000000000 Binary files a/airbyte-webapp/public/logo224.png and /dev/null differ diff --git a/airbyte-webapp/public/logo336.png b/airbyte-webapp/public/logo336.png deleted file mode 100644 index 746028bf073c..000000000000 Binary files a/airbyte-webapp/public/logo336.png and /dev/null differ diff --git a/airbyte-webapp/public/manifest.json b/airbyte-webapp/public/manifest.json deleted file mode 100644 index fcd8add36ff1..000000000000 --- a/airbyte-webapp/public/manifest.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "short_name": "Airbyte", - "name": "Airbyte", - "icons": [ - { - "src": "favicon.ico", - "sizes": "64x64 32x32 24x24 16x16", - "type": "image/x-icon" - }, - { - "src": "logo112.png", - "type": "image/png", - "sizes": "112x112" - }, - { - "src": "logo224.png", - "type": "image/png", - "sizes": "224x224" - }, - { - "src": "logo336.png", - "type": "image/png", - "sizes": "336x336" - } - ], - "start_url": ".", - "display": "standalone", - "theme_color": "#625EFF", - "background_color": "#F8F8FF" -} diff --git a/airbyte-webapp/public/newsletter.png b/airbyte-webapp/public/newsletter.png deleted file mode 100644 index f49f174b00f4..000000000000 Binary files a/airbyte-webapp/public/newsletter.png and /dev/null differ diff --git a/airbyte-webapp/public/play.svg b/airbyte-webapp/public/play.svg deleted file mode 100644 index a6976352ad28..000000000000 --- a/airbyte-webapp/public/play.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - \ No newline at end of file diff --git a/airbyte-webapp/public/process-arrow.svg b/airbyte-webapp/public/process-arrow.svg deleted file mode 100644 index 1258bc739c8a..000000000000 --- a/airbyte-webapp/public/process-arrow.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/airbyte-webapp/public/rectangle.svg b/airbyte-webapp/public/rectangle.svg deleted file mode 100644 index 66aa72f35d11..000000000000 --- a/airbyte-webapp/public/rectangle.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/airbyte-webapp/public/robots.txt b/airbyte-webapp/public/robots.txt deleted file mode 100644 index 01b0f9a10733..000000000000 --- a/airbyte-webapp/public/robots.txt +++ /dev/null @@ -1,2 +0,0 @@ -# https://www.robotstxt.org/robotstxt.html -User-agent: * diff --git a/airbyte-webapp/public/rocket.png b/airbyte-webapp/public/rocket.png deleted file mode 100644 index c5cf200f9d46..000000000000 Binary files a/airbyte-webapp/public/rocket.png and /dev/null differ diff --git a/airbyte-webapp/public/videoCover.png b/airbyte-webapp/public/videoCover.png deleted file mode 100644 index 402cdfb7e16d..000000000000 Binary files a/airbyte-webapp/public/videoCover.png and /dev/null differ diff --git a/airbyte-webapp/public/welcome.svg b/airbyte-webapp/public/welcome.svg deleted file mode 100644 index 655b61528438..000000000000 --- a/airbyte-webapp/public/welcome.svg +++ /dev/null @@ -1,52 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/airbyte-webapp/scripts/dev-overwrites.js b/airbyte-webapp/scripts/dev-overwrites.js deleted file mode 100644 index 6ec83d1df1b2..000000000000 --- a/airbyte-webapp/scripts/dev-overwrites.js +++ /dev/null @@ -1,22 +0,0 @@ -const fs = require("fs"); -const { isMainThread } = require("node:worker_threads"); -const path = require("path"); - -const chalk = require("chalk"); - -const EXPERIMENTS_FILE = path.resolve(__dirname, "../.experiments.json"); - -if (fs.existsSync(EXPERIMENTS_FILE)) { - const overwrites = require(EXPERIMENTS_FILE); - - if (Object.keys(overwrites).length) { - if (isMainThread) { - // Only print the message in the main thread, so it's not showing up in all the worker threads of vite-plugin-checker - console.log(chalk.bold(`🧪 Overwriting experiments via ${chalk.green(".experiments.json")}`)); - Object.entries(overwrites).forEach(([key, value]) => { - console.log(` ➜ ${chalk.cyan(key)}: ${JSON.stringify(value)}`); - }); - } - process.env.REACT_APP_EXPERIMENT_OVERWRITES = JSON.stringify(overwrites); - } -} diff --git a/airbyte-webapp/scripts/environment.js b/airbyte-webapp/scripts/environment.js deleted file mode 100644 index 4fa12398a6fb..000000000000 --- a/airbyte-webapp/scripts/environment.js +++ /dev/null @@ -1,25 +0,0 @@ -const fs = require("fs"); -const path = require("path"); - -const dotenv = require("dotenv"); - -if (!process.env.AB_ENV) { - return; -} - -const envFile = path.resolve( - __dirname, - "../../../airbyte-platform-internal/cloud-webapp/development", - `.env.${process.env.AB_ENV}` -); - -if (!fs.existsSync(envFile)) { - console.error( - `~~~ This mode is for Airbyte employees only. ~~~\n` + - `Could not find .env file for environment ${process.env.AB_ENV} (looking at ${envFile}).\n` + - `Make sure you have the latest airbyte-platform-internal repository checked out in a directory directly next to the airbyte OSS repository.\n` - ); - process.exit(42); -} - -dotenv.config({ path: envFile }); diff --git a/airbyte-webapp/scripts/license-check.js b/airbyte-webapp/scripts/license-check.js deleted file mode 100644 index 938e95275301..000000000000 --- a/airbyte-webapp/scripts/license-check.js +++ /dev/null @@ -1,111 +0,0 @@ -const path = require("path"); -const { promisify } = require("util"); - -const checker = require("license-checker"); - -const { version } = require("../package.json"); - -/** - * A list of all the allowed licenses that production dependencies can have. - */ -const ALLOWED_LICENSES = [ - "(Apache-2.0 OR MPL-1.1)", - "(AFL-2.1 OR BSD-3-Clause)", - "(AFLv2.1 OR BSD)", - "(BSD-2-Clause OR MIT OR Apache-2.0)", - "(BSD-3-Clause AND Apache-2.0)", - "(BSD-3-Clause OR GPL-2.0)", - "(CC-BY-4.0 AND MIT)", - "(MIT OR Apache-2.0)", - "(MIT OR CC0-1.0)", - "(MIT OR GPL-3.0)", - "(MIT OR GPL-3.0-or-later)", - "(MIT OR WTFPL)", - "(MIT AND CC-BY-3.0)", - "(MIT AND BSD-3-Clause)", - "(MIT AND Zlib)", - "(WTFPL OR MIT)", - "BSD-3-Clause OR MIT", - "0BSD", - "Apache", - "Apache-2.0", - "BSD", - "BSD-2-Clause", - "BSD-3-Clause", - "CC0-1.0", - "CC-BY-3.0", - "CC-BY-4.0", - "ISC", - "MIT", - "MPL-2.0", - "Public Domain", - "Python-2.0", - "Unlicense", - "WTFPL", -]; - -/** - * Licenses that should be allowed only for dev dependencies. - */ -const ALLOWED_DEV_LICENSES = [...ALLOWED_LICENSES, "ODC-By-1.0", "MPL-2.0"]; - -/** - * A list of all packages that should be excluded from license checking. - */ -const IGNORED_PACKAGES = [`airbyte-webapp@${version}`]; - -/** - * Overwrite licenses for specific packages manually, e.g. because they can't be detected properly. - */ -const LICENSE_OVERWRITES = { - "glob-to-regexp@0.3.0": "BSD-3-Clause", - "trim@0.0.1": "MIT", - "backslash@0.2.0": "MIT", - "browser-assert@1.2.1": "MIT", // via README (https://github.com/socialally/browser-assert/tree/v1.2.1) -}; - -const checkLicenses = promisify(checker.init); -const params = { - start: path.join(__dirname, ".."), - excludePackages: IGNORED_PACKAGES.join(";"), - unknown: true, -}; - -function validateLicenes(licenses, allowedLicenes, usedOverwrites) { - let licensesValid = true; - for (const [pkg, info] of Object.entries(licenses)) { - let license = Array.isArray(info.licenses) ? `(${info.licenses.join(" OR ")})` : info.licenses; - if (LICENSE_OVERWRITES[pkg]) { - license = LICENSE_OVERWRITES[pkg]; - usedOverwrites.add(pkg); - } - if (license.endsWith("*")) { - license = license.substr(0, license.length - 1); - console.log(`Guessed license for package ${pkg}: ${license}`); - } - if (!license || !allowedLicenes.includes(license)) { - licensesValid = false; - console.error(`Package ${pkg} has incompatible license: ${license}`); - } - } - - return licensesValid; -} - -Promise.all([checkLicenses({ ...params, production: true }), checkLicenses({ ...params, development: true })]).then( - ([prod, dev]) => { - const usedOverwrites = new Set(); - const prodLicensesValid = validateLicenes(prod, ALLOWED_LICENSES, usedOverwrites); - const devLicensesValid = validateLicenes(dev, ALLOWED_DEV_LICENSES, usedOverwrites); - - for (const overwrite of Object.keys(LICENSE_OVERWRITES)) { - if (!usedOverwrites.has(overwrite)) { - console.warn(`License overwrite for ${overwrite} is no longer needed and can be deleted.`); - } - } - - if (!prodLicensesValid || !devLicensesValid) { - process.exit(1); - } - } -); diff --git a/airbyte-webapp/scripts/load-declarative-schema.sh b/airbyte-webapp/scripts/load-declarative-schema.sh deleted file mode 100755 index 84e256e5d580..000000000000 --- a/airbyte-webapp/scripts/load-declarative-schema.sh +++ /dev/null @@ -1,28 +0,0 @@ -# This script makes sure the json schema for the low code connector manifest is provided for orval to build the Typescript types -# used by the connector builder UI. It either downloads a released version from PyPI or copies it over from a specified file path. - -set -e -mkdir -p build - -DEFAULT_CDK_VERSION=`cat ../airbyte-connector-builder-server/CDK_VERSION` - -if [ -z "$CDK_VERSION" ] -then - CDK_VERSION=$DEFAULT_CDK_VERSION -fi - - -if [ -z "$CDK_MANIFEST_PATH" ] -then - TARGET_FILE="build/declarative_component_schema-${CDK_VERSION}.yaml" - if [ ! -f "$TARGET_FILE" ]; then - echo "Downloading CDK manifest schema $CDK_VERSION from pypi" - curl -L https://pypi.python.org/packages/source/a/airbyte-cdk/airbyte-cdk-${CDK_VERSION}.tar.gz | tar -xzO airbyte-cdk-${CDK_VERSION}/airbyte_cdk/sources/declarative/declarative_component_schema.yaml > ${TARGET_FILE} - else - echo "Found cached CDK manifest schema $CDK_VERSION" - fi - cp ${TARGET_FILE} build/declarative_component_schema.yaml -else - echo "Copying local CDK manifest version from $CDK_MANIFEST_PATH" - cp ${CDK_MANIFEST_PATH} build/declarative_component_schema.yaml -fi \ No newline at end of file diff --git a/airbyte-webapp/scripts/validate-links.ts b/airbyte-webapp/scripts/validate-links.ts deleted file mode 100644 index 674627c27186..000000000000 --- a/airbyte-webapp/scripts/validate-links.ts +++ /dev/null @@ -1,39 +0,0 @@ -#! /usr/bin/env ts-node - -import fetch from "node-fetch"; - -import { links } from "../src/utils/links"; - -async function run() { - // Query all domains and wait for results - const results = await Promise.allSettled( - Object.entries(links).map(([key, url]) => { - return fetch(url, { headers: { "user-agent": "ValidateLinksCheck" } }) - .then((resp) => { - if (resp.status >= 200 && resp.status < 300) { - // Only URLs returning a 200 status code are considered okay - console.log(`✓ [${key}] ${url} returned HTTP ${resp.status}`); - } else { - // Everything else should fail this test - console.error(`X [${key}] ${url} returned HTTP ${resp.status}`); - return Promise.reject({ key, url }); - } - }) - .catch((reason) => { - console.error(`X [${key}] ${url} error fetching: ${String(reason)}`); - return Promise.reject({ key, url }); - }); - }) - ); - - const failures = results.filter((result): result is PromiseRejectedResult => result.status === "rejected"); - - if (failures.length > 0) { - console.log(`\nThe following URLs were not successful: ${failures.map((r) => r.reason.key).join(", ")}`); - process.exit(1); - } else { - console.log("\n✓ All URLs have been checked successfully."); - } -} - -run(); diff --git a/airbyte-webapp/src/App.tsx b/airbyte-webapp/src/App.tsx deleted file mode 100644 index b32f4ca06e04..000000000000 --- a/airbyte-webapp/src/App.tsx +++ /dev/null @@ -1,80 +0,0 @@ -import React, { Suspense } from "react"; -import { HelmetProvider } from "react-helmet-async"; -import { BrowserRouter as Router } from "react-router-dom"; -import { ThemeProvider } from "styled-components"; - -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; - -import { config } from "config"; -import { ApiServices } from "core/ApiServices"; -import { I18nProvider } from "core/i18n"; -import { ServicesProvider } from "core/servicesProvider"; -import { AppMonitoringServiceProvider } from "hooks/services/AppMonitoringService"; -import { ConfirmationModalService } from "hooks/services/ConfirmationModal"; -import { defaultOssFeatures, FeatureService } from "hooks/services/Feature"; -import { FormChangeTrackerService } from "hooks/services/FormChangeTracker"; -import { ModalServiceProvider } from "hooks/services/Modal"; -import { NotificationService } from "hooks/services/Notification"; -import { AnalyticsProvider } from "views/common/AnalyticsProvider"; -import { StoreProvider } from "views/common/StoreProvider"; - -import LoadingPage from "./components/LoadingPage"; -import { ConfigServiceProvider } from "./config"; -import en from "./locales/en.json"; -import { Routing } from "./pages/routes"; -import { WorkspaceServiceProvider } from "./services/workspaces/WorkspacesService"; -import { theme } from "./theme"; - -const StyleProvider: React.FC> = ({ children }) => ( - {children} -); - -const Services: React.FC> = ({ children }) => ( - - - - - - - - - - - {children} - - - - - - - - - - -); - -const App: React.FC = () => { - return ( - - - - - - }> - - - - - - - - - - - - - - ); -}; - -export default App; diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.module.scss b/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.module.scss deleted file mode 100644 index a8dd2d449d09..000000000000 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.module.scss +++ /dev/null @@ -1,11 +0,0 @@ -@use "scss/colors"; -@use "scss/variables"; - -.container { - margin-bottom: variables.$spacing-xl; -} - -.list { - background-color: colors.$grey-50; - border-radius: variables.$border-radius-xs; -} diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.tsx b/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.tsx deleted file mode 100644 index 97cc2587e3b6..000000000000 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/ArrayOfObjectsEditor.tsx +++ /dev/null @@ -1,96 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; - -import { Modal, ModalProps } from "components/ui/Modal"; - -import { ConnectionFormMode } from "hooks/services/ConnectionForm/ConnectionFormService"; - -import styles from "./ArrayOfObjectsEditor.module.scss"; -import { EditorHeader } from "./components/EditorHeader"; -import { EditorRow } from "./components/EditorRow"; - -interface ItemBase { - name?: string; - description?: string; -} - -export interface ArrayOfObjectsEditorProps { - items: T[]; - editableItemIndex?: number | string | null; - mainTitle?: React.ReactNode; - addButtonText?: React.ReactNode; - renderItemName?: (item: T, index: number) => React.ReactNode | undefined; - renderItemDescription?: (item: T, index: number) => React.ReactNode | undefined; - renderItemEditorForm: (item?: T) => React.ReactNode; - onStartEdit: (n: number) => void; - onRemove: (index: number) => void; - onCancel?: () => void; - mode?: ConnectionFormMode; - disabled?: boolean; - editModalSize?: ModalProps["size"]; -} - -export const ArrayOfObjectsEditor = ({ - onStartEdit, - onRemove, - onCancel, - renderItemName = (item) => item.name, - renderItemDescription = (item) => item.description, - renderItemEditorForm, - items, - editableItemIndex, - mainTitle, - addButtonText, - mode, - disabled, - editModalSize, -}: ArrayOfObjectsEditorProps): JSX.Element => { - const onAddItem = React.useCallback(() => onStartEdit(items.length), [onStartEdit, items]); - const isEditable = editableItemIndex !== null && editableItemIndex !== undefined; - - const renderEditModal = () => { - const item = typeof editableItemIndex === "number" ? items[editableItemIndex] : undefined; - - return ( - } - size={editModalSize} - testId="arrayOfObjects-editModal" - onClose={onCancel} - > - {renderItemEditorForm(item)} - - ); - }; - - return ( - <> -
    - - {items.length ? ( -
    - {items.map((item, index) => ( - - ))} -
    - ) : null} -
    - {mode !== "readonly" && isEditable && renderEditModal()} - - ); -}; diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.module.scss b/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.module.scss deleted file mode 100644 index 834c58a237ea..000000000000 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.module.scss +++ /dev/null @@ -1,13 +0,0 @@ -@use "scss/colors"; - -.editorHeader { - display: flex; - justify-content: space-between; - align-items: center; - flex-direction: row; - color: colors.$dark-blue-900; - font-weight: 500; - font-size: 14px; - line-height: 17px; - margin: 5px 0 10px; -} diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.test.tsx b/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.test.tsx deleted file mode 100644 index e34452d3bb87..000000000000 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.test.tsx +++ /dev/null @@ -1,46 +0,0 @@ -import { render } from "test-utils/testutils"; - -import { EditorHeader } from "./EditorHeader"; - -describe("", () => { - let container: HTMLElement; - describe("edit mode", () => { - it("renders only relevant items for the mode", async () => { - const renderResult = await render( - "This is the main title"} - addButtonText={
    "button text"
    } - itemsCount={0} - onAddItem={() => { - return null; - }} - mode="edit" - /> - ); - container = renderResult.container; - const mainTitle = container.querySelector("div[data-testid='mainTitle']"); - const addButtonText = container.querySelector("div[data-testid='addButtonText']"); - expect(mainTitle).toBeInTheDocument(); - expect(addButtonText).toBeInTheDocument(); - }); - }); - describe("readonly mode", () => { - it("renders only relevant items for the mode", async () => { - const renderResult = await render( - "This is the main title"} - addButtonText={
    "button text"
    } - itemsCount={0} - onAddItem={() => { - return null; - }} - mode="readonly" - /> - ); - container = renderResult.container; - const mainTitle = container.querySelector("div[data-testid='mainTitle']"); - expect(mainTitle).toBeInTheDocument(); - expect(container.querySelector("div[data-testid='addButtonText']")).not.toBeInTheDocument(); - }); - }); -}); diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.tsx b/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.tsx deleted file mode 100644 index f6990cd45702..000000000000 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorHeader.tsx +++ /dev/null @@ -1,39 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; - -import { Button } from "components/ui/Button"; - -import { ConnectionFormMode } from "hooks/services/ConnectionForm/ConnectionFormService"; - -import styles from "./EditorHeader.module.scss"; - -interface EditorHeaderProps { - mainTitle?: React.ReactNode; - addButtonText?: React.ReactNode; - itemsCount: number; - onAddItem: () => void; - mode?: ConnectionFormMode; - disabled?: boolean; -} - -const EditorHeader: React.FC = ({ - itemsCount, - onAddItem, - mainTitle, - addButtonText, - mode, - disabled, -}) => { - return ( -
    - {mainTitle || } - {mode !== "readonly" && ( - - )} -
    - ); -}; - -export { EditorHeader }; diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorRow.module.scss b/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorRow.module.scss deleted file mode 100644 index 8aa25e5ab820..000000000000 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorRow.module.scss +++ /dev/null @@ -1,30 +0,0 @@ -@use "../../../scss/colors"; -@use "../../../scss/variables"; - -.container + .container { - border-top: 1px solid colors.$white; -} - -.body { - display: flex; - justify-content: space-between; - align-items: center; - flex-direction: row; - color: colors.$dark-blue; - font-weight: 400; - font-size: 12px; - line-height: 17px; - padding: variables.$spacing-xs variables.$spacing-xs variables.$spacing-xs variables.$spacing-md; - gap: variables.$spacing-xs; -} - -.name { - overflow: hidden; - text-overflow: ellipsis; - white-space: nowrap; -} - -.actions { - display: flex; - flex-direction: row; -} diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorRow.tsx b/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorRow.tsx deleted file mode 100644 index 2184263b3383..000000000000 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorRow.tsx +++ /dev/null @@ -1,60 +0,0 @@ -import React from "react"; -import { useIntl } from "react-intl"; - -import { CrossIcon } from "components/icons/CrossIcon"; -import { PencilIcon } from "components/icons/PencilIcon"; -import { Button } from "components/ui/Button"; -import { Tooltip } from "components/ui/Tooltip"; - -import styles from "./EditorRow.module.scss"; - -interface EditorRowProps { - name?: React.ReactNode; - description?: React.ReactNode; - id: number; - onEdit: (id: number) => void; - onRemove: (id: number) => void; - disabled?: boolean; -} - -export const EditorRow: React.FC = ({ name, id, description, onEdit, onRemove, disabled }) => { - const { formatMessage } = useIntl(); - - const body = ( -
    -
    {name || id}
    -
    -
    -
    - ); - - return ( -
    - {description ? ( - - {description} - - ) : ( - body - )} -
    - ); -}; diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/index.tsx b/airbyte-webapp/src/components/ArrayOfObjectsEditor/index.tsx deleted file mode 100644 index b4a645f01601..000000000000 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/index.tsx +++ /dev/null @@ -1,4 +0,0 @@ -import { ArrayOfObjectsEditor } from "./ArrayOfObjectsEditor"; - -export default ArrayOfObjectsEditor; -export { ArrayOfObjectsEditor }; diff --git a/airbyte-webapp/src/components/CloudInviteUsersHint/CloudInviteUsersHint.tsx b/airbyte-webapp/src/components/CloudInviteUsersHint/CloudInviteUsersHint.tsx deleted file mode 100644 index 9f5b5b885c4a..000000000000 --- a/airbyte-webapp/src/components/CloudInviteUsersHint/CloudInviteUsersHint.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import { lazy, Suspense } from "react"; - -import { InviteUsersHintProps } from "packages/cloud/views/users/InviteUsersHint/types"; -import { isCloudApp } from "utils/app"; - -const LazyInviteUsersHint = lazy(() => - import("packages/cloud/views/users/InviteUsersHint").then(({ InviteUsersHint }) => ({ default: InviteUsersHint })) -); - -export const CloudInviteUsersHint: React.VFC = (props) => - isCloudApp() ? ( - - - - ) : null; diff --git a/airbyte-webapp/src/components/CloudInviteUsersHint/index.ts b/airbyte-webapp/src/components/CloudInviteUsersHint/index.ts deleted file mode 100644 index bfe0be13e8d1..000000000000 --- a/airbyte-webapp/src/components/CloudInviteUsersHint/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./CloudInviteUsersHint"; diff --git a/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.module.scss b/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.module.scss deleted file mode 100644 index 524972b7ce19..000000000000 --- a/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.module.scss +++ /dev/null @@ -1,15 +0,0 @@ -@use "scss/variables"; - -.container { - margin: 13px auto 0; - padding-bottom: variables.$spacing-xl; - - &.cloud { - padding-bottom: variables.$spacing-page-bottom-cloud; - } - - &:not(.big) { - width: 80%; - max-width: 813px; - } -} diff --git a/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.tsx b/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.tsx deleted file mode 100644 index 120e1a2e0890..000000000000 --- a/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.tsx +++ /dev/null @@ -1,27 +0,0 @@ -import classNames from "classnames"; -import { PropsWithChildren } from "react"; - -import { FlexContainer } from "components/ui/Flex"; - -import { isCloudApp } from "utils/app"; - -import styles from "./FormPageContent.module.scss"; - -interface FormPageContentProps { - big?: boolean; -} - -const FormPageContent: React.FC> = ({ big, children }) => ( - - {children} - -); - -export default FormPageContent; diff --git a/airbyte-webapp/src/components/ConnectorBlocks/ItemTabs.tsx b/airbyte-webapp/src/components/ConnectorBlocks/ItemTabs.tsx deleted file mode 100644 index e4d955e71f0d..000000000000 --- a/airbyte-webapp/src/components/ConnectorBlocks/ItemTabs.tsx +++ /dev/null @@ -1,31 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; - -import { StepsMenu } from "components/ui/StepsMenu"; - -export enum StepsTypes { - OVERVIEW = "overview", - SETTINGS = "settings", -} - -interface IProps { - currentStep?: string; - setCurrentStep: (step: string) => void; -} - -const steps = [ - { - id: StepsTypes.OVERVIEW, - name: , - }, - { - id: StepsTypes.SETTINGS, - name: , - }, -]; - -const ItemTabs: React.FC = ({ currentStep, setCurrentStep }) => { - return ; -}; - -export default ItemTabs; diff --git a/airbyte-webapp/src/components/ConnectorBlocks/TableItemTitle.module.scss b/airbyte-webapp/src/components/ConnectorBlocks/TableItemTitle.module.scss deleted file mode 100644 index c5d06898e0d2..000000000000 --- a/airbyte-webapp/src/components/ConnectorBlocks/TableItemTitle.module.scss +++ /dev/null @@ -1,36 +0,0 @@ -@use "../../scss/colors"; -@use "../../scss/variables"; - -.content, -.entityInfo { - display: flex; - flex-direction: row; - justify-content: space-between; - align-items: center; -} - -.content { - padding: 0 variables.$spacing-xl variables.$spacing-xl; -} - -.entityInfo { - justify-content: left; - padding: variables.$spacing-lg variables.$spacing-xl variables.$spacing-2xl; - gap: variables.$spacing-lg; -} - -.entityType { - display: flex; - gap: variables.$spacing-sm; - align-items: center; - color: colors.$grey-400; -} - -.entityIcon { - height: 40px; - width: 40px; -} - -.primary p { - color: colors.$blue; -} diff --git a/airbyte-webapp/src/components/ConnectorBlocks/TableItemTitle.tsx b/airbyte-webapp/src/components/ConnectorBlocks/TableItemTitle.tsx deleted file mode 100644 index 8527278982bc..000000000000 --- a/airbyte-webapp/src/components/ConnectorBlocks/TableItemTitle.tsx +++ /dev/null @@ -1,77 +0,0 @@ -import React from "react"; -import { FormattedMessage, useIntl } from "react-intl"; - -import { ReleaseStageBadge } from "components/ReleaseStageBadge"; -import { DropdownMenu, DropdownMenuOptionType } from "components/ui/DropdownMenu"; -import { Heading } from "components/ui/Heading"; -import { Text } from "components/ui/Text"; - -import { ReleaseStage } from "core/request/AirbyteClient"; -import { getIcon } from "utils/imageUtils"; - -import styles from "./TableItemTitle.module.scss"; -import { Button } from "../ui/Button"; - -interface TableItemTitleProps { - type: "source" | "destination"; - dropdownOptions: DropdownMenuOptionType[]; - onSelect: (data: DropdownMenuOptionType) => void; - entity: string; - entityName: string; - entityIcon?: string; - releaseStage?: ReleaseStage; -} - -const TableItemTitle: React.FC = ({ - type, - dropdownOptions, - onSelect, - entity, - entityName, - entityIcon, - releaseStage, -}) => { - const { formatMessage } = useIntl(); - - return ( - <> -
    - {entityIcon &&
    {getIcon(entityIcon)}
    } -
    - {entityName} - - {entity} - - -
    -
    -
    - - - - - {() => ( - - )} - -
    - - ); -}; - -export default TableItemTitle; diff --git a/airbyte-webapp/src/components/ConnectorBlocks/index.tsx b/airbyte-webapp/src/components/ConnectorBlocks/index.tsx deleted file mode 100644 index a2b50d51ba4d..000000000000 --- a/airbyte-webapp/src/components/ConnectorBlocks/index.tsx +++ /dev/null @@ -1,5 +0,0 @@ -import FormPageContent from "./FormPageContent"; -import ItemTabs, { StepsTypes } from "./ItemTabs"; -import TableItemTitle from "./TableItemTitle"; - -export { ItemTabs, StepsTypes, TableItemTitle, FormPageContent }; diff --git a/airbyte-webapp/src/components/ConnectorCard/ConnectorCard.module.scss b/airbyte-webapp/src/components/ConnectorCard/ConnectorCard.module.scss deleted file mode 100644 index 83980e04c689..000000000000 --- a/airbyte-webapp/src/components/ConnectorCard/ConnectorCard.module.scss +++ /dev/null @@ -1,55 +0,0 @@ -@use "../../scss/colors"; -@use "../../scss/variables"; - -.container { - display: flex; - padding: variables.$spacing-md; - width: 220px; - align-items: center; -} - -.details { - width: 160px; - margin-left: variables.$spacing-md; - display: flex; - flex-direction: column; - font-weight: normal; -} - -.entityIcon { - height: 30px; - width: 30px; -} - -.connectionName { - font-size: 14px; - color: colors.$dark-blue-900; - text-align: left; - margin-right: variables.$spacing-md; -} - -.connectorDetails { - display: flex; - justify-content: flex-start; - align-items: center; -} - -.connectorName { - font-size: 11px; - margin-top: 1px; - color: colors.$grey-300; - text-align: left; - word-wrap: break-word; -} - -.fullWidth { - width: 100%; - - .details { - width: 100%; - - .connectorDetails { - justify-content: space-between; - } - } -} diff --git a/airbyte-webapp/src/components/ConnectorCard/ConnectorCard.tsx b/airbyte-webapp/src/components/ConnectorCard/ConnectorCard.tsx deleted file mode 100644 index b5ecc761262c..000000000000 --- a/airbyte-webapp/src/components/ConnectorCard/ConnectorCard.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import classnames from "classnames"; -import React from "react"; - -import { ReleaseStageBadge } from "components/ReleaseStageBadge"; - -import { ReleaseStage } from "core/request/AirbyteClient"; -import { getIcon } from "utils/imageUtils"; - -import styles from "./ConnectorCard.module.scss"; - -export interface ConnectorCardProps { - connectionName: string; - icon?: string; - connectorName?: string; - releaseStage?: ReleaseStage; - fullWidth?: boolean; -} - -export const ConnectorCard: React.FC = ({ - connectionName, - connectorName, - icon, - releaseStage, - fullWidth, -}) => ( -
    - {icon &&
    {getIcon(icon)}
    } -
    -
    -
    {connectionName}
    - {releaseStage && } -
    - {connectorName &&
    {connectorName}
    } -
    -
    -); diff --git a/airbyte-webapp/src/components/ConnectorCard/index.tsx b/airbyte-webapp/src/components/ConnectorCard/index.tsx deleted file mode 100644 index 77f1bb6271b1..000000000000 --- a/airbyte-webapp/src/components/ConnectorCard/index.tsx +++ /dev/null @@ -1 +0,0 @@ -export * from "./ConnectorCard"; diff --git a/airbyte-webapp/src/components/EntityTable/ConnectionTable.module.scss b/airbyte-webapp/src/components/EntityTable/ConnectionTable.module.scss deleted file mode 100644 index 261da7bd7e4b..000000000000 --- a/airbyte-webapp/src/components/EntityTable/ConnectionTable.module.scss +++ /dev/null @@ -1,11 +0,0 @@ -th.width30 { - width: 30%; -} - -.thEnabled { - width: 1%; -} - -.thConnectionSettings { - width: 1%; -} diff --git a/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx b/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx deleted file mode 100644 index d26219a753c4..000000000000 --- a/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx +++ /dev/null @@ -1,199 +0,0 @@ -import { createColumnHelper } from "@tanstack/react-table"; -import queryString from "query-string"; -import React, { useCallback } from "react"; -import { FormattedMessage } from "react-intl"; -import { useNavigate } from "react-router-dom"; - -import { SortableTableHeader } from "components/ui/Table"; - -import { ConnectionScheduleType, SchemaChange } from "core/request/AirbyteClient"; -import { FeatureItem, useFeature } from "hooks/services/Feature"; -import { useQuery } from "hooks/useQuery"; - -import ConnectionSettingsCell from "./components/ConnectionSettingsCell"; -import { ConnectionStatusCell } from "./components/ConnectionStatusCell"; -import { ConnectorNameCell } from "./components/ConnectorNameCell"; -import { FrequencyCell } from "./components/FrequencyCell"; -import { LastSyncCell } from "./components/LastSyncCell"; -import { StatusCell } from "./components/StatusCell"; -import styles from "./ConnectionTable.module.scss"; -import { ConnectionTableDataItem, SortOrderEnum } from "./types"; -import { NextTable } from "../ui/NextTable"; - -interface ConnectionTableProps { - data: ConnectionTableDataItem[]; - entity: "source" | "destination" | "connection"; - onClickRow?: (data: ConnectionTableDataItem) => void; -} - -const ConnectionTable: React.FC = ({ data, entity, onClickRow }) => { - const navigate = useNavigate(); - const query = useQuery<{ sortBy?: string; order?: SortOrderEnum }>(); - const allowAutoDetectSchema = useFeature(FeatureItem.AllowAutoDetectSchema); - - const sortBy = query.sortBy || "entityName"; - const sortOrder = query.order || SortOrderEnum.ASC; - - const onSortClick = useCallback( - (field: string) => { - const order = - sortBy !== field ? SortOrderEnum.ASC : sortOrder === SortOrderEnum.ASC ? SortOrderEnum.DESC : SortOrderEnum.ASC; - navigate({ - search: queryString.stringify( - { - sortBy: field, - order, - }, - { skipNull: true } - ), - }); - }, - [navigate, sortBy, sortOrder] - ); - - const sortData = useCallback( - (a, b) => { - let result; - if (sortBy === "lastSync") { - result = b[sortBy] - a[sortBy]; - } else { - result = a[sortBy].toLowerCase().localeCompare(b[sortBy].toLowerCase()); - } - - if (sortOrder === SortOrderEnum.DESC) { - return -1 * result; - } - - return result; - }, - [sortBy, sortOrder] - ); - - const sortingData = React.useMemo(() => data.sort(sortData), [sortData, data]); - - const columnHelper = createColumnHelper(); - - const columns = React.useMemo( - () => [ - columnHelper.accessor("name", { - header: () => ( - onSortClick("name")} - isActive={sortBy === "name"} - isAscending={sortOrder === SortOrderEnum.ASC} - > - - - ), - meta: { - thClassName: styles.width30, - responsive: true, - }, - cell: (props) => ( - - ), - }), - columnHelper.accessor("entityName", { - header: () => ( - onSortClick("entityName")} - isActive={sortBy === "entityName"} - isAscending={sortOrder === SortOrderEnum.ASC} - > - - - ), - meta: { - thClassName: styles.width30, - responsive: true, - }, - cell: (props) => ( - - ), - }), - columnHelper.accessor("connectorName", { - header: () => ( - onSortClick("connectorName")} - isActive={sortBy === "connectorName"} - isAscending={sortOrder === SortOrderEnum.ASC} - > - - - ), - meta: { - thClassName: styles.width30, - responsive: true, - }, - cell: (props) => ( - - ), - }), - columnHelper.accessor("scheduleData", { - header: () => , - cell: (props) => ( - - ), - }), - columnHelper.accessor("lastSync", { - header: () => ( - onSortClick("lastSync")} - isActive={sortBy === "lastSync"} - isAscending={sortOrder === SortOrderEnum.ASC} - > - - - ), - cell: (props) => , - }), - columnHelper.accessor("enabled", { - header: () => , - meta: { - thClassName: styles.thEnabled, - }, - cell: (props) => ( - - ), - }), - columnHelper.accessor("connectionId", { - header: "", - meta: { - thClassName: styles.thConnectionSettings, - }, - cell: (props) => , - }), - ], - [columnHelper, sortBy, sortOrder, onSortClick, entity, allowAutoDetectSchema] - ); - - return ; -}; - -export default ConnectionTable; diff --git a/airbyte-webapp/src/components/EntityTable/ImplementationTable.module.scss b/airbyte-webapp/src/components/EntityTable/ImplementationTable.module.scss deleted file mode 100644 index f196a5fbb0db..000000000000 --- a/airbyte-webapp/src/components/EntityTable/ImplementationTable.module.scss +++ /dev/null @@ -1,9 +0,0 @@ -.content { - > table > tbody > tr > td:first-of-type { - padding-left: 32px !important; - } -} - -.thEntityName { - width: 40%; -} diff --git a/airbyte-webapp/src/components/EntityTable/ImplementationTable.tsx b/airbyte-webapp/src/components/EntityTable/ImplementationTable.tsx deleted file mode 100644 index 0f70ee0daa76..000000000000 --- a/airbyte-webapp/src/components/EntityTable/ImplementationTable.tsx +++ /dev/null @@ -1,142 +0,0 @@ -import { createColumnHelper } from "@tanstack/react-table"; -import queryString from "query-string"; -import React, { useCallback } from "react"; -import { FormattedMessage } from "react-intl"; -import { useNavigate } from "react-router-dom"; - -import { NextTable } from "components/ui/NextTable"; -import { SortableTableHeader } from "components/ui/Table"; - -import { useQuery } from "hooks/useQuery"; - -import AllConnectionsStatusCell from "./components/AllConnectionsStatusCell"; -import ConnectEntitiesCell from "./components/ConnectEntitiesCell"; -import { ConnectorNameCell } from "./components/ConnectorNameCell"; -import { EntityNameCell } from "./components/EntityNameCell"; -import { LastSyncCell } from "./components/LastSyncCell"; -import styles from "./ImplementationTable.module.scss"; -import { EntityTableDataItem, SortOrderEnum } from "./types"; - -interface IProps { - data: EntityTableDataItem[]; - entity: "source" | "destination"; - onClickRow?: (data: EntityTableDataItem) => void; -} - -const ImplementationTable: React.FC = ({ data, entity, onClickRow }) => { - const query = useQuery<{ sortBy?: string; order?: SortOrderEnum }>(); - const navigate = useNavigate(); - const sortBy = query.sortBy || "entity"; - const sortOrder = query.order || SortOrderEnum.ASC; - - const onSortClick = useCallback( - (field: string) => { - const order = - sortBy !== field ? SortOrderEnum.ASC : sortOrder === SortOrderEnum.ASC ? SortOrderEnum.DESC : SortOrderEnum.ASC; - navigate({ - search: queryString.stringify( - { - sortBy: field, - order, - }, - { skipNull: true } - ), - }); - }, - [navigate, sortBy, sortOrder] - ); - - const sortData = useCallback( - (a, b) => { - let result; - if (sortBy === "lastSync") { - result = b[sortBy] - a[sortBy]; - } else { - result = a[`${sortBy}Name`].toLowerCase().localeCompare(b[`${sortBy}Name`].toLowerCase()); - } - - if (sortOrder === SortOrderEnum.DESC) { - return -1 * result; - } - - return result; - }, - [sortBy, sortOrder] - ); - - const sortingData = React.useMemo(() => data.sort(sortData), [sortData, data]); - - const columnHelper = createColumnHelper(); - - const columns = React.useMemo( - () => [ - columnHelper.accessor("entityName", { - header: () => ( - onSortClick("entity")} - isActive={sortBy === "entity"} - isAscending={sortOrder === SortOrderEnum.ASC} - > - - - ), - meta: { - thClassName: styles.thEntityName, - }, - cell: (props) => , - }), - columnHelper.accessor("connectorName", { - header: () => ( - onSortClick("connector")} - isActive={sortBy === "connector"} - isAscending={sortOrder === SortOrderEnum.ASC} - > - - - ), - cell: (props) => ( - - ), - }), - columnHelper.accessor("connectEntities", { - header: () => , - cell: (props) => ( - - ), - }), - columnHelper.accessor("lastSync", { - header: () => ( - onSortClick("lastSync")} - isActive={sortBy === "lastSync"} - isAscending={sortOrder === SortOrderEnum.ASC} - > - - - ), - cell: (props) => ( - - ), - }), - columnHelper.accessor("connectEntities", { - header: () => , - id: "status", - cell: (props) => , - }), - ], - [columnHelper, entity, onSortClick, sortBy, sortOrder] - ); - - return ( -
    - -
    - ); -}; - -export default ImplementationTable; diff --git a/airbyte-webapp/src/components/EntityTable/components/AllConnectionsStatusCell.tsx b/airbyte-webapp/src/components/EntityTable/components/AllConnectionsStatusCell.tsx deleted file mode 100644 index e37d11e5ebf7..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/AllConnectionsStatusCell.tsx +++ /dev/null @@ -1,52 +0,0 @@ -import React, { useMemo } from "react"; -import { useIntl } from "react-intl"; - -import { StatusIcon } from "components/ui/StatusIcon"; -import { StatusIconStatus } from "components/ui/StatusIcon/StatusIcon"; - -import { Status } from "../types"; - -const _statusConfig: Array<{ status: Status; statusIconStatus?: StatusIconStatus; titleId: string }> = [ - { status: Status.ACTIVE, statusIconStatus: "success", titleId: "connection.successSync" }, - { status: Status.INACTIVE, statusIconStatus: "inactive", titleId: "connection.disabledConnection" }, - { status: Status.FAILED, titleId: "connection.failedSync" }, - { status: Status.EMPTY, statusIconStatus: "sleep", titleId: "connection.noSyncData" }, -]; - -interface AllConnectionStatusConnectEntity { - name: string; - connector: string; - status: string; - lastSyncStatus: string | null; -} - -interface AllConnectionsStatusCellProps { - connectEntities: AllConnectionStatusConnectEntity[]; -} - -const AllConnectionsStatusCell: React.FC = ({ connectEntities }) => { - const { formatMessage } = useIntl(); - - const statusIconProps = useMemo(() => { - if (connectEntities.length) { - for (const { status, statusIconStatus, titleId } of _statusConfig) { - const filteredEntities = connectEntities.filter((entity) => entity.lastSyncStatus === status); - if (filteredEntities.length) { - return { - status: statusIconStatus, - value: filteredEntities.length, - title: titleId, - }; - } - } - } - - return undefined; - }, [connectEntities]); - - return statusIconProps ? ( - - ) : null; -}; - -export default AllConnectionsStatusCell; diff --git a/airbyte-webapp/src/components/EntityTable/components/ChangesStatusIcon.module.scss b/airbyte-webapp/src/components/EntityTable/components/ChangesStatusIcon.module.scss deleted file mode 100644 index 7e5209120745..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ChangesStatusIcon.module.scss +++ /dev/null @@ -1,21 +0,0 @@ -@use "scss/colors"; - -.tooltipContainer { - display: flex !important; - align-items: center !important; -} - -.changesIcon { - width: 15px; - height: 15px; - display: flex; - align-items: center; - - &.breaking { - color: colors.$red; - } - - &.nonBreaking { - color: colors.$yellow; - } -} diff --git a/airbyte-webapp/src/components/EntityTable/components/ChangesStatusIcon.tsx b/airbyte-webapp/src/components/EntityTable/components/ChangesStatusIcon.tsx deleted file mode 100644 index 53c3278c1370..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ChangesStatusIcon.tsx +++ /dev/null @@ -1,42 +0,0 @@ -import { faExclamationCircle } from "@fortawesome/free-solid-svg-icons"; -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import classnames from "classnames"; -import React from "react"; -import { FormattedMessage } from "react-intl"; - -import { Tooltip } from "components/ui/Tooltip"; - -import { SchemaChange } from "core/request/AirbyteClient"; -import { convertSnakeToCamel } from "utils/strings"; - -import styles from "./ChangesStatusIcon.module.scss"; - -interface ChangesStatusIconProps { - schemaChange?: SchemaChange; -} - -export const ChangesStatusIcon: React.FC = ({ schemaChange = "no_change" }) => { - if (schemaChange === "no_change") { - return null; - } - const iconStyle = classnames(styles.changesIcon, { - [styles.breaking]: schemaChange === "breaking", - [styles.nonBreaking]: schemaChange === "non_breaking", - }); - return ( - - } - > - - - ); -}; diff --git a/airbyte-webapp/src/components/EntityTable/components/ConnectEntitiesCell.tsx b/airbyte-webapp/src/components/EntityTable/components/ConnectEntitiesCell.tsx deleted file mode 100644 index dbff24eeb6e2..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ConnectEntitiesCell.tsx +++ /dev/null @@ -1,65 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; -import styled from "styled-components"; - -import { NumberBadge } from "components/ui/NumberBadge"; - -interface IProps { - values: Array<{ - name: string; - connector: string; - }>; - enabled?: boolean; - entity: "source" | "destination"; -} - -const Number = styled(NumberBadge)` - margin-right: 6px; -`; - -const Content = styled.div<{ enabled?: boolean }>` - display: flex; - align-items: center; - color: ${({ theme, enabled }) => (!enabled ? theme.greyColor40 : "inheret")}; -`; - -const Connector = styled.div` - font-weight: normal; - font-size: 12px; - line-height: 15px; - color: ${({ theme }) => theme.greyColor40}; -`; - -const ConnectEntitiesCell: React.FC = ({ values, enabled, entity }) => { - if (values.length === 1) { - return ( - - -
    - {values[0].name} - {values[0].connector} -
    -
    - ); - } - - if (!values.length) { - return ( - - - - ); - } - - return ( - - -
    - - {`${values[0].connector}, ${values[1].connector}${values.length > 2 ? ",..." : ""}`} -
    -
    - ); -}; - -export default ConnectEntitiesCell; diff --git a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx b/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx deleted file mode 100644 index d70099f8e8ce..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx +++ /dev/null @@ -1,51 +0,0 @@ -import { faCog } from "@fortawesome/free-solid-svg-icons"; -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import React from "react"; -import styled from "styled-components"; - -import { Link } from "components"; - -import { useCurrentWorkspace } from "hooks/services/useWorkspace"; -import { ConnectionRoutePaths } from "pages/connections/types"; -import { RoutePaths } from "pages/routePaths"; - -interface IProps { - id: string; -} - -const Content = styled.div` - color: ${({ theme }) => theme.greyColor60}; - font-size: 17px; - min-width: 17px; -`; - -const Icon = styled(FontAwesomeIcon)` - display: none; - color: ${({ theme }) => theme.greyColor60}; - - tr:hover & { - display: block; - } - &:hover { - color: ${({ theme }) => theme.greyColor70}; - } -`; - -const ConnectorCell: React.FC = ({ id }) => { - const { workspaceId } = useCurrentWorkspace(); - - const openSettings = (event: React.MouseEvent) => { - event.stopPropagation(); - }; - - const settingPath = `/${RoutePaths.Workspaces}/${workspaceId}/${RoutePaths.Connections}/${id}/${ConnectionRoutePaths.Replication}`; - return ( - - - - - - ); -}; - -export default ConnectorCell; diff --git a/airbyte-webapp/src/components/EntityTable/components/ConnectionStatusCell.module.scss b/airbyte-webapp/src/components/EntityTable/components/ConnectionStatusCell.module.scss deleted file mode 100644 index 62b219e37320..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ConnectionStatusCell.module.scss +++ /dev/null @@ -1,11 +0,0 @@ -$connector-icon-width: 20px; -$connector-icon-margin: 10px; - -.content { - display: flex; - align-items: center; -} - -.text { - width: calc(100% - #{$connector-icon-width + $connector-icon-margin}); -} diff --git a/airbyte-webapp/src/components/EntityTable/components/ConnectionStatusCell.tsx b/airbyte-webapp/src/components/EntityTable/components/ConnectionStatusCell.tsx deleted file mode 100644 index 02338b14c486..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ConnectionStatusCell.tsx +++ /dev/null @@ -1,61 +0,0 @@ -import React, { useMemo } from "react"; -import { useIntl } from "react-intl"; - -import { StatusIcon } from "components/ui/StatusIcon"; -import { StatusIconStatus } from "components/ui/StatusIcon/StatusIcon"; - -import styles from "./ConnectionStatusCell.module.scss"; -import { EntityNameCell } from "./EntityNameCell"; -import { Status } from "../types"; - -interface ConnectionStatusCellProps { - status: string | null; - value: string; - enabled: boolean; -} - -export const ConnectionStatusCell: React.FC = ({ status, value, enabled }) => { - const { formatMessage } = useIntl(); - const statusIconStatus = useMemo( - () => - status === Status.EMPTY - ? "sleep" - : status === Status.ACTIVE - ? "success" - : status === Status.INACTIVE - ? "inactive" - : status === Status.PENDING - ? "loading" - : status === Status.CANCELLED - ? "cancelled" - : undefined, - [status] - ); - const title = - status === Status.EMPTY - ? formatMessage({ - id: "connection.noSyncData", - }) - : status === Status.INACTIVE - ? formatMessage({ - id: "connection.disabledConnection", - }) - : status === Status.ACTIVE - ? formatMessage({ - id: "connection.successSync", - }) - : status === Status.PENDING - ? formatMessage({ - id: "connection.pendingSync", - }) - : formatMessage({ - id: "connection.failedSync", - }); - - return ( -
    - - -
    - ); -}; diff --git a/airbyte-webapp/src/components/EntityTable/components/ConnectorNameCell.module.scss b/airbyte-webapp/src/components/EntityTable/components/ConnectorNameCell.module.scss deleted file mode 100644 index 6d0dbbc11b72..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ConnectorNameCell.module.scss +++ /dev/null @@ -1,13 +0,0 @@ -@use "scss/variables"; - -$status-icon-width: 25px; - -.content { - display: flex; - align-items: center; -} - -.text { - width: calc(100% - #{$status-icon-width}); - margin-left: variables.$spacing-md; -} diff --git a/airbyte-webapp/src/components/EntityTable/components/ConnectorNameCell.tsx b/airbyte-webapp/src/components/EntityTable/components/ConnectorNameCell.tsx deleted file mode 100644 index c8ecffe918d9..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ConnectorNameCell.tsx +++ /dev/null @@ -1,21 +0,0 @@ -import React from "react"; - -import { ConnectorIcon } from "components/common/ConnectorIcon"; - -import styles from "./ConnectorNameCell.module.scss"; -import { EntityNameCell } from "./EntityNameCell"; - -interface ConnectorNameCellProps { - enabled: boolean; - value: string; - icon: string | undefined; -} - -export const ConnectorNameCell: React.FC = ({ value, enabled, icon }) => { - return ( -
    - - -
    - ); -}; diff --git a/airbyte-webapp/src/components/EntityTable/components/EntityNameCell.module.scss b/airbyte-webapp/src/components/EntityTable/components/EntityNameCell.module.scss deleted file mode 100644 index f54b4281cdff..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/EntityNameCell.module.scss +++ /dev/null @@ -1,14 +0,0 @@ -@use "scss/variables"; -@use "scss/colors"; - -.text { - overflow: hidden; - white-space: nowrap; - text-overflow: ellipsis; - font-weight: 500; - color: colors.$grey; - - &.enabled { - color: colors.$dark-blue; - } -} diff --git a/airbyte-webapp/src/components/EntityTable/components/EntityNameCell.tsx b/airbyte-webapp/src/components/EntityTable/components/EntityNameCell.tsx deleted file mode 100644 index 99c73743f0f0..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/EntityNameCell.tsx +++ /dev/null @@ -1,20 +0,0 @@ -import classNames from "classnames"; -import React from "react"; - -import { Text } from "components/ui/Text"; - -import styles from "./EntityNameCell.module.scss"; - -interface EntityNameCellProps { - value: string; - enabled: boolean; - className?: string; -} - -export const EntityNameCell: React.FC = ({ value, enabled, className }) => { - return ( - - {value} - - ); -}; diff --git a/airbyte-webapp/src/components/EntityTable/components/FrequencyCell.module.scss b/airbyte-webapp/src/components/EntityTable/components/FrequencyCell.module.scss deleted file mode 100644 index d46751de1928..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/FrequencyCell.module.scss +++ /dev/null @@ -1,10 +0,0 @@ -@use "scss/variables"; -@use "scss/colors"; - -.text { - color: colors.$grey; - - &.enabled { - color: colors.$dark-blue; - } -} diff --git a/airbyte-webapp/src/components/EntityTable/components/FrequencyCell.tsx b/airbyte-webapp/src/components/EntityTable/components/FrequencyCell.tsx deleted file mode 100644 index 337581be2c3b..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/FrequencyCell.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import classNames from "classnames"; -import React from "react"; -import { FormattedMessage } from "react-intl"; - -import { Text } from "components/ui/Text"; - -import { ConnectionScheduleData, ConnectionScheduleType } from "core/request/AirbyteClient"; - -import styles from "./FrequencyCell.module.scss"; - -interface FrequencyCellProps { - value?: ConnectionScheduleData; - enabled?: boolean; - scheduleType?: ConnectionScheduleType; -} - -export const FrequencyCell: React.FC = ({ value, enabled, scheduleType }) => { - if (scheduleType === ConnectionScheduleType.cron || scheduleType === ConnectionScheduleType.manual) { - return ( - - - - ); - } - - return ( - - - - ); -}; diff --git a/airbyte-webapp/src/components/EntityTable/components/LastSyncCell.module.scss b/airbyte-webapp/src/components/EntityTable/components/LastSyncCell.module.scss deleted file mode 100644 index d46751de1928..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/LastSyncCell.module.scss +++ /dev/null @@ -1,10 +0,0 @@ -@use "scss/variables"; -@use "scss/colors"; - -.text { - color: colors.$grey; - - &.enabled { - color: colors.$dark-blue; - } -} diff --git a/airbyte-webapp/src/components/EntityTable/components/LastSyncCell.tsx b/airbyte-webapp/src/components/EntityTable/components/LastSyncCell.tsx deleted file mode 100644 index 5bff2c57eddb..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/LastSyncCell.tsx +++ /dev/null @@ -1,24 +0,0 @@ -import classNames from "classnames"; -import React from "react"; -import { FormattedRelativeTime } from "react-intl"; - -import { Text } from "components/ui/Text"; - -import styles from "./LastSyncCell.module.scss"; - -interface LastSyncCellProps { - timeInSeconds?: number | null; - enabled?: boolean; -} - -export const LastSyncCell: React.FC = ({ timeInSeconds, enabled }) => { - return ( - <> - {timeInSeconds ? ( - - - - ) : null} - - ); -}; diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCell.module.scss b/airbyte-webapp/src/components/EntityTable/components/StatusCell.module.scss deleted file mode 100644 index 663c904fc6d3..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCell.module.scss +++ /dev/null @@ -1,7 +0,0 @@ -.container { - display: flex; - flex-direction: row; - align-content: center; - justify-content: space-between; - width: 120px; -} diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCell.test.tsx b/airbyte-webapp/src/components/EntityTable/components/StatusCell.test.tsx deleted file mode 100644 index c49601ac024f..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCell.test.tsx +++ /dev/null @@ -1,82 +0,0 @@ -import { render, waitFor } from "@testing-library/react"; -import { TestWrapper, TestSuspenseBoundary, mockConnection } from "test-utils"; - -import { StatusCell } from "./StatusCell"; - -jest.mock("hooks/services/useConnectionHook", () => ({ - useConnectionList: jest.fn(() => ({ - connections: [], - })), - useEnableConnection: jest.fn(() => ({ - mutateAsync: jest.fn(), - })), - useSyncConnection: jest.fn(() => ({ - mutateAsync: jest.fn(), - })), -})); - -const mockId = "mock-id"; - -jest.doMock("hooks/services/useConnectionHook", () => ({ - useEnableConnection: () => ({ - mutateAsync: jest.fn(), - isLoading: false, - }), -})); - -describe("", () => { - it("renders switch when connection has schedule", () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - const switchElement = getByTestId("enable-connection-switch"); - - expect(switchElement).toBeEnabled(); - expect(switchElement).toBeChecked(); - }); - - it("renders button when connection does not have schedule", async () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - await waitFor(() => expect(getByTestId("manual-sync-button")).toBeEnabled()); - }); - - it("disables switch when hasBreakingChange is true", () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - expect(getByTestId("enable-connection-switch")).toBeDisabled(); - }); - - it("disables manual sync button when hasBreakingChange is true", () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - expect(getByTestId("manual-sync-button")).toBeDisabled(); - }); -}); diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCell.tsx b/airbyte-webapp/src/components/EntityTable/components/StatusCell.tsx deleted file mode 100644 index 133166016ed0..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCell.tsx +++ /dev/null @@ -1,44 +0,0 @@ -import React from "react"; - -import { SchemaChange, WebBackendConnectionListItem } from "core/request/AirbyteClient"; -import { FeatureItem, useFeature } from "hooks/services/Feature"; - -import { ChangesStatusIcon } from "./ChangesStatusIcon"; -import styles from "./StatusCell.module.scss"; -import { StatusCellControl } from "./StatusCellControl"; - -interface StatusCellProps { - hasBreakingChange?: boolean; - enabled?: boolean; - isSyncing?: boolean; - isManual?: boolean; - id: string; - schemaChange?: SchemaChange; - connection: WebBackendConnectionListItem; -} - -export const StatusCell: React.FC = ({ - enabled, - isManual, - id, - isSyncing, - schemaChange, - hasBreakingChange, - connection, -}) => { - const allowAutoDetectSchema = useFeature(FeatureItem.AllowAutoDetectSchema); - - return ( -
    - - {allowAutoDetectSchema && } -
    - ); -}; diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.module.scss b/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.module.scss deleted file mode 100644 index 26b1b9ecb9c9..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.module.scss +++ /dev/null @@ -1,7 +0,0 @@ -@use "scss/variables"; - -.inProgressLabel { - height: variables.$button-height-xs; - display: flex; - align-items: center; -} diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.tsx b/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.tsx deleted file mode 100644 index fbda0dc04f0d..000000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.tsx +++ /dev/null @@ -1,85 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; - -import { Button } from "components/ui/Button"; -import { Switch } from "components/ui/Switch"; - -import { WebBackendConnectionListItem } from "core/request/AirbyteClient"; -import { useEnableConnection, useSyncConnection } from "hooks/services/useConnectionHook"; - -import styles from "./StatusCellControl.module.scss"; - -interface StatusCellControlProps { - hasBreakingChange?: boolean; - enabled?: boolean; - isSyncing?: boolean; - isManual?: boolean; - id: string; - connection: WebBackendConnectionListItem; -} - -export const StatusCellControl: React.FC = ({ - enabled, - isManual, - id, - isSyncing, - hasBreakingChange, - connection, -}) => { - const { mutateAsync: enableConnection, isLoading } = useEnableConnection(); - const { mutateAsync: syncConnection, isLoading: isSyncStarting } = useSyncConnection(); - - const onRunManualSync = (event: React.SyntheticEvent) => { - event.stopPropagation(); - - if (connection) { - syncConnection(connection); - } - }; - - if (!isManual) { - const onSwitchChange = async (event: React.SyntheticEvent) => { - event.stopPropagation(); - await enableConnection({ - connectionId: id, - enable: !enabled, - }); - }; - - return ( - // this is so we can stop event propagation so the row doesn't receive the click and redirect - // eslint-disable-next-line jsx-a11y/no-static-element-interactions -
    event.stopPropagation()} - onKeyPress={(event: React.SyntheticEvent) => event.stopPropagation()} - > - -
    - ); - } - - if (isSyncing) { - return ( -
    - -
    - ); - } - - return ( - - ); -}; diff --git a/airbyte-webapp/src/components/EntityTable/index.tsx b/airbyte-webapp/src/components/EntityTable/index.tsx deleted file mode 100644 index 39e5c1fe1d29..000000000000 --- a/airbyte-webapp/src/components/EntityTable/index.tsx +++ /dev/null @@ -1,4 +0,0 @@ -import ConnectionTable from "./ConnectionTable"; -import ImplementationTable from "./ImplementationTable"; - -export { ImplementationTable, ConnectionTable }; diff --git a/airbyte-webapp/src/components/EntityTable/types.ts b/airbyte-webapp/src/components/EntityTable/types.ts deleted file mode 100644 index 7ac43bf70b3f..000000000000 --- a/airbyte-webapp/src/components/EntityTable/types.ts +++ /dev/null @@ -1,56 +0,0 @@ -import { - ConnectionScheduleData, - ConnectionScheduleType, - SchemaChange, - WebBackendConnectionListItem, -} from "../../core/request/AirbyteClient"; - -interface EntityTableDataItem { - entityId: string; - entityName: string; - connectorName: string; - connectEntities: Array<{ - name: string; - connector: string; - status: string; - lastSyncStatus: string | null; - }>; - enabled: boolean; - lastSync?: number | null; - connectorIcon?: string; -} - -interface ConnectionTableDataItem { - connectionId: string; - name: string; - entityName: string; - connectorName: string; - enabled: boolean; - isSyncing?: boolean; - status?: string; - lastSync?: number | null; - scheduleData?: ConnectionScheduleData; - scheduleType?: ConnectionScheduleType; - schemaChange: SchemaChange; - lastSyncStatus: string | null; - connectorIcon?: string; - entityIcon?: string; - connection: WebBackendConnectionListItem; -} - -const enum Status { - ACTIVE = "active", - INACTIVE = "inactive", - FAILED = "failed", - CANCELLED = "cancelled", - EMPTY = "empty", - PENDING = "pending", -} - -enum SortOrderEnum { - DESC = "desc", - ASC = "asc", -} - -export type { ConnectionTableDataItem, EntityTableDataItem }; -export { Status, SortOrderEnum }; diff --git a/airbyte-webapp/src/components/EntityTable/utils.tsx b/airbyte-webapp/src/components/EntityTable/utils.tsx deleted file mode 100644 index 9c12c3d267ba..000000000000 --- a/airbyte-webapp/src/components/EntityTable/utils.tsx +++ /dev/null @@ -1,130 +0,0 @@ -import { - ConnectionStatus, - DestinationRead, - DestinationSnippetRead, - JobStatus, - SourceRead, - SourceSnippetRead, - WebBackendConnectionListItem, -} from "core/request/AirbyteClient"; - -import { EntityTableDataItem, ConnectionTableDataItem, Status as ConnectionSyncStatus } from "./types"; - -const getConnectorTypeName = (connectorSpec: DestinationSnippetRead | SourceSnippetRead) => { - return "sourceName" in connectorSpec ? connectorSpec.sourceName : connectorSpec.destinationName; -}; - -const getConnectorTypeId = (connectorSpec: DestinationSnippetRead | SourceSnippetRead) => { - return "sourceId" in connectorSpec ? connectorSpec.sourceId : connectorSpec.destinationId; -}; - -// TODO: types in next methods look a bit ugly -export function getEntityTableData< - S extends "source" | "destination", - SoD extends S extends "source" ? SourceRead : DestinationRead ->(entities: SoD[], connections: WebBackendConnectionListItem[], type: S): EntityTableDataItem[] { - const connectType = type === "source" ? "destination" : "source"; - - const mappedEntities = entities.map((entityItem) => { - const entitySoDId = entityItem[`${type}Id` as keyof SoD] as unknown as string; - const entitySoDName = entityItem[`${type}Name` as keyof SoD] as unknown as string; - const entityConnections = connections.filter( - (connectionItem) => getConnectorTypeId(connectionItem[type]) === entitySoDId - ); - - if (!entityConnections.length) { - return { - entityId: entitySoDId, - entityName: entityItem.name, - enabled: true, - connectorName: entitySoDName, - connectorIcon: entityItem.icon, - lastSync: null, - connectEntities: [], - }; - } - - const connectEntities = entityConnections.map((connection) => ({ - name: connection[connectType]?.name || "", - connector: getConnectorTypeName(connection[connectType]), - status: connection.status, - lastSyncStatus: getConnectionSyncStatus(connection.status, connection.latestSyncJobStatus), - })); - - const sortBySync = entityConnections.sort((item1, item2) => - item1.latestSyncJobCreatedAt && item2.latestSyncJobCreatedAt - ? item2.latestSyncJobCreatedAt - item1.latestSyncJobCreatedAt - : 0 - ); - - return { - entityId: entitySoDId, - entityName: entityItem.name, - enabled: true, - connectorName: entitySoDName, - lastSync: sortBySync?.[0].latestSyncJobCreatedAt, - connectEntities, - connectorIcon: entityItem.icon, - }; - }); - - return mappedEntities; -} - -export const getConnectionTableData = ( - connections: WebBackendConnectionListItem[], - type: "source" | "destination" | "connection" -): ConnectionTableDataItem[] => { - const connectType = type === "source" ? "destination" : "source"; - - return connections.map((connection) => ({ - connectionId: connection.connectionId, - name: connection.name, - entityName: - type === "connection" - ? `${connection.source?.sourceName} - ${connection.source?.name}` - : connection[connectType]?.name || "", - connectorName: - type === "connection" - ? `${connection.destination?.destinationName} - ${connection.destination?.name}` - : getConnectorTypeName(connection[connectType]), - lastSync: connection.latestSyncJobCreatedAt, - enabled: connection.status === ConnectionStatus.active, - schemaChange: connection.schemaChange, - scheduleData: connection.scheduleData, - scheduleType: connection.scheduleType, - status: connection.status, - isSyncing: connection.isSyncing, - lastSyncStatus: getConnectionSyncStatus(connection.status, connection.latestSyncJobStatus), - connectorIcon: type === "destination" ? connection.source.icon : connection.destination.icon, - entityIcon: type === "destination" ? connection.destination.icon : connection.source.icon, - connection, - })); -}; - -export const getConnectionSyncStatus = ( - status: ConnectionStatus, - lastSyncJobStatus: JobStatus | undefined -): ConnectionSyncStatus => { - if (status === ConnectionStatus.inactive) { - return ConnectionSyncStatus.INACTIVE; - } - - switch (lastSyncJobStatus) { - case JobStatus.succeeded: - return ConnectionSyncStatus.ACTIVE; - - case JobStatus.failed: - return ConnectionSyncStatus.FAILED; - - case JobStatus.cancelled: - return ConnectionSyncStatus.CANCELLED; - - case JobStatus.pending: - case JobStatus.running: - return ConnectionSyncStatus.PENDING; - - default: - return ConnectionSyncStatus.EMPTY; - } -}; diff --git a/airbyte-webapp/src/components/GroupControls/GroupControls.module.scss b/airbyte-webapp/src/components/GroupControls/GroupControls.module.scss deleted file mode 100644 index dd0761d0e766..000000000000 --- a/airbyte-webapp/src/components/GroupControls/GroupControls.module.scss +++ /dev/null @@ -1,63 +0,0 @@ -@use "scss/colors"; -@use "scss/variables"; - -$title-height: 34px; -$group-spacing: variables.$spacing-xl; -$border-width: variables.$border-thick; - -.container { - min-height: $title-height; - position: relative; - padding-top: calc($title-height / 2); -} - -.title { - padding-right: $group-spacing; - display: flex; - align-items: center; - height: $title-height; - position: absolute; - left: 0; - right: 0; - top: 0; - - label { - padding-bottom: 0; - } -} - -.label { - width: auto; - height: 100%; - padding-right: variables.$spacing-md; - display: flex; - align-items: center; - background-color: colors.$white; - white-space: nowrap; -} - -.control { - margin-left: auto; - padding: 0 variables.$spacing-xs; - background-color: colors.$white; -} - -.content { - border-color: colors.$grey-100; - border-style: solid; - border-width: 0 $border-width $border-width; - border-radius: variables.$border-radius-lg; - - // box-shadow is used for the top border, so that it overlaps with bottom border when height is 0 - box-shadow: 0 $border-width colors.$grey-100 inset; - padding: 0 $group-spacing; - - // only apply padding when there are children, so that empty group sections border is just a single line - > :first-child { - padding-top: calc($group-spacing + $title-height/2); - } - - > div { - margin-bottom: $group-spacing; - } -} diff --git a/airbyte-webapp/src/components/GroupControls/GroupControls.tsx b/airbyte-webapp/src/components/GroupControls/GroupControls.tsx deleted file mode 100644 index c69eb8bcbf6d..000000000000 --- a/airbyte-webapp/src/components/GroupControls/GroupControls.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import classNames from "classnames"; -import React from "react"; - -import styles from "./GroupControls.module.scss"; - -interface GroupControlsProps { - label: React.ReactNode; - control?: React.ReactNode; - controlClassName?: string; - name?: string; -} - -const GroupControls: React.FC> = ({ - label, - control, - children, - name, - controlClassName, -}) => { - return ( - // This outer div is necessary for .content > :first-child padding to be properly applied in the case of nested GroupControls -
    -
    -
    -
    {label}
    -
    {control}
    -
    -
    - {children} -
    -
    -
    - ); -}; - -export default GroupControls; diff --git a/airbyte-webapp/src/components/GroupControls/index.stories.tsx b/airbyte-webapp/src/components/GroupControls/index.stories.tsx deleted file mode 100644 index 41c5266f1188..000000000000 --- a/airbyte-webapp/src/components/GroupControls/index.stories.tsx +++ /dev/null @@ -1,94 +0,0 @@ -import { ComponentStory, ComponentMeta } from "@storybook/react"; - -import { Button } from "components/ui/Button"; -import { Card } from "components/ui/Card"; - -import { FormBlock, FormConditionItem } from "core/form/types"; -import { GroupLabel } from "views/Connector/ConnectorForm/components/Sections/GroupLabel"; -import { SectionContainer } from "views/Connector/ConnectorForm/components/Sections/SectionContainer"; - -import GroupControls from "./GroupControls"; - -export default { - title: "UI/GroupControls", - component: GroupControls, -} as ComponentMeta; - -const Template: ComponentStory = (args) => ( - - - -); - -const propOneFormBlock: FormBlock = { - title: "propOne", - type: "string", - isRequired: true, - _type: "formItem", - fieldKey: "propOneKey", - path: "section.conditional.choice_one.prop_one", -}; - -const propTwoFormBlock: FormBlock = { - title: "propTwo", - type: "string", - isRequired: false, - _type: "formItem", - fieldKey: "propTwoKey", - path: "section.conditional.choice_one.prop_two", -}; - -const conditionFormField: FormConditionItem = { - conditions: [ - { - isRequired: true, - _type: "formGroup", - fieldKey: "choice_one_key", - path: "section.conditional.choice_one", - properties: [propOneFormBlock, propTwoFormBlock], - }, - ], - selectionPath: "section.conditional.choice_one.type", - selectionKey: "type", - selectionConstValues: ["one"], - isRequired: true, - _type: "formCondition", - fieldKey: "field_key", - path: "section.conditional", -}; - -const label = ; - -export const Empty = Template.bind({}); -Empty.args = { - label, -}; - -export const WithContent = Template.bind({}); -WithContent.args = { - label, - children: ( - <> - Content part 1 - Content part 2 - - ), -}; - -export const EmptyWithControl = Template.bind({}); -EmptyWithControl.args = { - label, - control: , -}; - -export const ControlAndContent = Template.bind({}); -ControlAndContent.args = { - label, - control: , - children: ( - <> - Content part 1 - Content part 2 - - ), -}; diff --git a/airbyte-webapp/src/components/GroupControls/index.tsx b/airbyte-webapp/src/components/GroupControls/index.tsx deleted file mode 100644 index ec345149199e..000000000000 --- a/airbyte-webapp/src/components/GroupControls/index.tsx +++ /dev/null @@ -1,3 +0,0 @@ -import GroupControls from "./GroupControls"; - -export default GroupControls; diff --git a/airbyte-webapp/src/components/Indicator/Indicator.module.scss b/airbyte-webapp/src/components/Indicator/Indicator.module.scss deleted file mode 100644 index 5ecd4ae36e4d..000000000000 --- a/airbyte-webapp/src/components/Indicator/Indicator.module.scss +++ /dev/null @@ -1,12 +0,0 @@ -@use "scss/colors"; - -.indicator { - height: 10px; - width: 10px; - border-radius: 50%; - background: colors.$red; -} - -.hidden { - background-color: transparent; -} diff --git a/airbyte-webapp/src/components/Indicator/Indicator.tsx b/airbyte-webapp/src/components/Indicator/Indicator.tsx deleted file mode 100644 index 6b684037f417..000000000000 --- a/airbyte-webapp/src/components/Indicator/Indicator.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import classNames from "classnames"; - -import styles from "./Indicator.module.scss"; - -export interface IndicatorProps { - /** - * Set to true to render an invisible indicator so reserve the space in the UI - */ - hidden?: boolean; - className?: string; -} - -export const Indicator: React.FC = ({ hidden, className }) => ( - - - - ); -}; diff --git a/airbyte-webapp/src/components/JobItem/attemptLinkUtils.ts b/airbyte-webapp/src/components/JobItem/attemptLinkUtils.ts deleted file mode 100644 index 8b7dc4d27d09..000000000000 --- a/airbyte-webapp/src/components/JobItem/attemptLinkUtils.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { useLocation } from "react-router-dom"; - -import { AttemptRead } from "core/request/AirbyteClient"; - -const PARSE_REGEXP = /^#(?\w*)::(?\w*)$/; - -/** - * Create and returns a link for a specific job and (optionally) attempt. - * The returned string is the hash part of a URL. - */ -export const buildAttemptLink = (jobId: number | string, attemptId?: AttemptRead["id"]): string => { - return `#${jobId}::${attemptId ?? ""}`; -}; - -/** - * Parses a hash part of the URL into a jobId and attemptId. - * This is the reverse function of {@link buildAttemptLink}. - */ -export const parseAttemptLink = (link: string): { jobId?: string; attemptId?: string } => { - const match = link.match(PARSE_REGEXP); - if (!match) { - return {}; - } - return { - jobId: match.groups?.jobId, - attemptId: match.groups?.attemptId, - }; -}; - -/** - * Returns the information about which attempt was linked to from the hash if available. - */ -export const useAttemptLink = () => { - const { hash } = useLocation(); - return parseAttemptLink(hash); -}; diff --git a/airbyte-webapp/src/components/JobItem/components/AttemptDetails.module.scss b/airbyte-webapp/src/components/JobItem/components/AttemptDetails.module.scss deleted file mode 100644 index b7c0d860a266..000000000000 --- a/airbyte-webapp/src/components/JobItem/components/AttemptDetails.module.scss +++ /dev/null @@ -1,28 +0,0 @@ -@use "scss/colors"; -@use "scss/variables"; - -.container { - font-size: 12px; - line-height: 15px; - color: colors.$grey; -} - -.details > *:not(:last-child, .lastAttempt)::after { - content: "|"; - padding: 0 variables.$spacing-sm; -} - -.failedMessage { - color: colors.$red-300; - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; -} - -.lastAttempt { - margin-right: variables.$spacing-sm; - - &.failed { - color: colors.$red; - } -} diff --git a/airbyte-webapp/src/components/JobItem/components/AttemptDetails.tsx b/airbyte-webapp/src/components/JobItem/components/AttemptDetails.tsx deleted file mode 100644 index b07feea2c10b..000000000000 --- a/airbyte-webapp/src/components/JobItem/components/AttemptDetails.tsx +++ /dev/null @@ -1,95 +0,0 @@ -import classNames from "classnames"; -import dayjs from "dayjs"; -import React from "react"; -import { FormattedMessage, useIntl } from "react-intl"; - -import { AttemptRead, AttemptStatus } from "core/request/AirbyteClient"; -import { formatBytes } from "utils/numberHelper"; - -import styles from "./AttemptDetails.module.scss"; -import { getFailureFromAttempt, isCancelledAttempt } from "../utils"; - -interface AttemptDetailsProps { - className?: string; - attempt: AttemptRead; - hasMultipleAttempts?: boolean; -} - -export const AttemptDetails: React.FC = ({ attempt, className, hasMultipleAttempts }) => { - const { formatMessage } = useIntl(); - - if (attempt.status !== AttemptStatus.succeeded && attempt.status !== AttemptStatus.failed) { - return null; - } - - const getFailureOrigin = (attempt: AttemptRead) => { - const failure = getFailureFromAttempt(attempt); - const failureOrigin = failure?.failureOrigin ?? formatMessage({ id: "errorView.unknown" }); - - return `${formatMessage({ - id: "sources.failureOrigin", - })}: ${failureOrigin}`; - }; - - const getExternalFailureMessage = (attempt: AttemptRead) => { - const failure = getFailureFromAttempt(attempt); - const failureMessage = failure?.externalMessage ?? formatMessage({ id: "errorView.unknown" }); - - return `${formatMessage({ - id: "sources.message", - })}: ${failureMessage}`; - }; - - const date1 = dayjs(attempt.createdAt * 1000); - const date2 = dayjs(attempt.updatedAt * 1000); - const hours = Math.abs(date2.diff(date1, "hour")); - const minutes = Math.abs(date2.diff(date1, "minute")) - hours * 60; - const seconds = Math.abs(date2.diff(date1, "second")) - minutes * 60 - hours * 3600; - const isCancelled = isCancelledAttempt(attempt); - const isFailed = attempt.status === AttemptStatus.failed && !isCancelled; - - return ( -
    - {!isCancelled && ( -
    - {hasMultipleAttempts && ( - - - - )} - {formatBytes(attempt?.totalStats?.bytesEmitted)} - - - - - - - - {hours ? : null} - {hours || minutes ? : null} - - -
    - )} - {isFailed && ( -
    - {formatMessage( - { - id: "ui.keyValuePairV3", - }, - { - key: getFailureOrigin(attempt), - value: getExternalFailureMessage(attempt), - } - )} -
    - )} -
    - ); -}; diff --git a/airbyte-webapp/src/components/JobItem/components/ContentWrapper.module.scss b/airbyte-webapp/src/components/JobItem/components/ContentWrapper.module.scss deleted file mode 100644 index 475385588f69..000000000000 --- a/airbyte-webapp/src/components/JobItem/components/ContentWrapper.module.scss +++ /dev/null @@ -1,3 +0,0 @@ -.container { - overflow: hidden; -} diff --git a/airbyte-webapp/src/components/JobItem/components/ContentWrapper.tsx b/airbyte-webapp/src/components/JobItem/components/ContentWrapper.tsx deleted file mode 100644 index 6a43ac76ebc5..000000000000 --- a/airbyte-webapp/src/components/JobItem/components/ContentWrapper.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import { motion } from "framer-motion"; -import React from "react"; - -import styles from "./ContentWrapper.module.scss"; - -interface IProps { - children?: React.ReactNode; - isOpen?: boolean; - onToggled?: () => void; -} - -const ContentWrapper: React.FC> = ({ children, isOpen, onToggled }) => { - return ( - - {children} - - ); -}; - -export default ContentWrapper; diff --git a/airbyte-webapp/src/components/JobItem/components/DebugInfoDetailsModal.module.scss b/airbyte-webapp/src/components/JobItem/components/DebugInfoDetailsModal.module.scss deleted file mode 100644 index d1566a69a9f9..000000000000 --- a/airbyte-webapp/src/components/JobItem/components/DebugInfoDetailsModal.module.scss +++ /dev/null @@ -1,3 +0,0 @@ -.buttonWithMargin { - margin-right: 9px; -} diff --git a/airbyte-webapp/src/components/JobItem/components/DownloadButton.tsx b/airbyte-webapp/src/components/JobItem/components/DownloadButton.tsx deleted file mode 100644 index 892e0464637a..000000000000 --- a/airbyte-webapp/src/components/JobItem/components/DownloadButton.tsx +++ /dev/null @@ -1,40 +0,0 @@ -import { faFileDownload } from "@fortawesome/free-solid-svg-icons"; -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import React from "react"; -import { useIntl } from "react-intl"; - -import { Button } from "components/ui/Button"; - -import { JobDebugInfoRead } from "core/request/AirbyteClient"; -import { useCurrentWorkspaceId, useGetWorkspace } from "services/workspaces/WorkspacesService"; -import { downloadFile, fileizeString } from "utils/file"; - -interface DownloadButtonProps { - jobDebugInfo: JobDebugInfoRead; - fileName: string; -} - -const DownloadButton: React.FC = ({ jobDebugInfo, fileName }) => { - const { formatMessage } = useIntl(); - const { name } = useGetWorkspace(useCurrentWorkspaceId()); - - const downloadFileWithLogs = () => { - const file = new Blob([jobDebugInfo.attempts.flatMap((info) => info.logs.logLines).join("\n")], { - type: "text/plain;charset=utf-8", - }); - downloadFile(file, fileizeString(`${name}-${fileName}.txt`)); - }; - - return ( - - - - - - ); -}; diff --git a/airbyte-webapp/src/components/common/ConfirmationModal/index.ts b/airbyte-webapp/src/components/common/ConfirmationModal/index.ts deleted file mode 100644 index 13a5a10e7dd6..000000000000 --- a/airbyte-webapp/src/components/common/ConfirmationModal/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { ConfirmationModal } from "./ConfirmationModal"; diff --git a/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.module.scss b/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.module.scss deleted file mode 100644 index 7e0592687e48..000000000000 --- a/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.module.scss +++ /dev/null @@ -1,5 +0,0 @@ -.content { - height: 25px; - width: 25px; - overflow: hidden; -} diff --git a/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.story.module.scss b/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.story.module.scss deleted file mode 100644 index 9d57d455817f..000000000000 --- a/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.story.module.scss +++ /dev/null @@ -1,25 +0,0 @@ -.wrapper { - display: flex; - flex-direction: column; - gap: 20px; -} - -.container { - border: 1px solid tomato; -} - -.small { - width: 17px; - height: 17px; -} - -.large { - width: 40px; - height: 40px; -} - -.huge { - width: 100px; - height: 100px; - background-color: hotpink; -} diff --git a/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.tsx b/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.tsx deleted file mode 100644 index b6a3dd9bfff7..000000000000 --- a/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.tsx +++ /dev/null @@ -1,17 +0,0 @@ -import classNames from "classnames"; -import React from "react"; - -import { getIcon } from "utils/imageUtils"; - -import styles from "./ConnectorIcon.module.scss"; - -interface ConnectorIconProps { - icon?: string; - className?: string; -} - -export const ConnectorIcon: React.FC = ({ className, icon }) => ( - -); diff --git a/airbyte-webapp/src/components/common/ConnectorIcon/index.stories.tsx b/airbyte-webapp/src/components/common/ConnectorIcon/index.stories.tsx deleted file mode 100644 index 70790970f453..000000000000 --- a/airbyte-webapp/src/components/common/ConnectorIcon/index.stories.tsx +++ /dev/null @@ -1,46 +0,0 @@ -import { ComponentStory, ComponentMeta } from "@storybook/react"; -import classNames from "classnames"; - -import { ConnectorCard } from "components/ConnectorCard"; - -import { ConnectorIcon } from "./ConnectorIcon"; -import styles from "./ConnectorIcon.story.module.scss"; - -export default { - title: "Common/ConnectorIcon", - component: ConnectorIcon, -} as ComponentMeta; - -const Template: ComponentStory = (args) => ; - -export const Primary = Template.bind({}); -Primary.args = { - icon: ` - -`, -}; - -export const ValidateIcons = ({ icon }: { icon: string }) => ( -
    -