diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index 012d7541d24d..fdf484ba6a9f 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -86,6 +86,7 @@ jobs: ${{ secrets.SUPERTOPHER_PAT }} \ ${{ secrets.DAVINCHIA_PAT }} + # Uncomment to debug. # changes-output: # name: "Debug Change Detection Logic" @@ -237,6 +238,37 @@ jobs: - name: Ensure no file change run: git --no-pager diff && test -z "$(git --no-pager diff)" + - name: Publish Connectors Base Test Results + uses: EnricoMi/publish-unit-test-result-action@v2 + id: connectors-test-results + if: always() + with: + junit_files: "/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml\n/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml" + comment_mode: off + json_file: connectors_base_results.json + json_test_case_results: true + check_name: "Connectors Base Test Results" + + - name: Setup Google Cloud SDK + if: always() + uses: google-github-actions/setup-gcloud@v0 + with: + service_account_key: ${{ secrets.GKE_TEST_SA_KEY }} + export_default_credentials: true + + - name: Prep Test Results For GCS + if: always() + run: | + python tools/bin/prep_test_results_for_gcs.py --json connectors_base_results.json + + - name: Upload Test Results to GCS + if: always() + run: | + gcs_bucket_name="dev-ab-ci-run-results" + filename=$(echo "${{ fromJSON( steps.connectors-test-results.outputs.json ).check_url }}" | sed 's@.*/@@') + echo "$filename" + gsutil -h "Cache-Control:public" cp connectors_base_results.jsonl "gs://$gcs_bucket_name/oss/$filename.jsonl" + - name: Generate Test Report uses: dorny/test-reporter@v1 if: always() @@ -524,6 +556,42 @@ jobs: - name: Automatic Migration Acceptance Test run: SUB_BUILD=PLATFORM ./gradlew :airbyte-tests:automaticMigrationAcceptanceTest --scan -i + - uses: actions/setup-python@v2 + if: always() + with: + python-version: "3.9" + + - name: Publish Platform Test Results + uses: EnricoMi/publish-unit-test-result-action@v2 + id: platform-results + if: always() + with: + junit_files: "/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml\n/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml" + comment_mode: off + json_file: platform_results.json + json_test_case_results: true + check_name: "Platform Test Results" + + - name: Setup Google Cloud SDK + if: always() + uses: google-github-actions/setup-gcloud@v0 + with: + service_account_key: ${{ secrets.GKE_TEST_SA_KEY }} + export_default_credentials: true + + - name: Prep Test Results For GCS + if: always() + run: | + python tools/bin/prep_test_results_for_gcs.py --json platform_results.json + + - name: Upload Test Results to GCS + if: always() + run: | + gcs_bucket_name="dev-ab-ci-run-results" + filename=$(echo "${{ fromJSON( steps.platform-results.outputs.json ).check_url }}" | sed 's@.*/@@') + echo "$filename" + gsutil -h "Cache-Control:public" cp platform_results.jsonl "gs://$gcs_bucket_name/oss/$filename.jsonl" + - name: Generate Test Report uses: dorny/test-reporter@v1 if: always() # run this step even if previous step failed @@ -543,15 +611,6 @@ jobs: key: ${{ secrets.BUILDPULSE_ACCESS_KEY_ID }} secret: ${{ secrets.BUILDPULSE_SECRET_ACCESS_KEY }} - - name: Upload test results to Github for analysis - if: '!cancelled()' # Run this step even when the tests fail. Skip if the workflow is cancelled. - uses: actions/upload-artifact@v3 - with: - path: | - /actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml - /actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml - name: test-results-build - # In case of self-hosted EC2 errors, remove this block. stop-platform-build-runner: name: "Platform: Stop Build EC2 Runner" @@ -683,6 +742,42 @@ jobs: run: | CI=true IS_MINIKUBE=true ./tools/bin/acceptance_test_kube.sh + - uses: actions/setup-python@v2 + if: always() + with: + python-version: "3.9" + + - name: Publish Kube Test Results + id: kube-results + uses: EnricoMi/publish-unit-test-result-action@v2 + if: always() + with: + junit_files: "/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml\n/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml" + comment_mode: off + json_file: kube_results.json + json_test_case_results: true + check_name: "Kube Test Results" + + - name: Setup Google Cloud SDK + if: always() + uses: google-github-actions/setup-gcloud@v0 + with: + service_account_key: ${{ secrets.GKE_TEST_SA_KEY }} + export_default_credentials: true + + - name: Prep Test Results For GCS + if: always() + run: | + python tools/bin/prep_test_results_for_gcs.py --json kube_results.json + + - name: Upload Test Results to GCS + if: always() + run: | + gcs_bucket_name="dev-ab-ci-run-results" + filename=$(echo "${{ fromJSON( steps.kube-results.outputs.json ).check_url }}" | sed 's@.*/@@') + echo "$filename" + gsutil -h "Cache-Control:public" cp kube_results.jsonl "gs://$gcs_bucket_name/oss/$filename.jsonl" + - name: Generate Test Report uses: dorny/test-reporter@v1 if: always() # run this step even if previous step failed @@ -701,20 +796,13 @@ jobs: key: ${{ secrets.BUILDPULSE_ACCESS_KEY_ID }} secret: ${{ secrets.BUILDPULSE_SECRET_ACCESS_KEY }} - - name: Upload test results to Github for analysis - if: '!cancelled()' # Run this step even when the tests fail. Skip if the workflow is cancelled. - uses: actions/upload-artifact@v3 - with: - path: | - /actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml - /actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml - name: test-results-kube - - uses: actions/upload-artifact@v2 if: failure() with: name: Kubernetes Logs path: /tmp/kubernetes_logs/* + + # In case of self-hosted EC2 errors, remove this block. stop-kube-acceptance-test-runner: name: "Platform: Stop Kube Acceptance Test EC2 Runner" @@ -861,13 +949,6 @@ jobs: # SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }} # run: | # CI=true IS_MINIKUBE=true ./tools/bin/acceptance_test_kube_helm.sh -# - name: Generate Test Report -# uses: dorny/test-reporter@v1 -# if: always() # run this step even if previous step failed -# with: -# name: Platform Helm E2E Test Report -# path: '/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml' -# reporter: java-junit # # - uses: actions/upload-artifact@v2 # if: failure() diff --git a/tools/bin/prep_test_results_for_gcs.py b/tools/bin/prep_test_results_for_gcs.py new file mode 100644 index 000000000000..7f658eb381fc --- /dev/null +++ b/tools/bin/prep_test_results_for_gcs.py @@ -0,0 +1,54 @@ +import argparse +import json +import os + + +''' + +This script is intended to be run in conjuction with https://github.com/EnricoMi/publish-unit-test-result-action to upload trimmed +test results from the output to a GCS bucket for further analysis. + +The script takes as input the filename of the json output by the aforementioned action, trims it, and writes it out in jsonl format with ".jsonl" filename + +''' + +# Initiate the parser +parser = argparse.ArgumentParser() + +# Add long and short argument +parser.add_argument("--json", "-j", help="Path to the result json output by https://github.com/EnricoMi/publish-unit-test-result-action") + +def main(): + # Read arguments from the command line + args = parser.parse_args() + + f = open(args.json) + d = json.load(f) + out = [] + + check_run_id = int(d["check_url"].split("/")[-1]) + + for elem in d['cases']: + for conclusion in ('success', 'failure', 'skipped'): + if conclusion not in elem['states']: + continue + for i in range(len(elem['states'][conclusion])): + output = { + "test_name": elem['states'][conclusion][i]['test_name'], + "class_name": elem['states'][conclusion][i]['class_name'], + "result_file": elem['states'][conclusion][i]['result_file'], + "time": elem['states'][conclusion][i]['time'], + "state": conclusion, + "check_run_id": check_run_id, + "repo": "airbytehq/airbyte" + } + out.append(output) + + with open(args.json + "l", 'w') as f: + for o in out: + json.dump(o, f) + f.write('\n') + + +if __name__ == '__main__': + main() \ No newline at end of file