Skip to content

Commit

Permalink
ci: use custom action to upload job results
Browse files Browse the repository at this point in the history
  • Loading branch information
c-p-b committed Nov 1, 2022
1 parent 804bf47 commit fc29dc1
Show file tree
Hide file tree
Showing 2 changed files with 179 additions and 25 deletions.
130 changes: 105 additions & 25 deletions .github/workflows/gradle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ jobs:
${{ secrets.SUPERTOPHER_PAT }} \
${{ secrets.DAVINCHIA_PAT }}
# Uncomment to debug.
# changes-output:
# name: "Debug Change Detection Logic"
Expand Down Expand Up @@ -235,6 +236,37 @@ jobs:
- name: Ensure no file change
run: git --no-pager diff && test -z "$(git --no-pager diff)"

- name: Publish Connectors Base Test Results
uses: EnricoMi/publish-unit-test-result-action@v2
id: connectors-test-results
if: always()
with:
junit_files: "/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml\n/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml"
comment_mode: off
json_file: connectors_base_results.json
json_test_case_results: true
check_name: "Connectors Base Test Results"

- name: Setup Cloud SDK
if: always()
uses: google-github-actions/setup-gcloud@v0
with:
service_account_key: ${{ secrets.GKE_TEST_SA_KEY }}
export_default_credentials: true

- name: Prep Test Results For GCS
if: always()
run: |
python tools/bin/prep_test_results_for_gcs.py --json connectors_base_results.json
- name: Upload Test Results to GCS
if: always()
run: |
gcs_bucket_name="dev-ab-ci-run-results"
filename=$(echo "${{ fromJSON( steps.connectors-test-results.outputs.json ).check_url }}" | sed 's@.*/@@')
echo "$filename"
gsutil -h "Cache-Control:public" cp connectors_base_results.jsonl "gs://$gcs_bucket_name/$filename.jsonl"
- name: Generate Test Report
uses: dorny/test-reporter@v1
if: always()
Expand Down Expand Up @@ -522,6 +554,42 @@ jobs:
- name: Automatic Migration Acceptance Test
run: SUB_BUILD=PLATFORM ./gradlew :airbyte-tests:automaticMigrationAcceptanceTest --scan -i

- uses: actions/setup-python@v2
if: always()
with:
python-version: "3.9"

- name: Publish Platform Test Results
uses: EnricoMi/publish-unit-test-result-action@v2
id: platform-results
if: always()
with:
junit_files: "/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml\n/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml"
comment_mode: off
json_file: platform_results.json
json_test_case_results: true
check_name: "Platform Test Results"

- name: Setup Cloud SDK
if: always()
uses: google-github-actions/setup-gcloud@v0
with:
service_account_key: ${{ secrets.GKE_TEST_SA_KEY }}
export_default_credentials: true

- name: Prep Test Results For GCS
if: always()
run: |
python tools/bin/prep_test_results_for_gcs.py --json platform_results.json
- name: Upload Test Results to GCS
if: always()
run: |
gcs_bucket_name="dev-ab-ci-run-results"
filename=$(echo "${{ fromJSON( steps.platform-results.outputs.json ).check_url }}" | sed 's@.*/@@')
echo "$filename"
gsutil -h "Cache-Control:public" cp platform_results.jsonl "gs://$gcs_bucket_name/$filename.jsonl"
- name: Generate Test Report
uses: dorny/test-reporter@v1
if: always() # run this step even if previous step failed
Expand All @@ -541,15 +609,6 @@ jobs:
key: ${{ secrets.BUILDPULSE_ACCESS_KEY_ID }}
secret: ${{ secrets.BUILDPULSE_SECRET_ACCESS_KEY }}

- name: Upload test results to Github for analysis
if: '!cancelled()' # Run this step even when the tests fail. Skip if the workflow is cancelled.
uses: actions/upload-artifact@v3
with:
path: |
/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml
/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml
name: test-results-build

# In case of self-hosted EC2 errors, remove this block.
stop-platform-build-runner:
name: "Platform: Stop Build EC2 Runner"
Expand Down Expand Up @@ -681,6 +740,41 @@ jobs:
run: |
CI=true IS_MINIKUBE=true ./tools/bin/acceptance_test_kube.sh
- uses: actions/setup-python@v2
with:
python-version: "3.9"

- name: Publish Kube Test Results
id: kube-results
uses: EnricoMi/publish-unit-test-result-action@v2
if: always()
with:
junit_files: "/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml\n/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml"
comment_mode: off
json_file: kube_results.json
json_test_case_results: true
check_name: "Kube Test Results"

- name: Setup Cloud SDK
if: always()
uses: google-github-actions/setup-gcloud@v0
with:
service_account_key: ${{ secrets.GKE_TEST_SA_KEY }}
export_default_credentials: true

- name: Prep Test Results For GCS
if: always()
run: |
python tools/bin/prep_test_results_for_gcs.py --json kube_results.json
- name: Upload Test Results to GCS
if: always()
run: |
gcs_bucket_name="dev-ab-ci-run-results"
filename=$(echo "${{ fromJSON( steps.kube-results.outputs.json ).check_url }}" | sed 's@.*/@@')
echo "$filename"
gsutil -h "Cache-Control:public" cp kube_results.jsonl "gs://$gcs_bucket_name/$filename.jsonl"
- name: Generate Test Report
uses: dorny/test-reporter@v1
if: always() # run this step even if previous step failed
Expand All @@ -699,20 +793,13 @@ jobs:
key: ${{ secrets.BUILDPULSE_ACCESS_KEY_ID }}
secret: ${{ secrets.BUILDPULSE_SECRET_ACCESS_KEY }}

- name: Upload test results to Github for analysis
if: '!cancelled()' # Run this step even when the tests fail. Skip if the workflow is cancelled.
uses: actions/upload-artifact@v3
with:
path: |
/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml
/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml
name: test-results-kube

- uses: actions/upload-artifact@v2
if: failure()
with:
name: Kubernetes Logs
path: /tmp/kubernetes_logs/*


# In case of self-hosted EC2 errors, remove this block.
stop-kube-acceptance-test-runner:
name: "Platform: Stop Kube Acceptance Test EC2 Runner"
Expand Down Expand Up @@ -859,13 +946,6 @@ jobs:
# SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }}
# run: |
# CI=true IS_MINIKUBE=true ./tools/bin/acceptance_test_kube_helm.sh
# - name: Generate Test Report
# uses: dorny/test-reporter@v1
# if: always() # run this step even if previous step failed
# with:
# name: Platform Helm E2E Test Report
# path: '/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml'
# reporter: java-junit
#
# - uses: actions/upload-artifact@v2
# if: failure()
Expand Down
74 changes: 74 additions & 0 deletions tools/bin/prep_test_results_for_gcs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import argparse
import json
import os


'''
This script is intended to be run in conjuction with https://github.com/EnricoMi/publish-unit-test-result-action to upload trimmed
test results from the output to a GCS bucket for further analysis.
The script takes as input the filename of the json output by the aforementioned action, trims it, and uploads it to GCS with a ".jsonl" filename
'''

# Initiate the parser
parser = argparse.ArgumentParser()

# Add long and short argument
parser.add_argument("--json", "-j", help="Path to the result json output by https://github.com/EnricoMi/publish-unit-test-result-action")

def main():
# Read arguments from the command line
args = parser.parse_args()

token = os.getenv('GITHUB_TOKEN')

f = open(args.json)
d = json.load(f)
out = []

check_run_id = int(d["check_url"].split("/")[-1])
for elem in d['cases']:
if 'success' in elem['states']:
for i in range(len(elem['states']['success'])):
output = {
"test_name": elem['states']['success'][i]['test_name'],
"class_name": elem['states']['success'][i]['class_name'],
"result_file": elem['states']['success'][i]['result_file'],
"time": elem['states']['success'][i]['time'],
"state": "success",
"check_run_id": check_run_id,
}
out.append(output)
if 'failure' in elem['states']:
for i in range(len(elem['states']['failure'])):
output = {
"test_name": elem['states']['failure'][i]['test_name'],
"class_name": elem['states']['failure'][i]['class_name'],
"result_file": elem['states']['failure'][i]['result_file'],
"time": elem['states']['failure'][i]['time'],
"state": "failure",
"check_run_id": check_run_id,
}
out.append(output)
if 'skipped' in elem['states']:
for i in range(len(elem['states']['skipped'])):
output = {
"test_name": elem['states']['skipped'][i]['test_name'],
"class_name": elem['states']['skipped'][i]['class_name'],
"result_file": elem['states']['skipped'][i]['result_file'],
"time": elem['states']['skipped'][i]['time'],
"state": "skipped",
"check_run_id": check_run_id,
}
out.append(output)

with open(args.json + "l", 'w') as f:
for o in out:
json.dump(o, f)
f.write('\n')


if __name__ == '__main__':
main()

0 comments on commit fc29dc1

Please sign in to comment.