Skip to content

Commit

Permalink
Auto merge of #136977 - Kobzol:citool-datadog, r=<try>
Browse files Browse the repository at this point in the history
[WIP] Upload Datadog metrics with citool

Opening as a draft for testing.

r? `@ghost`
  • Loading branch information
bors committed Feb 14, 2025
2 parents 905b1bf + 38ad38d commit a4e8dc6
Show file tree
Hide file tree
Showing 19 changed files with 1,877 additions and 5,462 deletions.
89 changes: 47 additions & 42 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# This file defines our primary CI workflow that runs on pull requests
# and also on pushes to special branches (auto, try).
#
# The actual definition of the executed jobs is calculated by a Python
# script located at src/ci/github-actions/ci.py, which
# The actual definition of the executed jobs is calculated by the
# `src/ci/citool` crate, which
# uses job definition data from src/ci/github-actions/jobs.yml.
# You should primarily modify the `jobs.yml` file if you want to modify
# what jobs are executed in CI.
Expand Down Expand Up @@ -56,7 +56,10 @@ jobs:
- name: Calculate the CI job matrix
env:
COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
run: python3 src/ci/github-actions/ci.py calculate-job-matrix >> $GITHUB_OUTPUT
run: |
cd src/ci/citool
cargo test
cargo run calculate-job-matrix >> $GITHUB_OUTPUT
id: jobs
job:
name: ${{ matrix.full_name }}
Expand Down Expand Up @@ -173,52 +176,54 @@ jobs:
- name: ensure the stable version number is correct
run: src/ci/scripts/verify-stable-version-number.sh

- name: run the build
# Redirect stderr to stdout to avoid reordering the two streams in the GHA logs.
run: src/ci/scripts/run-build-from-ci.sh 2>&1
env:
AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }}

- name: create github artifacts
run: src/ci/scripts/create-doc-artifacts.sh

- name: print disk usage
# Prebuilt citool before the following step uninstall rustup
- name: Build citool
run: |
echo "disk usage:"
df -h
- name: upload artifacts to github
uses: actions/upload-artifact@v4
with:
# name is set in previous step
name: ${{ env.DOC_ARTIFACT_NAME }}
path: obj/artifacts/doc
if-no-files-found: ignore
retention-days: 5

- name: upload artifacts to S3
run: src/ci/scripts/upload-artifacts.sh
env:
AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}
# Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy
# builders *should* have the AWS credentials available. Still, explicitly
# adding the condition is helpful as this way CI will not silently skip
# deploying artifacts from a dist builder if the variables are misconfigured,
# erroring about invalid credentials instead.
if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1'
cd src/ci/citool
cargo build
# - name: run the build
# # Redirect stderr to stdout to avoid reordering the two streams in the GHA logs.
# run: src/ci/scripts/run-build-from-ci.sh 2>&1
# env:
# AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }}
# AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }}
#
# - name: create github artifacts
# run: src/ci/scripts/create-doc-artifacts.sh
#
# - name: print disk usage
# run: |
# echo "disk usage:"
# df -h
#
# - name: upload artifacts to github
# uses: actions/upload-artifact@v4
# with:
# # name is set in previous step
# name: ${{ env.DOC_ARTIFACT_NAME }}
# path: obj/artifacts/doc
# if-no-files-found: ignore
# retention-days: 5
#
# - name: upload artifacts to S3
# run: src/ci/scripts/upload-artifacts.sh
# env:
# AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}
# AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}
# # Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy
# # builders *should* have the AWS credentials available. Still, explicitly
# # adding the condition is helpful as this way CI will not silently skip
# # deploying artifacts from a dist builder if the variables are misconfigured,
# # erroring about invalid credentials instead.
# if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1'

- name: upload job metrics to DataDog
if: needs.calculate_matrix.outputs.run_type != 'pr'
env:
DATADOG_SITE: datadoghq.com
DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }}
DD_GITHUB_JOB_NAME: ${{ matrix.full_name }}
run: |
cd src/ci
npm ci
python3 scripts/upload-build-metrics.py ../../build/cpu-usage.csv
run: ./src/ci/citool/target/debug/citool upload-build-metrics build/cpu-usage.csv

# This job isused to tell bors the final status of the build, as there is no practical way to detect
# when a workflow is successful listening to webhooks only in our current bors implementation (homu).
Expand Down
Loading

0 comments on commit a4e8dc6

Please sign in to comment.