diff --git a/.gitattributes b/.gitattributes index 9bd26362b0e87..1762f1a04d802 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,3 @@ Cargo.lock linguist-generated=true +/.gitlab-ci.yml filter=ci-prettier +/scripts/ci/gitlab/pipeline/*.yml filter=ci-prettier diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ab3fd84d6e9ee..12a1ff825d162 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -43,57 +43,57 @@ workflow: - if: $CI_COMMIT_BRANCH variables: - GIT_STRATEGY: fetch - GIT_DEPTH: 100 - CARGO_INCREMENTAL: 0 - DOCKER_OS: "debian:stretch" - ARCH: "x86_64" - CI_IMAGE: "paritytech/ci-linux:production" - BUILDAH_IMAGE: "quay.io/buildah/stable:v1.27" - RUSTY_CACHIER_SINGLE_BRANCH: master + GIT_STRATEGY: fetch + GIT_DEPTH: 100 + CARGO_INCREMENTAL: 0 + DOCKER_OS: "debian:stretch" + ARCH: "x86_64" + CI_IMAGE: "paritytech/ci-linux:production" + BUILDAH_IMAGE: "quay.io/buildah/stable:v1.27" + RUSTY_CACHIER_SINGLE_BRANCH: master RUSTY_CACHIER_DONT_OPERATE_ON_MAIN_BRANCH: "true" RUSTY_CACHIER_COMPRESSION_METHOD: zstd - NEXTEST_FAILURE_OUTPUT: immediate-final - NEXTEST_SUCCESS_OUTPUT: final - ZOMBIENET_IMAGE: "docker.io/paritytech/zombienet:v1.3.34" + NEXTEST_FAILURE_OUTPUT: immediate-final + NEXTEST_SUCCESS_OUTPUT: final + ZOMBIENET_IMAGE: "docker.io/paritytech/zombienet:v1.3.34" -.shared-default: &shared-default +.shared-default: &shared-default retry: max: 2 when: - runner_system_failure - unknown_failure - api_failure - cache: {} + cache: {} .default-pipeline-definitions: default: - <<: *shared-default - interruptible: true + <<: *shared-default + interruptible: true .crate-publishing-pipeline-definitions: default: - <<: *shared-default + <<: *shared-default # The crate-publishing pipeline defaults to `interruptible: false` so that we'll be able to # reach and run the publishing jobs despite the "Auto-cancel redundant pipelines" CI setting. # The setting is relevant because the crate-publishing pipeline runs on `master`, thus future # pipelines on `master` (e.g. created for new commits or other schedules) might unintendedly # cancel the publishing jobs or its dependencies before we get to actually publish the crates. - interruptible: false + interruptible: false .collect-artifacts: artifacts: - name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}" - when: on_success - expire_in: 7 days + name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}" + when: on_success + expire_in: 7 days paths: - artifacts/ .collect-artifacts-short: artifacts: - name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}" - when: on_success - expire_in: 3 hours + name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}" + when: on_success + expire_in: 3 hours paths: - artifacts/ @@ -111,7 +111,7 @@ variables: - if echo "$CI_DISABLED_JOBS" | grep -xF "$CI_JOB_NAME"; then echo "The job has been cancelled in CI settings"; exit 0; fi .kubernetes-env: - image: "${CI_IMAGE}" + image: "${CI_IMAGE}" before_script: - !reference [.job-switcher, before_script] - !reference [.prepare-env, before_script] @@ -139,7 +139,7 @@ variables: dotenv: pipeline-stopper.env .docker-env: - image: "${CI_IMAGE}" + image: "${CI_IMAGE}" before_script: - !reference [.job-switcher, before_script] - !reference [.prepare-env, before_script] @@ -167,8 +167,8 @@ variables: - if: $CI_PIPELINE_SOURCE == "web" - if: $CI_PIPELINE_SOURCE == "schedule" - if: $CI_COMMIT_REF_NAME == "master" - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 + - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs + - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 # handle the specific case where benches could store incorrect bench data because of the downstream staging runs # exclude cargo-check-benches from such runs @@ -179,8 +179,8 @@ variables: - if: $CI_PIPELINE_SOURCE == "web" - if: $CI_PIPELINE_SOURCE == "schedule" - if: $CI_COMMIT_REF_NAME == "master" - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 + - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs + - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 .test-refs-no-trigger: rules: @@ -189,8 +189,8 @@ variables: - if: $CI_PIPELINE_SOURCE == "web" - if: $CI_PIPELINE_SOURCE == "schedule" - if: $CI_COMMIT_REF_NAME == "master" - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 + - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs + - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 - if: $CI_COMMIT_REF_NAME =~ /^ci-release-.*$/ .test-refs-no-trigger-prs-only: @@ -199,7 +199,7 @@ variables: when: never - if: $CI_PIPELINE_SOURCE == "web" - if: $CI_PIPELINE_SOURCE == "schedule" - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs + - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs .publish-refs: rules: @@ -208,7 +208,7 @@ variables: - if: $CI_PIPELINE_SOURCE == "web" - if: $CI_PIPELINE_SOURCE == "schedule" - if: $CI_COMMIT_REF_NAME == "master" - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 + - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 .build-refs: # publish-refs + PRs @@ -218,8 +218,8 @@ variables: - if: $CI_PIPELINE_SOURCE == "web" - if: $CI_PIPELINE_SOURCE == "schedule" - if: $CI_COMMIT_REF_NAME == "master" - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs + - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 + - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs .zombienet-refs: extends: .build-refs @@ -231,12 +231,12 @@ variables: .crates-publishing-variables: variables: - CRATESIO_CRATES_OWNER: parity-crate-owner - REPO: substrate - REPO_OWNER: paritytech + CRATESIO_CRATES_OWNER: parity-crate-owner + REPO: substrate + REPO_OWNER: paritytech .crates-publishing-pipeline: - extends: .crates-publishing-variables + extends: .crates-publishing-variables rules: - if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_REF_NAME == "master" && $PIPELINE == "automatic-crate-publishing" @@ -247,40 +247,40 @@ variables: # collect artifacts even on failure so that we know how the crates were generated (they'll be # generated to the artifacts folder according to SPUB_TMP further down) artifacts: - name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}" - when: always - expire_in: 7 days + name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}" + when: always + expire_in: 7 days paths: - artifacts/ variables: - SPUB_TMP: artifacts + SPUB_TMP: artifacts #### stage: .pre skip-if-draft: - extends: .kubernetes-env + extends: .kubernetes-env variables: - CI_IMAGE: "paritytech/tools:latest" - stage: .pre + CI_IMAGE: "paritytech/tools:latest" + stage: .pre rules: - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs + - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs script: - - echo "Commit message is ${CI_COMMIT_MESSAGE}" - - echo "Ref is ${CI_COMMIT_REF_NAME}" - - echo "pipeline source is ${CI_PIPELINE_SOURCE}" - - ./scripts/ci/gitlab/skip_if_draft.sh - allow_failure: true + - echo "Commit message is ${CI_COMMIT_MESSAGE}" + - echo "Ref is ${CI_COMMIT_REF_NAME}" + - echo "pipeline source is ${CI_PIPELINE_SOURCE}" + - ./scripts/ci/gitlab/skip_if_draft.sh + allow_failure: true check-crates-publishing-pipeline: - stage: .pre + stage: .pre extends: - .kubernetes-env - .crates-publishing-pipeline script: - git clone - --depth 1 - --branch "$RELENG_SCRIPTS_BRANCH" - https://github.com/paritytech/releng-scripts.git + --depth 1 + --branch "$RELENG_SCRIPTS_BRANCH" + https://github.com/paritytech/releng-scripts.git - ONLY_CHECK_PIPELINE=true ./releng-scripts/publish-crates include: @@ -314,17 +314,17 @@ include: #### stage: deploy deploy-prometheus-alerting-rules: - stage: deploy + stage: deploy needs: - - job: test-prometheus-alerting-rules - artifacts: false - allow_failure: true + - job: test-prometheus-alerting-rules + artifacts: false + allow_failure: true trigger: - project: parity/infrastructure/cloud-infra + project: parity/infrastructure/cloud-infra variables: - SUBSTRATE_CI_COMMIT_NAME: "${CI_COMMIT_REF_NAME}" - SUBSTRATE_CI_COMMIT_REF: "${CI_COMMIT_SHORT_SHA}" - UPSTREAM_TRIGGER_PROJECT: "${CI_PROJECT_PATH}" + SUBSTRATE_CI_COMMIT_NAME: "${CI_COMMIT_REF_NAME}" + SUBSTRATE_CI_COMMIT_REF: "${CI_COMMIT_SHORT_SHA}" + UPSTREAM_TRIGGER_PROJECT: "${CI_PROJECT_PATH}" rules: - if: $CI_PIPELINE_SOURCE == "pipeline" when: never @@ -339,12 +339,12 @@ deploy-prometheus-alerting-rules: # This info is later used for the cache distribution and an overlay creation. # Note that we don't use any .rusty-cachier references as we assume that a pipeline has reached this stage with working rusty-cachier. rusty-cachier-notify: - stage: notify - extends: .kubernetes-env + stage: notify + extends: .kubernetes-env variables: - CI_IMAGE: paritytech/rusty-cachier-env:latest - GIT_STRATEGY: none - dependencies: [] + CI_IMAGE: paritytech/rusty-cachier-env:latest + GIT_STRATEGY: none + dependencies: [] script: - curl -s https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.parity.io/parity/infrastructure/ci_cd/rusty-cachier/client/-/raw/release/util/install.sh | bash - rusty-cachier cache notify @@ -355,83 +355,83 @@ rusty-cachier-notify: # In a DAG, every jobs chain is executed independently of others. The `fail_fast` principle suggests # to fail the pipeline as soon as possible to shorten the feedback loop. .cancel-pipeline-template: - stage: .post + stage: .post rules: - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs + - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs when: on_failure variables: - PROJECT_ID: "${CI_PROJECT_ID}" - PROJECT_NAME: "${CI_PROJECT_NAME}" - PIPELINE_ID: "${CI_PIPELINE_ID}" - FAILED_JOB_URL: "${FAILED_JOB_URL}" - FAILED_JOB_NAME: "${FAILED_JOB_NAME}" - PR_NUM: "${PR_NUM}" + PROJECT_ID: "${CI_PROJECT_ID}" + PROJECT_NAME: "${CI_PROJECT_NAME}" + PIPELINE_ID: "${CI_PIPELINE_ID}" + FAILED_JOB_URL: "${FAILED_JOB_URL}" + FAILED_JOB_NAME: "${FAILED_JOB_NAME}" + PR_NUM: "${PR_NUM}" trigger: - project: "parity/infrastructure/ci_cd/pipeline-stopper" + project: "parity/infrastructure/ci_cd/pipeline-stopper" remove-cancel-pipeline-message: stage: .post rules: - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs + - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs variables: - PROJECT_ID: "${CI_PROJECT_ID}" - PROJECT_NAME: "${CI_PROJECT_NAME}" - PIPELINE_ID: "${CI_PIPELINE_ID}" - FAILED_JOB_URL: "https://gitlab.com" - FAILED_JOB_NAME: "nope" - PR_NUM: "${CI_COMMIT_REF_NAME}" + PROJECT_ID: "${CI_PROJECT_ID}" + PROJECT_NAME: "${CI_PROJECT_NAME}" + PIPELINE_ID: "${CI_PIPELINE_ID}" + FAILED_JOB_URL: "https://gitlab.com" + FAILED_JOB_NAME: "nope" + PR_NUM: "${CI_COMMIT_REF_NAME}" trigger: - project: "parity/infrastructure/ci_cd/pipeline-stopper" - branch: "as-improve" + project: "parity/infrastructure/ci_cd/pipeline-stopper" + branch: "as-improve" # need to copy jobs this way because otherwise gitlab will wait # for all 3 jobs to finish instead of cancelling if one fails cancel-pipeline-test-linux-stable1: - extends: .cancel-pipeline-template + extends: .cancel-pipeline-template needs: - - job: "test-linux-stable 1/3" + - job: "test-linux-stable 1/3" cancel-pipeline-test-linux-stable2: - extends: .cancel-pipeline-template + extends: .cancel-pipeline-template needs: - - job: "test-linux-stable 2/3" + - job: "test-linux-stable 2/3" cancel-pipeline-test-linux-stable3: - extends: .cancel-pipeline-template + extends: .cancel-pipeline-template needs: - - job: "test-linux-stable 3/3" + - job: "test-linux-stable 3/3" cancel-pipeline-cargo-check-benches1: - extends: .cancel-pipeline-template + extends: .cancel-pipeline-template needs: - - job: "cargo-check-benches 1/2" + - job: "cargo-check-benches 1/2" cancel-pipeline-cargo-check-benches2: - extends: .cancel-pipeline-template + extends: .cancel-pipeline-template needs: - - job: "cargo-check-benches 2/2" + - job: "cargo-check-benches 2/2" cancel-pipeline-test-linux-stable-int: - extends: .cancel-pipeline-template + extends: .cancel-pipeline-template needs: - - job: test-linux-stable-int + - job: test-linux-stable-int cancel-pipeline-cargo-check-each-crate-1: - extends: .cancel-pipeline-template + extends: .cancel-pipeline-template needs: - - job: "cargo-check-each-crate 1/2" + - job: "cargo-check-each-crate 1/2" cancel-pipeline-cargo-check-each-crate-2: - extends: .cancel-pipeline-template + extends: .cancel-pipeline-template needs: - - job: "cargo-check-each-crate 2/2" + - job: "cargo-check-each-crate 2/2" cancel-pipeline-cargo-check-each-crate-macos: - extends: .cancel-pipeline-template + extends: .cancel-pipeline-template needs: - - job: cargo-check-each-crate-macos + - job: cargo-check-each-crate-macos cancel-pipeline-check-tracing: - extends: .cancel-pipeline-template + extends: .cancel-pipeline-template needs: - - job: check-tracing + - job: check-tracing diff --git a/scripts/ci/gitlab/pipeline/build.yml b/scripts/ci/gitlab/pipeline/build.yml index ba529569d0fc1..02f25a02a2c22 100644 --- a/scripts/ci/gitlab/pipeline/build.yml +++ b/scripts/ci/gitlab/pipeline/build.yml @@ -4,63 +4,63 @@ # PIPELINE_SCRIPTS_TAG can be found in the project variables .check-dependent-project: - stage: build + stage: build # DAG: this is artificial dependency needs: - - job: cargo-clippy - artifacts: false + - job: cargo-clippy + artifacts: false extends: - .docker-env - .test-refs-no-trigger-prs-only script: - git clone - --depth=1 - --branch="$PIPELINE_SCRIPTS_TAG" - https://github.com/paritytech/pipeline-scripts + --depth=1 + --branch="$PIPELINE_SCRIPTS_TAG" + https://github.com/paritytech/pipeline-scripts - ./pipeline-scripts/check_dependent_project.sh - --org paritytech - --dependent-repo "$DEPENDENT_REPO" - --github-api-token "$GITHUB_PR_TOKEN" - --extra-dependencies "$EXTRA_DEPENDENCIES" - --companion-overrides "$COMPANION_OVERRIDES" + --org paritytech + --dependent-repo "$DEPENDENT_REPO" + --github-api-token "$GITHUB_PR_TOKEN" + --extra-dependencies "$EXTRA_DEPENDENCIES" + --companion-overrides "$COMPANION_OVERRIDES" # Individual jobs are set up for each dependent project so that they can be ran in parallel. # Arguably we could generate a job for each companion in the PR's description using Gitlab's # parent-child pipelines but that's more complicated. check-dependent-polkadot: - extends: .check-dependent-project + extends: .check-dependent-project variables: - DEPENDENT_REPO: polkadot + DEPENDENT_REPO: polkadot COMPANION_OVERRIDES: | substrate: polkadot-v* polkadot: release-v* rules: - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ #PRs + - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ #PRs check-dependent-cumulus: - extends: .check-dependent-project + extends: .check-dependent-project variables: - DEPENDENT_REPO: cumulus - EXTRA_DEPENDENCIES: polkadot + DEPENDENT_REPO: cumulus + EXTRA_DEPENDENCIES: polkadot COMPANION_OVERRIDES: | substrate: polkadot-v* polkadot: release-v* rules: - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ #PRs + - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ #PRs build-linux-substrate: - stage: build + stage: build extends: - .collect-artifacts - .docker-env - .build-refs variables: # this variable gets overriden by "rusty-cachier environment inject", use the value as default - CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" + CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" needs: - - job: test-linux-stable - artifacts: false + - job: test-linux-stable + artifacts: false before_script: - !reference [.job-switcher, before_script] - mkdir -p ./artifacts/substrate/ @@ -74,10 +74,10 @@ build-linux-substrate: - mv $CARGO_TARGET_DIR/release/substrate ./artifacts/substrate/. - echo -n "Substrate version = " - if [ "${CI_COMMIT_TAG}" ]; then - echo "${CI_COMMIT_TAG}" | tee ./artifacts/substrate/VERSION; + echo "${CI_COMMIT_TAG}" | tee ./artifacts/substrate/VERSION; else - ./artifacts/substrate/substrate --version | - cut -d ' ' -f 2 | tee ./artifacts/substrate/VERSION; + ./artifacts/substrate/substrate --version | + cut -d ' ' -f 2 | tee ./artifacts/substrate/VERSION; fi - sha256sum ./artifacts/substrate/substrate | tee ./artifacts/substrate/substrate.sha256 - cp -r ./scripts/ci/docker/substrate.Dockerfile ./artifacts/substrate/ @@ -86,14 +86,14 @@ build-linux-substrate: - rusty-cachier cache upload .build-subkey: - stage: build + stage: build extends: - .collect-artifacts - .docker-env - .publish-refs variables: # this variable gets overriden by "rusty-cachier environment inject", use the value as default - CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" + CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" before_script: - !reference [.job-switcher, before_script] - mkdir -p ./artifacts/subkey @@ -106,17 +106,17 @@ build-linux-substrate: - mv $CARGO_TARGET_DIR/release/subkey ./artifacts/subkey/. - echo -n "Subkey version = " - ./artifacts/subkey/subkey --version | - sed -n -E 's/^subkey ([0-9.]+.*)/\1/p' | - tee ./artifacts/subkey/VERSION; + sed -n -E 's/^subkey ([0-9.]+.*)/\1/p' | + tee ./artifacts/subkey/VERSION; - sha256sum ./artifacts/subkey/subkey | tee ./artifacts/subkey/subkey.sha256 - cp -r ./scripts/ci/docker/subkey.Dockerfile ./artifacts/subkey/ - rusty-cachier cache upload build-subkey-linux: - extends: .build-subkey + extends: .build-subkey build-subkey-macos: - extends: .build-subkey + extends: .build-subkey # duplicating before_script & script sections from .build-subkey hidden job # to overwrite rusty-cachier integration as it doesn't work on macos before_script: @@ -129,8 +129,8 @@ build-subkey-macos: - mv ./target/release/subkey ./artifacts/subkey/. - echo -n "Subkey version = " - ./artifacts/subkey/subkey --version | - sed -n -E 's/^subkey ([0-9.]+.*)/\1/p' | - tee ./artifacts/subkey/VERSION; + sed -n -E 's/^subkey ([0-9.]+.*)/\1/p' | + tee ./artifacts/subkey/VERSION; - sha256sum ./artifacts/subkey/subkey | tee ./artifacts/subkey/subkey.sha256 - cp -r ./scripts/ci/docker/subkey.Dockerfile ./artifacts/subkey/ after_script: [""] @@ -138,26 +138,26 @@ build-subkey-macos: - osx build-rustdoc: - stage: build + stage: build extends: - .docker-env - .test-refs variables: - SKIP_WASM_BUILD: 1 - DOC_INDEX_PAGE: "sc_service/index.html" # default redirected page - RUSTY_CACHIER_TOOLCHAIN: nightly + SKIP_WASM_BUILD: 1 + DOC_INDEX_PAGE: "sc_service/index.html" # default redirected page + RUSTY_CACHIER_TOOLCHAIN: nightly # this variable gets overriden by "rusty-cachier environment inject", use the value as default - CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" + CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" artifacts: - name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}-doc" - when: on_success - expire_in: 7 days + name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}-doc" + when: on_success + expire_in: 7 days paths: - - ./crate-docs/ + - ./crate-docs/ # DAG: this is artificial dependency needs: - - job: cargo-clippy - artifacts: false + - job: cargo-clippy + artifacts: false script: - rusty-cachier snapshot create - time cargo +nightly doc --locked --workspace --all-features --verbose --no-deps diff --git a/scripts/ci/gitlab/pipeline/check.yml b/scripts/ci/gitlab/pipeline/check.yml index 55f0061501076..a29f31d4aa3ba 100644 --- a/scripts/ci/gitlab/pipeline/check.yml +++ b/scripts/ci/gitlab/pipeline/check.yml @@ -2,56 +2,56 @@ # Here are all jobs that are executed during "check" stage check-runtime: - stage: check + stage: check extends: - .kubernetes-env - .test-refs-no-trigger-prs-only variables: - CI_IMAGE: "paritytech/tools:latest" - GITLAB_API: "https://gitlab.parity.io/api/v4" - GITHUB_API_PROJECT: "parity%2Finfrastructure%2Fgithub-api" + CI_IMAGE: "paritytech/tools:latest" + GITLAB_API: "https://gitlab.parity.io/api/v4" + GITHUB_API_PROJECT: "parity%2Finfrastructure%2Fgithub-api" script: - ./scripts/ci/gitlab/check_runtime.sh - allow_failure: true + allow_failure: true check-signed-tag: - stage: check - extends: .kubernetes-env + stage: check + extends: .kubernetes-env variables: - CI_IMAGE: "paritytech/tools:latest" + CI_IMAGE: "paritytech/tools:latest" rules: - if: $CI_COMMIT_REF_NAME =~ /^ci-release-.*$/ - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 + - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 script: - ./scripts/ci/gitlab/check_signed.sh test-dependency-rules: - stage: check + stage: check extends: - .kubernetes-env - .test-refs-no-trigger-prs-only variables: - CI_IMAGE: "paritytech/tools:latest" + CI_IMAGE: "paritytech/tools:latest" script: - ./scripts/ci/gitlab/ensure-deps.sh test-rust-features: - stage: check + stage: check extends: - .kubernetes-env - .test-refs-no-trigger-prs-only script: - git clone - --depth=1 - --branch="$PIPELINE_SCRIPTS_TAG" - https://github.com/paritytech/pipeline-scripts + --depth=1 + --branch="$PIPELINE_SCRIPTS_TAG" + https://github.com/paritytech/pipeline-scripts - bash ./pipeline-scripts/rust-features.sh . test-prometheus-alerting-rules: - stage: check - extends: .kubernetes-env + stage: check + extends: .kubernetes-env variables: - CI_IMAGE: "paritytech/tools:latest" + CI_IMAGE: "paritytech/tools:latest" rules: - if: $CI_PIPELINE_SOURCE == "pipeline" when: never @@ -62,4 +62,4 @@ test-prometheus-alerting-rules: script: - promtool check rules ./scripts/ci/monitoring/alerting-rules/alerting-rules.yaml - cat ./scripts/ci/monitoring/alerting-rules/alerting-rules.yaml | - promtool test rules ./scripts/ci/monitoring/alerting-rules/alerting-rule-tests.yaml + promtool test rules ./scripts/ci/monitoring/alerting-rules/alerting-rule-tests.yaml diff --git a/scripts/ci/gitlab/pipeline/publish.yml b/scripts/ci/gitlab/pipeline/publish.yml index 3c280dec7325c..188a093864cc0 100644 --- a/scripts/ci/gitlab/pipeline/publish.yml +++ b/scripts/ci/gitlab/pipeline/publish.yml @@ -1,16 +1,15 @@ - # This file is part of .gitlab-ci.yml # Here are all jobs that are executed during "publish" stage .build-push-docker-image-common: extends: - .kubernetes-env - stage: publish + stage: publish variables: - CI_IMAGE: $BUILDAH_IMAGE - GIT_STRATEGY: none - DOCKERFILE: $PRODUCT.Dockerfile - IMAGE_NAME: docker.io/$IMAGE_PATH + CI_IMAGE: $BUILDAH_IMAGE + GIT_STRATEGY: none + DOCKERFILE: $PRODUCT.Dockerfile + IMAGE_NAME: docker.io/$IMAGE_PATH before_script: - !reference [.kubernetes-env, before_script] - cd ./artifacts/$PRODUCT/ @@ -19,17 +18,17 @@ - test -z "${VERSION}" && exit 1 script: - test "$DOCKER_USER" -a "$DOCKER_PASS" || - ( echo "no docker credentials provided"; exit 1 ) + ( echo "no docker credentials provided"; exit 1 ) - buildah bud - --format=docker - --build-arg VCS_REF="${CI_COMMIT_SHA}" - --build-arg BUILD_DATE="$(date -u '+%Y-%m-%dT%H:%M:%SZ')" - --build-arg IMAGE_NAME="${IMAGE_PATH}" - --tag "$IMAGE_NAME:$VERSION" - --tag "$IMAGE_NAME:latest" - --file "$DOCKERFILE" . + --format=docker + --build-arg VCS_REF="${CI_COMMIT_SHA}" + --build-arg BUILD_DATE="$(date -u '+%Y-%m-%dT%H:%M:%SZ')" + --build-arg IMAGE_NAME="${IMAGE_PATH}" + --tag "$IMAGE_NAME:$VERSION" + --tag "$IMAGE_NAME:latest" + --file "$DOCKERFILE" . - echo "$DOCKER_PASS" | - buildah login --username "$DOCKER_USER" --password-stdin docker.io + buildah login --username "$DOCKER_USER" --password-stdin docker.io - buildah info - buildah push --format=v2s2 "$IMAGE_NAME:$VERSION" - buildah push --format=v2s2 "$IMAGE_NAME:latest" @@ -45,25 +44,25 @@ - .publish-refs - .build-push-docker-image-common variables: - IMAGE_PATH: parity/$PRODUCT - DOCKER_USER: $Docker_Hub_User_Parity - DOCKER_PASS: $Docker_Hub_Pass_Parity + IMAGE_PATH: parity/$PRODUCT + DOCKER_USER: $Docker_Hub_User_Parity + DOCKER_PASS: $Docker_Hub_Pass_Parity .push-docker-image-description: - stage: publish + stage: publish extends: - .kubernetes-env - .publish-refs variables: - CI_IMAGE: paritytech/dockerhub-description - DOCKERHUB_REPOSITORY: parity/$PRODUCT - DOCKER_USERNAME: $Docker_Hub_User_Parity - DOCKER_PASSWORD: $Docker_Hub_Pass_Parity - README_FILEPATH: $CI_PROJECT_DIR/scripts/ci/docker/$PRODUCT.Dockerfile.README.md + CI_IMAGE: paritytech/dockerhub-description + DOCKERHUB_REPOSITORY: parity/$PRODUCT + DOCKER_USERNAME: $Docker_Hub_User_Parity + DOCKER_PASSWORD: $Docker_Hub_Pass_Parity + README_FILEPATH: $CI_PROJECT_DIR/scripts/ci/docker/$PRODUCT.Dockerfile.README.md rules: - - if: $CI_COMMIT_REF_NAME == "master" - changes: - - scripts/ci/docker/$PRODUCT.Dockerfile.README.md + - if: $CI_COMMIT_REF_NAME == "master" + changes: + - scripts/ci/docker/$PRODUCT.Dockerfile.README.md script: - cd / && sh entrypoint.sh @@ -73,94 +72,94 @@ - .build-refs - .build-push-docker-image-common variables: - IMAGE_PATH: paritypr/$PRODUCT - DOCKER_USER: $PARITYPR_USER - DOCKER_PASS: $PARITYPR_PASS + IMAGE_PATH: paritypr/$PRODUCT + DOCKER_USER: $PARITYPR_USER + DOCKER_PASS: $PARITYPR_PASS publish-docker-substrate: - extends: .build-push-docker-image + extends: .build-push-docker-image needs: - - job: build-linux-substrate - artifacts: true + - job: build-linux-substrate + artifacts: true variables: - PRODUCT: substrate + PRODUCT: substrate publish-docker-description-substrate: - extends: .push-docker-image-description + extends: .push-docker-image-description variables: - PRODUCT: substrate - SHORT_DESCRIPTION: "Substrate Docker Image." + PRODUCT: substrate + SHORT_DESCRIPTION: "Substrate Docker Image." publish-docker-substrate-temporary: - extends: .build-push-image-temporary + extends: .build-push-image-temporary needs: - - job: build-linux-substrate - artifacts: true + - job: build-linux-substrate + artifacts: true variables: - PRODUCT: substrate + PRODUCT: substrate artifacts: reports: # this artifact is used in zombienet-tests job # https://docs.gitlab.com/ee/ci/multi_project_pipelines.html#with-variable-inheritance dotenv: ./artifacts/$PRODUCT/build.env - expire_in: 24h + expire_in: 24h publish-docker-subkey: - extends: .build-push-docker-image + extends: .build-push-docker-image needs: - - job: build-subkey-linux - artifacts: true + - job: build-subkey-linux + artifacts: true variables: - PRODUCT: subkey + PRODUCT: subkey publish-docker-description-subkey: - extends: .push-docker-image-description + extends: .push-docker-image-description variables: - PRODUCT: subkey - SHORT_DESCRIPTION: "The subkey program is a key management utility for Substrate-based blockchains." + PRODUCT: subkey + SHORT_DESCRIPTION: "The subkey program is a key management utility for Substrate-based blockchains." publish-s3-release: - stage: publish + stage: publish extends: - .publish-refs - .kubernetes-env needs: - - job: build-linux-substrate - artifacts: true - - job: build-subkey-linux - artifacts: true - image: paritytech/awscli:latest + - job: build-linux-substrate + artifacts: true + - job: build-subkey-linux + artifacts: true + image: paritytech/awscli:latest variables: - GIT_STRATEGY: none - BUCKET: "releases.parity.io" - PREFIX: "substrate/${ARCH}-${DOCKER_OS}" + GIT_STRATEGY: none + BUCKET: "releases.parity.io" + PREFIX: "substrate/${ARCH}-${DOCKER_OS}" script: - aws s3 sync ./artifacts/ s3://${BUCKET}/${PREFIX}/$(cat ./artifacts/substrate/VERSION)/ - echo "update objects in latest path" - aws s3 sync s3://${BUCKET}/${PREFIX}/$(cat ./artifacts/substrate/VERSION)/ s3://${BUCKET}/${PREFIX}/latest/ after_script: - aws s3 ls s3://${BUCKET}/${PREFIX}/latest/ - --recursive --human-readable --summarize + --recursive --human-readable --summarize publish-rustdoc: - stage: publish - extends: .kubernetes-env + stage: publish + extends: .kubernetes-env variables: - CI_IMAGE: node:16 - GIT_DEPTH: 100 - RUSTDOCS_DEPLOY_REFS: "master" + CI_IMAGE: node:16 + GIT_DEPTH: 100 + RUSTDOCS_DEPLOY_REFS: "master" rules: - if: $CI_PIPELINE_SOURCE == "pipeline" when: never - if: $CI_PIPELINE_SOURCE == "web" && $CI_COMMIT_REF_NAME == "master" - if: $CI_COMMIT_REF_NAME == "master" - - if: $CI_COMMIT_REF_NAME =~ /^monthly-20[0-9]{2}-[0-9]{2}.*$/ # to support: monthly-2021-09+1 - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 + - if: $CI_COMMIT_REF_NAME =~ /^monthly-20[0-9]{2}-[0-9]{2}.*$/ # to support: monthly-2021-09+1 + - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 # `needs:` can be removed after CI image gets nonroot. In this case `needs:` stops other # artifacts from being dowloaded by this job. needs: - - job: build-rustdoc - artifacts: true + - job: build-rustdoc + artifacts: true script: # If $CI_COMMIT_REF_NAME doesn't match one of $RUSTDOCS_DEPLOY_REFS space-separated values, we # exit immediately. @@ -196,75 +195,75 @@ publish-rustdoc: # We don't want to mark the entire job failed if there's nothing to # publish though, hence the `|| true`. - git commit -m "___Updated docs for ${CI_COMMIT_REF_NAME}___" || - echo "___Nothing to commit___" + echo "___Nothing to commit___" - git push origin gh-pages --force after_script: - rm -rf .git/ ./* publish-draft-release: - stage: publish - image: paritytech/tools:latest + stage: publish + image: paritytech/tools:latest rules: - if: $CI_COMMIT_REF_NAME =~ /^ci-release-.*$/ - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 + - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 script: - ./scripts/ci/gitlab/publish_draft_release.sh - allow_failure: true + allow_failure: true .publish-crates-template: - stage: publish + stage: publish extends: - .crates-publishing-template - .crates-publishing-pipeline # We don't want multiple jobs racing to publish crates as it's redundant and they might overwrite # the releases of one another. Use resource_group to ensure that at most one instance of this job # is running at any given time. - resource_group: crates-publishing + resource_group: crates-publishing # crates.io currently rate limits crate publishing at 1 per minute: # https://github.com/paritytech/release-engineering/issues/123#issuecomment-1335509748 # Taking into account the 202 (as of Dec 07, 2022) publishable Substrate crates, in the worst # case, due to the rate limits alone, we'd have to wait through at least 202 minutes of delay. # Taking into account also the verification steps and extra synchronization delays after # publishing the crate, the job needs to have a much higher timeout than average. - timeout: 9h + timeout: 9h # A custom publishing environment is used for us to be able to set up protected secrets # specifically for it - environment: publish-crates + environment: publish-crates script: - rusty-cachier snapshot create - git clone - --depth 1 - --branch "$RELENG_SCRIPTS_BRANCH" - https://github.com/paritytech/releng-scripts.git + --depth 1 + --branch "$RELENG_SCRIPTS_BRANCH" + https://github.com/paritytech/releng-scripts.git - CRATESIO_TARGET_INSTANCE=default ./releng-scripts/publish-crates - rusty-cachier cache upload publish-crates: - extends: .publish-crates-template + extends: .publish-crates-template # publish-crates should only be run if publish-crates-locally passes needs: - - job: check-crate-publishing - artifacts: false + - job: check-crate-publishing + artifacts: false publish-crates-manual: - extends: .publish-crates-template - when: manual - interruptible: false + extends: .publish-crates-template + when: manual + interruptible: false check-crate-publishing: - stage: publish + stage: publish extends: - .test-refs - .crates-publishing-template # When lots of crates are taken into account (for example on master where all crates are tested) # the job might take a long time, as evidenced by: # https://gitlab.parity.io/parity/mirrors/substrate/-/jobs/2269364 - timeout: 4h + timeout: 4h script: - rusty-cachier snapshot create - git clone - --depth 1 - --branch "$RELENG_SCRIPTS_BRANCH" - https://github.com/paritytech/releng-scripts.git + --depth 1 + --branch "$RELENG_SCRIPTS_BRANCH" + https://github.com/paritytech/releng-scripts.git - CRATESIO_TARGET_INSTANCE=local ./releng-scripts/publish-crates - rusty-cachier cache upload diff --git a/scripts/ci/gitlab/pipeline/test.yml b/scripts/ci/gitlab/pipeline/test.yml index 732b8c0fd699c..49dbb194fb651 100644 --- a/scripts/ci/gitlab/pipeline/test.yml +++ b/scripts/ci/gitlab/pipeline/test.yml @@ -1,29 +1,28 @@ # This file is part of .gitlab-ci.yml # Here are all jobs that are executed during "test" stage - # It's more like a check and it belongs to the previous stage, but we want to run this job with real tests in parallel find-fail-ci-phrase: - stage: test + stage: test variables: - CI_IMAGE: "paritytech/tools:latest" - ASSERT_REGEX: "FAIL-CI" - GIT_DEPTH: 1 + CI_IMAGE: "paritytech/tools:latest" + ASSERT_REGEX: "FAIL-CI" + GIT_DEPTH: 1 extends: - .kubernetes-env script: - set +e - rg --line-number --hidden --type rust --glob '!{.git,target}' "$ASSERT_REGEX" .; exit_status=$? - if [ $exit_status -eq 0 ]; then - echo "$ASSERT_REGEX was found, exiting with 1"; - exit 1; + echo "$ASSERT_REGEX was found, exiting with 1"; + exit 1; else - echo "No $ASSERT_REGEX was found, exiting with 0"; - exit 0; + echo "No $ASSERT_REGEX was found, exiting with 0"; + exit 0; fi cargo-deny: - stage: test + stage: test extends: - .docker-env - .nightly-pipeline @@ -36,16 +35,16 @@ cargo-deny: - echo "___The complete log is in the artifacts___" - cargo deny check -c ./scripts/ci/deny.toml 2> deny.log artifacts: - name: $CI_COMMIT_SHORT_SHA - expire_in: 3 days - when: always + name: $CI_COMMIT_SHORT_SHA + expire_in: 3 days + when: always paths: - deny.log # FIXME: Temporarily allow to fail. - allow_failure: true + allow_failure: true cargo-fmt: - stage: test + stage: test variables: RUSTY_CACHIER_TOOLCHAIN: nightly extends: @@ -57,11 +56,11 @@ cargo-fmt: - rusty-cachier cache upload cargo-clippy: - stage: test + stage: test # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs needs: - - job: cargo-fmt - artifacts: false + - job: cargo-fmt + artifacts: false variables: RUSTY_CACHIER_TOOLCHAIN: nightly extends: @@ -73,11 +72,11 @@ cargo-clippy: - rusty-cachier cache upload cargo-check-benches: - stage: test + stage: test variables: # Override to use nightly toolchain - RUSTY_CACHIER_TOOLCHAIN: "nightly" - CI_JOB_NAME: "cargo-check-benches" + RUSTY_CACHIER_TOOLCHAIN: "nightly" + CI_JOB_NAME: "cargo-check-benches" extends: - .docker-env - .test-refs-check-benches @@ -93,11 +92,11 @@ cargo-check-benches: - | export BASE=$(curl -s -H "Authorization: Bearer ${GITHUB_PR_TOKEN}" https://api.github.com/repos/paritytech/substrate/pulls/${$CI_COMMIT_REF_NAME} | jq .base.ref) - if [ $CI_COMMIT_REF_NAME != "master" ]; then - git fetch origin +${BASE}:${BASE}; - git fetch origin +$CI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME; - git checkout ${BASE}; - git config user.email "ci@gitlab.parity.io"; - git merge $CI_COMMIT_REF_NAME --verbose --no-edit; + git fetch origin +${BASE}:${BASE}; + git fetch origin +$CI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME; + git checkout ${BASE}; + git config user.email "ci@gitlab.parity.io"; + git merge $CI_COMMIT_REF_NAME --verbose --no-edit; fi parallel: 2 script: @@ -106,18 +105,18 @@ cargo-check-benches: # this job is executed in parallel on two runners - echo "___Running benchmarks___"; - case ${CI_NODE_INDEX} in - 1) - SKIP_WASM_BUILD=1 time cargo +nightly check --locked --benches --all; - cargo run --locked --release -p node-bench -- ::trie::read::small --json - | tee ./artifacts/benches/$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA/::trie::read::small.json; - echo "___Uploading cache for rusty-cachier___"; - rusty-cachier cache upload - ;; - 2) - cargo run --locked --release -p node-bench -- ::node::import::native::sr25519::transfer_keep_alive::paritydb::small --json - | tee ./artifacts/benches/$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA/::node::import::native::sr25519::transfer_keep_alive::paritydb::small.json - ;; - esac + 1) + SKIP_WASM_BUILD=1 time cargo +nightly check --locked --benches --all; + cargo run --locked --release -p node-bench -- ::trie::read::small --json + | tee ./artifacts/benches/$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA/::trie::read::small.json; + echo "___Uploading cache for rusty-cachier___"; + rusty-cachier cache upload + ;; + 2) + cargo run --locked --release -p node-bench -- ::node::import::native::sr25519::transfer_keep_alive::paritydb::small --json + | tee ./artifacts/benches/$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA/::node::import::native::sr25519::transfer_keep_alive::paritydb::small.json + ;; + esac tags: - linux-docker-benches @@ -125,38 +124,38 @@ node-bench-regression-guard: # it's not belong to `build` semantically, but dag jobs can't depend on each other # within the single stage - https://gitlab.com/gitlab-org/gitlab/-/issues/30632 # more: https://github.com/paritytech/substrate/pull/8519#discussion_r608012402 - stage: build + stage: build extends: - .docker-env - .test-refs-no-trigger-prs-only needs: # this is a DAG - - job: cargo-check-benches - artifacts: true + - job: cargo-check-benches + artifacts: true # polls artifact from master to compare with current result # need to specify both parallel jobs from master because of the bug # https://gitlab.com/gitlab-org/gitlab/-/issues/39063 - - project: $CI_PROJECT_PATH - job: "cargo-check-benches 1/2" - ref: master - artifacts: true - - project: $CI_PROJECT_PATH - job: "cargo-check-benches 2/2" - ref: master - artifacts: true + - project: $CI_PROJECT_PATH + job: "cargo-check-benches 1/2" + ref: master + artifacts: true + - project: $CI_PROJECT_PATH + job: "cargo-check-benches 2/2" + ref: master + artifacts: true variables: - CI_IMAGE: "paritytech/node-bench-regression-guard:latest" + CI_IMAGE: "paritytech/node-bench-regression-guard:latest" before_script: [""] script: - echo "------- IMPORTANT -------" - echo "node-bench-regression-guard depends on the results of a cargo-check-benches job" - echo "In case of this job failure, check your pipeline's cargo-check-benches" - - 'node-bench-regression-guard --reference artifacts/benches/master-* - --compare-with artifacts/benches/$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA' + - "node-bench-regression-guard --reference artifacts/benches/master-* + --compare-with artifacts/benches/$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA" after_script: [""] cargo-check-try-runtime: - stage: test + stage: test extends: - .docker-env - .test-refs @@ -166,18 +165,18 @@ cargo-check-try-runtime: - rusty-cachier cache upload test-deterministic-wasm: - stage: test + stage: test # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs needs: - - job: cargo-check-try-runtime - artifacts: false + - job: cargo-check-try-runtime + artifacts: false extends: - .docker-env - .test-refs variables: - WASM_BUILD_NO_COLOR: 1 + WASM_BUILD_NO_COLOR: 1 # this variable gets overriden by "rusty-cachier environment inject", use the value as default - CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" + CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target" script: - rusty-cachier snapshot create # build runtime @@ -195,7 +194,7 @@ test-deterministic-wasm: - rusty-cachier cache upload test-linux-stable: - stage: test + stage: test extends: - .docker-env - .test-refs @@ -203,14 +202,14 @@ test-linux-stable: variables: # Enable debug assertions since we are running optimized builds for testing # but still want to have debug assertions. - RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" - RUST_BACKTRACE: 1 - WASM_BUILD_NO_COLOR: 1 - WASM_BUILD_RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" + RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" + RUST_BACKTRACE: 1 + WASM_BUILD_NO_COLOR: 1 + WASM_BUILD_RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" # Ensure we run the UI tests. - RUN_UI_TESTS: 1 + RUN_UI_TESTS: 1 # needed for rusty-cachier to keep cache in test-linux-stable folder and not in test-linux-stable-1/3 - CI_JOB_NAME: "test-linux-stable" + CI_JOB_NAME: "test-linux-stable" parallel: 3 script: - rusty-cachier snapshot create @@ -219,30 +218,30 @@ test-linux-stable: # node-cli is excluded until https://github.com/paritytech/substrate/issues/11321 fixed - echo "Node index - ${CI_NODE_INDEX}. Total amount - ${CI_NODE_TOTAL}" - time cargo nextest run --workspace - --locked - --release - --verbose - --features runtime-benchmarks - --manifest-path ./bin/node/cli/Cargo.toml - --exclude node-cli - --partition count:${CI_NODE_INDEX}/${CI_NODE_TOTAL} + --locked + --release + --verbose + --features runtime-benchmarks + --manifest-path ./bin/node/cli/Cargo.toml + --exclude node-cli + --partition count:${CI_NODE_INDEX}/${CI_NODE_TOTAL} # we need to update cache only from one job - if [ ${CI_NODE_INDEX} == 1 ]; then rusty-cachier cache upload; fi test-frame-support: - stage: test + stage: test extends: - .docker-env - .test-refs variables: # Enable debug assertions since we are running optimized builds for testing # but still want to have debug assertions. - RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" - RUST_BACKTRACE: 1 - WASM_BUILD_NO_COLOR: 1 - WASM_BUILD_RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" + RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" + RUST_BACKTRACE: 1 + WASM_BUILD_NO_COLOR: 1 + WASM_BUILD_RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" # Ensure we run the UI tests. - RUN_UI_TESTS: 1 + RUN_UI_TESTS: 1 script: - rusty-cachier snapshot create - time cargo test --locked -p frame-support-test --features=frame-feature-testing,no-metadata-docs --manifest-path ./frame/support/test/Cargo.toml --test pallet # does not reuse cache 1 min 44 sec @@ -252,19 +251,19 @@ test-frame-support: # This job runs tests that don't work with cargo-nextest in test-linux-stable test-linux-stable-extra: - stage: test + stage: test extends: - .docker-env - .test-refs variables: # Enable debug assertions since we are running optimized builds for testing # but still want to have debug assertions. - RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" - RUST_BACKTRACE: 1 - WASM_BUILD_NO_COLOR: 1 - WASM_BUILD_RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" + RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" + RUST_BACKTRACE: 1 + WASM_BUILD_NO_COLOR: 1 + WASM_BUILD_RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" # Ensure we run the UI tests. - RUN_UI_TESTS: 1 + RUN_UI_TESTS: 1 script: - rusty-cachier snapshot create # Run node-cli tests @@ -277,17 +276,17 @@ test-linux-stable-extra: # This job runs all benchmarks defined in the `/bin/node/runtime` once to check that there are no errors. quick-benchmarks: - stage: test + stage: test extends: - .docker-env - .test-refs variables: # Enable debug assertions since we are running optimized builds for testing # but still want to have debug assertions. - RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" - RUST_BACKTRACE: "full" - WASM_BUILD_NO_COLOR: 1 - WASM_BUILD_RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" + RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" + RUST_BACKTRACE: "full" + WASM_BUILD_NO_COLOR: 1 + WASM_BUILD_RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" script: - rusty-cachier snapshot create - time cargo run --locked --release --features runtime-benchmarks -- benchmark pallet --execution wasm --wasm-execution compiled --chain dev --pallet "*" --extrinsic "*" --steps 2 --repeat 1 @@ -295,16 +294,15 @@ quick-benchmarks: test-frame-examples-compile-to-wasm: # into one job - stage: test - variables: - RUSTY_CACHIER_TOOLCHAIN: nightly + stage: test extends: - .docker-env - .test-refs variables: + RUSTY_CACHIER_TOOLCHAIN: nightly # Enable debug assertions since we are running optimized builds for testing # but still want to have debug assertions. - RUSTFLAGS: "-Cdebug-assertions=y" + RUSTFLAGS: "-Cdebug-assertions=y" RUST_BACKTRACE: 1 script: - rusty-cachier snapshot create @@ -315,7 +313,7 @@ test-frame-examples-compile-to-wasm: - rusty-cachier cache upload test-linux-stable-int: - stage: test + stage: test extends: - .docker-env - .test-refs @@ -323,27 +321,27 @@ test-linux-stable-int: variables: # Enable debug assertions since we are running optimized builds for testing # but still want to have debug assertions. - RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" - RUST_BACKTRACE: 1 - WASM_BUILD_NO_COLOR: 1 - WASM_BUILD_RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" + RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" + RUST_BACKTRACE: 1 + WASM_BUILD_NO_COLOR: 1 + WASM_BUILD_RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings" # Ensure we run the UI tests. - RUN_UI_TESTS: 1 + RUN_UI_TESTS: 1 script: - rusty-cachier snapshot create - WASM_BUILD_NO_COLOR=1 RUST_LOG=sync=trace,consensus=trace,client=trace,state-db=trace,db=trace,forks=trace,state_db=trace,storage_cache=trace - time cargo test -p node-cli --release --verbose --locked -- --ignored + time cargo test -p node-cli --release --verbose --locked -- --ignored - rusty-cachier cache upload # more information about this job can be found here: # https://github.com/paritytech/substrate/pull/6916 check-tracing: - stage: test + stage: test # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs needs: - - job: test-linux-stable-int - artifacts: false + - job: test-linux-stable-int + artifacts: false variables: RUSTY_CACHIER_TOOLCHAIN: nightly extends: @@ -360,20 +358,19 @@ check-tracing: # more information about this job can be found here: # https://github.com/paritytech/substrate/pull/3778 test-full-crypto-feature: - stage: test + stage: test # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs needs: - - job: check-tracing - artifacts: false - variables: - RUSTY_CACHIER_TOOLCHAIN: nightly + - job: check-tracing + artifacts: false extends: - .docker-env - .test-refs variables: + RUSTY_CACHIER_TOOLCHAIN: nightly # Enable debug assertions since we are running optimized builds for testing # but still want to have debug assertions. - RUSTFLAGS: "-Cdebug-assertions=y" + RUSTFLAGS: "-Cdebug-assertions=y" RUST_BACKTRACE: 1 script: - rusty-cachier snapshot create @@ -384,22 +381,21 @@ test-full-crypto-feature: - rusty-cachier cache upload check-rustdoc: - stage: test - variables: - RUSTY_CACHIER_TOOLCHAIN: nightly + stage: test extends: - .docker-env - .test-refs variables: - SKIP_WASM_BUILD: 1 - RUSTDOCFLAGS: "-Dwarnings" + RUSTY_CACHIER_TOOLCHAIN: nightly + SKIP_WASM_BUILD: 1 + RUSTDOCFLAGS: "-Dwarnings" script: - rusty-cachier snapshot create - time cargo +nightly doc --locked --workspace --all-features --verbose --no-deps - rusty-cachier cache upload cargo-check-each-crate: - stage: test + stage: test extends: - .docker-env - .test-refs @@ -408,7 +404,7 @@ cargo-check-each-crate: variables: # $CI_JOB_NAME is set manually so that rusty-cachier can share the cache for all # "cargo-check-each-crate I/N" jobs - CI_JOB_NAME: cargo-check-each-crate + CI_JOB_NAME: cargo-check-each-crate script: - rusty-cachier snapshot create - PYTHONUNBUFFERED=x time ./scripts/ci/gitlab/check-each-crate.py "$CI_NODE_INDEX" "$CI_NODE_TOTAL" @@ -427,7 +423,7 @@ cargo-check-each-crate-macos: - !reference [.rust-info-script, script] - !reference [.pipeline-stopper-vars, script] variables: - SKIP_WASM_BUILD: 1 + SKIP_WASM_BUILD: 1 script: # TODO: enable rusty-cachier once it supports Mac # TODO: use parallel jobs, as per cargo-check-each-crate, once more Mac runners are available diff --git a/scripts/ci/gitlab/pipeline/zombienet.yml b/scripts/ci/gitlab/pipeline/zombienet.yml index 3429621e4fe22..31ee510343278 100644 --- a/scripts/ci/gitlab/pipeline/zombienet.yml +++ b/scripts/ci/gitlab/pipeline/zombienet.yml @@ -11,20 +11,20 @@ - export DEBUG=zombie,zombie::network-node - export ZOMBIENET_INTEGRATION_TEST_IMAGE=${SUBSTRATE_IMAGE_NAME}:${SUBSTRATE_IMAGE_TAG} - echo "${ZOMBIENET_INTEGRATION_TEST_IMAGE}" - stage: zombienet - image: "${ZOMBIENET_IMAGE}" + stage: zombienet + image: "${ZOMBIENET_IMAGE}" needs: - - job: publish-docker-substrate-temporary + - job: publish-docker-substrate-temporary extends: - .kubernetes-env - .zombienet-refs variables: - GH_DIR: "https://github.com/paritytech/substrate/tree/${CI_COMMIT_SHA}/zombienet" + GH_DIR: "https://github.com/paritytech/substrate/tree/${CI_COMMIT_SHA}/zombienet" FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 1 artifacts: - name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}" - when: always - expire_in: 2 days + name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}" + when: always + expire_in: 2 days paths: - ./zombienet-logs after_script: @@ -39,32 +39,29 @@ zombienet-0000-block-building: - .zombienet-common script: - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}/0000-block-building" - --test="block-building.zndsl" - + --github-remote-dir="${GH_DIR}/0000-block-building" + --test="block-building.zndsl" zombienet-0001-basic-warp-sync: extends: - .zombienet-common script: - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}/0001-basic-warp-sync" - --test="test-warp-sync.zndsl" - + --github-remote-dir="${GH_DIR}/0001-basic-warp-sync" + --test="test-warp-sync.zndsl" zombienet-0002-validators-warp-sync: extends: - .zombienet-common script: - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}/0002-validators-warp-sync" - --test="test-validators-warp-sync.zndsl" - + --github-remote-dir="${GH_DIR}/0002-validators-warp-sync" + --test="test-validators-warp-sync.zndsl" zombienet-0003-block-building-warp-sync: extends: - .zombienet-common script: - /home/nonroot/zombie-net/scripts/ci/run-test-env-manager.sh - --github-remote-dir="${GH_DIR}/0003-block-building-warp-sync" - --test="test-block-building-warp-sync.zndsl" + --github-remote-dir="${GH_DIR}/0003-block-building-warp-sync" + --test="test-block-building-warp-sync.zndsl" diff --git a/scripts/ci/gitlab/prettier.sh b/scripts/ci/gitlab/prettier.sh new file mode 100755 index 0000000000000..299bbee179dca --- /dev/null +++ b/scripts/ci/gitlab/prettier.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +# meant to be installed via +# git config filter.ci-prettier.clean "scripts/ci/gitlab/prettier.sh" + +prettier --parser yaml