diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index 965b12a64..4a562e3fa 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -9,9 +9,6 @@ export GO_VERSION GCP_SERVICE_ACCOUNT_SECRET_PATH=secret/ci/elastic-elastic-package/gcp-service-account AWS_SERVICE_ACCOUNT_SECRET_PATH=kv/ci-shared/platform-ingest/aws_account_auth GITHUB_TOKEN_VAULT_PATH=kv/ci-shared/platform-ingest/github_token -JENKINS_API_TOKEN_PATH=kv/ci-shared/platform-ingest/jenkins_api_tokens -SIGNING_PACKAGES_GCS_CREDENTIALS_PATH=kv/ci-shared/platform-ingest/signing_packages_gcs_artifacts_credentials -PACKAGE_UPLOADER_GCS_CREDENTIALS_PATH=kv/ci-shared/platform-ingest/package_storage_uploader PRIVATE_CI_GCS_CREDENTIALS_PATH=kv/ci-shared/platform-ingest/gcp-platform-ingest-ci-service-account # Secrets must be redacted @@ -50,23 +47,6 @@ if [[ "$BUILDKITE_PIPELINE_SLUG" == "elastic-package" && "$BUILDKITE_STEP_KEY" = export GITHUB_TOKEN fi -if [[ "$BUILDKITE_PIPELINE_SLUG" == "elastic-package-package-storage-publish" && "$BUILDKITE_STEP_KEY" == "sign-publish" ]]; then - JENKINS_USERNAME_SECRET=$(retry 5 vault kv get -field username ${JENKINS_API_TOKEN_PATH}) - export JENKINS_USERNAME_SECRET - JENKINS_HOST_SECRET=$(retry 5 vault kv get -field internal_ci_host ${JENKINS_API_TOKEN_PATH}) - export JENKINS_HOST_SECRET - JENKINS_TOKEN=$(retry 5 vault kv get -field internal_ci ${JENKINS_API_TOKEN_PATH}) - export JENKINS_TOKEN - - # signing job - SIGNING_PACKAGES_GCS_CREDENTIALS_SECRET=$(retry 5 vault kv get -field value ${SIGNING_PACKAGES_GCS_CREDENTIALS_PATH}) - export SIGNING_PACKAGES_GCS_CREDENTIALS_SECRET - - # publishing job - PACKAGE_UPLOADER_GCS_CREDENTIALS_SECRET=$(retry 5 vault kv get -field value ${PACKAGE_UPLOADER_GCS_CREDENTIALS_PATH}) - export PACKAGE_UPLOADER_GCS_CREDENTIALS_SECRET -fi - if [[ "$BUILDKITE_PIPELINE_SLUG" == "elastic-package-test-with-integrations" && "$BUILDKITE_STEP_KEY" == "pr-integrations" ]]; then GITHUB_USERNAME_SECRET=$(retry 5 vault kv get -field username ${GITHUB_TOKEN_VAULT_PATH}) export GITHUB_USERNAME_SECRET diff --git a/.buildkite/pipeline.package-storage-publish.yml b/.buildkite/pipeline.package-storage-publish.yml deleted file mode 100644 index fa55f358e..000000000 --- a/.buildkite/pipeline.package-storage-publish.yml +++ /dev/null @@ -1,26 +0,0 @@ -env: - SETUP_GVM_VERSION: 'v0.5.1' # https://github.com/andrewkroh/gvm/issues/44#issuecomment-1013231151 - LINUX_AGENT_IMAGE: "golang:${GO_VERSION}" - -steps: - - label: ":go: Build package" - key: build-package - command: - - "make install" - - "cd test/packages/package-storage/package_storage_candidate; elastic-package build -v --zip" - agents: - image: "${LINUX_AGENT_IMAGE}" - cpu: "8" - memory: "4G" - artifact_paths: - - build/packages/*.zip - - - label: "Sign and Publish package" - key: sign-publish - command: ".buildkite/scripts/signAndPublishPackage.sh" - depends_on: - - build-package - timeout_in_minutes: 90 - agents: - provider: "gcp" - image: family/core-ubuntu-2004 diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json index 1c83bf0b2..1960b6a69 100644 --- a/.buildkite/pull-requests.json +++ b/.buildkite/pull-requests.json @@ -16,22 +16,6 @@ "skip_ci_on_only_changed": [ ], "always_require_ci_on_changed": [ ] }, - { - "enabled": true, - "pipelineSlug": "elastic-package-package-storage-publish", - "allow_org_users": true, - "allowed_repo_permissions": ["admin", "write"], - "allowed_list": ["dependabot[bot]", "mergify[bot]"], - "set_commit_status": true, - "build_on_commit": true, - "build_on_comment": true, - "trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))|^/test$", - "always_trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))|^/test$", - "skip_ci_labels": [ ], - "skip_target_branches": [ ], - "skip_ci_on_only_changed": [ ], - "always_require_ci_on_changed": [ ] - }, { "enabled": true, "pipelineSlug": "elastic-package-test-with-integrations", diff --git a/.buildkite/scripts/signAndPublishPackage.sh b/.buildkite/scripts/signAndPublishPackage.sh deleted file mode 100755 index 325aef18d..000000000 --- a/.buildkite/scripts/signAndPublishPackage.sh +++ /dev/null @@ -1,173 +0,0 @@ -#!/bin/bash -source .buildkite/scripts/install_deps.sh -source .buildkite/scripts/tooling.sh - -set -euo pipefail - -WORKSPACE="$(pwd)" -TMP_FOLDER_TEMPLATE_BASE="tmp.elastic-package" - -cleanup() { - local error_code=$? - - if [ $error_code != 0 ] ; then - # if variable is defined, run the logout - if [ -n "${GOOGLE_APPLICATION_CREDENTIALS+x}" ]; then - google_cloud_logout_active_account - fi - fi - - echo "Deleting temporal files..." - cd "${WORKSPACE}" - rm -rf ${TMP_FOLDER_TEMPLATE_BASE}.* - echo "Done." - - exit $error_code -} - -trap cleanup EXIT - -is_already_published() { - local packageZip=$1 - - if curl -s --head "https://package-storage.elastic.co/artifacts/packages/${packageZip}" | grep -q "HTTP/2 200" ; then - echo "- Already published ${packageZip}" - return 0 - fi - echo "- Not published ${packageZip}" - return 1 -} - -echo "Checking gsutil command..." -if ! command -v gsutil &> /dev/null ; then - echo "⚠️ gsutil is not installed" - exit 1 -fi - - -REPO_NAME=$(repo_name "${BUILDKITE_REPO}") -BUILD_TAG="buildkite-${BUILDKITE_PIPELINE_SLUG}-${BUILDKITE_BUILD_NUMBER}" - -REPO_BUILD_TAG="${REPO_NAME}/${BUILD_TAG}" - -BUILD_PACKAGES_PATH="build/packages" -TMP_FOLDER_TEMPLATE="${TMP_FOLDER_TEMPLATE_BASE}.XXXXXXXXX" -JENKINS_TRIGGER_PATH=".buildkite/scripts/triggerJenkinsJob" -GOOGLE_CREDENTIALS_FILENAME="google-cloud-credentials.json" - -## Signing -INFRA_SIGNING_BUCKET_NAME='internal-ci-artifacts' -INFRA_SIGNING_BUCKET_SIGNED_ARTIFACTS_SUBFOLDER="${REPO_BUILD_TAG}/signed-artifacts" -INFRA_SIGNING_BUCKET_ARTIFACTS_PATH="gs://${INFRA_SIGNING_BUCKET_NAME}/${REPO_BUILD_TAG}" -INFRA_SIGNING_BUCKET_SIGNED_ARTIFACTS_PATH="gs://${INFRA_SIGNING_BUCKET_NAME}/${INFRA_SIGNING_BUCKET_SIGNED_ARTIFACTS_SUBFOLDER}" - -## Publishing -PACKAGE_STORAGE_INTERNAL_BUCKET_QUEUE_PUBLISHING_PATH="gs://elastic-bekitzur-package-storage-internal/queue-publishing/${REPO_BUILD_TAG}" - - -google_cloud_auth_signing() { - local gsUtilLocation - gsUtilLocation=$(mktemp -d -p "${WORKSPACE}" -t "${TMP_FOLDER_TEMPLATE}") - - local secretFileLocation=${gsUtilLocation}/${GOOGLE_CREDENTIALS_FILENAME} - echo "${SIGNING_PACKAGES_GCS_CREDENTIALS_SECRET}" > "${secretFileLocation}" - - google_cloud_auth "${secretFileLocation}" -} - -google_cloud_auth_publishing() { - local gsUtilLocation - gsUtilLocation=$(mktemp -d -p "${WORKSPACE}" -t "${TMP_FOLDER_TEMPLATE}") - - local secretFileLocation=${gsUtilLocation}/${GOOGLE_CREDENTIALS_FILENAME} - echo "${PACKAGE_UPLOADER_GCS_CREDENTIALS_SECRET}" > "${secretFileLocation}" - - google_cloud_auth "${secretFileLocation}" -} - -sign_package() { - local package=${1} - local packageZip - packageZip=$(basename "${package}") - - google_cloud_auth_signing - - # upload zip package (trailing forward slashes are required) - echo "Upload package .zip file for signing ${package} to ${INFRA_SIGNING_BUCKET_ARTIFACTS_PATH}" - gsutil cp "${package}" "${INFRA_SIGNING_BUCKET_ARTIFACTS_PATH}/" - - echo "Trigger Jenkins job for signing package ${packageZip}" - pushd ${JENKINS_TRIGGER_PATH} > /dev/null - - go run main.go \ - --jenkins-job sign \ - --folder "${INFRA_SIGNING_BUCKET_ARTIFACTS_PATH}" - - popd > /dev/null - - echo "Download signatures" - gsutil cp "${INFRA_SIGNING_BUCKET_SIGNED_ARTIFACTS_PATH}/${packageZip}.asc" "${BUILD_PACKAGES_PATH}" - - echo "Rename asc to sig" - for f in $(ls ${BUILD_PACKAGES_PATH}/*.asc); do - mv "$f" "${f%.asc}.sig" - done - - ls -l "${BUILD_PACKAGES_PATH}" - - google_cloud_logout_active_account -} - -publish_package() { - local package=$1 - local packageZip - packageZip=$(basename "${package}") - - # create file with credentials - google_cloud_auth_publishing - - # upload files (trailing forward slashes are required) - echo "Upload package .zip file ${package} to ${PACKAGE_STORAGE_INTERNAL_BUCKET_QUEUE_PUBLISHING_PATH}" - gsutil cp "${package}" "${PACKAGE_STORAGE_INTERNAL_BUCKET_QUEUE_PUBLISHING_PATH}/" - echo "Upload package .sig file ${package}.sig to ${PACKAGE_STORAGE_INTERNAL_BUCKET_QUEUE_PUBLISHING_PATH}" - gsutil cp "${package}.sig" "${PACKAGE_STORAGE_INTERNAL_BUCKET_QUEUE_PUBLISHING_PATH}/" - - echo "Trigger Jenkins job for publishing package ${packageZip}" - pushd "${JENKINS_TRIGGER_PATH}" > /dev/null - - go run main.go \ - --jenkins-job publish \ - --package "${PACKAGE_STORAGE_INTERNAL_BUCKET_QUEUE_PUBLISHING_PATH}/${packageZip}" \ - --signature "${PACKAGE_STORAGE_INTERNAL_BUCKET_QUEUE_PUBLISHING_PATH}/${packageZip}.sig" - - popd > /dev/null - - google_cloud_logout_active_account -} - -add_bin_path - -# Required to trigger Jenkins job -with_go - -# download package artifact from previous step -mkdir -p "${BUILD_PACKAGES_PATH}" - -buildkite-agent artifact download "${BUILD_PACKAGES_PATH}/*.zip" --step build-package . -echo "Show artifacts downloaded from previous step ${BUILD_PACKAGES_PATH}" -ls -l "${BUILD_PACKAGES_PATH}" - -for package in $(ls ${BUILD_PACKAGES_PATH}/*.zip); do - echo "isAlreadyInstalled ${package}?" - packageZip=$(basename ${package}) - if is_already_published ${packageZip} ; then - echo "Skipping. ${packageZip} already published" - continue - fi - - echo "Signing package ${packageZip}" - sign_package "${package}" - - echo "Publishing package ${packageZip}" - publish_package "${package}" -done diff --git a/.buildkite/scripts/triggerJenkinsJob/go.mod b/.buildkite/scripts/triggerJenkinsJob/go.mod deleted file mode 100644 index b5a6dd6c6..000000000 --- a/.buildkite/scripts/triggerJenkinsJob/go.mod +++ /dev/null @@ -1,7 +0,0 @@ -module github.com/elastic/trigger-jenkins-buildkite-plugin - -go 1.21.0 - -require github.com/bndr/gojenkins v1.1.0 - -require golang.org/x/net v0.17.0 // indirect diff --git a/.buildkite/scripts/triggerJenkinsJob/go.sum b/.buildkite/scripts/triggerJenkinsJob/go.sum deleted file mode 100644 index cabb0799d..000000000 --- a/.buildkite/scripts/triggerJenkinsJob/go.sum +++ /dev/null @@ -1,18 +0,0 @@ -github.com/bndr/gojenkins v1.1.0 h1:TWyJI6ST1qDAfH33DQb3G4mD8KkrBfyfSUoZBHQAvPI= -github.com/bndr/gojenkins v1.1.0/go.mod h1:QeskxN9F/Csz0XV/01IC8y37CapKKWvOHa0UHLLX1fM= -github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/.buildkite/scripts/triggerJenkinsJob/jenkins/jenkins.go b/.buildkite/scripts/triggerJenkinsJob/jenkins/jenkins.go deleted file mode 100644 index 7a8f32cdd..000000000 --- a/.buildkite/scripts/triggerJenkinsJob/jenkins/jenkins.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -package jenkins - -import ( - "context" - "fmt" - "log" - "time" - - "github.com/bndr/gojenkins" -) - -type JenkinsClient struct { - client *gojenkins.Jenkins -} - -type Options struct { - WaitingTime time.Duration - MaxWaitingTime time.Duration - GrowthFactor float64 - Retries int -} - -func NewJenkinsClient(ctx context.Context, host, user, token string) (*JenkinsClient, error) { - jenkins, err := gojenkins.CreateJenkins(nil, host, user, token).Init(ctx) - if err != nil { - return nil, fmt.Errorf("client coult not be created: %w", err) - } - - return &JenkinsClient{ - client: jenkins, - }, nil -} - -func (j *JenkinsClient) RunJob(ctx context.Context, jobName string, async bool, params map[string]string, opts Options) error { - log.Printf("Building job %s", jobName) - var queueId int64 - - r := retry(func(ctx context.Context) error { - var err error - queueId, err = j.client.BuildJob(ctx, jobName, params) - if err != nil { - return fmt.Errorf("error running job %s: %w", jobName, err) - } - - if queueId != 0 { - return nil - } - return fmt.Errorf("already running %s?", jobName) - - }, opts.Retries, opts.GrowthFactor, opts.WaitingTime, opts.MaxWaitingTime) - - if err := r(ctx); err != nil { - return err - } - - build, err := j.getBuildFromJobAndQueueID(ctx, jobName, queueId) - if err != nil { - return err - } - log.Printf("Job triggered %s/%d\n", jobName, build.GetBuildNumber()) - - if async { - return nil - } - - log.Printf("Waiting to be finished %s\n", build.GetUrl()) - err = j.waitForBuildFinished(ctx, build) - if err != nil { - return fmt.Errorf("not finished job %s/%d: %w", jobName, build.GetBuildNumber(), err) - } - - log.Printf("Build %s finished with result: %s\n", build.GetUrl(), build.GetResult()) - - if build.GetResult() != gojenkins.STATUS_SUCCESS { - return fmt.Errorf("build %s finished with result %s", build.GetUrl(), build.GetResult()) - } - return nil -} - -func (j *JenkinsClient) getBuildFromJobAndQueueID(ctx context.Context, jobName string, queueId int64) (*gojenkins.Build, error) { - job, err := j.client.GetJob(ctx, jobName) - if err != nil { - return nil, fmt.Errorf("not able to get job %s: %w", jobName, err) - } - - build, err := j.getBuildFromQueueID(ctx, job, queueId) - if err != nil { - return nil, fmt.Errorf("not able to get build from %s: %w", jobName, err) - } - return build, nil -} - -// based on https://github.com/bndr/gojenkins/blob/master/jenkins.go#L282 -func (j *JenkinsClient) getBuildFromQueueID(ctx context.Context, job *gojenkins.Job, queueid int64) (*gojenkins.Build, error) { - task, err := j.client.GetQueueItem(ctx, queueid) - if err != nil { - return nil, err - } - // Jenkins queue API has about 4.7second quiet period - for task.Raw.Executable.Number == 0 { - select { - case <-time.After(1000 * time.Millisecond): - case <-ctx.Done(): - return nil, ctx.Err() - } - _, err = task.Poll(ctx) - if err != nil { - return nil, err - } - } - - build, err := job.GetBuild(ctx, task.Raw.Executable.Number) - if err != nil { - return nil, fmt.Errorf("not able to retrieve build %s", task.Raw.Executable.Number, err) - } - return build, nil -} - -func (j *JenkinsClient) waitForBuildFinished(ctx context.Context, build *gojenkins.Build) error { - const waitingPeriod = 10000 * time.Millisecond - for build.IsRunning(ctx) { - log.Printf("Build still running, waiting for %s...", waitingPeriod) - select { - case <-time.After(waitingPeriod): - case <-ctx.Done(): - return ctx.Err() - } - _, err := build.Poll(ctx) - if err != nil { - return err - } - } - return nil -} diff --git a/.buildkite/scripts/triggerJenkinsJob/jenkins/retry.go b/.buildkite/scripts/triggerJenkinsJob/jenkins/retry.go deleted file mode 100644 index 7be270c93..000000000 --- a/.buildkite/scripts/triggerJenkinsJob/jenkins/retry.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -package jenkins - -import ( - "context" - "log" - "math" - "time" -) - -type retryableFunction func(context.Context) error - -func minDuration(a, b time.Duration) time.Duration { - if a < b { - return a - } - return b -} - -func retry(f retryableFunction, retries int, growthFactor float64, delay, maxDelay time.Duration) retryableFunction { - return func(ctx context.Context) error { - delaySeconds := delay.Seconds() - for r := 0; ; r++ { - err := f(ctx) - if err == nil || r >= retries { - // Return when there is no error or the maximum amount - // of retries is reached. - return err - } - - waitingTimeSeconds := math.Pow(growthFactor, float64(r)) * delaySeconds - waitingTime := time.Duration(waitingTimeSeconds) * time.Second - waitingTime = minDuration(waitingTime, maxDelay) - - log.Printf("Function failed, retrying in %v -> %.2f", waitingTime, waitingTimeSeconds) - - select { - case <-time.After(waitingTime): - case <-ctx.Done(): - return ctx.Err() - } - } - return nil - } -} diff --git a/.buildkite/scripts/triggerJenkinsJob/main.go b/.buildkite/scripts/triggerJenkinsJob/main.go deleted file mode 100644 index f85866dec..000000000 --- a/.buildkite/scripts/triggerJenkinsJob/main.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -package main - -import ( - "context" - "flag" - "fmt" - "log" - "os" - "strings" - "time" - - "github.com/elastic/trigger-jenkins-buildkite-plugin/jenkins" -) - -const ( - publishingRemoteJob = "package_storage/job/publishing-job-remote" - signingJob = "elastic+unified-release+master+sign-artifacts-with-gpg" - - publishJobKey = "publish" - signJobKey = "sign" -) - -var allowedJenkinsJobs = map[string]string{ - publishJobKey: publishingRemoteJob, - signJobKey: signingJob, -} - -var ( - jenkinsHost = os.Getenv("JENKINS_HOST_SECRET") - jenkinsUser = os.Getenv("JENKINS_USERNAME_SECRET") - jenkinsToken = os.Getenv("JENKINS_TOKEN") -) - -func jenkinsJobOptions() []string { - keys := make([]string, 0, len(allowedJenkinsJobs)) - for k := range allowedJenkinsJobs { - keys = append(keys, k) - } - return keys -} - -func main() { - jenkinsJob := flag.String("jenkins-job", "", fmt.Sprintf("Jenkins job to trigger. Allowed values: %s", strings.Join(jenkinsJobOptions(), " ,"))) - waitingTime := flag.Duration("waiting-time", 5*time.Second, fmt.Sprintf("Waiting period between each retry")) - growthFactor := flag.Float64("growth-factor", 1.25, fmt.Sprintf("Growth-Factor used for exponential backoff delays")) - retries := flag.Int("retries", 20, fmt.Sprintf("Number of retries to trigger the job")) - maxWaitingTime := flag.Duration("max-waiting-time", 60*time.Minute, fmt.Sprintf("Maximum waiting time per each retry")) - - folderPath := flag.String("folder", "", "Path to artifacts folder") - zipPackagePath := flag.String("package", "", "Path to zip package file (*.zip)") - sigPackagePath := flag.String("signature", "", "Path to the signature file of the package file (*.zip.sig)") - async := flag.Bool("async", false, "Run async the Jenkins job") - flag.Parse() - - if _, ok := allowedJenkinsJobs[*jenkinsJob]; !ok { - log.Fatal("Invalid jenkins job") - } - - log.Printf("Triggering job: %s", allowedJenkinsJobs[*jenkinsJob]) - - ctx := context.Background() - client, err := jenkins.NewJenkinsClient(ctx, jenkinsHost, jenkinsUser, jenkinsToken) - if err != nil { - log.Fatalf("error creating jenkins client") - } - - opts := jenkins.Options{ - WaitingTime: *waitingTime, - Retries: *retries, - GrowthFactor: *growthFactor, - MaxWaitingTime: *maxWaitingTime, - } - - switch *jenkinsJob { - case publishJobKey: - err = runPublishingRemoteJob(ctx, client, *async, allowedJenkinsJobs[*jenkinsJob], *zipPackagePath, *sigPackagePath, opts) - case signJobKey: - err = runSignPackageJob(ctx, client, *async, allowedJenkinsJobs[*jenkinsJob], *folderPath, opts) - default: - log.Fatal("unsupported jenkins job") - } - - if err != nil { - log.Fatalf("Error: %s", err) - } -} - -func runSignPackageJob(ctx context.Context, client *jenkins.JenkinsClient, async bool, jobName, folderPath string, opts jenkins.Options) error { - if folderPath == "" { - return fmt.Errorf("missing parameter --gcs_input_path for") - } - params := map[string]string{ - "gcs_input_path": folderPath, - } - - return client.RunJob(ctx, jobName, async, params, opts) -} - -func runPublishingRemoteJob(ctx context.Context, client *jenkins.JenkinsClient, async bool, jobName, packagePath, signaturePath string, opts jenkins.Options) error { - if packagePath == "" { - return fmt.Errorf("missing parameter --gs_package_build_zip_path") - } - if signaturePath == "" { - return fmt.Errorf("missing parameter --gs_package_signature_path") - } - - // Run the job with some parameters - params := map[string]string{ - "dry_run": "true", - "gs_package_build_zip_path": packagePath, - "gs_package_signature_path": signaturePath, - } - - return client.RunJob(ctx, jobName, async, params, opts) -} diff --git a/catalog-info.yaml b/catalog-info.yaml index 7ad729a87..664f8a7fa 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -63,50 +63,6 @@ spec: everyone: access_level: READ_ONLY ---- -# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json -apiVersion: backstage.io/v1alpha1 -kind: Resource -metadata: - name: buildkite-pipeline-elastic-package-package-storage-publish - description: 'Minimal pipeline to exercise publishing a package to Package Storage (for testing only)' - links: - - title: Pipeline - url: https://buildkite.com/elastic/elastic-package-package-storage-publish - -spec: - type: buildkite-pipeline - owner: group:ingest-fp - system: buildkite - implementation: - apiVersion: buildkite.elastic.dev/v1 - kind: Pipeline - metadata: - name: elastic-package-package-storage-publish - description: 'Minimal pipeline to exercise publishing a package to Package Storage (for testing only)' - spec: - branch_configuration: main - pipeline_file: ".buildkite/pipeline.package-storage-publish.yml" - provider_settings: - build_pull_request_forks: false - build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot - build_tags: true - filter_enabled: true - filter_condition: >- - build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) - repository: elastic/elastic-package - cancel_intermediate_builds: true - cancel_intermediate_builds_branch_filter: '!main' - skip_intermediate_builds: true - skip_intermediate_builds_branch_filter: '!main' - teams: - ecosystem: - access_level: MANAGE_BUILD_AND_READ - ingest-fp: - access_level: MANAGE_BUILD_AND_READ - everyone: - access_level: READ_ONLY - --- # yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json apiVersion: backstage.io/v1alpha1 diff --git a/scripts/test-build-zip.sh b/scripts/test-build-zip.sh index c76c2c3d2..b83a61648 100755 --- a/scripts/test-build-zip.sh +++ b/scripts/test-build-zip.sh @@ -14,7 +14,7 @@ cleanup() { # Clean used resources for d in test/packages/*/*/; do ( - cd $d + cd "$d" elastic-package clean -v ) done @@ -23,7 +23,7 @@ cleanup() { } testype() { - echo $(basename $(dirname $1)) + basename "$(dirname "$1")" } trap cleanup EXIT @@ -31,8 +31,10 @@ trap cleanup EXIT OLDPWD=$PWD # Build packages export ELASTIC_PACKAGE_SIGNER_PRIVATE_KEYFILE="$OLDPWD/scripts/gpg-private.asc" -export ELASTIC_PACKAGE_SIGNER_PASSPHRASE=$(cat "$OLDPWD/scripts/gpg-pass.txt") -export ELASTIC_PACKAGE_LINKS_FILE_PATH="$(pwd)/scripts/links_table.yml" +ELASTIC_PACKAGE_SIGNER_PASSPHRASE=$(cat "$OLDPWD/scripts/gpg-pass.txt") +export ELASTIC_PACKAGE_SIGNER_PASSPHRASE +ELASTIC_PACKAGE_LINKS_FILE_PATH="$(pwd)/scripts/links_table.yml" +export ELASTIC_PACKAGE_LINKS_FILE_PATH go run ./scripts/gpgkey diff --git a/scripts/test-check-false-positives.sh b/scripts/test-check-false-positives.sh index 2d904ca12..f09016441 100755 --- a/scripts/test-check-false-positives.sh +++ b/scripts/test-check-false-positives.sh @@ -14,7 +14,7 @@ function cleanup() { # Clean used resources for d in test/packages/${PACKAGE_TEST_TYPE:-false_positives}/${PACKAGE_UNDER_TEST:-*}/; do ( - cd $d + cd "$d" elastic-package clean -v ) done @@ -24,29 +24,32 @@ function cleanup() { function check_expected_errors() { local package_root=$1 - local package_name=$(basename $1) + local package_name="" + package_name=$(basename "$1") local expected_errors_file="${package_root%/}.expected_errors" local result_tests="build/test-results/${package_name}_*.xml" local results_no_spaces="build/test-results-no-spaces.xml" - if [ ! -f ${expected_errors_file} ]; then + if [ ! -f "${expected_errors_file}" ]; then echo "No unexpected errors file in ${expected_errors_file}" return fi rm -f ${result_tests} ( - cd $package_root + cd "$package_root" elastic-package test -v --report-format xUnit --report-output file --test-coverage --defer-cleanup 1s || true ) cat ${result_tests} | tr -d '\n' > ${results_no_spaces} # check number of expected errors - local number_errors=$(cat ${result_tests} | grep "" | wc -l) - local expected_errors=$(cat ${expected_errors_file} | wc -l) + local number_errors + number_errors=$(cat ${result_tests} | grep "" | wc -l) + local expected_errors + expected_errors=$(cat ${expected_errors_file} | wc -l) - if [ ${number_errors} -ne ${expected_errors} ]; then + if [ "${number_errors}" -ne "${expected_errors}" ]; then echo "Error: There are unexpected errors in ${package_name}" exit 1 fi @@ -54,7 +57,7 @@ function check_expected_errors() { # check whether or not the expected errors exist in the xml files while read -r line; do cat ${results_no_spaces} | grep -E "${line}" - done < ${expected_errors_file} + done < "${expected_errors_file}" rm -f ${result_tests} rm -f ${results_no_spaces} @@ -65,21 +68,21 @@ function check_build_output() { local expected_build_output="${package_root%/}.build_output" local output_file="$PWD/build/elastic-package-output" - if [ ! -f ${expected_build_output} ]; then + if [ ! -f "${expected_build_output}" ]; then ( - cd $package_root + cd "$package_root" elastic-package build -v ) return fi ( - cd $package_root - mkdir -p $(dirname $output_file) - elastic-package build 2>&1 | tee $output_file || true # Ignore errors here + cd "$package_root" + mkdir -p "$(dirname "$output_file")" + elastic-package build 2>&1 | tee "$output_file" || true # Ignore errors here ) - diff -w -u $expected_build_output $output_file || ( + diff -w -u "$expected_build_output" "$output_file" || ( echo "Error: Build output has differences with expected output" exit 1 ) @@ -87,7 +90,8 @@ function check_build_output() { trap cleanup EXIT -export ELASTIC_PACKAGE_LINKS_FILE_PATH="$(pwd)/scripts/links_table.yml" +ELASTIC_PACKAGE_LINKS_FILE_PATH="$(pwd)/scripts/links_table.yml" +export ELASTIC_PACKAGE_LINKS_FILE_PATH # Update the stack elastic-package stack update -v @@ -99,6 +103,6 @@ elastic-package stack status # Run package tests for d in test/packages/${PACKAGE_TEST_TYPE:-false_positives}/${PACKAGE_UNDER_TEST:-*}/; do - check_build_output $d - check_expected_errors $d + check_build_output "$d" + check_expected_errors "$d" done diff --git a/scripts/test-check-packages.sh b/scripts/test-check-packages.sh index 544fc327b..a9b116e3f 100755 --- a/scripts/test-check-packages.sh +++ b/scripts/test-check-packages.sh @@ -30,7 +30,7 @@ cleanup() { # Clean used resources for d in test/packages/${PACKAGE_TEST_TYPE:-other}/${PACKAGE_UNDER_TEST:-*}/; do ( - cd $d + cd "$d" elastic-package clean -v ) done @@ -40,13 +40,14 @@ cleanup() { trap cleanup EXIT -export ELASTIC_PACKAGE_LINKS_FILE_PATH="$(pwd)/scripts/links_table.yml" +ELASTIC_PACKAGE_LINKS_FILE_PATH="$(pwd)/scripts/links_table.yml" +export ELASTIC_PACKAGE_LINKS_FILE_PATH OLDPWD=$PWD # Build/check packages for d in test/packages/${PACKAGE_TEST_TYPE:-other}/${PACKAGE_UNDER_TEST:-*}/; do ( - cd $d + cd "$d" elastic-package check -v ) done @@ -74,17 +75,17 @@ elastic-package stack status if [ "${PACKAGE_TEST_TYPE:-other}" == "with-kind" ]; then # Boot up the kind cluster - kind create cluster --config $PWD/scripts/kind-config.yaml + kind create cluster --config "$PWD/scripts/kind-config.yaml" fi # Run package tests for d in test/packages/${PACKAGE_TEST_TYPE:-other}/${PACKAGE_UNDER_TEST:-*}/; do ( - cd $d + cd "$d" if [ "${PACKAGE_TEST_TYPE:-other}" == "benchmarks" ]; then # It is not used PACKAGE_UNDER_TEST, so all benchmark packages are run in the same loop - package_to_test=$(basename ${d}) + package_to_test=$(basename "${d}") if [ "${package_to_test}" == "pipeline_benchmark" ]; then rm -rf "${OLDPWD}/build/benchmark-results" elastic-package benchmark pipeline -v --report-format xUnit --report-output file --fail-on-missing @@ -95,8 +96,8 @@ for d in test/packages/${PACKAGE_TEST_TYPE:-other}/${PACKAGE_UNDER_TEST:-*}/; do elastic-package benchmark pipeline -v --report-format json --report-output file --fail-on-missing elastic-package report --fail-on-missing benchmark \ - --new ${OLDPWD}/build/benchmark-results \ - --old ${OLDPWD}/build/benchmark-results-old \ + --new "${OLDPWD}/build/benchmark-results" \ + --old "${OLDPWD}/build/benchmark-results-old" \ --threshold 1 --report-output-path="${OLDPWD}/build/benchreport" fi if [ "${package_to_test}" == "system_benchmark" ]; then diff --git a/scripts/test-check-update-version.sh b/scripts/test-check-update-version.sh index 7054f774a..cd952adf4 100755 --- a/scripts/test-check-update-version.sh +++ b/scripts/test-check-update-version.sh @@ -9,54 +9,54 @@ timeLatestDataModification() { } latestVersionFilePath="${HOME}/.elastic-package/latestVersion" -rm -rf ${latestVersionFilePath} +rm -rf "${latestVersionFilePath}" # First usage needs to write the cache file elastic-package version -if [ ! -f ${latestVersionFilePath} ]; then +if [ ! -f "${latestVersionFilePath}" ]; then echo "Error: Cache file with latest release info not written" exit 1 fi -LATEST_MODIFICATION_SINCE_EPOCH=$(timeLatestDataModification ${latestVersionFilePath}) +LATEST_MODIFICATION_SINCE_EPOCH=$(timeLatestDataModification "${latestVersionFilePath}") # Second elastic-package usage should not update the file elastic-package version -if [ ${LATEST_MODIFICATION_SINCE_EPOCH} != $(timeLatestDataModification ${latestVersionFilePath}) ]; then +if [ "${LATEST_MODIFICATION_SINCE_EPOCH}" != "$(timeLatestDataModification "${latestVersionFilePath}")" ]; then echo "Error: Cache file with latest release info updated - not used cached value" exit 1 fi # If latest data modification is older than the expiration time, it should be updated # Forced change latest data modification of cache file -cat < ${latestVersionFilePath} +cat < "${latestVersionFilePath}" { "tag":"v0.85.0", "html_url":"https://github.com/elastic/elastic-package/releases/tag/v0.85.0", "timestamp":"2023-08-28T17:10:31.735505212+02:00" } EOF -LATEST_MODIFICATION_SINCE_EPOCH=$(timeLatestDataModification ${latestVersionFilePath}) +LATEST_MODIFICATION_SINCE_EPOCH=$(timeLatestDataModification "${latestVersionFilePath}") # Precision of stat is in seconds, need to wait at least 1 second sleep 1 elastic-package version -if [ ${LATEST_MODIFICATION_SINCE_EPOCH} == $(timeLatestDataModification ${latestVersionFilePath}) ]; then +if [ "${LATEST_MODIFICATION_SINCE_EPOCH}" == "$(timeLatestDataModification "${latestVersionFilePath}")" ]; then echo "Error: Cache file with latest release info not updated and timestamp is older than the expiration time" exit 1 fi # If environment variable is defined, cache file should not be written export ELASTIC_PACKAGE_CHECK_UPDATE_DISABLED=true -rm -rf ${latestVersionFilePath} +rm -rf "${latestVersionFilePath}" elastic-package version -if [ -f ${latestVersionFilePath} ]; then +if [ -f "${latestVersionFilePath}" ]; then echo "Error: Cache file with latest release info written and ELASTIC_PACKAGE_CHECK_UPDATE_DISABLED is defined" exit 1 fi \ No newline at end of file diff --git a/scripts/test-install-zip.sh b/scripts/test-install-zip.sh index 388230c8e..be2fc24a0 100755 --- a/scripts/test-install-zip.sh +++ b/scripts/test-install-zip.sh @@ -18,7 +18,7 @@ cleanup() { for d in test/packages/*/*/; do ( - cd $d + cd "$d" elastic-package clean -v ) done @@ -27,24 +27,25 @@ cleanup() { } testype() { - echo $(basename $(dirname $1)) + basename "$(dirname "$1")" } trap cleanup EXIT installAndVerifyPackage() { local zipFile="$1" - local PACKAGE_NAME_VERSION=$(basename ${zipFile} .zip) + local PACKAGE_NAME_VERSION + PACKAGE_NAME_VERSION=$(basename "${zipFile}" .zip) - elastic-package install -v --zip ${zipFile} + elastic-package install -v --zip "${zipFile}" # check that the package is installed curl -s \ - -u ${ELASTIC_PACKAGE_ELASTICSEARCH_USERNAME}:${ELASTIC_PACKAGE_ELASTICSEARCH_PASSWORD} \ - --cacert ${ELASTIC_PACKAGE_CA_CERT} \ + -u "${ELASTIC_PACKAGE_ELASTICSEARCH_USERNAME}:${ELASTIC_PACKAGE_ELASTICSEARCH_PASSWORD}" \ + --cacert "${ELASTIC_PACKAGE_CA_CERT}" \ -H 'content-type: application/json' \ -H 'kbn-xsrf: true' \ - -f ${ELASTIC_PACKAGE_KIBANA_HOST}/api/fleet/epm/packages/${PACKAGE_NAME_VERSION} | grep -q '"status":"installed"' + -f "${ELASTIC_PACKAGE_KIBANA_HOST}/api/fleet/epm/packages/${PACKAGE_NAME_VERSION}" | grep -q '"status":"installed"' } usage() { @@ -95,21 +96,22 @@ elastic-package stack update -v ${ARG_VERSION} # Boot up the stack elastic-package stack up -d -v ${ARG_VERSION} -export ELASTIC_PACKAGE_LINKS_FILE_PATH="$(pwd)/scripts/links_table.yml" +ELASTIC_PACKAGE_LINKS_FILE_PATH="$(pwd)/scripts/links_table.yml" +export ELASTIC_PACKAGE_LINKS_FILE_PATH OLDPWD=$PWD # Build packages for d in test/packages/*/*/; do # Packages in false_positives can have issues. - if [ "$(testype $d)" == "false_positives" ]; then + if [ "$(testype "$d")" == "false_positives" ]; then continue fi ( - cd $d + cd "$d" elastic-package build ) done -cd $OLDPWD +cd "$OLDPWD" # Remove unzipped built packages, leave .zip files rm -r build/packages/*/ @@ -124,5 +126,5 @@ else fi for zipFile in build/packages/*.zip; do - installAndVerifyPackage ${zipFile} + installAndVerifyPackage "${zipFile}" done diff --git a/scripts/test-stack-command.sh b/scripts/test-stack-command.sh index f05af3ab0..d5f0a7690 100755 --- a/scripts/test-stack-command.sh +++ b/scripts/test-stack-command.sh @@ -9,7 +9,7 @@ cleanup() { r=$? # Dump stack logs - elastic-package stack dump -v --output build/elastic-stack-dump/stack/${VERSION} + elastic-package stack dump -v --output "build/elastic-stack-dump/stack/${VERSION}" # Take down the stack elastic-package stack down -v @@ -28,7 +28,7 @@ default_version() { clean_status_output() { local output_file="$1" - cat ${output_file} | grep "│" | tr -d ' ' + cat "${output_file}" | grep "│" | tr -d ' ' } trap cleanup EXIT @@ -56,11 +56,11 @@ OUTPUT_PATH_STATUS="build/elastic-stack-status/${VERSION}" if [ "${APM_SERVER_ENABLED}" = true ]; then OUTPUT_PATH_STATUS="build/elastic-stack-status/${VERSION}_with_apm_server" fi -mkdir -p ${OUTPUT_PATH_STATUS} +mkdir -p "${OUTPUT_PATH_STATUS}" # Initial status empty -elastic-package stack status 2> ${OUTPUT_PATH_STATUS}/initial.txt -grep "\- No service running" ${OUTPUT_PATH_STATUS}/initial.txt +elastic-package stack status 2> "${OUTPUT_PATH_STATUS}/initial.txt" +grep "\- No service running" "${OUTPUT_PATH_STATUS}/initial.txt" # Update the stack elastic-package stack update -v ${ARG_VERSION} @@ -70,10 +70,10 @@ elastic-package stack up -d -v ${ARG_VERSION} # Verify it's accessible eval "$(elastic-package stack shellinit)" -curl --cacert ${ELASTIC_PACKAGE_CA_CERT} -f ${ELASTIC_PACKAGE_KIBANA_HOST}/login | grep kbn-injected-metadata >/dev/null # healthcheck +curl --cacert "${ELASTIC_PACKAGE_CA_CERT}" -f "${ELASTIC_PACKAGE_KIBANA_HOST}/login" | grep kbn-injected-metadata >/dev/null # healthcheck # Check status with running services -cat < ${OUTPUT_PATH_STATUS}/expected_running.txt +cat < "${OUTPUT_PATH_STATUS}/expected_running.txt" Status of Elastic stack services: ╭──────────────────┬─────────┬───────────────────╮ │ SERVICE │ VERSION │ STATUS │ @@ -86,14 +86,14 @@ Status of Elastic stack services: ╰──────────────────┴─────────┴───────────────────╯ EOF -elastic-package stack status -v 2> ${OUTPUT_PATH_STATUS}/running.txt +elastic-package stack status -v 2> "${OUTPUT_PATH_STATUS}/running.txt" # Remove spaces to avoid issues with spaces between columns -clean_status_output "${OUTPUT_PATH_STATUS}/expected_running.txt" > ${OUTPUT_PATH_STATUS}/expected_no_spaces.txt -clean_status_output "${OUTPUT_PATH_STATUS}/running.txt" > ${OUTPUT_PATH_STATUS}/running_no_spaces.txt +clean_status_output "${OUTPUT_PATH_STATUS}/expected_running.txt" > "${OUTPUT_PATH_STATUS}/expected_no_spaces.txt" +clean_status_output "${OUTPUT_PATH_STATUS}/running.txt" > "${OUTPUT_PATH_STATUS}/running_no_spaces.txt" if [ "${APM_SERVER_ENABLED}" = true ]; then curl http://localhost:8200/ fi -diff -q ${OUTPUT_PATH_STATUS}/running_no_spaces.txt ${OUTPUT_PATH_STATUS}/expected_no_spaces.txt +diff -q "${OUTPUT_PATH_STATUS}/running_no_spaces.txt" "${OUTPUT_PATH_STATUS}/expected_no_spaces.txt"