diff --git a/Jenkinsfile b/Jenkinsfile index 5af2bc193c..6263b382f4 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,19 +1,8 @@ #!/usr/bin/env groovy -/* Tips -1. Keep stages focused on producing one artifact or achieving one goal. This makes stages easier to parallelize or re-structure later. -1. Stages should simply invoke a make target or a self-contained script. Do not write testing logic in this Jenkinsfile. -3. CoreOS does not ship with `make`, so Docker builds still have to use small scripts. -*/ - commonCreds = [ - file(credentialsId: 'tectonic-license', variable: 'TF_VAR_tectonic_license_path'), - file(credentialsId: 'tectonic-pull', variable: 'TF_VAR_tectonic_pull_secret_path'), - usernamePassword( - credentialsId: 'jenkins-log-analyzer-user', - passwordVariable: 'LOG_ANALYZER_PASSWORD', - usernameVariable: 'LOG_ANALYZER_USER' - ), + file(credentialsId: 'tectonic-license', variable: 'LICENSE_PATH'), + file(credentialsId: 'tectonic-pull', variable: 'PULL_SECRET_PATH'), [ $class: 'StringBinding', credentialsId: 'github-coreosbot', @@ -36,9 +25,6 @@ creds.push( credentialsId: 'TF-TECTONIC-JENKINS-NO-SESSION' ] ) -creds.push( - string(credentialsId: 'AWS-TECTONIC-TRACK-2-ROLE-NAME', variable: 'TF_VAR_tectonic_aws_installer_role') -) quayCreds = [ usernamePassword( @@ -48,17 +34,12 @@ quayCreds = [ ) ] -defaultBuilderImage = 'quay.io/coreos/tectonic-builder:v1.46' -tectonicSmokeTestEnvImage = 'quay.io/coreos/tectonic-smoke-test-env:v5.16' +tectonicSmokeTestEnvImage = 'quay.io/coreos/tectonic-smoke-test-env:v6.0' tectonicBazelImage = 'quay.io/coreos/tectonic-builder:bazel-v0.3' originalCommitId = 'UNKNOWN' pipeline { - agent none - environment { - KUBE_CONFORMANCE_IMAGE = 'quay.io/coreos/kube-conformance:v1.9.1_coreos.0' - LOGSTASH_BUCKET= "log-analyzer-tectonic-installer" - } + agent { label 'worker && ec2' } options { // Individual steps have stricter timeouts. 360 minutes should be never reached. timeout(time:6, unit:'HOURS') @@ -66,36 +47,11 @@ pipeline { buildDiscarder(logRotator(numToKeepStr:'20', artifactNumToKeepStr: '20')) } parameters { - string( - name: 'builder_image', - defaultValue: defaultBuilderImage, - description: 'tectonic-builder docker image to use for builds' - ) - string( - name: 'hyperkube_image', - defaultValue: '', - description: 'Hyperkube image. Please define the param like: {hyperkube=""}' - ) - booleanParam( - name: 'RUN_CONFORMANCE_TESTS', - defaultValue: false, - description: '' - ) booleanParam( name: 'RUN_SMOKE_TESTS', defaultValue: true, description: '' ) - booleanParam( - name: 'RUN_GUI_TESTS', - defaultValue: true, - description: '' - ) - string( - name: 'COMPONENT_TEST_IMAGES', - defaultValue: '', - description: 'List of container images for component tests to run (comma-separated)' - ) booleanParam( name: 'PLATFORM/AWS', defaultValue: true, @@ -124,121 +80,48 @@ pipeline { } stages { - stage('Build & Test') { - environment { - GO_PROJECT = "/go/src/github.com/${params.GITHUB_REPO}" - MAKEFLAGS = '-j4' - } - steps { - node('worker && ec2') { - ansiColor('xterm') { - script { - def err = null - try { - timeout(time: 20, unit: 'MINUTES') { - forcefullyCleanWorkspace() - - /* - This supports users who require builds at a specific git ref - instead of the branch tip. - */ - if (params.SPECIFIC_GIT_COMMIT == '') { - checkout scm - originalCommitId = sh(returnStdout: true, script: 'git rev-parse "origin/${BRANCH_NAME}"').trim() - } else { - checkout([ - $class: 'GitSCM', - branches: [[name: params.SPECIFIC_GIT_COMMIT]], - userRemoteConfigs: [[url: "https://github.com/${params.GITHUB_REPO}.git"]] - ]) - // In case params.SPECIFIC_GIT_COMMIT is a mutable tag instead - // of a sha - originalCommitId = sh(returnStdout: true, script: 'git rev-parse "${SPECIFIC_GIT_COMMIT}"').trim() - } - - echo "originalCommitId: ${originalCommitId}" - stash name: 'clean-repo', excludes: 'vendor/**,tests/smoke/vendor/**' - - withDockerContainer(tectonicBazelImage) { - sh "bazel test terraform_fmt --test_output=all" - sh "bazel test installer:cli_units --test_output=all" - sh"""#!/bin/bash -ex - bazel build tarball tests/smoke - - # Jenkins `stash` does not follow symlinks - thereby temporarily copy the files to the root dir - cp bazel-bin/tectonic-dev.tar.gz . - cp bazel-bin/tests/smoke/linux_amd64_stripped/smoke . - """ - stash name: 'tectonic-tarball', includes: 'tectonic-dev.tar.gz' - stash name: 'smoke-tests', includes: 'smoke' - archiveArtifacts allowEmptyArchive: true, artifacts: 'tectonic-dev.tar.gz' - } - - withDockerContainer(tectonicSmokeTestEnvImage) { - sh"""#!/bin/bash -ex - cd tests/rspec - rubocop --cache false spec lib - """ - } - } - } catch (error) { - err = error - throw error - } finally { - reportStatusToGithub((err == null) ? 'success' : 'failure', 'basic-tests', originalCommitId) - } - } - } - } - } - } - stage("Smoke Tests") { when { expression { - return params.RUN_SMOKE_TESTS || params.RUN_CONFORMANCE_TESTS || params.COMPONENT_TEST_IMAGES != '' + return params.RUN_SMOKE_TESTS } } - environment { - TECTONIC_INSTALLER_ROLE = 'tf-tectonic-installer-track-2' - GRAFITI_DELETER_ROLE = 'tf-grafiti' - TF_VAR_tectonic_container_images = "${params.hyperkube_image}" - TF_VAR_tectonic_kubelet_debug_config = "--minimum-container-ttl-duration=8h --maximum-dead-containers-per-container=9999 --maximum-dead-containers=9999" + options { + timeout(time: 70, unit: 'MINUTES') } steps { - script { - def builds = [:] - def aws = [ - [file: 'basic_spec.rb', args: ''], - // [file: 'vpc_internal_spec.rb', args: '--device=/dev/net/tun --cap-add=NET_ADMIN -u root'], - // [file: 'network_flannel_spec.rb', args: ''], - // [file: 'exp_spec.rb', args: ''], - // [file: 'ca_spec.rb', args: ''], - // [file: 'custom_tls_spec.rb', args: ''] - ] - - if (params."PLATFORM/AWS") { - aws.each { build -> - filepath = 'spec/aws/' + build.file - builds['aws/' + build.file] = runRSpecTest(filepath, build.args, creds) + withDockerContainer(tectonicSmokeTestEnvImage) { + withCredentials(creds) { + ansiColor('xterm') { + sh """#!/bin/bash -e + export HOME=/home/jenkins + ./tests/run.sh + cp bazel-bin/tectonic-dev.tar.gz . + """ + stash name: 'tectonic-tarball', includes: 'tectonic-dev.tar.gz' } } - - parallel builds + } + } + post { + success { + reportStatusToGithub('success', originalCommitId) + } + failure { + reportStatusToGithub('failure', originalCommitId) } } } + stage('Build docker image') { when { branch 'master' } steps { - node('worker && ec2') { forcefullyCleanWorkspace() withCredentials(quayCreds) { ansiColor('xterm') { - unstash 'clean-repo' unstash 'tectonic-tarball' sh """ docker build -t quay.io/coreos/tectonic-installer:master -f images/tectonic-installer/Dockerfile . @@ -249,38 +132,15 @@ pipeline { cleanWs notFailBuild: true } } - } } } } post { always { - node('worker && ec2') { - forcefullyCleanWorkspace() - echo "Starting with streaming the logfile to the S3 bucket" - withDockerContainer(params.builder_image) { - withCredentials(credsLog) { - unstash 'clean-repo' - script { - try { - sh """#!/bin/bash -xe - export BUILD_RESULT=${currentBuild.currentResult} - ./tests/jenkins-jobs/scripts/log-analyzer-copy.sh jenkins-logs - """ - } catch (Exception e) { - if (params.NOTIFY_SLACK) { - slackSend color: 'warning', channel: params.SLACK_CHANNEL, message: "Job ${env.JOB_NAME}, build no. #${BUILD_NUMBER} - cannot send jenkins logs to S3" - } - } finally { - cleanWs notFailBuild: true - } - } - } - } - } + forcefullyCleanWorkspace() + cleanWs notFailBuild: true } - failure { script { if (params.NOTIFY_SLACK) { @@ -293,7 +153,7 @@ pipeline { def forcefullyCleanWorkspace() { return withDockerContainer( - image: tectonicSmokeTestEnvImage, + image: tectonicBazelImage, args: '-u root' ) { ansiColor('xterm') { @@ -307,78 +167,8 @@ def forcefullyCleanWorkspace() { } } -def unstashCleanRepoTectonicTarGZSmokeTests() { - unstash 'clean-repo' - unstash 'tectonic-tarball' - unstash 'smoke-tests' - sh """#!/bin/bash -ex - # Jenkins `stash` does not follow symlinks - thereby temporarily copy the files to the root dir - mkdir -p bazel-bin/tests/smoke/linux_amd64_stripped/ - cp tectonic-dev.tar.gz bazel-bin/. - cp smoke bazel-bin/tests/smoke/linux_amd64_stripped/. - """ -} - -def runRSpecTest(testFilePath, dockerArgs, credentials) { - return { - node('worker && ec2') { - def err = null - try { - timeout(time: 5, unit: 'HOURS') { - forcefullyCleanWorkspace() - ansiColor('xterm') { - withCredentials(credentials + quayCreds) { - withDockerContainer( - image: tectonicSmokeTestEnvImage, - args: '-u root -v /var/run/docker.sock:/var/run/docker.sock ' + dockerArgs - ) { - unstashCleanRepoTectonicTarGZSmokeTests() - sh """#!/bin/bash -ex - mkdir -p templogfiles && chmod 777 templogfiles - cd tests/rspec - - # Directing test output both to stdout as well as a log file - rspec ${testFilePath} --format RspecTap::Formatter --format RspecTap::Formatter --out ../../templogfiles/tap.log - """ - } - } - } - } - } catch (error) { - err = error - throw error - } finally { - reportStatusToGithub((err == null) ? 'success' : 'failure', testFilePath, originalCommitId) - step([$class: "TapPublisher", testResults: "templogfiles/*", outputTapToConsole: true, planRequired: false]) - archiveArtifacts allowEmptyArchive: true, artifacts: 'bazel-bin/tectonic/**/logs/**' - withDockerContainer(params.builder_image) { - withCredentials(credsLog) { - script { - try { - sh """#!/bin/bash -xe - ./tests/jenkins-jobs/scripts/log-analyzer-copy.sh smoke-test-logs ${testFilePath} - """ - } catch (Exception e) { - if (params.NOTIFY_SLACK) { - slackSend color: 'warning', channel: params.SLACK_CHANNEL, message: "Job ${env.JOB_NAME}, build no. #${BUILD_NUMBER} - cannot send smoke test logs to S3" - } - } finally { - cleanWs notFailBuild: true - } - } - } - } - cleanWs notFailBuild: true - } - - } - } -} - -def reportStatusToGithub(status, context, commitId) { +def reportStatusToGithub(status, commitId) { withCredentials(creds) { - sh """#!/bin/bash -ex - ./tests/jenkins-jobs/scripts/report-status-to-github.sh ${status} ${context} ${commitId} ${params.GITHUB_REPO} - """ + sh "./tests/jenkins-jobs/scripts/report-status-to-github.sh ${status} smoke-test ${commitId} ${params.GITHUB_REPO} || true" } } diff --git a/images/tectonic-smoke-test-env/Dockerfile b/images/tectonic-smoke-test-env/Dockerfile index dbd10ddb86..2959a0be2f 100644 --- a/images/tectonic-smoke-test-env/Dockerfile +++ b/images/tectonic-smoke-test-env/Dockerfile @@ -1,53 +1,24 @@ FROM debian:stretch ENV TERRAFORM_VERSION="0.11.1" -ENV DOCKER_VERSION 1.13.1 -ENV CT_VERSION 0.5.0 -ENV BUNDLER_VERSION 1.16.0 - -COPY ./tests/rspec/Gemfile* /tmp/app/ RUN apt-get update && \ apt-get install --no-install-recommends -y -q \ - curl make unzip jq awscli wget xvfb xauth ssh openvpn build-essential ruby-full zlib1g-dev bundler gnupg && \ + curl make unzip jq awscli wget xvfb xauth ssh openvpn build-essential zlib1g-dev gnupg uuid-runtime && \ apt-get clean -# Install gcloud -RUN echo "deb http://packages.cloud.google.com/apt cloud-sdk-stretch main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ - curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \ +# Install bazel +RUN echo "deb [arch=amd64] http://storage.googleapis.com/bazel-apt stable jdk1.8" > /etc/apt/sources.list.d/bazel.list && \ + curl https://bazel.build/bazel-release.pub.gpg | apt-key add - && \ apt-get update && \ - apt-get install --no-install-recommends -y -q \ - google-cloud-sdk - -RUN cd /tmp/app && gem install bundler -v ${BUNDLER_VERSION} && bundle install && rm -r /tmp/app - -# Install kubectl -RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl && \ - chmod +x ./kubectl && \ - mv ./kubectl /bin/kubectl + apt-get install --no-install-recommends -y -q openjdk-8-jdk bazel git python python-yaml && \ + apt-get clean # Install Terraform RUN curl -L https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip | funzip > /usr/local/bin/terraform && chmod +x /usr/local/bin/terraform -# Install Chrome for installer gui tests -# Use Chrome beta because v60 or higher is needed for headless mode -RUN wget --quiet -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \ - sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list' && \ - apt-get update && \ - apt-get install --no-install-recommends -y -q \ - google-chrome-beta ca-certificates - -# Install container linux config transpiler -RUN curl -L https://github.com/coreos/container-linux-config-transpiler/releases/download/v${CT_VERSION}/ct-v${CT_VERSION}-x86_64-unknown-linux-gnu -o ct && \ -chmod +x ./ct && \ -mv ct /bin/ct - -# Install docker client to start k8s conformance test docker container -RUN curl https://get.docker.com/builds/Linux/x86_64/docker-${DOCKER_VERSION}.tgz | tar -xvz && \ - mv docker/docker /usr/local/bin/docker && \ - chmod +x /usr/local/bin/docker && \ - rm -r docker - # Add user jenkins (uid: 1000), this is needed among others for `ssh` to not # complain about missing user. RUN useradd -u 1000 -ms /bin/bash jenkins +RUN mkdir -p /home/jenkins/.ssh && chown jenkins:jenkins /home/jenkins/.ssh +ENV HOME /home/jenkins diff --git a/images/tectonic-smoke-test-env/Dockerfile.builder b/images/tectonic-smoke-test-env/Dockerfile.builder deleted file mode 100644 index ec54cabf23..0000000000 --- a/images/tectonic-smoke-test-env/Dockerfile.builder +++ /dev/null @@ -1,8 +0,0 @@ -FROM alpine:edge -RUN apk update && apk upgrade && apk add alpine-sdk linux-headers bash curl openssl-dev readline-dev zlib-dev && \ - adduser -s /bin/bash -D rspec -VOLUME /home/rspec/.rbenv -WORKDIR /home/rspec -USER rspec -RUN echo "export PATH=\$HOME/.rbenv/bin:\$PATH ; eval \"\$(rbenv init -)\"" >> .bashrc -ENTRYPOINT ["/bin/bash", "-c"] diff --git a/images/tectonic-smoke-test-env/Dockerfile.runtime b/images/tectonic-smoke-test-env/Dockerfile.runtime deleted file mode 100644 index 00d1b41102..0000000000 --- a/images/tectonic-smoke-test-env/Dockerfile.runtime +++ /dev/null @@ -1,13 +0,0 @@ -FROM alpine:3.6 -VOLUME /tests -ENTRYPOINT ["/bin/bash", "-lc", "bundle", "exec"] -RUN apk add --no-cache bash curl openssl readline zlib && \ - adduser -s /bin/bash -D rspec && \ - echo "export PATH=\$HOME/.rbenv/bin:\$PATH ; eval \"\$(rbenv init -)\"" >> /home/rspec/.bashrc && \ - echo ". ~/.bashrc" >> /home/rspec/.bash_profile -ADD rbenv /home/rspec/.rbenv -RUN chown -R rspec:rspec /home/rspec/.rbenv -USER rspec -SHELL ["/bin/bash", "-c"] -WORKDIR /tests/rspec -# RUN "source ~/.bashrc && bundle install" diff --git a/images/tectonic-smoke-test-env/Makefile b/images/tectonic-smoke-test-env/Makefile deleted file mode 100644 index 19d87cc2e9..0000000000 --- a/images/tectonic-smoke-test-env/Makefile +++ /dev/null @@ -1,42 +0,0 @@ -.PHONY: all deps runtime - -VERSION = latest -RUBY_VERSION = 2.4.1 -RBENV_VERSION = 1.1.1 -RUBY_BUILD_VERSION = 20170726 - -RUNTIME_IMAGE = smokes-runtime:$(VERSION) -BUILDER_IMAGE = smokes-builder:$(VERSION) -RBENV_URL = https://github.com/rbenv/rbenv/archive/v$(RBENV_VERSION).zip -RUBY_BUILD_URL = https://github.com/rbenv/ruby-build/archive/v$(RUBY_BUILD_VERSION).zip - -TMPDIR = /tmp -BUILD_ROOT = $(PWD) - -all: runtime - -clean: - rm -rf $(TMPDIR)/rbenv.zip $(TMPDIR)/ruby-build.zip rbenv - docker rmi $(RUNTIME_IMAGE) $(BUILDER_IMAGE) - -deps: $(BUILD_ROOT)/rbenv $(BUILD_ROOT)/rbenv/plugins/ruby-build - -$(BUILD_ROOT)/rbenv: - curl -#Lo "$(TMPDIR)/rbenv.zip" $(RBENV_URL) - unzip "$(TMPDIR)/rbenv.zip" - mv rbenv-$(RBENV_VERSION) rbenv - -$(BUILD_ROOT)/rbenv/plugins/ruby-build: rbenv - curl -#Lo "$(TMPDIR)/ruby-build.zip" $(RUBY_BUILD_URL) - mkdir -p ./rbenv/plugins - unzip -d ./rbenv/plugins/ "$(TMPDIR)/ruby-build.zip" - mv ./rbenv/plugins/ruby-build-$(RUBY_BUILD_VERSION) ./rbenv/plugins/ruby-build - -builder: deps - docker build -f Dockerfile.builder -t $(BUILDER_IMAGE) . - -build-ruby: builder - docker run -t --rm -v $(PWD)/rbenv:/home/rspec/.rbenv $(BUILDER_IMAGE) ". ~/.bashrc && rbenv install -s $(RUBY_VERSION)" - -runtime: build-ruby - docker build -f Dockerfile.runtime -t $(RUNTIME_IMAGE) . diff --git a/tests/run.sh b/tests/run.sh new file mode 100755 index 0000000000..35ee658f21 --- /dev/null +++ b/tests/run.sh @@ -0,0 +1,85 @@ +#!/bin/bash -e +#shellcheck disable=SC2155 + +# This should be executed from top-level directory not from `tests` directory +# Script needs two variables to be set before execution +# 1) LICENSE_PATH - path to tectonic license file +# 2) PULL_SECRET_PATH - path to pull secret file + +set -eo pipefail + +SMOKE_TEST_OUTPUT="Never executed. Problem with one of previous stages" +[ -z ${LICENSE_PATH+x} ] && (echo "Please set LICENSE_PATH"; exit 1) +[ -z ${PULL_SECRET_PATH+x} ] && (echo "Please set PULL_SECRET_PATH"; exit 1) +[ -z ${DOMAIN+x} ] && DOMAIN="tectonic-ci.de" +[ -z ${AWS_REGION+x} ] && AWS_REGION="eu-west-1" +[ -z ${JOB_NAME+x} ] && PREFIX="${USER:-test}" || PREFIX="ci-${JOB_NAME#*/}" +CLUSTER_NAME=$(echo "${PREFIX}-$(uuidgen -r | cut -c1-5)" | tr '[:upper:]' '[:lower:]') +exec &> >(tee -a "$CLUSTER_NAME.log") + +function destroy() { + echo -e "\\e[34m Exiting... Destroying Tectonic and cleaning SSH keys...\\e[0m" + tectonic destroy --dir="${CLUSTER_NAME}" + aws ec2 delete-key-pair --key-name "${CLUSTER_NAME}" + echo -e "\\e[36m Finished! Smoke test output:\\e[0m ${SMOKE_TEST_OUTPUT}" + echo -e "\\e[34m So Long, and Thanks for All the Fish\\e[0m" +} + +trap destroy EXIT + +echo -e "\\e[36m Starting build process...\\e[0m" +bazel build tarball tests/smoke +# In future bazel build could be extracted to another job which could be running in docker container like this: +# docker run --rm -v $PWD:$PWD:Z -w $PWD quay.io/coreos/tectonic-builder:bazel-v0.3 bazel build tarball tests/smoke + +echo -e "\\e[36m Unpacking artifacts...\\e[0m" +tar -zxf bazel-bin/tectonic-dev.tar.gz +cp bazel-bin/tests/smoke/linux_amd64_stripped/smoke tectonic-dev/smoke +export PATH="$(pwd)/tectonic-dev/installer:${PATH}" +cd tectonic-dev + +echo -e "\\e[36m Creating Tectonic configuration...\\e[0m" +CONFIG=$(python -c 'import sys, yaml, json; json.dump(yaml.load(sys.stdin), sys.stdout)' < examples/tectonic.aws.yaml) +CONFIG=$(echo "${CONFIG}" | jq ".name = \"${CLUSTER_NAME}\"" |\ + jq ".baseDomain = \"${DOMAIN}\"" |\ + jq ".licensePath = \"${LICENSE_PATH}\"" |\ + jq ".pullSecretPath = \"${PULL_SECRET_PATH}\"" |\ + jq ".aws.region = \"${AWS_REGION}\"" |\ + jq ".aws.master.iamRoleName = \"tf-tectonic-master-node\"" |\ + jq ".aws.worker.iamRoleName = \"tf-tectonic-worker-node\"" |\ + jq ".aws.etcd.iamRoleName = \"tf-tectonic-etcd-node\"" +) +echo "${CONFIG}" | python -c 'import sys, yaml, json; yaml.safe_dump(json.load(sys.stdin), sys.stdout)' > "${CLUSTER_NAME}.yaml" + +echo -e "\\e[36m Initializing Tectonic...\\e[0m" +tectonic init --config="${CLUSTER_NAME}".yaml + +### ASSUME ROLE ### +echo -e "\\e[36m Setting up AWS credentials...\\e[0m" +export AWS_DEFAULT_REGION="${AWS_REGION}" +unset AWS_SESSION_TOKEN +ACCOUNT_ID=$(aws sts get-caller-identity | jq --raw-output '.Account') +ROLE_ARN="arn:aws:iam::${ACCOUNT_ID}:role/tf-tectonic-installer" +RES=$(aws sts assume-role --role-arn="${ROLE_ARN}" --role-session-name="jenkins-${CLUSTER_NAME}") +export AWS_SECRET_ACCESS_KEY=$(echo "${RES}" | jq --raw-output '.Credentials.SecretAccessKey') +export AWS_ACCESS_KEY_ID=$(echo "${RES}" | jq --raw-output '.Credentials.AccessKeyId') +export AWS_SESSION_TOKEN=$(echo "${RES}" | jq --raw-output '.Credentials.SessionToken') + +### HANDLE SSH KEY ### +echo -e "\\e[36m Uploading SSH key-pair to AWS...\\e[0m" +if [ ! -f "$HOME/.ssh/id_rsa.pub" ]; then + #shellcheck disable=SC2034 + SSH=$(ssh-keygen -b 2048 -t rsa -f "${HOME}/.ssh/id_rsa" -N "" < /dev/zero) +fi +aws ec2 import-key-pair --key-name "${CLUSTER_NAME}" --public-key-material "file://$HOME/.ssh/id_rsa.pub" +export TF_VAR_tectonic_aws_ssh_key="${CLUSTER_NAME}" + +echo -e "\\e[36m Deploying Tectonic...\\e[0m" +tectonic install --dir="${CLUSTER_NAME}" +echo -e "\\e[36m Running smoke test...\\e[0m" +export SMOKE_KUBECONFIG="$(pwd)/$CLUSTER_NAME/generated/auth/kubeconfig" +export SMOKE_NETWORKING="canal" +export SMOKE_NODE_COUNT="7" # Sum of all nodes (etcd + master + worker) +export SMOKE_MANIFEST_PATHS="$(pwd)/$CLUSTER_NAME/generated" +exec 5>&1 +SMOKE_TEST_OUTPUT=$(./smoke -test.v --cluster | tee >(cat - >&5))