diff --git a/build-docs.sh b/build-docs.sh index ff426c56ae015..585f48174f950 100755 --- a/build-docs.sh +++ b/build-docs.sh @@ -5,6 +5,6 @@ set -euo pipefail (cd docs && bash ./build-docs.sh) # deploy output to 'pack/docs' -rm -fr pack/docs -mkdir pack/docs -rsync -av docs/dist/ pack/docs/ +rm -fr dist/docs +mkdir dist/docs +rsync -av docs/dist/ dist/docs/ diff --git a/buildspec.yaml b/buildspec.yaml index ecd806c5d47dd..b23384f617e95 100644 --- a/buildspec.yaml +++ b/buildspec.yaml @@ -13,8 +13,6 @@ phases: post_build: commands: - "[ -f .BUILD_COMPLETED ] && /bin/bash ./pack.sh" - - "[ -f .BUILD_COMPLETED ] && /bin/bash ./build-docs.sh" - - "[ -f .BUILD_COMPLETED ] && /bin/bash ./bundle.sh" artifacts: files: - "**/*" diff --git a/bundle.sh b/bundle.sh deleted file mode 100755 index c7af081dbf5c9..0000000000000 --- a/bundle.sh +++ /dev/null @@ -1,95 +0,0 @@ -#!/bin/bash -# Creates the bundle for the CDK. -# Assume we have a bootstrapped and packaged repository -set -euo pipefail -root=$PWD - -# Get version from lerna -version="$(cat ${root}/lerna.json | grep version | cut -d '"' -f4)" - -# Get commit from CodePipeline (or git, if we are in CodeBuild) -# If CODEBUILD_RESOLVED_SOURCE_VERSION is not defined (i.e. local -# build or CodePipeline build), use the HEAD commit hash). -commit="${CODEBUILD_RESOLVED_SOURCE_VERSION:-}" -if [ -z "${commit}" ]; then - commit="$(git rev-parse --verify HEAD)" -fi - -full_version="${version}+${commit:0:7}" -echo "Bundling ${full_version}..." - -staging="$(mktemp -d)" -cleanup() { - # Clean-up after yourself - echo "Cleaning up staging directory: ${staging}" - cd ${root} - rm -rf ${staging} -} -trap cleanup EXIT -cd ${staging} - -echo "Staging: ${staging}" - -# Bundle structure -# ================ -# aws-cdk-${version}+${commit}.zip -# ├─ bin -# ├─ node_modules -# ├─ y -# │ └─ npm - y-npm repository for local installs (duplicate tarballs) -# │ -# ├─ docs - rendered docsite -# ├─ npm - npm tarballs -# ├─ dotnet - nuget packages -# ├─ java - maven repository -# │ -# └─ .version - -# Bootstrap our distribution with "pack/", which contains the collection of all -# dist/ directories in the repo (this is what ./pack.sh is doing). This includes -# 'docs', 'npm', 'java' and 'dotnet' and any other jsii language artifacts. -rsync -av ${root}/pack/ . - -# We are keeping y-npm support only for backwards compatibility reasons and until -# we publish y-npm itself and can devise instructions on how to use the self-contained .zip. -# Integration tests also depend on this behavior for now. - -y_npm_dir="${root}/tools/y-npm" -Y_NPM="${y_npm_dir}/bin/y-npm" - -# Creating a `y-npm` registry -echo "Preparing local NPM registry under y/npm" -export Y_NPM_REPOSITORY="${staging}/y/npm" -mkdir -p ${Y_NPM_REPOSITORY} - -# Publish all tarballs from the "npm" dist to this repo -# Yes, this means we will have duplicate tgz for now. -echo "Publishing CDK npm modules into y/npm" -for tarball in $PWD/js/*.tgz; do - ${Y_NPM} publish ${tarball} -done - -echo "Installing y-npm under node_modules" -y_npm_tarball=${y_npm_dir}/$(cd ${y_npm_dir} && npm pack) # produce a tarball -npm install --global-style --no-save ${y_npm_tarball} -# Because y-npm is installed on the build server (Linux), we need to bootstrap -# it on windows by manually creating the shim batch file. -cp ${y_npm_dir}/bin/y-npm.template.cmd node_modules/.bin/y-npm.cmd -ln -s node_modules/.bin bin - -# Create an archive under ./dist -echo "Creating ZIP bundle" - -echo ${version} > .version -dist=${root}/dist -output="${dist}/aws-cdk-${full_version}.zip" -rm -fr ${dist} -mkdir -p ${dist} -zip -y -r ${output} . -echo ${output} - -chmod +x $root/scripts/with-signing-key.sh -chmod +x $root/scripts/sign-files.sh - -# Sign the bundle -$root/scripts/with-signing-key.sh $root/scripts/sign-files.sh $output diff --git a/docs/build-docs.sh b/docs/build-docs.sh index 05307df0ee272..aea577f6d4717 100755 --- a/docs/build-docs.sh +++ b/docs/build-docs.sh @@ -17,7 +17,7 @@ export PATH=${PYTHON_DEPS}/bin:$PATH # CONFIG staging=".staging" output="dist" -refsrc="../pack/sphinx" +refsrc="../dist/sphinx" refdocs="refs" refdocsdir="${staging}/${refdocs}" refs_index="${staging}/reference.rst" diff --git a/pack-collect.sh b/pack-collect.sh deleted file mode 100755 index 8ef50d7b04c5d..0000000000000 --- a/pack-collect.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -# Merges all files from all "dist/" directories under "packages/" into "./pack" -# Executed by pack.sh as a preparation for beta-bundle.sh -set -euo pipefail -target="pack" -mkdir -p ${target} -for dir in $(find packages -name dist | grep -v node_modules); do - echo "Merging ${dir} into ${target}" - rsync -av $dir/ ${target}/ -done diff --git a/pack.sh b/pack.sh index 084f021360bd4..273d7571514f2 100755 --- a/pack.sh +++ b/pack.sh @@ -4,10 +4,11 @@ # later read by bundle-beta.sh. set -e export PATH=$PWD/node_modules/.bin:$PATH +root=$PWD -packdir="$PWD/pack" -rm -fr ${packdir} -mkdir -p ${packdir} +distdir="$PWD/dist" +rm -fr ${distdir} +mkdir -p ${distdir} scopes=$(lerna ls 2>/dev/null | grep -v "(private)" | cut -d" " -f1 | xargs -n1 -I{} echo "--scope {}" | tr "\n" " ") @@ -16,4 +17,38 @@ scopes=$(lerna ls 2>/dev/null | grep -v "(private)" | cut -d" " -f1 | xargs -n1 lerna run ${scopes} --sort --stream package # Collect dist/ from all modules into the root dist/ -/bin/bash ./pack-collect.sh +for dir in $(find packages -name dist | grep -v node_modules); do + echo "Merging ${dir} into ${distdir}" + rsync -av $dir/ ${distdir}/ +done + +# Build docs +/bin/bash ./build-docs.sh + +# Get version from lerna +version="$(cat ${root}/lerna.json | grep version | cut -d '"' -f4)" + +# Get commit from CodePipeline (or git, if we are in CodeBuild) +# If CODEBUILD_RESOLVED_SOURCE_VERSION is not defined (i.e. local +# build or CodePipeline build), use the HEAD commit hash). +commit="${CODEBUILD_RESOLVED_SOURCE_VERSION:-}" +if [ -z "${commit}" ]; then + commit="$(git rev-parse --verify HEAD)" +fi + +cat > ${distdir}/build.json <&2 - echo "">&2 - echo "Creates detached signature as FILE.sig." >&2 - exit 1 -else - if [ ! -f ${1} ]; then - echo "Asked to sign ${1}, but no such file exists." - exit 1 - fi -fi - -if [[ "${KEY_AVAILABLE:-}" == "" ]]; then - echo "Run this script using with-signing-key.sh" >&2 - exit 1 -fi - -if ! $KEY_AVAILABLE; then - echo "No key available, not signing anything." >&2 - exit 0 # Note: NOT an error -fi - -while [[ "${1:-}" != "" ]]; do - echo "Signing $1..." >&2 - echo $KEY_PASSPHRASE | gpg \ - ${GPG_PASSPHRASE_FROM_STDIN} \ - --local-user $KEY_ID \ - --batch --yes --no-tty \ - --output $1.sig \ - --detach-sign $1 - shift -done - -echo "Done!" >&2 diff --git a/scripts/with-signing-key.sh b/scripts/with-signing-key.sh deleted file mode 100755 index 2b6b4388fda70..0000000000000 --- a/scripts/with-signing-key.sh +++ /dev/null @@ -1,73 +0,0 @@ -#!/bin/bash -# Run another command with the signing key for the current scope, -# if set. -# -# Upon running the subcommand, $KEY_AVAILABLE will be set to either -# 'true' or 'false'. If $KEY_AVAILABLE is 'true', the following -# variables will be set as well: -# -# $KEY_ID -# $KEY_PASSPHRASE -# $GPG_PASSPHRASE_FROM_STDIN -# -# The environment variable KEY_PASSPHRASE will be set to -# the key's passphrase, to pass in like so: -# -# echo $KEY_PASSPHRASE | gpg ${GPG_PASSPHRASE_FROM_STDIN} \ -# ...other gpg arguments... -set -euo pipefail - -if [[ "${1:-}" == "" ]]; then - echo "Usage: with-signing-key.sh CMD [ARG...]" >&2 - echo "">&2 - echo "Run another command with a preloaded GPG keyring." >&2 - exit 1 -fi - -if [[ "${SIGNING_KEY_SCOPE:-}" == "" ]]; then - echo "SIGNING_KEY_SCOPE not set, running without a key" >&2 - export KEY_AVAILABLE=false -else - tmpdir=$(mktemp -d) - trap "find $tmpdir -type f -exec rm {} \\; && rm -rf $tmpdir" EXIT - - SECRET=$SIGNING_KEY_SCOPE/SigningKey - - # Use secrets manager to obtain the key and passphrase into a JSON file - echo "Retrieving key $SECRET..." >&2 - aws secretsmanager get-secret-value --secret-id "$SECRET" --output text --query SecretString > $tmpdir/secret.txt - - value-from-secret() { - node -e "console.log(JSON.parse(require('fs').readFileSync('$tmpdir/secret.txt', { encoding: 'utf-8' })).$1)" - } - - export KEY_PASSPHRASE=$(value-from-secret Passphrase) - - # GnuPG will occasionally bail out with "gpg: failed: Inappropriate ioctl for device", the following attempts to fix - export GPG_TTY=$(tty) - export GNUPGHOME=$tmpdir - - echo "Importing key..." >&2 - gpg --allow-secret-key-import \ - --batch --yes --no-tty \ - --import <(value-from-secret PrivateKey) - - export KEY_ID=$(gpg --list-keys --with-colons | grep pub | cut -d: -f5) - - # Prepare environment variables with flags to GPG - # --passphrase-fd 0 \ - # ${EXTRA_GPG_OPTS} \ - GPG_PASSPHRASE_FROM_STDIN="--passphrase-fd 0" - if [[ "$(uname)" == "Darwin" ]]; then - # On Mac, we must pass this to disable a prompt for - # passphrase, but option is not recognized on Linux. - GPG_PASSPHRASE_FROM_STDIN="${GPG_PASSPHRASE_FROM_STDIN} --pinentry-mode loopback" - fi - export GPG_PASSPHRASE_FROM_STDIN - - export KEY_AVAILABLE=true -fi - -# Execute remaining commands -echo "Running: $@" >&2 -"$@"