diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..5bcd58f --- /dev/null +++ b/.dockerignore @@ -0,0 +1,9 @@ +node_modules +defaults.js +installationConstants.js +_agstate/yarn-links/ +.idea/ +ui/.cache/ +ui/dist/ +/Dockerfile +/results diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml index 105462b..d80edaf 100644 --- a/.github/workflows/lint-test.yml +++ b/.github/workflows/lint-test.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [12.x, 14.x] + node-version: [14.x] steps: - name: Checkout dapp uses: actions/checkout@v2 diff --git a/.gitignore b/.gitignore index cb93085..096afc3 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,6 @@ installationConstants.js _agstate/yarn-links/ .idea/ ui/.cache/ -ui/dist/ \ No newline at end of file +ui/dist/ +/results/* +!/results/.keep diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..44b1b67 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,161 @@ +# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.177.0/containers/typescript-node/.devcontainer/base.Dockerfile + +# [Choice] Node.js version: 16, 14, 12 +ARG VARIANT="14-buster" +FROM mcr.microsoft.com/vscode/devcontainers/typescript-node:0-${VARIANT} as base + +# [Optional] Uncomment this section to install additional OS packages. +# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ +# && apt-get -y install --no-install-recommends + +# [Optional] Uncomment if you want to install an additional version of node using nvm +# ARG EXTRA_NODE_VERSION=10 +# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}" + +# [Optional] Uncomment if you want to install more global node packages +# RUN su node -c "npm install -g " + +############################## +# From https://github.com/docker-library/golang/blob/master/1.16/buster/Dockerfile + +ENV PATH /usr/local/go/bin:$PATH + +ENV GOLANG_VERSION 1.16.5 + +RUN set -eux; \ + \ + dpkgArch="$(dpkg --print-architecture)"; \ + url=; \ + case "${dpkgArch##*-}" in \ + 'amd64') \ + url='https://dl.google.com/go/go1.16.5.linux-amd64.tar.gz'; \ + sha256='b12c23023b68de22f74c0524f10b753e7b08b1504cb7e417eccebdd3fae49061'; \ + ;; \ + 'armel') \ + export GOARCH='arm' GOARM='5' GOOS='linux'; \ + ;; \ + 'armhf') \ + url='https://dl.google.com/go/go1.16.5.linux-armv6l.tar.gz'; \ + sha256='93cacacfbe87e3106b5bf5821de106f0f0a43c8bd1029826d44445c15df795a5'; \ + ;; \ + 'arm64') \ + url='https://dl.google.com/go/go1.16.5.linux-arm64.tar.gz'; \ + sha256='d5446b46ef6f36fdffa852f73dfbbe78c1ddf010b99fa4964944b9ae8b4d6799'; \ + ;; \ + 'i386') \ + url='https://dl.google.com/go/go1.16.5.linux-386.tar.gz'; \ + sha256='a37c6b71d0b673fe8dfeb2a8b3de78824f05d680ad32b7ac6b58c573fa6695de'; \ + ;; \ + 'mips64el') \ + export GOARCH='mips64le' GOOS='linux'; \ + ;; \ + 'ppc64el') \ + url='https://dl.google.com/go/go1.16.5.linux-ppc64le.tar.gz'; \ + sha256='fad2da6c86ede8448d2d0e66e1776e2f0ae9169714eade29b9ffbbdede7fc6cc'; \ + ;; \ + 's390x') \ + url='https://dl.google.com/go/go1.16.5.linux-s390x.tar.gz'; \ + sha256='21085f6a3568fae639edf383cce78bcb00d8f415e5e3d7feb04b6124e8e9efc1'; \ + ;; \ + *) echo >&2 "error: unsupported architecture '$dpkgArch' (likely packaging update needed)"; exit 1 ;; \ + esac; \ + build=; \ + if [ -z "$url" ]; then \ +# https://github.com/golang/go/issues/38536#issuecomment-616897960 + build=1; \ + url='https://dl.google.com/go/go1.16.5.src.tar.gz'; \ + sha256='7bfa7e5908c7cc9e75da5ddf3066d7cbcf3fd9fa51945851325eebc17f50ba80'; \ + echo >&2; \ + echo >&2 "warning: current architecture ($dpkgArch) does not have a corresponding Go binary release; will be building from source"; \ + echo >&2; \ + fi; \ + \ + wget -O go.tgz.asc "$url.asc" --progress=dot:giga; \ + wget -O go.tgz "$url" --progress=dot:giga; \ + echo "$sha256 *go.tgz" | sha256sum --strict --check -; \ + \ +# https://github.com/golang/go/issues/14739#issuecomment-324767697 + export GNUPGHOME="$(mktemp -d)"; \ +# https://www.google.com/linuxrepositories/ +# gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys 'EB4C 1BFD 4F04 2F6D DDCC EC91 7721 F63B D38B 4796'; \ + curl 'https://dl.google.com/linux/linux_signing_key.pub' | gpg --batch --import; \ + gpg --batch --verify go.tgz.asc go.tgz; \ + gpgconf --kill all; \ + rm -rf "$GNUPGHOME" go.tgz.asc; \ + \ + tar -C /usr/local -xzf go.tgz; \ + rm go.tgz; \ + \ + if [ -n "$build" ]; then \ + savedAptMark="$(apt-mark showmanual)"; \ + apt-get update; \ + apt-get install -y --no-install-recommends golang-go; \ + \ + ( \ + cd /usr/local/go/src; \ +# set GOROOT_BOOTSTRAP + GOHOST* such that we can build Go successfully + export GOROOT_BOOTSTRAP="$(go env GOROOT)" GOHOSTOS="$GOOS" GOHOSTARCH="$GOARCH"; \ + ./make.bash; \ + ); \ + \ + apt-mark auto '.*' > /dev/null; \ + apt-mark manual $savedAptMark > /dev/null; \ + apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \ + rm -rf /var/lib/apt/lists/*; \ + \ +# pre-compile the standard library, just like the official binary release tarballs do + go install std; \ +# go install: -race is only supported on linux/amd64, linux/ppc64le, linux/arm64, freebsd/amd64, netbsd/amd64, darwin/amd64 and windows/amd64 +# go install -race std; \ + \ +# remove a few intermediate / bootstrapping files the official binary release tarballs do not contain + rm -rf \ + /usr/local/go/pkg/*/cmd \ + /usr/local/go/pkg/bootstrap \ + /usr/local/go/pkg/obj \ + /usr/local/go/pkg/tool/*/api \ + /usr/local/go/pkg/tool/*/go_bootstrap \ + /usr/local/go/src/cmd/dist/dist \ + ; \ + fi; \ + \ + go version + +ENV GOPATH /go +ENV PATH $GOPATH/bin:$PATH +RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH" +#WORKDIR $GOPATH + +############################## +# From https://github.com/microsoft/vscode-dev-containers/blob/v0.163.1/containers/go/.devcontainer/base.Dockerfile + +# Install Go tools +ENV GO111MODULE=auto +COPY library-scripts/go-debian.sh /tmp/library-scripts/ +RUN bash /tmp/library-scripts/go-debian.sh "none" "/usr/local/go" "${GOPATH}" "node" "false" \ + && apt-get clean -y && rm -rf /tmp/library-scripts + +# Add Tini +ENV TINI_VERSION v0.19.0 +ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini +RUN chmod +x /tini + +############################## +FROM base + +ARG USER_UID=1000 +ARG USER_GID=$USER_UID + +ENV IS_DOCKER=true +ENV SDK_SRC=/src +ENV OUTPUT_DIR=/out +ENV SDK_REVISION= + +WORKDIR /app +COPY --chown=$USER_UID:$USER_GID . . + +RUN mkdir -p $SDK_SRC $OUTPUT_DIR && chown $USER_UID:$USER_GID $SDK_SRC $OUTPUT_DIR /app + +USER $USER_UID:$USER_GID + +ENTRYPOINT ["/tini", "--", "/app/start.sh"] \ No newline at end of file diff --git a/Makefile b/Makefile index 84044b5..acbd4a5 100644 --- a/Makefile +++ b/Makefile @@ -30,6 +30,11 @@ run-chain: run-client: $(MAKE) -C $(CSDIR) scenario2-run-client +build-docker: + docker build -t loadgen-runner . + +daily-perf: + cd results && ../scripts/run-daily-perf.sh run-loadgen: yarn loadgen diff --git a/README.md b/README.md index b1a5c0d..7c6fec0 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,117 @@ # Load Generator +## Runner + +The loadgen runner automates running any number of load generation cycles on a local or testnet chain, monitoring the local chain node and vats processes. Depending on the use case, it can be ran using a local Agoric SDK repo, or checkout and setup any given revision, with multiple layers of helpers to automate the execution: + +- `loadgen-runner` executable: core tool automating loadgen cycles against an installed agoric SDK (available on PATH) +- `start.sh` script: helper to automate checking out any agoric-sdk revision, compile and install it in a temporary location, and run the load generator with it. Can also be used an existing agoric-sdk repo. +- docker image: A Linux Debian environment setup with all dependencies to allow compiling the agoric-sdk. The entrypoint executes the start script, and has mount points for output directory and optionally an SDK repo. +- `run-daily-perf.sh` script: service entrypoint to continuously execute a `loadgen-runner` docker image against the latest revision with the default options. + +### `loadgen-runner` executable + +At the core, the loadgen-runner can be configured to run multiple stages of load generation, either on a local solo chain, or against an existing chain, automatically spawning a ag-solo client and deploying the loadgen tool. It captures the slog file of the local chain node, the state storage of the chain at the end of each stage, and process and disk usage information. + +#### Command + +Assuming the agoric-sdk and cosmic-swingset are built and installed, and the agoric cli is available in `PATH`. + +```sh +mkdir -p $HOME/loadgen-output +./runner/bin/loadgen-runner --output-dir=$HOME/loadgen-output +``` + +#### Options + +The runner uses `yargs-parser` to parse the string command line arguments, and handles dynamically creating a complex `argv` object from them. It automatically converts unary arguments into boolean (with support for `no-` negation prefix), number conversion, nested objects with dot (`.`) notation, and does kebab to camel case conversion. + +Currently the following options are available: + +- `--output-dir`: the directory location where to put the results from the loadgen cycles (`perf.jsonl`, chain node slogs, chain node storage). Defaults to `results/run-{posixtime}` in the working directory. +- `--profile`: (experimental) the chain target, either `local` (default), `testnet` or `stage`. +- `--no-monitor`: allows disabling running a chain monitor node (for non `local` profiles). +- `--monitor-interval`: a number in minutes for the interval at which to capture process stats for the chain. +- `--no-reset`: a boolean option to control whether the local chain state directory should be checked out clean before starting. +- `--stages`: the total number of stages to run. Default to 6 +- `--stage.loadgen.*`: the object to use as default loadgen config for the stages. Created from multiple arguments and passed as-is to the loadgen tool. +- `--stage.duration`: the time in minutes to use as default duration for each loadgen stage (non chain-only, see below). Defaults to 360 minutes (6 hours). +- `--stage.n.*`: Override config for a given stage 0 <= n < `--stages` +- `--stage.n.loadgen.*`: the object to use as loadgen config for the given stage. If specified and `chain-only` is missing Created from multiple arguments and passed as-is to the loadgen tool. +- `--stage.n. +- `--stage.n.chain-only`: boolean flag specifying if the stage should only run the chain node and not start a client or loadgen. Defaults to `true` for first and last stage. Defaults to `false` for other stages, or if `--stage.n.loadgen.*` is specified. +- `--stage.n.save-storage`: boolean indicating if the storage of the chain node should be saved at the end of the stage. Defaults to `true` for non stage-only stages (where the loadgen runs), as well as for stage 0 (to capture local bootstrap). +- `--stage.n.duration`: the time in minutes for the stage duration. Defaults to the shared duration above for non chain-only stages, or 0 (immediate stop after start) otherwise. Use a negative value to run until interrupted. + +### `start.sh` script + +The start script automates checking out and setting up any revision of the Agoric SDK before launching the loadgen-runner. It does so without interfering with an existing sdk installation by default, but can also be pointed to run the setup steps on an existing checked out repository. + +All command line arguments are passed through to `loadgen-runner`. + +#### Environment + +- `OUTPUT_DIR`: directory containing output artifacts. Creates temporary folder derived from revision if not set (`/tmp/agoric-sdk-out-{SDK_REVISION}`) +- `SDK_SRC`: directory containing `agoric-sdk` repo. Creates temporary folder if not set (`/tmp/agoric-sdk-src-{SDK_REVISION}`) +- `SDK_REVISION`: The agoric-sdk git revision to checkout for the test, if no existing repo found. Remote head if not set + +#### Examples + +```sh +SDK_REVISION=fa7ff5e55e OUTPUT_DIR=$HOME/loadgen-output ./start.sh +``` + +```sh +SDK_SRC=../agoric-sdk ./start.sh --stage.duration=10 +``` + +### Docker image + +The Docker image provides a Linux Debian environment setup with all dependencies to allow compiling the agoric-sdk. The entrypoint executes the start script, and has mount points for output directory and optionally an SDK repo. + +#### Mount points + +- `/out`: directory containing output artifacts +- `/src`: directory containing `agoric-sdk` repo. Automatically checked out if not a git repo (empty) + +#### Environment + +- `SDK_REVISION`: The agoric-sdk git revision to checkout for the test + +#### Examples + +First build the image: + +```sh +docker build . -t loadgen-runner +``` + +To perform a loadgen cycle on a given revision: + +```sh +OUTPUT_DIR=$HOME/loadgen-output +mkdir -p $OUTPUT_DIR +docker run --rm -v $OUTPUT_DIR:/out -e SDK_REVISION=fa7ff5e55e loadgen-runner --no-reset +``` + +To use an existing agoric-sdk copy + +```sh +OUTPUT_DIR=$HOME/loadgen-output +mkdir -p $OUTPUT_DIR +docker run --rm -v $OUTPUT_DIR:/out -v ../agoric-sdk:/src loadgen-runner --no-reset --stage.duration=10 +``` + +### `run-daily-perf.sh` script + +The script is used as a service entrypoint to continuously execute a `loadgen-runner` docker image against the latest SDK revision with the default options. It creates output folders in the current working directory based on the latest revision. The script waits for a new revision to be available if results already exist. + +## Manual + +The loadgen is implemented as a dapp deploy script which runs forever, and opens an HTTP server on a local port to receive config updates. + +### Example + In one terminal: ```sh @@ -42,12 +154,15 @@ curl -X PUT --data '{"faucet":{"interval":60}}' http://127.0.0.1:3352/config ``` To disable all generators: + ```sh curl -X PUT --data '{}' http://127.0.0.1:3352/config ``` +### Loadgen types + The load generators defined so far: -* `faucet`: initialize by creating a `dapp-fungible-faucet` -style mint, then each cycle requests an invitation and completes it, adding 1000 Tokens to Bob's Purse. Takes 4 round-trips to complete. -* `amm`: initialize by selling half our BLD to get RUN, then record the balances. Each cycle sells 1% of the recorded BLD to get RUN, then sells 1% of the recorded RUN to get BLD. Because of fees, the total available will drop slowly over time. -* `vault`: initialize by recording our BLD balance and the BLD/RUN price. Each cycle deposits 1% of the recorded BLD balance and borrows half its value in RUN, then pays back the loan and recovers the BLD (less fees). +- `faucet`: initialize by creating a `dapp-fungible-faucet` -style mint, then each cycle requests an invitation and completes it, adding 1000 Tokens to Bob's Purse. Takes 4 round-trips to complete. +- `amm`: initialize by selling half our BLD to get RUN, then record the balances. Each cycle sells 1% of the recorded BLD to get RUN, then sells 1% of the recorded RUN to get BLD. Because of fees, the total available will drop slowly over time. +- `vault`: initialize by recording our BLD balance and the BLD/RUN price. Each cycle deposits 1% of the recorded BLD balance and borrows half its value in RUN, then pays back the loan and recovers the BLD (less fees). diff --git a/_agstate/agoric-servers/.gitignore b/_agstate/agoric-servers/.gitignore index a8b18cd..757e71c 100644 --- a/_agstate/agoric-servers/.gitignore +++ b/_agstate/agoric-servers/.gitignore @@ -78,7 +78,7 @@ integration-test/transform-tests/output solo dev -testnet +testnet-* chain ve* -local* +local-* diff --git a/library-scripts/go-debian.sh b/library-scripts/go-debian.sh new file mode 100644 index 0000000..d0f22a9 --- /dev/null +++ b/library-scripts/go-debian.sh @@ -0,0 +1,129 @@ +#!/usr/bin/env bash +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- +# +# Docs: https://github.com/microsoft/vscode-dev-containers/blob/master/script-library/docs/go.md +# +# Syntax: ./go-debian.sh [Go version] [GOROOT] [GOPATH] [non-root user] [Add GOPATH, GOROOT to rc files flag] [Install tools flag] + +TARGET_GO_VERSION=${1:-"latest"} +TARGET_GOROOT=${2:-"/usr/local/go"} +TARGET_GOPATH=${3:-"/go"} +USERNAME=${4:-"automatic"} +UPDATE_RC=${5:-"true"} +INSTALL_GO_TOOLS=${6:-"true"} + +set -e + +if [ "$(id -u)" -ne 0 ]; then + echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' + exit 1 +fi + +# Ensure that login shells get the correct path if the user updated the PATH using ENV. +rm -f /etc/profile.d/00-restore-env.sh +echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh +chmod +x /etc/profile.d/00-restore-env.sh + +# Determine the appropriate non-root user +if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then + USERNAME="" + POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") + for CURRENT_USER in ${POSSIBLE_USERS[@]}; do + if id -u ${CURRENT_USER} > /dev/null 2>&1; then + USERNAME=${CURRENT_USER} + break + fi + done + if [ "${USERNAME}" = "" ]; then + USERNAME=root + fi +elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then + USERNAME=root +fi + +function updaterc() { + if [ "${UPDATE_RC}" = "true" ]; then + echo "Updating /etc/bash.bashrc and /etc/zsh/zshrc..." + echo -e "$1" >> /etc/bash.bashrc + if [ -f "/etc/zsh/zshrc" ]; then + echo -e "$1" >> /etc/zsh/zshrc + fi + fi +} + +export DEBIAN_FRONTEND=noninteractive + +# Install curl, tar, git, other dependencies if missing +if ! dpkg -s curl ca-certificates tar git g++ gcc libc6-dev make pkg-config > /dev/null 2>&1; then + if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then + apt-get update + fi + apt-get -y install --no-install-recommends curl ca-certificates tar git g++ gcc libc6-dev make pkg-config +fi + +# Get latest version number if latest is specified +if [ "${TARGET_GO_VERSION}" = "latest" ] || [ "${TARGET_GO_VERSION}" = "current" ] || [ "${TARGET_GO_VERSION}" = "lts" ]; then + TARGET_GO_VERSION=$(curl -sSL "https://golang.org/VERSION?m=text" | sed -n '/^go/s///p' ) +fi + +# Install Go +GO_INSTALL_SCRIPT="$(cat < /dev/null 2>&1; then + mkdir -p "${TARGET_GOROOT}" "${TARGET_GOPATH}" + chown -R ${USERNAME} "${TARGET_GOROOT}" "${TARGET_GOPATH}" + su ${USERNAME} -c "${GO_INSTALL_SCRIPT}" +else + echo "Go already installed. Skipping." +fi + +# Install Go tools that are isImportant && !replacedByGopls based on +# https://github.com/golang/vscode-go/blob/0c6dce4a96978f61b022892c1376fe3a00c27677/src/goTools.ts#L188 +# exception: golangci-lint is installed using their install script below. +GO_TOOLS="\ + golang.org/x/tools/gopls \ + honnef.co/go/tools/... \ + golang.org/x/lint/golint \ + github.com/mgechev/revive \ + github.com/uudashr/gopkgs/v2/cmd/gopkgs \ + github.com/ramya-rao-a/go-outline \ + github.com/go-delve/delve/cmd/dlv \ + github.com/golangci/golangci-lint/cmd/golangci-lint" +if [ "${INSTALL_GO_TOOLS}" = "true" ]; then + echo "Installing common Go tools..." + export PATH=${TARGET_GOROOT}/bin:${PATH} + mkdir -p /tmp/gotools /usr/local/etc/vscode-dev-containers ${TARGET_GOPATH}/bin + cd /tmp/gotools + export GOPATH=/tmp/gotools + export GOCACHE=/tmp/gotools/cache + + # Go tools w/module support + export GO111MODULE=on + (echo "${GO_TOOLS}" | xargs -n 1 go get -v )2>&1 | tee -a /usr/local/etc/vscode-dev-containers/go.log + + # Move Go tools into path and clean up + mv /tmp/gotools/bin/* ${TARGET_GOPATH}/bin/ + rm -rf /tmp/gotools + chown -R ${USERNAME} "${TARGET_GOPATH}" +fi + +# Add GOPATH variable and bin directory into PATH in bashrc/zshrc files (unless disabled) +updaterc "$(cat << EOF +export GOPATH="${TARGET_GOPATH}" +if [[ "\${PATH}" != *"\${GOPATH}/bin"* ]]; then export PATH="\${PATH}:\${GOPATH}/bin"; fi +export GOROOT="${TARGET_GOROOT}" +if [[ "\${PATH}" != *"\${GOROOT}/bin"* ]]; then export PATH="\${PATH}:\${GOROOT}/bin"; fi +EOF +)" + +echo "Done!" diff --git a/loadgen/agent-create-vault.js b/loadgen/agent-create-vault.js index e961dd1..9c038d8 100644 --- a/loadgen/agent-create-vault.js +++ b/loadgen/agent-create-vault.js @@ -42,7 +42,7 @@ export default async function startAgent([key, home]) { // we only withdraw half the value of the collateral, giving us 200% // collateralization const collaterals = await E(treasuryPublicFacet).getCollaterals(); - const cdata = collaterals.find(c => c.brand === bldBrand); + const cdata = collaterals.find((c) => c.brand === bldBrand); const priceRate = cdata.marketPrice; const half = makeRatio(BigInt(50), runBrand); const wantedRun = multiplyBy(multiplyBy(bldToLock, priceRate), half); diff --git a/loadgen/allValues.js b/loadgen/allValues.js index 41745ef..89d01c8 100644 --- a/loadgen/allValues.js +++ b/loadgen/allValues.js @@ -1,4 +1,4 @@ const zip = (xs, ys) => xs.map((x, i) => [x, ys[i]]); const { keys, values, fromEntries } = Object; -export const allValues = async obj => +export const allValues = async (obj) => fromEntries(zip(keys(obj), await Promise.all(values(obj)))); diff --git a/loadgen/loop.js b/loadgen/loop.js index 4777093..fd01312 100644 --- a/loadgen/loop.js +++ b/loadgen/loop.js @@ -1,4 +1,6 @@ /* global setInterval clearInterval setTimeout clearTimeout */ +/* eslint-disable no-continue */ + import { performance } from 'perf_hooks'; import http from 'http'; import { prepareFaucet } from './task-tap-faucet'; @@ -53,7 +55,7 @@ function maybeStartOneCycle(name, limit) { logdata({ type: 'finish', task: name, seq, success: true }); s.succeeded += 1; }, - err => { + (err) => { console.log(`[${name}] failed:`, err); logdata({ type: 'finish', task: name, seq, success: false }); s.failed += 1; @@ -117,7 +119,7 @@ function updateConfig(config) { } } -function startServer() { +async function startServer() { const server = http.createServer((req, res) => { const url = new URL(req.url, `http://${req.headers.host}`); // console.log(`pathname ${url.pathname}, ${req.method}`); @@ -126,7 +128,7 @@ function startServer() { if (req.method === 'PUT') { let body = ''; req.setEncoding('utf8'); - req.on('data', chunk => { + req.on('data', (chunk) => { body += chunk; }); req.on('end', () => { @@ -153,6 +155,9 @@ function startServer() { } }); server.listen(3352, '127.0.0.1'); + return new Promise((resolve, reject) => { + server.on('listening', resolve).on('error', reject); + }); } export default async function runCycles(homePromise, deployPowers) { @@ -170,7 +175,8 @@ export default async function runCycles(homePromise, deployPowers) { status[name] = { active: 0, succeeded: 0, failed: 0, next: 0 }; } console.log('all tasks ready'); - startServer(); + await startServer(); + console.log('server running on 127.0.0.1:3352'); if (!checkConfig(currentConfig)) { throw Error('bad config'); diff --git a/loadgen/package.json b/loadgen/package.json index a59123a..c1d042b 100644 --- a/loadgen/package.json +++ b/loadgen/package.json @@ -28,10 +28,10 @@ "@agoric/ertp": "*", "@agoric/eventual-send": "*", "@agoric/install-ses": "*", - "@agoric/marshal": "^0.4.17", + "@agoric/marshal": "*", "@agoric/notifier": "*", "@agoric/store": "*", - "@agoric/ui-components": "^0.2.15", + "@agoric/ui-components": "*", "@agoric/zoe": "*", "esm": "^3.2.5" }, diff --git a/package.json b/package.json index 8104420..7a36da0 100644 --- a/package.json +++ b/package.json @@ -6,27 +6,30 @@ "main": "index.js", "workspaces": [ "loadgen", - "_agstate/agoric-servers" + "_agstate/agoric-servers", + "runner" ], "devDependencies": { "@typescript-eslint/parser": "^4.18.0", "eslint": "^7.23.0", "eslint-config-airbnb-base": "^14.0.0", - "eslint-config-jessie": "^0.0.4", - "eslint-config-prettier": "^6.9.0", - "eslint-plugin-import": "^2.20.0", + "eslint-config-jessie": "^0.0.6", + "eslint-config-prettier": "^6.12.0", + "eslint-plugin-import": "^2.22.1", "eslint-plugin-jsdoc": "^30.4.2", "eslint-plugin-jsx-a11y": "^6.2.3", "eslint-plugin-prettier": "^3.1.2", - "prettier": "^1.18.2", + "prettier": "^2.1.2", "typescript": "^4.2.3" }, "scripts": { "preinstall": "node -e \"process.env.AGORIC_INSTALL && process.exit(0); console.warn('please use: agoric install . For details, see https://agoric.com/documentation/'); process.exit(1)\"", - "lint": "yarn workspaces run lint-fix", + "lint": "yarn workspaces run lint-check", + "lint-fix": "yarn workspaces run lint-fix", "lint-check": "yarn workspaces run lint-check", "test": "yarn workspaces run test", "build": "yarn workspaces run build", + "runner": "runner/bin/loadgen-runner", "loadgen": "agoric deploy loadgen/loop.js" }, "dependencies": { diff --git a/results/.keep b/results/.keep new file mode 100644 index 0000000..e69de29 diff --git a/runner/bin/loadgen-runner b/runner/bin/loadgen-runner new file mode 120000 index 0000000..24ad568 --- /dev/null +++ b/runner/bin/loadgen-runner @@ -0,0 +1 @@ +../lib/entrypoint.js \ No newline at end of file diff --git a/runner/jsconfig.json b/runner/jsconfig.json new file mode 100644 index 0000000..50681bb --- /dev/null +++ b/runner/jsconfig.json @@ -0,0 +1,22 @@ +// This file can contain .js-specific Typescript compiler config. +{ + "compilerOptions": { + "target": "esnext", + "noEmit": true, + "checkJs": true, + /* + // The following flags are for creating .d.ts files: + "noEmit": false, + "declaration": true, + "emitDeclarationOnly": true, +*/ + "strict": true, + "strictNullChecks": true, + "noImplicitAny": true, + "moduleResolution": "node", + }, + "include": [ + "lib/**/*.js", + "lib/**/*.d.ts" + ], +} \ No newline at end of file diff --git a/runner/lib/entrypoint.js b/runner/lib/entrypoint.js new file mode 100755 index 0000000..c609a72 --- /dev/null +++ b/runner/lib/entrypoint.js @@ -0,0 +1,67 @@ +#!/usr/bin/env node +/* global process */ +// @ts-nocheck + +import './sdk/install-ses.js'; + +import path from 'path'; +import { spawn } from 'child_process'; +import rawFs from 'fs'; +import os from 'os'; + +import main from './main.js'; +import { + flattenAggregateErrors, + aggregateTryFinally, +} from './helpers/async.js'; + +const fs = rawFs.promises; +const fsStream = { + createReadStream: rawFs.createReadStream, + createWriteStream: rawFs.createWriteStream, +}; +const progname = path.basename(process.argv[1]); + +const { stdout, stderr } = process; + +const rawArgs = process.argv.slice(2); + +process.on('uncaughtException', (error) => { + console.error('uncaught exception', error); + process.exit(2); +}); + +(async () => { + const tmpDir = await fs.mkdtemp( + path.join(os.tmpdir(), `${progname.replace(/[^a-z0-9_]/gi, '-')}-`), + ); + + return aggregateTryFinally( + async () => + main(progname, rawArgs, { + stdout, + stderr, + fs, + fsStream, + os, + process, + spawn, + tmpDir, + }), + async () => fs.rmdir(tmpDir, { recursive: true }), + ); +})().then( + (res) => { + res === undefined || process.exit(res); + }, + (rej) => { + // console.log(process._getActiveRequests(), process._getActiveHandles()); + console.error(rej); + if (rej.errors) { + flattenAggregateErrors(rej.errors).forEach((error) => + console.error('nested error:', error), + ); + } + process.exit(2); + }, +); diff --git a/runner/lib/helpers/async.d.ts b/runner/lib/helpers/async.d.ts new file mode 100644 index 0000000..6aeac0b --- /dev/null +++ b/runner/lib/helpers/async.d.ts @@ -0,0 +1,150 @@ +/* global Console */ +/* eslint-disable no-unused-vars,no-redeclare */ + +export declare function sleep(ms: number): Promise; + +export interface AggregateError extends Error { + readonly errors: Error[]; +} + +export declare function flattenAggregateErrors(errors: Error[]): Error[]; + +export declare function warnOnRejection( + operation: Promise, + console: Console, + ...messages: string[] +): void; + +export declare function aggregateTryFinally( + trier: () => Promise, + finalizer: () => Promise, +): Promise; + +export declare function tryTimeout( + timeoutMs: number, + trier: () => Promise, + onError?: () => Promise, +): Promise; + +export declare function PromiseAllOrErrors< + T1, + T2, + T3, + T4, + T5, + T6, + T7, + T8, + T9, + T10 +>( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + T6 | PromiseLike, + T7 | PromiseLike, + T8 | PromiseLike, + T9 | PromiseLike, + T10 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + T6 | PromiseLike, + T7 | PromiseLike, + T8 | PromiseLike, + T9 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + T6 | PromiseLike, + T7 | PromiseLike, + T8 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5, T6, T7, T8]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + T6 | PromiseLike, + T7 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5, T6, T7]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + T6 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5, T6]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + ], +): Promise<[T1, T2, T3]>; + +export declare function PromiseAllOrErrors( + values: readonly [T1 | PromiseLike, T2 | PromiseLike], +): Promise<[T1, T2]>; + +export declare function PromiseAllOrErrors( + values: readonly (T | PromiseLike)[], +): Promise; + +export interface NextStep { + (stop: Promise): Promise; +} + +export interface Task { + (nextStep: NextStep): Promise; +} + +export declare function sequential(...tasks: readonly Task[]): Task; + +export declare function parallel(...tasks: readonly Task[]): Task; diff --git a/runner/lib/helpers/async.js b/runner/lib/helpers/async.js new file mode 100644 index 0000000..823e4b3 --- /dev/null +++ b/runner/lib/helpers/async.js @@ -0,0 +1,147 @@ +/* global setTimeout */ + +import { makePromiseKit } from '../sdk/promise-kit.js'; + +/** @type {import("./async.js").sleep} */ +export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); + +/** + * @param {Error[]} errors + * @param {string} [message] + */ +const makeAggregateError = (errors, message) => { + const err = new Error(message); + Object.defineProperties(err, { + name: { + value: 'AggregateError', + }, + errors: { + value: errors, + }, + }); + return err; +}; + +/** + * @template T + * @param {readonly (T | PromiseLike)[]} values + * @returns {Promise} + */ +export const PromiseAllOrErrors = async (values) => { + return Promise.allSettled(values).then((results) => { + const errors = /** @type {PromiseRejectedResult[]} */ (results.filter( + ({ status }) => status === 'rejected', + )).map((result) => result.reason); + if (!errors.length) { + return /** @type {PromiseFulfilledResult[]} */ (results).map( + (result) => result.value, + ); + } else if (errors.length === 1) { + throw errors[0]; + } else { + throw makeAggregateError(errors); + } + }); +}; + +/** @type {import("./async.js").flattenAggregateErrors} */ +export const flattenAggregateErrors = (errors) => + errors.reduce((arr, error) => { + arr.push(error); + if ('errors' in error) { + arr.push( + ...flattenAggregateErrors(/** @type {AggregateError} */ (error).errors), + ); + } + return arr; + }, /** @type {Error[]} */ ([])); + +/** @type {import("./async.js").warnOnRejection} */ +export const warnOnRejection = (operation, console, ...messages) => { + operation.catch((error) => { + console.warn(...messages, error); + if ('errors' in error) { + // TODO: Plug into SES error handling + console.warn( + 'Reasons:', + ...flattenAggregateErrors(/** @type {AggregateError} */ (error).errors), + ); + } + }); +}; + +/** @type {import("./async.js").aggregateTryFinally} */ +export const aggregateTryFinally = async (trier, finalizer) => + trier().then( + async (result) => finalizer().then(() => result), + async (tryError) => + finalizer() + .then( + () => tryError, + (finalizeError) => makeAggregateError([tryError, finalizeError]), + ) + .then((error) => Promise.reject(error)), + ); + +/** @type {import("./async.js").tryTimeout} */ +export const tryTimeout = async (timeoutMs, trier, onError) => { + const result = Promise.race([ + sleep(timeoutMs).then(() => Promise.reject(new Error('Timeout'))), + trier(), + ]); + + return !onError + ? result + : result.catch(async (error) => + onError() + .then( + () => error, + (cleanupError) => makeAggregateError([error, cleanupError]), + ) + .then((finalError) => Promise.reject(finalError)), + ); +}; + +/** @typedef {import("./async.js").Task} Task */ +/** + * @template T + * @typedef {import('../sdk/promise-kit.js').PromiseRecord} PromiseRecord + */ + +/** + * @param {Task[]} tasks + * @returns {Task} + */ +export const sequential = (...tasks) => { + return tasks.reduceRight((accumulatedTask, prevTask) => async (nextStep) => { + await prevTask(async (stopPrev) => { + await accumulatedTask(async (stopAcc) => { + await nextStep(Promise.race([stopAcc, stopPrev])); + }); + }); + }); +}; + +/** + * @param {Task[]} tasks + * @returns {Task} + */ +export const parallel = (...tasks) => async (nextStep) => { + /** @type {PromiseRecord<{stop: Promise}>[]} */ + const kits = tasks.map(() => makePromiseKit()); + /** @type {PromiseRecord} */ + const nextStepDone = makePromiseKit(); + Promise.all(kits.map((kit) => kit.promise)).then((wrappedStops) => { + nextStepDone.resolve( + nextStep(Promise.race(wrappedStops.map(({ stop }) => stop))), + ); + }); + await Promise.all( + tasks.map((task, i) => + task((stop) => { + kits[i].resolve({ stop }); + return nextStepDone.promise; + }), + ), + ); +}; diff --git a/runner/lib/helpers/child-process.js b/runner/lib/helpers/child-process.js new file mode 100644 index 0000000..2e91379 --- /dev/null +++ b/runner/lib/helpers/child-process.js @@ -0,0 +1,143 @@ +/* global process */ + +/** + * @param {import("child_process").ChildProcess} childProcess + * @param {Object} [options] + * @param {boolean} [options.ignoreExitCode] do not error on non-zero exit codes + * @returns {Promise} The exit code of the process + */ +export const childProcessDone = ( + childProcess, + { ignoreExitCode = false } = {}, +) => + new Promise((resolve, reject) => + childProcess.on('error', reject).on('exit', (code) => { + if (!ignoreExitCode && (code == null || code !== 0)) { + reject(new Error(`Process exited with non-zero code: ${code}`)); + } else { + resolve(code != null ? code : -1); + } + }), + ); + +/** + * Makes a spawn that support non fd backed stdio streams + * Automatically creates a pipe stdio and pipes the stream + * + * @param {Object} options + * @param {import("child_process").spawn} options.spawn Node.js spawn + * @param {boolean} [options.end] Pipe option to automatically forward stream end + * @returns {import("child_process").spawn} + */ +export const makeSpawnWithPipedStream = ({ spawn, end }) => { + /** + * @param {string} command + * @param {ReadonlyArray} args + * @param {import("child_process").SpawnOptions} options + * @returns {import("child_process").ChildProcess} + */ + const pipedSpawn = (command, args, options) => { + const spawnOptions = + typeof args === 'object' && args != null && !Array.isArray(args) + ? /** @type {import("child_process").SpawnOptions} */ (args) + : options || {}; + let { stdio } = spawnOptions; + let stdin; + let stdout; + let stderr; + if (Array.isArray(stdio)) { + /** @type {(import("stream").Stream | undefined)[]} */ + const internalStdio = new Array(3); + + stdio = stdio.map((value, idx) => { + if ( + idx < 3 && + typeof value === 'object' && + value != null && + typeof (/** @type {any} */ (value).fd) !== 'number' + ) { + internalStdio[idx] = value; + return 'pipe'; + } + return value; + }); + + [stdin, stdout, stderr] = internalStdio; + } + + const childProcess = spawn(command, args, { + ...spawnOptions, + stdio, + }); + + if (stdin) { + stdin.pipe(/** @type {NodeJS.WritableStream} */ (childProcess.stdin), { + end, + }); + } + if (stdout) { + /** @type {NodeJS.ReadableStream} */ (childProcess.stdout).pipe( + /** @type {*} */ (stdout), + { + end, + }, + ); + } + if (stderr) { + /** @type {NodeJS.ReadableStream} */ (childProcess.stderr).pipe( + /** @type {*} */ (stderr), + { end }, + ); + } + + return /** @type {any} */ (childProcess); + }; + + // TODO: general covariance of return type allows our spawn to add stdio streams + // but NodeJS spawn overloads specifically disallow it + return /** @type {*} */ (pipedSpawn); +}; + +/** + * @callback PipedSpawn + * @param {string} command + * @param {ReadonlyArray} args + * @param {import("child_process").SpawnOptionsWithStdioTuple<'ignore' | undefined, import("stream").Writable, import("stream").Writable>} options + * @returns {import("child_process").ChildProcessByStdio} + */ + +/** + * Makes a verbose spawn that requires a writable stream for stdout and stderr, + * prints out the executed command and pipes child process streams + * + * @param {Object} options + * @param {import("child_process").spawn} options.spawn Node.js spawn + * @param {boolean} [options.end] Pipe option to automatically forward stream end + * @returns {PipedSpawn} + */ +export const makeSpawnWithPrintAndPipeOutput = ({ spawn, end }) => { + const spawnWithPipe = makeSpawnWithPipedStream({ spawn, end }); + + return (command, args, options) => { + const env = (options.env !== process.env ? options.env : null) || {}; + const envPairs = Object.entries( + // While prototype properties are used by spawn + // don't clutter the print output with the "inherited" env + Object.getOwnPropertyDescriptors(env), + ) + .filter(([_, desc]) => desc.enumerable) + .map(([name, desc]) => `${name}=${desc.value}`); + + const [_, out, err, ...others] = options.stdio; + + out.write(`${[...envPairs, command, ...args].join(' ')}\n`); + + const childProcess = spawnWithPipe(command, args, { + ...options, + stdio: ['ignore', out, err, ...others], + }); + + // The childProcess does include the out and err streams but spawnWithPipe doesn't have the correct return type + return /** @type {*} */ (childProcess); + }; +}; diff --git a/runner/lib/helpers/fs.js b/runner/lib/helpers/fs.js new file mode 100644 index 0000000..b9e3624 --- /dev/null +++ b/runner/lib/helpers/fs.js @@ -0,0 +1,153 @@ +// import { openSync, closeSync } from 'fs'; + +import { basename, dirname, join as joinPath } from 'path'; + +import { childProcessDone } from './child-process.js'; + +/** + * @callback FindByPrefix + * @param {string} prefix + * @returns {Promise} + */ + +/** + * @callback DirDiskUsage + * @param {string} rootDir + * @param {Object} [options] + * @param {number} [options.minFileSize] + * @returns {Promise>} + */ + +/** + * Make a FIFO file readable stream + * + * @callback MakeFIFO + * @param {string} name + * @returns {Promise} + */ + +/** + * @typedef FsHelper + * @property {FindByPrefix} findByPrefix + * @property {DirDiskUsage} dirDiskUsage + * @property {MakeFIFO} makeFIFO + */ + +/** @typedef {Pick} fsStream */ + +/** + * + * @param {Object} powers + * @param {import("fs/promises")} powers.fs Node.js promisified fs object + * @param {fsStream} powers.fsStream Node.js fs stream operations + * @param {import("child_process").spawn} powers.spawn Node.js spawn + * @param {string} powers.tmpDir Directory location to place temporary files in + * @returns {FsHelper} + * + */ +export const makeFsHelper = ({ fs, fsStream, spawn, tmpDir }) => { + /** @type {FindByPrefix} */ + const findByPrefix = async (prefix) => { + const parentDir = dirname(prefix); + const prefixBase = basename(prefix); + + const name = (await fs.readdir(parentDir)).find((dir) => + dir.startsWith(prefixBase), + ); + if (!name) { + throw new Error(`Couldn't find dir entry starting with prefix`); + } + return joinPath(parentDir, name); + }; + + // TODO: figure out why tsc complains when using /** @type {DirDiskUsage} */ + /** + * @param {string} rootDir + * @param {Object} [options] + * @param {number} [options.minFileSize] + */ + const dirDiskUsage = async (rootDir, { minFileSize = 5 } = {}) => { + /** @type {Record} */ + const book = {}; + + /** + * @param {string} subpath + */ + const processDir = async (subpath) => { + const dirEntNames = await fs.readdir(joinPath(rootDir, subpath)); + const dirEntStats = await Promise.all( + dirEntNames.map( + /** + * @param {string} name + * @returns {Promise<[string, import('fs').Stats]>} + * */ + async (name) => [ + joinPath(subpath, name), + await fs.lstat(joinPath(rootDir, subpath, name)), + ], + ), + ); + for (const [path, stat] of dirEntStats) { + if (stat.isDirectory()) { + // Await the recursion here to provide some level of order and parallelism limit + // eslint-disable-next-line no-await-in-loop + await processDir(path); + } else if (stat.isFile()) { + // A linux fs block is 512 bytes + // https://man7.org/linux/man-pages/man2/stat.2.html + const size = stat.blocks / 2; + if (size >= minFileSize) { + book[path] = stat.blocks / 2; + } + } else { + console.error('Unexpected file type', joinPath(rootDir, path)); + } + } + }; + + await processDir(''); + + return book; + }; + + /** @type {MakeFIFO} */ + const makeFIFO = async (name) => { + const fifoPath = joinPath(tmpDir, basename(name)); + await childProcessDone(spawn('mkfifo', [fifoPath], { stdio: 'inherit' })); + + const stream = fsStream.createReadStream(fifoPath, { + emitClose: true, + // Large buffer + // TODO: Make configurable + highWaterMark: 1024 * 1024, + }); + + // eslint-disable-next-line no-underscore-dangle + const originalStreamDestroy = stream._destroy; + // eslint-disable-next-line no-underscore-dangle + stream._destroy = (error, callback) => { + const internalStream = /** @type {{closed: boolean, fd: number | null}} */ ( + /** @type {unknown} */ (stream) + ); + if (!internalStream.closed && typeof internalStream.fd !== 'number') { + console.warn( + 'FIFO was never opened for write, self opening to unblock process.', + ); + // Unblock node's internal read open + (async () => (await fs.open(fifoPath, 'a')).close())(); + // closeSync(openSync(fifoPath, 'a')); + } + + originalStreamDestroy.call(stream, error, callback); + }; + + stream.once('close', () => { + // TODO: log errors + fs.rm(fifoPath); + }); + + return stream; + }; + + return harden({ dirDiskUsage, findByPrefix, makeFIFO }); +}; diff --git a/runner/lib/helpers/line-stream-transform.d.ts b/runner/lib/helpers/line-stream-transform.d.ts new file mode 100644 index 0000000..187add6 --- /dev/null +++ b/runner/lib/helpers/line-stream-transform.d.ts @@ -0,0 +1,16 @@ +/* eslint-disable no-unused-vars */ + +import type ReadlineTransform, { + ReadlineTransformOptions, +} from 'readline-transform'; + +export interface LineStreamTransformOptions extends ReadlineTransformOptions { + /** optional prefix to prepend for each line */ + prefix?: string; + /** ending for each line. If true, a new line is added. */ + lineEndings?: boolean | string; +} + +export default class LineStreamTransform extends ReadlineTransform { + constructor(options?: LineStreamTransformOptions); +} diff --git a/runner/lib/helpers/line-stream-transform.js b/runner/lib/helpers/line-stream-transform.js new file mode 100644 index 0000000..b213277 --- /dev/null +++ b/runner/lib/helpers/line-stream-transform.js @@ -0,0 +1,33 @@ +import ReadlineTransform from 'readline-transform'; + +/* eslint-disable no-underscore-dangle,no-nested-ternary */ + +export default class LineStreamTransform extends ReadlineTransform { + /** + * + * @param {import("./line-stream-transform.js").LineStreamTransformOptions} options + */ + constructor(options = {}) { + const defaultTransformOptions = { readableObjectMode: true }; + const { + transform: _, + prefix = '', + lineEndings = false, + ...readlineTransformOptions + } = options; + super({ ...defaultTransformOptions, ...readlineTransformOptions }); + this._prefix = prefix; + this._suffix = lineEndings + ? typeof lineEndings === 'string' + ? lineEndings + : '\n' + : ''; + } + + /** @param {string} line */ + _writeItem(line) { + if (line.length > 0 || !(/** @type {any} */ (this)._skipEmpty)) { + this.push(`${this._prefix}${line}${this._suffix}`); + } + } +} diff --git a/runner/lib/helpers/outputter.js b/runner/lib/helpers/outputter.js new file mode 100644 index 0000000..755b836 --- /dev/null +++ b/runner/lib/helpers/outputter.js @@ -0,0 +1,43 @@ +import { Console } from 'console'; + +import LineStreamTransform from './line-stream-transform.js'; + +/** + * @param {Object} options + * @param {import("stream").Writable} options.out + * @param {import("stream").Writable} [options.err] + * @param {string} [options.outPrefix] + * @param {string} [options.errPrefix] + * @param {boolean} [options.colorMode] + */ +export const makeOutputter = ({ + out, + err = out, + outPrefix, + errPrefix = outPrefix, + colorMode = true, +}) => { + if (outPrefix) { + const dstOut = out; + out = new LineStreamTransform({ + prefix: outPrefix, + lineEndings: true, + }); + out.pipe(dstOut); + } + + if (errPrefix) { + const dstErr = err; + err = new LineStreamTransform({ + prefix: errPrefix, + lineEndings: true, + }); + err.pipe(dstErr); + } + + return { + console: new Console({ stdout: out, stderr: err, colorMode }), + out, + err, + }; +}; diff --git a/runner/lib/helpers/process-info.d.ts b/runner/lib/helpers/process-info.d.ts new file mode 100644 index 0000000..9b9bca3 --- /dev/null +++ b/runner/lib/helpers/process-info.d.ts @@ -0,0 +1,74 @@ +/** + * See https://github.com/torvalds/linux/blob/master/Documentation/filesystems/proc.rst + * for details on some of these values + */ + +/** Process times in seconds (converted from jiffies) */ +export type ProcessTimes = { + /** time spent waiting for block IO */ + blockIo: number; + /** guest time of the task children */ + childGuest: number; + /** kernel mode including children */ + childKernel: number; + /** user mode including children */ + childUser: number; + /** guest time of the task */ + guest: number; + /** kernel mode */ + kernel: number; + /** user mode */ + user: number; +}; + +/** Process memory sizes in kiB */ +export type ProcessMemory = { + /** peak virtual memory size */ + vmPeak: number; + /** total program size */ + vmSize: number; + /** locked memory size */ + vmLocked: number; + /** pinned memory size */ + vmPinned: number; + /** peak resident set size ("high water mark") */ + vmHwm: number; + /** size of memory portions. It contains the three following parts (vmRSS = rssAnon + rssFile + rssShmem) */ + vmRss: number; + /** size of resident anonymous memory */ + rssAnon: number; + /** size of resident file mappings */ + rssFile: number; + /** size of resident shmem memory (includes SysV shm, mapping of tmpfs and shared anonymous mappings) */ + rssShmem: number; + /** size of private data segments */ + vmData: number; + /** size of stack segments */ + vmStack: number; + /** size of text segment */ + vmExe: number; + /** size of shared library code */ + vmLib: number; + /** size of page table entries */ + vmPte: number; + /** amount of swap used by anonymous private data (shmem swap usage is not included) */ + vmSwap: number; +}; + +export interface ProcessInfo { + /** The PID of the process */ + readonly pid: number; + /** The process' static start time in seconds relative to the origin process start */ + readonly startTimestamp: number; + /** Retrieves the current command line. This may change if when process "exec" */ + getArgv(): Promise; + /** Retrieves the current timing and memory usage for the process */ + getUsageSnapshot(): Promise<{ times: ProcessTimes; memory: ProcessMemory }>; + /** Retrieves the current list of child processes */ + getChildren(): Promise; + /** + * Retrieves the parent of the process. + * This may change if the original parent terminates. + */ + getParent(): Promise; +} diff --git a/runner/lib/helpers/procsfs.js b/runner/lib/helpers/procsfs.js new file mode 100644 index 0000000..982a90e --- /dev/null +++ b/runner/lib/helpers/procsfs.js @@ -0,0 +1,267 @@ +/* global process Buffer */ + +/** + * Helper module to read, parse and interpret procfs + * + * Authoritative info on content: + * https://github.com/torvalds/linux/blob/master/Documentation/filesystems/proc.rst + */ + +import { performance } from 'perf_hooks'; + +import { childProcessDone } from './child-process.js'; + +const statusLineFormat = /^([^:]+):[\s]+(.+)$/; + +/** @typedef {import("./process-info.js").ProcessInfo} ProcessInfo */ + +/** + * @callback GetProcessInfo + * @param {number} pid PID of the process to get info + * @returns {Promise} + */ + +/** + * @typedef ProcessHelper + * @property {GetProcessInfo} getProcessInfo + * @property {() => Promise} getCPUTimeOffset + */ + +/** + * + * @param {Object} powers + * @param {import("fs/promises")} powers.fs Node.js promisified fs object + * @param {import("child_process").spawn} powers.spawn Node.js spawn + * @param {number} [powers.startPid] The PID of the process to use as a start time reference + * @returns {ProcessHelper} + * + */ +export const makeProcfsHelper = ({ fs, spawn, startPid = process.pid }) => { + // Kernel data has no encoding so just copy bytes + /** @type {{encoding: BufferEncoding}} */ + const bufferOptions = { encoding: 'latin1' }; + + // A lot of kernel times are in jiffies/ticks, which frequency can be changed + // through a kernel compilation time configuration + const userHertzP = (async () => { + const childProcess = spawn('getconf', ['CLK_TCK'], { stdio: 'pipe' }); + const spawnResult = childProcessDone(childProcess); + + // The result will probably come in a single chunk, but let's be correct + const chunks = []; + for await (const chunk of childProcess.stdout) { + chunks.push(chunk); + } + await spawnResult; + return parseInt(Buffer.concat(chunks).toString(bufferOptions.encoding), 10); + })(); + + /** @typedef {string[]} ProcStat */ + /** + * Returns the split but unparsed stat data from /proc/:pid/stat + * + * @param {number} pid + * @returns {Promise} + */ + const getStat = async (pid) => { + const data = await fs.readFile(`/proc/${pid}/stat`, bufferOptions); + const idx1 = data.indexOf('('); + const idx2 = data.lastIndexOf(')'); + return [ + data.substring(0, idx1 - 1), + data.substring(idx1 + 1, idx2), + ...data.substring(idx2 + 2).split(' '), + ]; + }; + + /** @typedef {Record} ProcStatus */ + /** + * Returns the split but unparsed status data from /proc/:pid/status + * + * @param {number} pid + * @returns {Promise} + */ + const getStatus = async (pid) => { + const data = await fs.readFile(`/proc/${pid}/status`, bufferOptions); + /** @type {ProcStatus} */ + const status = {}; + for (const line of data.split('\n')) { + const matches = statusLineFormat.exec(line); + if (matches) { + status[matches[1]] = matches[2]; + } + } + return status; + }; + + /** + * Returns the split command line from /proc/:pid/cmdline + * + * @param {number} pid + * @returns {Promise} + */ + const getCmdline = async (pid) => { + const data = await fs.readFile(`/proc/${pid}/cmdline`, bufferOptions); + if (!data) return null; + const argv = data.split('\x00'); + argv.pop(); // trailing empty line + return argv; + }; + + /** @param {ProcStat} stat */ + const getStartTicks = (stat) => parseInt(stat[21], 10); + + const startTicksOriginP = getStat(startPid).then(getStartTicks); + + // TODO: Use a WeakValueMap + /** @type {Map} */ + const knownProcessInfo = new Map(); + + /** @type {GetProcessInfo} */ + const getProcessInfo = async (pid) => { + const startTicks = getStartTicks(await getStat(pid)); + + // Technically PIDs can be recycled, but the startTicks will be different + const uniquePid = `${pid}-${startTicks}`; + + /** @param {ProcStat} stat */ + const assertSameProcess = (stat) => { + assert(String(pid) === stat[0]); + assert(startTicks === getStartTicks(stat)); + }; + + let processInfo = knownProcessInfo.get(uniquePid); + + if (!processInfo) { + const startTimestamp = + (startTicks - (await startTicksOriginP)) / (await userHertzP); + + processInfo = harden({ + pid, + startTimestamp, + getArgv: async () => { + return getCmdline(pid); + }, + getUsageSnapshot: async () => { + const [stat, status, userHertz] = await Promise.all([ + getStat(pid), + getStatus(pid), + userHertzP, + ]); + assertSameProcess(stat); + + const times = { + blockIo: parseInt(stat[41], 10) / userHertz, + childGuest: parseInt(stat[43], 10) / userHertz, + childKernel: parseInt(stat[16], 10) / userHertz, + childUser: parseInt(stat[15], 10) / userHertz, + guest: parseInt(stat[42], 10) / userHertz, + kernel: parseInt(stat[14], 10) / userHertz, + user: parseInt(stat[13], 10) / userHertz, + }; + + // TODO: Parse /proc/:pid/smaps values to get better RSS info + const memory = { + // rss: parseInt(stat[23], 10) * 4, + // rssSoftLimit: parseInt(stat[24], 10), + // vsize: parseInt(stat[22], 10) / 1024, + vmData: parseInt(status.VmData, 10), + vmExe: parseInt(status.VmExe, 10), + vmHwm: parseInt(status.VmHWM, 10), + vmLib: parseInt(status.VmLib, 10), + vmLocked: parseInt(status.VmLck, 10), + vmPeak: parseInt(status.VmPeak, 10), + vmPinned: parseInt(status.VmPin, 10), + vmPte: parseInt(status.VmPTE, 10), + vmRss: parseInt(status.VmRSS, 10), + vmSize: parseInt(status.VmSize, 10), + vmStack: parseInt(status.VmStk, 10), + vmSwap: parseInt(status.VmSwap, 10), + rssAnon: parseInt(status.RssAnon, 10), + rssFile: parseInt(status.RssFile, 10), + rssShmem: parseInt(status.RssShmem, 10), + }; + + return harden({ times, memory }); + }, + getChildren: async () => { + assertSameProcess(await getStat(pid)); + + const tids = await fs.readdir(`/proc/${pid}/task`, bufferOptions); + + const rawChildrens = await Promise.all( + tids.map((tid) => + fs.readFile(`/proc/${pid}/task/${tid}/children`, bufferOptions), + ), + ); + + /** @type {Set} */ + const cpids = new Set(); + + for (const rawChildren of rawChildrens) { + const rawCpids = rawChildren.split(' '); + if (!rawCpids[rawCpids.length - 1]) { + rawCpids.pop(); // remove empty trail + } + for (const rawCpid of rawCpids) { + cpids.add(parseInt(rawCpid, 10)); + } + } + + // Ignore any children that may have gone missing by the time we get their info + const childrenInfoResolutions = await Promise.allSettled( + [...cpids].map(getProcessInfo), + ); + return harden( + childrenInfoResolutions + .filter(({ status }) => status === 'fulfilled') + .map( + (r) => + /** @type {PromiseFulfilledResult} */ (r).value, + ), + ); + }, + getParent: async () => { + const stat = await getStat(pid); + assertSameProcess(stat); + const ppid = parseInt(stat[3], 10); + return getProcessInfo(ppid); + }, + }); + knownProcessInfo.set(uniquePid, processInfo); + } + + return processInfo; + }; + + /** + * Estimates the offset between ProcessInfo startTimestamp + * and performance.now()'s origin for the current process. + * + * The absolute value of this offset should be below 0.01s + * on a system with somewhat accurate time measurement if + * node was the first image executed. If there was a delay + * from process creation to node execution, the value returned + * will capture an approximation of that delay within 10ms. + * + * @returns {Promise} The offset in seconds + */ + const getCPUTimeOffset = async () => { + const perfNowBefore = performance.now(); + const uptime = await fs.readFile('/proc/uptime', bufferOptions); + const perfNow = (perfNowBefore + performance.now()) / 2; + + // Process start time is static and expressed in jiffies + // It's not adjusted like other kernel monotonic clock + const startMsOrigin = + ((await startTicksOriginP) * 1000) / (await userHertzP); + + // Uptime is a monotonic clock that represents elapsed time since system boot + // It does get adjusted by NTP, and thus might deviate over time from jiffies + const uptimeMs = Number(uptime.split(' ')[0]) * 1000; + + return Math.round(uptimeMs - startMsOrigin - perfNow) / 1000; + }; + + return harden({ getProcessInfo, getCPUTimeOffset }); +}; diff --git a/runner/lib/helpers/stream-steps.js b/runner/lib/helpers/stream-steps.js new file mode 100644 index 0000000..b443d7a --- /dev/null +++ b/runner/lib/helpers/stream-steps.js @@ -0,0 +1,60 @@ +import { promisify } from 'util'; +import { finished as finishedCallback } from 'stream'; + +import { makePromiseKit } from '../sdk/promise-kit.js'; + +import LineStreamTransform from './line-stream-transform.js'; + +const finished = promisify(finishedCallback); + +/** + * @typedef {Object} StepConfig + * @property {RegExp} matcher + * @property {number} [resultIndex=1] the index in the match result to use as resolution + */ + +/** + * @param {import("stream").Readable} stream + * @param {StepConfig[]} steps + * @param {Object} [options] + * @param {boolean} [options.waitEnd=true] + */ +export const whenStreamSteps = (stream, steps, { waitEnd = true } = {}) => { + const stepsAndKits = steps.map((step) => ({ step, kit: makePromiseKit() })); + + const lines = new LineStreamTransform(); + // const pipeResult = pipeline(stream, lines); + stream.pipe(lines); + + const parseResult = (async () => { + for await (const line of lines) { + if (stepsAndKits.length) { + const match = stepsAndKits[0].step.matcher.exec(line); + if (match) { + const stepAndKit = /** @type {{step: StepConfig, kit: import('../sdk/promise-kit.js').PromiseRecord}} */ (stepsAndKits.shift()); + const { + step: { resultIndex = 1 }, + kit: { resolve }, + } = stepAndKit; + resolve(match[resultIndex]); + } + } + + if (!stepsAndKits.length) { + stream.unpipe(lines); + lines.end(); + } + } + + if (stepsAndKits.length) { + const error = new Error('Stream ended before match found'); + stepsAndKits.forEach(({ kit: { reject } }) => reject(error)); + } + + if (waitEnd) { + await finished(stream); + } + })(); + + return [...stepsAndKits.map(({ kit: { promise } }) => promise), parseResult]; +}; diff --git a/runner/lib/main.js b/runner/lib/main.js new file mode 100644 index 0000000..40cc34a --- /dev/null +++ b/runner/lib/main.js @@ -0,0 +1,978 @@ +/* global process setInterval clearInterval */ +/* eslint-disable no-continue */ + +import { resolve as resolvePath, join as joinPath, basename } from 'path'; +import { performance } from 'perf_hooks'; +import zlib from 'zlib'; +import { promisify } from 'util'; +import { + pipeline as pipelineCallback, + finished as finishedCallback, +} from 'stream'; + +import yargsParser from 'yargs-parser'; +import chalk from 'chalk'; +import { makePromiseKit } from './sdk/promise-kit.js'; + +import { + sleep, + PromiseAllOrErrors, + warnOnRejection, + aggregateTryFinally, + sequential, +} from './helpers/async.js'; +import { childProcessDone } from './helpers/child-process.js'; +import { makeFsHelper } from './helpers/fs.js'; +import { makeProcfsHelper } from './helpers/procsfs.js'; +import { makeOutputter } from './helpers/outputter.js'; + +import { makeTasks as makeLocalChainTasks } from './tasks/local-chain.js'; +import { makeTasks as makeTestnetTasks } from './tasks/testnet.js'; + +/** @typedef {import('./helpers/async.js').Task} Task */ + +const pipeline = promisify(pipelineCallback); +const finished = promisify(finishedCallback); + +const defaultLoadgenConfig = { + vault: { interval: 120 }, + amm: { wait: 60, interval: 120 }, +}; +const defaultMonitorIntervalMinutes = 5; +const defaultStageDurationMinutes = 6 * 60; +const defaultNumberStages = 4 + 2; + +const vatIdentifierRE = /^(v\d+):(.*)$/; +const knownVatsNamesWithoutProcess = ['comms', 'vattp']; + +/** + * @typedef { | + * 'cosmic-swingset-bootstrap-block-start' | + * 'cosmic-swingset-bootstrap-block-finish' | + * 'cosmic-swingset-end-block-start' | + * 'cosmic-swingset-end-block-finish' | + * 'cosmic-swingset-begin-block' + * } SlogCosmicSwingsetEventTypes + */ + +/** + * @typedef { | + * 'create-vat' | + * 'vat-startup-finish' | + * 'replay-transcript-start' | + * SlogCosmicSwingsetEventTypes + * } SlogSupportedEventTypes + */ + +/** + * @typedef {{ + * time: number, + * type: SlogSupportedEventTypes + * }} SlogEventBase + */ + +/** + * @typedef {SlogEventBase & Record} SlogEvent + */ + +/** + * @typedef {{ + * time: number, + * type: 'create-vat', + * vatID: string, + * name?: string, + * dynamic: boolean, + * } & Record} SlogCreateVatEvent + */ + +/** + * @typedef {{ + * time: number, + * type: 'vat-startup-finish' | 'replay-transcript-start', + * vatID: string + * } & Record} SlogVatEvent + */ + +/** + * @typedef {{ + * time: number, + * type: SlogCosmicSwingsetEventTypes, + * blockHeight?: number, + * blockTime: number + * } & Record} SlogCosmicSwingsetEvent + */ + +/** @type {SlogSupportedEventTypes[]} */ +const supportedSlogEventTypes = [ + 'create-vat', + 'vat-startup-finish', + 'replay-transcript-start', + 'cosmic-swingset-bootstrap-block-start', + 'cosmic-swingset-bootstrap-block-finish', + 'cosmic-swingset-end-block-start', + 'cosmic-swingset-end-block-finish', + 'cosmic-swingset-begin-block', +]; + +const slogEventRE = new RegExp( + `^{"time":\\d+(?:\\.\\d+),"type":"(?:${supportedSlogEventTypes.join('|')})"`, +); + +/** + * @param {unknown} maybeObj + * @param {Record} [defaultValue] + */ +const coerceRecordOption = (maybeObj, defaultValue = {}) => { + if (maybeObj == null) { + return defaultValue; + } + + if (typeof maybeObj !== 'object') { + throw new Error('Unexpected object option value'); + } + + return /** @type {Record} */ (maybeObj); +}; + +/** + * @template {boolean | undefined} T + * @param {unknown} maybeBoolValue + * @param {T} defaultValue + * @param {boolean} [assertBool] + */ +const coerceBooleanOption = ( + maybeBoolValue, + defaultValue, + assertBool = true, +) => { + switch (maybeBoolValue) { + case 1: + case true: + case 'true': + return true; + case 0: + case false: + case 'false': + return false; + case null: + case undefined: + return defaultValue; + default: + if (assertBool) { + throw new Error(`Unexpected boolean option value ${maybeBoolValue}`); + } + return defaultValue; + } +}; + +const makeInterrupterKit = () => { + const signal = makePromiseKit(); + /** @type {Error | null} */ + let rejection = null; + const onInterrupt = () => { + if (rejection) { + console.warn('Interruption already in progress'); + } else { + rejection = new Error('Interrupted'); + signal.reject(rejection); + } + }; + const onExit = () => { + throw new Error('Interrupt was not cleaned up'); + }; + process.on('SIGINT', onInterrupt); + process.on('SIGTERM', onInterrupt); + process.on('exit', onExit); + + let orInterruptCalled = false; + + const orInterrupt = async (job = new Promise(() => {})) => { + orInterruptCalled = true; + return Promise.race([signal.promise, job]); + }; + + const releaseInterrupt = async () => { + process.off('SIGINT', onInterrupt); + process.off('SIGTERM', onInterrupt); + process.off('exit', onExit); + if (!orInterruptCalled && rejection) { + throw rejection; + } + }; + + // Prevent unhandled rejection when orInterrupt is called after interruption + signal.promise.catch(() => {}); + + return { orInterrupt, releaseInterrupt }; +}; + +/** + * + * @param {string} progName + * @param {string[]} rawArgs + * @param {Object} powers + * @param {import("stream").Writable} powers.stdout + * @param {import("stream").Writable} powers.stderr + * @param {import("fs/promises")} powers.fs Node.js promisified fs object + * @param {import("./helpers/fs.js").fsStream} powers.fsStream Node.js fs stream operations + * @param {import("child_process").spawn} powers.spawn Node.js spawn + * @param {string} powers.tmpDir Directory location to place temporary files in + */ +const main = async (progName, rawArgs, powers) => { + const { stdout, stderr, fs, fsStream, spawn, tmpDir } = powers; + + // TODO: switch to full yargs for documenting output + const argv = yargsParser(rawArgs); + + const { getProcessInfo, getCPUTimeOffset } = makeProcfsHelper({ fs, spawn }); + const { findByPrefix, dirDiskUsage, makeFIFO } = makeFsHelper({ + fs, + fsStream, + spawn, + tmpDir, + }); + + /** + * @param {string} [prefix] + * @param {import("stream").Writable} [out] + * @param {import("stream").Writable} [err] + */ + const makeConsole = (prefix, out = stdout, err = stderr) => + makeOutputter({ + out, + err, + outPrefix: prefix && `${chalk.green(prefix)}: `, + errPrefix: prefix && `${chalk.bold.red(prefix)}: `, + }); + + let { console } = makeConsole(); + + const outputDir = String(argv.outputDir || `results/run-${Date.now()}`); + console.log(`Outputting to ${resolvePath(outputDir)}`); + await fs.mkdir(outputDir, { recursive: true }); + + let makeTasks; + /** @type {string} */ + let testnetOrigin; + + switch (argv.profile) { + case null: + case undefined: + case 'local': + makeTasks = makeLocalChainTasks; + testnetOrigin = ''; + break; + case 'testnet': + case 'stage': + makeTasks = makeTestnetTasks; + testnetOrigin = + argv.testnetOrigin || `https://${argv.profile}.agoric.net`; + break; + default: + throw new Error(`Unexpected profile option: ${argv.profile}`); + } + + const { setupTasks, runChain, runClient, runLoadgen } = makeTasks({ + spawn, + fs, + findDirByPrefix: findByPrefix, + makeFIFO, + getProcessInfo, + }); + + const outputStream = fsStream.createWriteStream( + joinPath(outputDir, 'perf.jsonl'), + ); + + const monitorInterval = + Number(argv.monitorInterval || defaultMonitorIntervalMinutes) * 60 * 1000; + + let currentStage = -1; + let currentStageElapsedOffsetNs = 0; + const cpuTimeOffset = await getCPUTimeOffset(); + + /** + * + * @param {string} eventType + * @param {Record} [data] + */ + const logPerfEvent = (eventType, data = {}) => { + const perfNowNs = performance.now() * 1000; + outputStream.write( + JSON.stringify( + { + timestamp: Math.round(perfNowNs) / 1e6, + stage: currentStage, + elapsed: Math.round(perfNowNs - currentStageElapsedOffsetNs) / 1e6, + time: undefined, // Placeholder to put data.time before type if it exists + type: `perf-${eventType}`, + ...data, + }, + (_, arg) => (typeof arg === 'bigint' ? Number(arg) : arg), + ), + ); + outputStream.write('\n'); + }; + + /** + * @param {import("./tasks/types.js").RunChainInfo} chainInfo + * @param {Object} param1 + * @param {() => void} param1.resolveFirstEmptyBlock + * @param {import("stream").Writable} param1.out + * @param {import("stream").Writable} param1.err + */ + const monitorChain = async ( + { slogLines, storageLocation, processInfo: kernelProcessInfo }, + { resolveFirstEmptyBlock, out, err }, + ) => { + const { console: monitorConsole } = makeConsole('monitor-chain', out, err); + + /** + * @typedef {{ + * processInfo: import("./helpers/procsfs.js").ProcessInfo | null | undefined, + * vatName: string | undefined, + * started: boolean, + * }} VatInfo + */ + /** @type {Map} */ + const vatInfos = new Map(); + let vatUpdated = Promise.resolve(); + + const updateVatInfos = async () => { + monitorConsole.log('Updating vat infos'); + const childrenInfos = new Set( + await kernelProcessInfo.getChildren().catch(() => []), + ); + for (const info of childrenInfos) { + const vatArgv = await info.getArgv(); // eslint-disable-line no-await-in-loop + if (!vatArgv || basename(vatArgv[0]) !== 'xsnap') continue; + const vatIdentifierMatches = vatIdentifierRE.exec(vatArgv[1]); + if (!vatIdentifierMatches) continue; + const vatID = vatIdentifierMatches[1]; + const vatInfo = vatInfos.get(vatID); + + if (!vatInfo) { + /** @type {string | undefined} */ + let vatName = vatIdentifierMatches[2]; + if (!vatName || vatName === 'undefined') vatName = undefined; + // TODO: warn found vat process without create event + monitorConsole.warn( + `found vat ${vatID}${ + vatName ? ` ${vatName}` : '' + } process before create event`, + 'pid=', + info.pid, + ); + // vatInfo = { vatName, processInfo: info }; + // vatInfos.set(vatID, vatInfo); + continue; + } + + if (vatInfo.processInfo !== info) { + // TODO: warn if replacing with new processInfo ? + } + + vatInfo.processInfo = info; + + // if (!vatInfo.started) { + // monitorConsole.warn( + // `found vat ${vatID}${ + // vatInfo.vatName ? ` ${vatInfo.vatName}` : '' + // } process before vat start event`, + // 'pid=', + // info.pid, + // ); + // } + } + for (const [vatID, vatInfo] of vatInfos) { + if (vatInfo.processInfo && !childrenInfos.has(vatInfo.processInfo)) { + vatInfo.processInfo = null; + } + + if ( + vatInfo.started && + !vatInfo.processInfo && + vatInfo.vatName && + !knownVatsNamesWithoutProcess.includes(vatInfo.vatName) + ) { + // Either the vat started but the process doesn't exist yet (undefined) + // or the vat process exited but the vat didn't stop yet (null) + monitorConsole.warn( + `Vat ${vatID} started but process ${ + vatInfo.processInfo === null + ? 'exited early' + : "doesn't exist yet" + }`, + ); + } + } + }; + + const ensureVatInfoUpdated = async () => { + const vatUpdatedBefore = vatUpdated; + await vatUpdated; + if (vatUpdated === vatUpdatedBefore) { + vatUpdated = updateVatInfos(); + warnOnRejection( + vatUpdated, + monitorConsole, + 'Failed to update vat process infos', + ); + } + }; + + const logProcessUsage = async () => + PromiseAllOrErrors( + [ + { + eventData: { + processType: 'kernel', + }, + processInfo: kernelProcessInfo, + }, + ...[...vatInfos].map(([vatID, { processInfo, vatName }]) => ({ + eventData: { + processType: 'vat', + vatID, + name: vatName, + }, + processInfo, + })), + ].map(async ({ eventData, processInfo }) => { + if (!processInfo) return; + const { times, memory } = await processInfo.getUsageSnapshot(); + logPerfEvent('chain-process-usage', { + ...eventData, + real: + Math.round( + performance.now() * 1000 - + (processInfo.startTimestamp - cpuTimeOffset) * 1e6, + ) / 1e6, + ...times, + ...memory, + }); + }), + ).then(() => {}); + + const logStorageUsage = async () => { + logPerfEvent('chain-storage-usage', { + chain: await dirDiskUsage(storageLocation), + }); + }; + + const monitorIntervalId = setInterval( + () => + warnOnRejection( + PromiseAllOrErrors([logStorageUsage(), logProcessUsage()]), + monitorConsole, + 'Failure during usage monitoring', + ), + monitorInterval, + ); + + const slogOutput = zlib.createGzip({ + level: zlib.constants.Z_BEST_COMPRESSION, + }); + const slogOutputWriteStream = fsStream.createWriteStream( + joinPath(outputDir, `chain-stage-${currentStage}.slog.gz`), + ); + // const slogOutput = slogOutputWriteStream; + // const slogOutputPipeResult = finished(slogOutput); + const slogOutputPipeResult = pipeline(slogOutput, slogOutputWriteStream); + + /** @type {number | null} */ + let slogStart = null; + + let slogBlocksSeen = 0; + let slogEmptyBlocksSeen = 0; + let slogLinesInBlock = 0; + + for await (const line of slogLines) { + slogOutput.write(line); + slogOutput.write('\n'); + + if (slogStart == null) { + // TODO: figure out a better way + // There is a risk we could be late to the party here, with the chain + // having started some time before us but in reality we usually find + // the process before it starts the kernel + slogStart = performance.now() / 1000; + warnOnRejection( + logStorageUsage(), + monitorConsole, + 'Failed to get first storage usage', + ); + } + + slogLinesInBlock += 1; + + // Avoid JSON parsing lines we don't care about + if (!slogEventRE.test(line)) continue; + + const localEventTime = performance.timeOrigin + performance.now(); + + /** @type {SlogEvent} */ + let event; + try { + event = JSON.parse(line); + } catch (error) { + monitorConsole.warn('Failed to parse slog line', line, error); + continue; + } + + monitorConsole.log( + 'slog event', + event.type, + 'delay', + Math.round(localEventTime - event.time * 1000), + 'ms', + ); + + switch (event.type) { + case 'create-vat': { + const { + vatID, + name: vatName, + } = /** @type {SlogCreateVatEvent} */ (event); + if (!vatInfos.has(vatID)) { + vatInfos.set(vatID, { + vatName, + processInfo: undefined, + started: false, + }); + } else { + // TODO: warn already created vat before + } + break; + } + case 'vat-startup-finish': { + const { vatID } = /** @type {SlogVatEvent} */ (event); + const vatInfo = vatInfos.get(vatID); + if (!vatInfo) { + // TODO: warn unknown vat + } else { + vatInfo.started = true; + ensureVatInfoUpdated(); + } + break; + } + case 'replay-transcript-start': { + const { vatID } = /** @type {SlogVatEvent} */ (event); + const vatInfo = vatInfos.get(vatID); + if (!vatInfo) { + // TODO: warn unknown vat + } else if (!vatInfo.processInfo) { + ensureVatInfoUpdated(); + } + break; + } + case 'cosmic-swingset-bootstrap-block-start': { + logPerfEvent('chain-first-init-start'); + break; + } + case 'cosmic-swingset-bootstrap-block-finish': { + logPerfEvent('chain-first-init-finish'); + break; + } + case 'cosmic-swingset-end-block-start': { + if (event.blockHeight === 0) { + // Before https://github.com/Agoric/agoric-sdk/pull/3491 + // bootstrap didn't have it's own slog entry + logPerfEvent('chain-first-init-start'); + } + slogLinesInBlock = 0; + break; + } + case 'cosmic-swingset-end-block-finish': { + if (event.blockHeight === 0) { + // TODO: measure duration from start to finish + logPerfEvent('chain-first-init-finish'); + } + // Finish line doesn't count + slogLinesInBlock -= 1; + if (slogLinesInBlock === 0) { + if (!slogEmptyBlocksSeen) { + logPerfEvent('stage-first-empty-block', { + block: event.blockHeight, + }); + resolveFirstEmptyBlock(); + } + slogEmptyBlocksSeen += 1; + } + monitorConsole.log( + 'end-block', + event.blockHeight, + 'linesInBlock=', + slogLinesInBlock, + ); + break; + } + case 'cosmic-swingset-begin-block': { + if (!slogBlocksSeen) { + logPerfEvent('stage-first-block', { block: event.blockHeight }); + warnOnRejection( + logProcessUsage(), + monitorConsole, + 'Failed to get initial process usage', + ); + } + slogBlocksSeen += 1; + monitorConsole.log('begin-block', event.blockHeight); + break; + } + default: + } + } + + clearInterval(monitorIntervalId); + + slogOutput.end(); + await slogOutputPipeResult; + }; + + /** + * @param {Object} param0 + * @param {boolean} param0.chainOnly + * @param {number} param0.duration + * @param {unknown} param0.loadgenConfig + * @param {boolean} param0.withMonitor + * @param {boolean} param0.saveStorage + */ + const runStage = async ({ + chainOnly, + duration, + loadgenConfig, + withMonitor, + saveStorage, + }) => { + /** @type {import("stream").Writable} */ + let out; + /** @type {import("stream").Writable} */ + let err; + + /** @type {string | void} */ + let chainStorageLocation; + currentStageElapsedOffsetNs = performance.now() * 1000; + ({ console, out, err } = makeConsole(`stage-${currentStage}`)); + + const { console: stageConsole } = makeConsole('runner', out, err); + + const { orInterrupt, releaseInterrupt } = makeInterrupterKit(); + + logPerfEvent('stage-start'); + const stageStart = performance.now(); + + /** @type {Task} */ + const spawnChain = async (nextStep) => { + stageConsole.log('Running chain', { chainOnly, duration, loadgenConfig }); + logPerfEvent('run-chain-start'); + const runChainResult = await runChain({ stdout: out, stderr: err }); + logPerfEvent('run-chain-finish'); + + let chainExited = false; + const done = runChainResult.done.finally(() => { + chainExited = true; + logPerfEvent('chain-stopped'); + }); + + currentStageElapsedOffsetNs = + (runChainResult.processInfo.startTimestamp - cpuTimeOffset) * 1e6; + chainStorageLocation = runChainResult.storageLocation; + /** @type {import("./sdk/promise-kit.js").PromiseRecord} */ + const { + promise: chainFirstEmptyBlock, + resolve: resolveFirstEmptyBlock, + } = makePromiseKit(); + const monitorChainDone = monitorChain(runChainResult, { + resolveFirstEmptyBlock, + out, + err, + }); + + await aggregateTryFinally( + async () => { + await orInterrupt(runChainResult.ready); + logPerfEvent('chain-ready'); + stageConsole.log('Chain ready'); + + await orInterrupt(chainFirstEmptyBlock); + + await nextStep(done); + }, + async () => { + if (!chainExited) { + stageConsole.log('Stopping chain'); + + runChainResult.stop(); + await done; + } + + await monitorChainDone; + }, + ); + }; + + /** @type {Task} */ + const spawnClient = async (nextStep) => { + stageConsole.log('Running client'); + logPerfEvent('run-client-start'); + const runClientStart = performance.now(); + const runClientResult = await runClient({ stdout: out, stderr: err }); + logPerfEvent('run-client-finish'); + + let clientExited = false; + const done = runClientResult.done.finally(() => { + clientExited = true; + logPerfEvent('client-stopped'); + }); + + await aggregateTryFinally( + async () => { + await orInterrupt(runClientResult.ready); + logPerfEvent('client-ready', { + duration: + Math.round((performance.now() - runClientStart) * 1000) / 1e6, + }); + + await nextStep(done); + }, + async () => { + if (!clientExited) { + stageConsole.log('Stopping client'); + + runClientResult.stop(); + await done; + } + }, + ); + }; + + /** @type {Task} */ + const spawnLoadgen = async (nextStep) => { + stageConsole.log('Running load gen'); + logPerfEvent('run-loadgen-start'); + const runLoadgenResult = await runLoadgen({ + stdout: out, + stderr: err, + config: loadgenConfig, + }); + logPerfEvent('run-loadgen-finish'); + + let loadgenExited = false; + const done = runLoadgenResult.done.finally(() => { + loadgenExited = true; + logPerfEvent('loadgen-stopped'); + }); + + await aggregateTryFinally( + async () => { + await orInterrupt(runLoadgenResult.ready); + logPerfEvent('loadgen-ready'); + + await nextStep(done); + }, + async () => { + if (!loadgenExited) { + stageConsole.log('Stopping loadgen'); + + runLoadgenResult.stop(); + await done; + } + }, + ); + }; + + /** @type {Task} */ + const stageReady = async (nextStep) => { + /** @type {Promise} */ + let sleeping; + if (duration < 0) { + // sleeping forever + sleeping = new Promise(() => {}); + stageConsole.log('Stage ready, waiting for end of chain'); + } else { + const sleepTime = Math.max( + 0, + duration - (performance.now() - stageStart), + ); + if (sleepTime) { + sleeping = sleep(sleepTime); + stageConsole.log( + 'Stage ready, going to sleep for', + Math.round(sleepTime / (1000 * 60)), + 'minutes', + ); + } else { + sleeping = Promise.resolve(); + stageConsole.log('Stage ready, no time to sleep, moving on'); + } + } + logPerfEvent('stage-ready'); + await nextStep(sleeping); + logPerfEvent('stage-shutdown'); + }; + + await aggregateTryFinally( + async () => { + /** @type {Task} */ + const rootTask = async (nextStep) => { + const done = orInterrupt(); + done.catch(() => {}); + await nextStep(done); + }; + + /** @type {Task[]} */ + const tasks = [rootTask]; + + if (withMonitor) { + tasks.push(spawnChain); + } + + if (!chainOnly) { + tasks.push(spawnClient, spawnLoadgen); + } + + if (tasks.length === 1) { + throw new Error('Nothing to do'); + } else { + tasks.push(stageReady); + } + + await sequential(...tasks)((stop) => stop); + }, + async () => + aggregateTryFinally( + async () => { + if (saveStorage && chainStorageLocation != null) { + stageConsole.log('Saving chain storage'); + await childProcessDone( + spawn('tar', [ + '-cSJf', + joinPath( + outputDir, + `chain-storage-stage-${currentStage}.tar.xz`, + ), + chainStorageLocation, + ]), + ); + } + }, + async () => { + releaseInterrupt(); + + logPerfEvent('stage-finish'); + currentStageElapsedOffsetNs = 0; + }, + ), + ); + }; + + // Main + + await aggregateTryFinally( + async () => { + /** @type {import("stream").Writable} */ + let out; + /** @type {import("stream").Writable} */ + let err; + ({ console, out, err } = makeConsole('init')); + logPerfEvent('start', { + cpuTimeOffset: await getCPUTimeOffset(), + timeOrigin: performance.timeOrigin / 1000, + // TODO: add other interesting info here + }); + + const withMonitor = coerceBooleanOption(argv.monitor, true); + { + const { releaseInterrupt } = makeInterrupterKit(); + + const reset = coerceBooleanOption(argv.reset, true); + const setupConfig = { reset, withMonitor, testnetOrigin }; + logPerfEvent('setup-tasks-start', setupConfig); + await aggregateTryFinally( + // Do not short-circuit on interrupt, let the spawned setup process terminate + async () => + setupTasks({ stdout: out, stderr: err, config: setupConfig }), + + // This will throw if there was any interrupt, and prevent further execution + async () => releaseInterrupt(), + ); + logPerfEvent('setup-tasks-finish'); + } + + const stages = + argv.stages != null + ? parseInt(String(argv.stages), 10) + : defaultNumberStages; + + const stageConfigs = coerceRecordOption(argv.stage); + + const sharedLoadgenConfig = coerceRecordOption( + stageConfigs.loadgen, + defaultLoadgenConfig, + ); + + const sharedStageDurationMinutes = + stageConfigs.duration != null + ? Number(stageConfigs.duration) + : defaultStageDurationMinutes; + + while (currentStage < stages - 1) { + currentStage += 1; + + const stageConfig = coerceRecordOption(stageConfigs[currentStage]); + + const withLoadgen = coerceBooleanOption( + stageConfig.loadgen, + undefined, + false, + ); + + const loadgenConfig = + withLoadgen == null + ? coerceRecordOption(stageConfig.loadgen, sharedLoadgenConfig) + : sharedLoadgenConfig; + + // By default the first stage will only initialize the chain from genesis + // and the last stage will only capture the chain restart time + // loadgen and chainOnly options overide default + const chainOnly = coerceBooleanOption( + stageConfig.chainOnly, + withLoadgen != null + ? !withLoadgen // use boolean loadgen option value as default chainOnly + : loadgenConfig === sharedLoadgenConfig && // user provided stage loadgen config implies chain + withMonitor && // If monitor is disabled, chainOnly has no meaning + (currentStage === 0 || currentStage === stages - 1), + ); + + const saveStorage = coerceBooleanOption( + stageConfig.saveStorage, + !chainOnly || currentStage === 0, + ); + + const duration = + (stageConfig.duration != null + ? Number(stageConfig.duration) + : (!chainOnly && sharedStageDurationMinutes) || 0) * + 60 * + 1000; + + // eslint-disable-next-line no-await-in-loop + await runStage({ + chainOnly, + duration, + loadgenConfig, + withMonitor, + saveStorage, + }); + } + }, + async () => { + outputStream.end(); + + await finished(outputStream); + }, + ); +}; + +export default main; diff --git a/runner/lib/sdk/install-ses.js b/runner/lib/sdk/install-ses.js new file mode 100644 index 0000000..32c159e --- /dev/null +++ b/runner/lib/sdk/install-ses.js @@ -0,0 +1,149 @@ +/* global LOCKDOWN_OPTIONS process */ +// 'lockdown' appears on the global as a side-effect of importing 'ses' +import 'ses'; + +// Install our HandledPromise global. +// import '@agoric/eventual-send/shim.js'; + +// For testing under Ava, and also sometimes for testing and debugging in +// general, when safety is not needed, you perhaps want to use +// packages/SwingSet/tools/install-ses-debug.js instead of this one. +// If you're using a prepare-test-env-ava.js, it is probably already doing that +// for you. + +// The`@agoric/import-ses` package exists so the "main" of production code can +// start with the following import or its equivalent. +// ```js +// import '@agoric/install-ses'; +// ``` +// But production code must also be tested. Normal ocap discipline of passing +// explicit arguments into the `lockdown` +// call would require an awkward structuring of start modules, since +// the `install-ses` module calls `lockdown` during its initialization, +// before any explicit code in the start module gets to run. Even if other code +// does get to run first, the `lockdown` call in this module happens during +// module initialization, before it can legitimately receive parameters by +// explicit parameter passing. +// +// Instead, for now, `install-ses` violates normal ocap discipline by feature +// testing global state for a passed "parameter". This is something that a +// module can but normally should not do, during initialization or otherwise. +// Initialization is often awkward. +// +// The `install-ses` module tests, first, +// for a JavaScript global named `LOCKDOWN_OPTIONS`, and second, for an +// environment +// variable named `LOCKDOWN_OPTIONS`. If either is present, its value should be +// a JSON encoding of the options bag to pass to the `lockdown` call. If so, +// then `install-ses` calls `lockdown` with those options. If there is no such +// feature, `install-ses` calls `lockdown` with appropriate settings for +// production use. + +let optionsString; +if (typeof LOCKDOWN_OPTIONS === 'string') { + optionsString = LOCKDOWN_OPTIONS; + console.log( + `'@agoric/install-ses' sniffed and found a 'LOCKDOWN_OPTIONS' global variable\n`, + ); +} else if ( + typeof process === 'object' && + typeof process.env.LOCKDOWN_OPTIONS === 'string' +) { + optionsString = process.env.LOCKDOWN_OPTIONS; + console.log( + `'@agoric/install-ses' sniffed and found a 'LOCKDOWN_OPTIONS' environment variable\n`, + ); +} + +if (typeof optionsString === 'string') { + let options; + try { + options = JSON.parse(optionsString); + } catch (err) { + console.error('Environment variable LOCKDOWN_OPTIONS must be JSON', err); + throw err; + } + if (typeof options !== 'object' || Array.isArray(options)) { + const err = new TypeError( + 'Environment variable LOCKDOWN_OPTIONS must be a JSON object', + ); + console.error('', err, options); + throw err; + } + lockdown(options); +} else { + lockdown({ + // The default `{errorTaming: 'safe'}` setting, if possible, redacts the + // stack trace info from the error instances, so that it is not available + // merely by saying `errorInstance.stack`. However, some tools + // will look for the stack there and become much less useful if it is + // missing. In production, the settings in this file need to preserve + // security, so the 'unsafe' setting below MUST always be commented out + // except during private development. + // + // NOTE TO REVIEWERS: If you see the following line *not* commented out, + // this may be a development accident that MUST be fixed before merging. + // + // errorTaming: 'unsafe', + // + // + // The default `{stackFiltering: 'concise'}` setting usually makes for a + // better debugging experience, by severely reducing the noisy distractions + // of the normal verbose stack traces. Which is why we comment + // out the `'verbose'` setting is commented out below. However, some + // tools look for the full filename that it expects in order + // to fetch the source text for diagnostics, + // + // Another reason for not commenting it out: The cause + // of the bug may be anywhere, so the `'noise'` thrown out by the default + // `'concise'` setting may also contain the signal you need. To see it, + // uncomment out the following line. But please do not commit it in that + // state. + // + // NOTE TO REVIEWERS: If you see the following line *not* commented out, + // this may be a development accident that MUST be fixed before merging. + // + // stackFiltering: 'verbose', + // + // + // The default `{overrideTaming: 'moderate'}` setting does not hurt the + // debugging experience much. But it will introduce noise into, for example, + // the vscode debugger's object inspector. During debug and test, if you can + // avoid legacy code that needs the `'moderate'` setting, then the `'min'` + // setting reduces debugging noise yet further, by turning fewer inherited + // properties into accessors. + // + // NOTE TO REVIEWERS: If you see the following line *not* commented out, + // this may be a development accident that MUST be fixed before merging. + // + // overrideTaming: 'min', + // + // + // The default `{consoleTaming: 'safe'}` setting usually makes for a + // better debugging experience, by wrapping the original `console` with + // the SES replacement `console` that provides more information about + // errors, expecially those thrown by the `assert` system. However, + // in case the SES `console` is getting in the way, we provide the + // `'unsafe'` option for leaving the original `console` in place. + // + // NOTE TO REVIEWERS: If you see the following line *not* commented out, + // this may be a development accident that MUST be fixed before merging. + // + // consoleTaming: 'unsafe', + }); +} + +// We are now in the "Start Compartment". Our global has all the same +// powerful things it had before, but the primordials have changed to make +// them safe to use in the arguments of API calls we make into more limited +// compartments + +// 'Compartment' and 'harden' (and `StaticModuleRecord`) are now present in +// our global scope. + +// Even on non-v8, we tame the start compartment's Error constructor so +// this assignment is not rejected, even if it does nothing. +Error.stackTraceLimit = Infinity; + +harden(TextEncoder); +harden(TextDecoder); diff --git a/runner/lib/sdk/promise-kit.js b/runner/lib/sdk/promise-kit.js new file mode 100644 index 0000000..e09129c --- /dev/null +++ b/runner/lib/sdk/promise-kit.js @@ -0,0 +1,82 @@ +/* global globalThis */ +// @ts-check + +// eslint-disable-next-line spaced-comment +/// + +/** @type {import('@agoric/eventual-send').HandledPromiseConstructor | PromiseConstructor} */ +const BestPipelinablePromise = globalThis.HandledPromise || Promise; + +/** + * @template T + * @typedef {Object} PromiseRecord A reified Promise + * @property {(value: ERef) => void} resolve + * @property {(reason: any) => void} reject + * @property {Promise} promise + */ + +/** + * @template T + * @typedef {T | PromiseLike} ERef + * A reference of some kind for to an object of type T. It may be a direct + * reference to a local T. It may be a local presence for a remote T. It may + * be a promise for a local or remote T. Or it may even be a thenable + * (a promise-like non-promise with a "then" method) for a T. + */ + +/** + * Needed to prevent type errors where functions are detected to be undefined. + */ +const NOOP_INITIALIZER = harden(() => {}); + +/** + * makePromiseKit() builds a Promise object, and returns a record + * containing the promise itself, as well as separate facets for resolving + * and rejecting it. + * + * @template T + * @returns {PromiseRecord} + */ +export function makePromiseKit() { + /** @type {(value: ERef) => void} */ + let res = NOOP_INITIALIZER; + /** @type {(reason: any) => void} */ + let rej = NOOP_INITIALIZER; + + /** @type {Promise & {domain?: unknown}} */ + const p = new BestPipelinablePromise((resolve, reject) => { + res = resolve; + rej = reject; + }); + // Node.js adds the `domain` property which is not a standard + // property on Promise. Because we do not know it to be ocap-safe, + // we remove it. + if ('domain' in p) { + // deleting p.domain may break functionality. To retain current + // functionality at the expense of safety, set unsafe to true. + const unsafe = false; + if (unsafe) { + const originalDomain = p.domain; + Object.defineProperty(p, 'domain', { + get() { + return originalDomain; + }, + }); + } else { + delete p.domain; + } + } + return harden({ promise: p, resolve: res, reject: rej }); +} +harden(makePromiseKit); + +/** + * Determine if the argument is a Promise. + * + * @param {any} maybePromise The value to examine + * @returns {maybePromise is Promise} Whether it is a promise + */ +export function isPromise(maybePromise) { + return Promise.resolve(maybePromise) === maybePromise; +} +harden(isPromise); diff --git a/runner/lib/sdk/ses-types.d.ts b/runner/lib/sdk/ses-types.d.ts new file mode 100644 index 0000000..da34322 --- /dev/null +++ b/runner/lib/sdk/ses-types.d.ts @@ -0,0 +1,11 @@ +/* eslint-disable */ + +// This file is not referenced anywhere but it makes +// tsc happy for missing types in the source of dependencies + +declare global { + var LOCKDOWN_OPTIONS: string | void; + var HandledPromise: HandledPromiseConstructor; +} + +export {}; diff --git a/runner/lib/tasks/helpers.js b/runner/lib/tasks/helpers.js new file mode 100644 index 0000000..0f1a288 --- /dev/null +++ b/runner/lib/tasks/helpers.js @@ -0,0 +1,128 @@ +import chalk from 'chalk'; + +// TODO: pass an "httpRequest" as power instead of importing +import http from 'http'; +import https from 'https'; +import fs from 'fs'; + +import { sleep } from '../helpers/async.js'; +import { makeOutputter } from '../helpers/outputter.js'; + +const protocolModules = { + 'http:': http, + 'https:': https, +}; + +/** + * @param {string | URL} urlOrString + * @param {http.RequestOptions & {body?: Buffer}} [options] + * @returns {Promise} + */ +export const httpRequest = (urlOrString, options = {}) => { + return new Promise((resolve, reject) => { + const url = + typeof urlOrString === 'string' ? new URL(urlOrString) : urlOrString; + + if (url.protocol === 'file:') { + const stream = fs.createReadStream(url.pathname); + // Ugly cast hack to make res look like what the consumer cares about + const res = /** @type {http.IncomingMessage} */ (harden( + /** @type {unknown} */ ({ + [Symbol.asyncIterator]: () => stream[Symbol.asyncIterator](), + statusCode: 200, + }), + )); + resolve(res); + return; + } + + if (!(url.protocol in protocolModules)) { + throw new Error(`Invalid protocol ${url.protocol}`); + } + + const protocolModule = + protocolModules[/** @type {keyof protocolModules} */ (url.protocol)]; + + const { body, ...httpOptions } = options; + + const req = protocolModule.request(url, httpOptions); + req.on('response', resolve).on('error', reject); + if (body) { + req.write(body); + } + req.end(); + }); +}; + +/** @typedef {(argv: string[]) => boolean} ArgvMatcher */ + +/** + * @param {(RegExp | null | undefined)[]} argMatchers + * @returns {ArgvMatcher} + */ +export const getArgvMatcher = (argMatchers) => (argv) => + argv.every((arg, idx) => { + const matcher = argMatchers[idx]; + return !matcher || matcher.test(arg); + }); + +/** + * @param {ArgvMatcher} argvMatcher + * @returns {ArgvMatcher} + */ +export const wrapArgvMatcherIgnoreEnvShebang = (argvMatcher) => (argv) => + argvMatcher(argv) || (/env$/.test(argv[0]) && argvMatcher(argv.slice(1))); + +/** + * @param {import('../helpers/process-info.js').ProcessInfo} launcherInfo + * @param {ArgvMatcher} argvMatcher + * @param {number} [retries] + * @returns {Promise} + */ +export const getChildMatchingArgv = async ( + launcherInfo, + argvMatcher, + retries = 50, +) => { + const childrenWithArgv = await Promise.all( + (await launcherInfo.getChildren()).map(async (info) => ({ + info, + argv: await info.getArgv(), + })), + ); + + const result = childrenWithArgv.find(({ argv }) => argv && argvMatcher(argv)); + + if (result) { + return result.info; + } else if (retries > 0) { + await sleep(100); + return getChildMatchingArgv(launcherInfo, argvMatcher, retries - 1); + } else { + console.error( + `getChildMatchingArgv: ${ + childrenWithArgv.length + } child process, none of ["${childrenWithArgv + .map(({ argv }) => (argv || ['no argv']).join(' ')) + .join('", "')}"] match expected arguments`, + ); + + throw new Error("Couldn't find child process"); + } +}; + +/** + * @param {string} prefix + * @param {import("stream").Writable} stdout + * @param {import("stream").Writable} stderr + * @returns {{stdio: [undefined, import("stream").Writable, import("stream").Writable], console: Console}} + */ +export const getConsoleAndStdio = (prefix, stdout, stderr) => { + const { console, out, err } = makeOutputter({ + out: stdout, + err: stderr, + outPrefix: prefix && `${chalk.bold.blue(prefix)}: `, + errPrefix: prefix && `${chalk.bold.red(prefix)}: `, + }); + return { console, stdio: [undefined, out, err] }; +}; diff --git a/runner/lib/tasks/local-chain.js b/runner/lib/tasks/local-chain.js new file mode 100644 index 0000000..c643e23 --- /dev/null +++ b/runner/lib/tasks/local-chain.js @@ -0,0 +1,253 @@ +/* global process */ + +import { dirname } from 'path'; +import { promisify } from 'util'; +import { pipeline as pipelineCallback } from 'stream'; + +import { + childProcessDone, + makeSpawnWithPrintAndPipeOutput, +} from '../helpers/child-process.js'; +import LineStreamTransform from '../helpers/line-stream-transform.js'; +import { PromiseAllOrErrors, tryTimeout } from '../helpers/async.js'; +import { whenStreamSteps } from '../helpers/stream-steps.js'; +import { + getArgvMatcher, + getChildMatchingArgv, + wrapArgvMatcherIgnoreEnvShebang, + getConsoleAndStdio, +} from './helpers.js'; +import { makeLoadgenTask } from './shared-loadgen.js'; + +const pipeline = promisify(pipelineCallback); + +const chainDirPrefix = '_agstate/agoric-servers/local-chain-'; + +const chainStartRE = /ag-chain-cosmos start --home=(.*)$/; +const chainBlockBeginRE = /block-manager: block (\d+) begin$/; +const clientStartRE = /\bsolo\b\S+entrypoint\.[cm]?js start/; +const clientWalletReadyRE = /(?:Deployed Wallet!|Don't need our provides: wallet)/; + +const chainNodeArgvMatcher = wrapArgvMatcherIgnoreEnvShebang( + getArgvMatcher([/node$/, /chain-entrypoint/]), +); +const chainGoArgvMatcher = getArgvMatcher([/(?:sh|node)$/, /ag-chain-cosmos$/]); +/** @param {string[]} argv */ +const chainArgvMatcher = (argv) => + chainNodeArgvMatcher(argv) || chainGoArgvMatcher(argv); +const clientArgvMatcher = wrapArgvMatcherIgnoreEnvShebang( + getArgvMatcher([/node$/, /\bsolo\b\S+entrypoint\.[cm]?js/]), +); + +/** + * + * @param {Object} powers + * @param {import("child_process").spawn} powers.spawn Node.js spawn + * @param {import("../helpers/fs.js").MakeFIFO} powers.makeFIFO Make a FIFO file readable stream + * @param {import("../helpers/fs.js").FindByPrefix} powers.findDirByPrefix + * @param {import("../helpers/procsfs.js").GetProcessInfo} powers.getProcessInfo + * @returns {import("./types.js").OrchestratorTasks} + * + */ +export const makeTasks = ({ + spawn, + findDirByPrefix, + makeFIFO, + getProcessInfo, +}) => { + const pipedSpawn = makeSpawnWithPrintAndPipeOutput({ + spawn, + end: false, + }); + + /** @param {import("./types.js").TaskBaseOptions & {config?: {reset?: boolean}}} options */ + const setupTasks = async ({ stdout, stderr, config: { reset } = {} }) => { + const { console, stdio } = getConsoleAndStdio( + 'setup-tasks', + stdout, + stderr, + ); + + console.log('Starting'); + + if (reset) { + console.log('Resetting chain node and client state'); + const stateDir = dirname(chainDirPrefix); + await childProcessDone(pipedSpawn('rm', ['-rf', stateDir], { stdio })); + await childProcessDone( + pipedSpawn('git', ['checkout', '--', stateDir], { + stdio, + }), + ); + } + await childProcessDone(pipedSpawn('agoric', ['install'], { stdio })); + + console.log('Done'); + }; + + /** @param {import("./types.js").TaskBaseOptions} options */ + const runChain = async ({ stdout, stderr, timeout = 120 }) => { + const { console, stdio } = getConsoleAndStdio('chain', stdout, stderr); + + console.log('Starting chain'); + + const slogFifo = await makeFIFO('chain.slog'); + const slogLines = new LineStreamTransform(); + const slogPipeResult = pipeline(slogFifo, slogLines); + + const chainEnv = Object.create(process.env); + chainEnv.SLOGFILE = slogFifo.path; + + const launcherCp = pipedSpawn( + 'agoric', + ['start', 'local-chain', '--verbose'], + { stdio, env: chainEnv, detached: true }, + ); + + let stopped = false; + + // Chain exit with code 98 when killed + const chainDone = childProcessDone(launcherCp, { + ignoreExitCode: true, + }).then((code) => { + if (code !== 0 && (!stopped || code !== 98)) { + return Promise.reject( + new Error(`Chain exited with non-zero code: ${code}`), + ); + } + return 0; + }); + + chainDone.then( + () => console.log('Chain exited successfully'), + (error) => console.error('Chain exited with error', error), + ); + + const [chainStarted, firstBlock, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: chainStartRE }, + { matcher: chainBlockBeginRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + const done = PromiseAllOrErrors([ + slogPipeResult, + outputParsed, + chainDone, + ]).then(() => {}); + + return tryTimeout( + timeout * 1000, + async () => { + await chainStarted; + + console.log('Chain running'); + + const [storageLocation, processInfo] = await PromiseAllOrErrors([ + chainStarted.then(findDirByPrefix), + getProcessInfo( + /** @type {number} */ (launcherCp.pid), + ).then((launcherInfo) => + getChildMatchingArgv(launcherInfo, chainArgvMatcher), + ), + ]); + + const stop = () => { + stopped = true; + process.kill(processInfo.pid); + if (slogFifo.pending) { + slogFifo.close(); + } + }; + + return harden({ + stop, + done, + ready: firstBlock, + slogLines: { + [Symbol.asyncIterator]: () => slogLines[Symbol.asyncIterator](), + }, + storageLocation, + processInfo, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([done, firstBlock]); + launcherCp.kill(); + slogFifo.close(); + }, + ); + }; + + /** @param {import("./types.js").TaskBaseOptions} options */ + const runClient = async ({ stdout, stderr, timeout = 60 }) => { + const { console, stdio } = getConsoleAndStdio('client', stdout, stderr); + + console.log('Starting client'); + + const launcherCp = pipedSpawn('agoric', ['start', 'local-solo'], { + stdio, + detached: true, + }); + + const clientDone = childProcessDone(launcherCp); + + clientDone.then( + () => console.log('Client exited successfully'), + (error) => console.error('Client exited with error', error), + ); + + const [clientStarted, walletReady, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: clientStartRE, resultIndex: -1 }, + { matcher: clientWalletReadyRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + const done = PromiseAllOrErrors([outputParsed, clientDone]).then(() => {}); + + return tryTimeout( + timeout * 1000, + async () => { + await clientStarted; + + console.log('Client running'); + + const processInfo = await getProcessInfo( + /** @type {number} */ (launcherCp.pid), + ).then((launcherInfo) => + getChildMatchingArgv(launcherInfo, clientArgvMatcher), + ); + + const stop = () => process.kill(processInfo.pid); + + return harden({ + stop, + done, + ready: walletReady, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([done, walletReady]); + launcherCp.kill(); + }, + ); + }; + + return harden({ + setupTasks, + runChain, + runClient, + runLoadgen: makeLoadgenTask({ pipedSpawn }), + }); +}; diff --git a/runner/lib/tasks/shared-loadgen.js b/runner/lib/tasks/shared-loadgen.js new file mode 100644 index 0000000..bced442 --- /dev/null +++ b/runner/lib/tasks/shared-loadgen.js @@ -0,0 +1,117 @@ +/* global process Buffer */ + +import { PassThrough } from 'stream'; + +import { childProcessDone } from '../helpers/child-process.js'; +import LineStreamTransform from '../helpers/line-stream-transform.js'; +import { PromiseAllOrErrors, tryTimeout } from '../helpers/async.js'; +import { whenStreamSteps } from '../helpers/stream-steps.js'; +import { httpRequest, getConsoleAndStdio } from './helpers.js'; + +const loadgenStartRE = /deploy.*loadgen\/loop\.js/; +const loadgenReadyRE = /server running/; + +/** + * + * @param {Object} powers + * @param {import("../helpers/child-process.js").PipedSpawn} powers.pipedSpawn Spawn with piped output + * @returns {import("./types.js").OrchestratorTasks['runLoadgen']} + * + */ +export const makeLoadgenTask = ({ pipedSpawn }) => { + return harden(async ({ stdout, stderr, timeout = 30, config = {} }) => { + const { console, stdio } = getConsoleAndStdio('loadgen', stdout, stderr); + + console.log('Starting loadgen'); + + const loadgenEnv = Object.create(process.env); + // loadgenEnv.DEBUG = 'agoric'; + + const launcherCp = pipedSpawn('agoric', ['deploy', 'loadgen/loop.js'], { + stdio, + env: loadgenEnv, + detached: true, + }); + + let stopped = false; + const stop = () => { + stopped = true; + launcherCp.kill(); + }; + + // Load gen exit with non-zero code when killed + const loadgenDone = childProcessDone(launcherCp).catch((err) => + stopped ? 0 : Promise.reject(err), + ); + + loadgenDone.then( + () => console.log('Load gen app stopped successfully'), + (error) => console.error('Load gen app stopped with error', error), + ); + + // The agoric deploy output is currently sent to stderr + // Combine both stderr and stdout in to detect both steps + // accommodating future changes + const combinedOutput = new PassThrough(); + const outLines = new LineStreamTransform({ lineEndings: true }); + const errLines = new LineStreamTransform({ lineEndings: true }); + launcherCp.stdout.pipe(outLines).pipe(combinedOutput); + launcherCp.stderr.pipe(errLines).pipe(combinedOutput); + + const [deploying, tasksReady, outputParsed] = whenStreamSteps( + combinedOutput, + [{ matcher: loadgenStartRE }, { matcher: loadgenReadyRE }], + { + waitEnd: false, + }, + ); + + const cleanCombined = () => { + launcherCp.stdout.unpipe(outLines); + launcherCp.stderr.unpipe(errLines); + }; + outputParsed.then(cleanCombined, cleanCombined); + + const done = PromiseAllOrErrors([outputParsed, loadgenDone]).then(() => {}); + + return tryTimeout( + timeout * 1000, + async () => { + await deploying; + + console.log('Load gen app running'); + + const ready = tasksReady.then(async () => { + console.log('Making request to loadgen'); + const body = Buffer.from(JSON.stringify(config), 'utf8'); + + const res = await httpRequest('http://127.0.0.1:3352/config', { + body, + method: 'PUT', + headers: { + 'Content-Type': 'application/json', + 'Content-Length': body.byteLength, + }, + }); + // Consume and discard the response + for await (const _ of res); + + if (!res.statusCode || res.statusCode >= 400) { + throw new Error('Could not start faucet'); + } + }); + + return harden({ + stop, + done, + ready, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([loadgenDone, tasksReady]); + launcherCp.kill(); + }, + ); + }); +}; diff --git a/runner/lib/tasks/testnet.js b/runner/lib/tasks/testnet.js new file mode 100644 index 0000000..d81b63f --- /dev/null +++ b/runner/lib/tasks/testnet.js @@ -0,0 +1,432 @@ +/* global process Buffer */ +/* eslint-disable no-await-in-loop */ + +import { join as joinPath } from 'path'; +import { promisify } from 'util'; +import { pipeline as pipelineCallback } from 'stream'; + +import TOML from '@iarna/toml'; + +import { + childProcessDone, + makeSpawnWithPrintAndPipeOutput, +} from '../helpers/child-process.js'; +import LineStreamTransform from '../helpers/line-stream-transform.js'; +import { + PromiseAllOrErrors, + tryTimeout, + sleep, + aggregateTryFinally, +} from '../helpers/async.js'; +import { whenStreamSteps } from '../helpers/stream-steps.js'; +import { + getArgvMatcher, + getChildMatchingArgv, + wrapArgvMatcherIgnoreEnvShebang, + getConsoleAndStdio, + httpRequest, +} from './helpers.js'; +import { makeLoadgenTask } from './shared-loadgen.js'; + +const pipeline = promisify(pipelineCallback); + +/** + * @param {string} url + * @returns {Promise} + */ +const fetchAsJSON = async (url) => { + const res = await httpRequest(url); + const chunks = []; + for await (const chunk of res) { + chunks.push(chunk); + } + + if (!res.statusCode || res.statusCode >= 400) { + throw new Error(`HTTP request error: ${res.statusCode}`); + } + + // TODO: Check `res.headers['content-type']` for type and charset + return JSON.parse(Buffer.concat(chunks).toString('utf-8')); +}; + +const clientStateDir = '_agstate/agoric-servers/testnet-8000'; + +const chainSwingSetLaunchRE = /launch-chain: Launching SwingSet kernel$/; +const chainBlockBeginRE = /block-manager: block (\d+) begin$/; +const clientStartRE = /\bsolo\b\S+entrypoint\.[cm]?js.* setup(?: .*)$/; +const clientWalletReadyRE = /(?:Deployed Wallet!|Don't need our provides: wallet)/; +const clientSwingSetReadyRE = /start: swingset running$/; + +const clientArgvMatcher = wrapArgvMatcherIgnoreEnvShebang( + getArgvMatcher([/node$/, /\bsolo\b\S+entrypoint\.[cm]?js/]), +); + +/** + * + * @param {Object} powers + * @param {import("child_process").spawn} powers.spawn Node.js spawn + * @param {import("fs/promises")} powers.fs Node.js promisified fs object + * @param {import("../helpers/fs.js").MakeFIFO} powers.makeFIFO Make a FIFO file readable stream + * @param {import("../helpers/procsfs.js").GetProcessInfo} powers.getProcessInfo + * @returns {import("./types.js").OrchestratorTasks} + * + */ +export const makeTasks = ({ spawn, fs, makeFIFO, getProcessInfo }) => { + const pipedSpawn = makeSpawnWithPrintAndPipeOutput({ + spawn, + end: false, + }); + + const chainStateDir = String( + process.env.AG_CHAIN_COSMOS_HOME || + joinPath(process.env.HOME || '~', '.ag-chain-cosmos'), + ); + + let testnetOrigin = 'https://testnet.agoric.net'; + + /** @param {import("./types.js").TaskBaseOptions & {config?: {reset?: boolean, chainOnly?: boolean, withMonitor?: boolean, testnetOrigin?: string}}} options */ + const setupTasks = async ({ + stdout, + stderr, + timeout = 120, + config: { + reset = true, + chainOnly, + withMonitor = true, + testnetOrigin: testnetOriginOption, + } = {}, + }) => { + const { console, stdio } = getConsoleAndStdio( + 'setup-tasks', + stdout, + stderr, + ); + + console.log('Starting'); + + if (testnetOriginOption) { + testnetOrigin = testnetOriginOption; + } + + if (withMonitor !== false) { + if (reset) { + console.log('Resetting chain node'); + await childProcessDone( + pipedSpawn('rm', ['-rf', chainStateDir], { stdio }), + ); + } + + const chainDirStat = await fs + .stat(chainStateDir) + .catch((err) => (err.code === 'ENOENT' ? null : Promise.reject(err))); + + if (!chainDirStat) { + console.log('Fetching network config and genesis'); + const { + chainName, + peers, + seeds, + } = /** @type {{chainName: string, peers: string[], seeds: string[]}} */ (await fetchAsJSON( + `${testnetOrigin}/network-config`, + )); + const genesis = await fetchAsJSON(`${testnetOrigin}/genesis.json`); + + await childProcessDone( + pipedSpawn( + 'ag-chain-cosmos', + ['init', '--chain-id', chainName, `loadgen-monitor-${Date.now()}`], + { stdio }, + ), + ); + + fs.writeFile( + joinPath(chainStateDir, 'config', 'genesis.json'), + JSON.stringify(genesis), + ); + + await childProcessDone( + pipedSpawn('ag-chain-cosmos', ['unsafe-reset-all'], { stdio }), + ); + + const configPath = joinPath(chainStateDir, 'config', 'config.toml'); + + console.log('Patching config'); + const config = await TOML.parse.async( + await fs.readFile(configPath, 'utf-8'), + ); + const configP2p = /** @type {TOML.JsonMap} */ (config.p2p); + configP2p.persistent_peers = peers.join(','); + configP2p.seeds = seeds.join(','); + delete config.log_level; + await fs.writeFile(configPath, TOML.stringify(config)); + } + } + + if (reset) { + console.log('Resetting client'); + await childProcessDone( + pipedSpawn('rm', ['-rf', clientStateDir], { stdio }), + ); + + // TODO: start client to provision the first time then kill it + } + + // Make sure client is provisioned + if (chainOnly !== true) { + console.log('Provisioning client'); + + const launcherCp = pipedSpawn( + 'agoric', + ['start', 'testnet', '8000', `${testnetOrigin}/network-config`], + { + stdio, + }, + ); + + const clientDone = childProcessDone(launcherCp); + + const [clientStarted, clientProvisioned, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: clientStartRE, resultIndex: -1 }, + { matcher: clientSwingSetReadyRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + Promise.allSettled([clientProvisioned, outputParsed, clientDone]); + + await aggregateTryFinally( + async () => { + await clientStarted; + + const processInfo = await getProcessInfo( + /** @type {number} */ (launcherCp.pid), + ).then((launcherInfo) => + getChildMatchingArgv(launcherInfo, clientArgvMatcher), + ); + + await aggregateTryFinally( + async () => + tryTimeout(timeout * 1000, async () => clientProvisioned), + async () => { + try { + process.kill(processInfo.pid); + } catch (_) { + // Ignore kill errors + } + }, + ); + + await PromiseAllOrErrors([outputParsed, clientDone]).then(() => {}); + }, + async () => { + launcherCp.kill(); + }, + ); + } + + await childProcessDone(pipedSpawn('agoric', ['install'], { stdio })); + + console.log('Done'); + }; + + /** @param {import("./types.js").TaskBaseOptions} options */ + const runChain = async ({ stdout, stderr, timeout = 30 }) => { + const { console, stdio } = getConsoleAndStdio('chain', stdout, stderr); + + console.log('Starting chain monitor'); + + const slogFifo = await makeFIFO('chain.slog'); + const slogLines = new LineStreamTransform(); + const slogPipeResult = pipeline(slogFifo, slogLines); + + const chainEnv = Object.create(process.env); + chainEnv.SLOGFILE = slogFifo.path; + // chainEnv.DEBUG = 'agoric'; + + const launcherCp = pipedSpawn('ag-chain-cosmos', ['start'], { + stdio, + env: chainEnv, + detached: true, + }); + + let stopped = false; + + // Chain exit with code 98 when killed + const chainDone = childProcessDone(launcherCp, { + ignoreExitCode: true, + }).then((code) => { + if (code !== 0 && (!stopped || code !== 98)) { + return Promise.reject( + new Error(`Chain exited with non-zero code: ${code}`), + ); + } + return 0; + }); + + chainDone.then( + () => console.log('Chain exited successfully'), + (error) => console.error('Chain exited with error', error), + ); + + const [swingSetLaunched, firstBlock, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: chainSwingSetLaunchRE }, + { matcher: chainBlockBeginRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + const done = PromiseAllOrErrors([ + slogPipeResult, + outputParsed, + chainDone, + ]).then(() => {}); + + const ready = firstBlock.then(async () => { + let retries = 0; + while (!stopped) { + // Don't pipe output to console, it's too noisy + const statusCp = spawn('ag-chain-cosmos', ['status'], { + stdio: ['ignore', 'pipe', 'pipe'], + }); + + const chunks = []; + for await (const chunk of statusCp.stdout) { + chunks.push(chunk); + } + if ( + (await childProcessDone(statusCp, { + ignoreExitCode: retries < 3, + })) !== 0 + ) { + retries += 1; + await sleep(1 * 1000); + continue; // eslint-disable-line no-continue + } else { + retries = 0; + } + + const status = JSON.parse(Buffer.concat(chunks).toString('utf-8')); + + if (status.SyncInfo.catching_up === false) { + return; + } + + await sleep(5 * 1000); + } + }); + + return tryTimeout( + timeout * 1000, + async () => { + await swingSetLaunched; + + console.log('Chain running'); + + const stop = () => { + stopped = true; + launcherCp.kill(); + }; + + const processInfo = await getProcessInfo( + /** @type {number} */ (launcherCp.pid), + ); + + return harden({ + stop, + done, + ready, + slogLines: { + [Symbol.asyncIterator]: () => slogLines[Symbol.asyncIterator](), + }, + storageLocation: chainStateDir, + processInfo, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([done, firstBlock]); + launcherCp.kill(); + slogFifo.close(); + }, + ); + }; + + /** @param {import("./types.js").TaskBaseOptions} options */ + const runClient = async ({ stdout, stderr, timeout = 20 }) => { + const { console, stdio } = getConsoleAndStdio('client', stdout, stderr); + + console.log('Starting client'); + + const launcherCp = pipedSpawn( + 'agoric', + ['start', 'testnet', '8000', `${testnetOrigin}/network-config`], + { + stdio, + detached: true, + }, + ); + + const clientDone = childProcessDone(launcherCp); + + clientDone.then( + () => console.log('Client exited successfully'), + (error) => console.error('Client exited with error', error), + ); + + const [clientStarted, walletReady, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: clientStartRE, resultIndex: -1 }, + { matcher: clientWalletReadyRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + const done = PromiseAllOrErrors([outputParsed, clientDone]).then(() => {}); + + return tryTimeout( + timeout * 1000, + async () => { + await clientStarted; + + console.log('Client running'); + + const processInfo = await getProcessInfo( + /** @type {number} */ (launcherCp.pid), + ).then((launcherInfo) => + getChildMatchingArgv(launcherInfo, clientArgvMatcher), + ); + + const stop = () => process.kill(processInfo.pid); + + return harden({ + stop, + done, + ready: walletReady, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([done, walletReady]); + launcherCp.kill(); + }, + ); + }; + + return harden({ + setupTasks, + runChain, + runClient, + runLoadgen: makeLoadgenTask({ pipedSpawn }), + }); +}; diff --git a/runner/lib/tasks/types.d.ts b/runner/lib/tasks/types.d.ts new file mode 100644 index 0000000..74e166e --- /dev/null +++ b/runner/lib/tasks/types.d.ts @@ -0,0 +1,29 @@ +/* eslint-disable no-unused-vars,no-redeclare */ + +export type TaskResult = { + readonly stop: () => void; + readonly done: Promise; + readonly ready: Promise; +}; + +export type RunChainInfo = { + readonly slogLines: AsyncIterable; + readonly processInfo: import('../helpers/process-info.js').ProcessInfo; + readonly storageLocation: string; +}; + +export type RunChainResult = TaskResult & RunChainInfo; + +export interface TaskBaseOptions { + readonly stdout: import('stream').Writable; + readonly stderr: import('stream').Writable; + readonly timeout?: number; + readonly config?: unknown; +} + +export interface OrchestratorTasks { + setupTasks(options: TaskBaseOptions): Promise; + runChain(options: TaskBaseOptions): Promise; + runClient(options: TaskBaseOptions): Promise; + runLoadgen(options: TaskBaseOptions): Promise; +} diff --git a/runner/package.json b/runner/package.json new file mode 100644 index 0000000..76a5e1d --- /dev/null +++ b/runner/package.json @@ -0,0 +1,57 @@ +{ + "name": "loadgen-runner", + "version": "0.1.0", + "private": true, + "description": "Runner for the load generator", + "parsers": { + "js": "mjs" + }, + "type": "module", + "bin": "bin/loadgen-runner", + "scripts": { + "build": "exit 0", + "test": "exit 0", + "lint": "yarn lint:js && yarn lint:prettier && yarn lint:types", + "lint:js": "eslint '**/*.{js,ts}'", + "lint:prettier": "prettier --check '**/*.{js,ts}'", + "lint:types": "tsc -p jsconfig.json", + "lint-check": "yarn lint", + "lint-fix": "yarn lint-fix:js && yarn lint-fix:prettier", + "lint-fix:js": "eslint --fix '**/*.{js,ts}'", + "lint-fix:prettier": "prettier --write '**/*.{js,ts}'" + }, + "devDependencies": { + "@endo/eslint-config": "^0.3.9", + "@types/readline-transform": "^1.0.0", + "@types/yargs-parser": "^20.2.1", + "ava": "^3.13.0", + "eslint": "^7.11.0", + "prettier": "^2.1.2" + }, + "dependencies": { + "@iarna/toml": "^2.2.3", + "anylogger": "^0.21.0", + "chalk": "^2.4.2", + "deterministic-json": "^1.0.5", + "inquirer": "^6.3.1", + "readline-transform": "^1.0.0", + "ses": "^0.14.0", + "yargs-parser": "^20.2.2" + }, + "keywords": [], + "author": "Agoric", + "license": "Apache-2.0", + "eslintConfig": { + "extends": [ + "@endo" + ], + "rules": { + "prettier/prettier": "off" + } + }, + "prettier": { + "trailingComma": "all", + "singleQuote": true + }, + "eslintIgnore": [] +} diff --git a/scripts/loadgen-daily-perf.service b/scripts/loadgen-daily-perf.service new file mode 100644 index 0000000..fa713e5 --- /dev/null +++ b/scripts/loadgen-daily-perf.service @@ -0,0 +1,17 @@ +[Unit] +Description=Agoric SDK loadgen daily perf runner +DefaultDependencies=no +After=docker.service + +[Service] +Type=exec +User=benchmark +Group=benchmark +WorkingDirectory=/home/benchmark/workspace/daily-perf/ +ExecStart=/home/benchmark/workspace/daily-perf/run.sh +TimeoutStartSec=0 +TimeoutStopSec=3600 +KillMode=mixed + +[Install] +WantedBy=default.target diff --git a/scripts/run-daily-perf.sh b/scripts/run-daily-perf.sh new file mode 100755 index 0000000..bd74033 --- /dev/null +++ b/scripts/run-daily-perf.sh @@ -0,0 +1,41 @@ +#!/bin/sh +set -x + +# Runs a full 24h loadgen on the latest HEAD and save the output in the current directory +# Requires a docker image named `loadgen-runner` + +SDK_REPO="${SDK_REPO:-https://github.com/Agoric/agoric-sdk.git}" +running=1 +DOCKER_ID= +SLEEP_PID= + +stop_sleep() { [ -z "$SLEEP_PID" ] && return; kill -TERM $SLEEP_PID; exit 0; } +stop_container() { [ -z "${DOCKER_ID}" ] && return; docker kill --signal=SIGTERM ${DOCKER_ID}; } + +trap '' HUP +trap 'running=0; stop_sleep; stop_container' INT TERM + +while [ $running -eq 1 ] +do + while true + do + SDK_REVISION=$(git ls-remote ${SDK_REPO} HEAD | awk '{ print substr($1,1,12) }') + OUTPUT_DIR="daily-perf-${SDK_REVISION}" + [ ! -d "${OUTPUT_DIR}" ] && break + sleep 60 & + SLEEP_PID=$! + wait $SLEEP_PID + SLEEP_PID= + done + echo "processing ${SDK_REVISION}" + mkdir "${OUTPUT_DIR}" + DOCKER_ID=$(docker create -v "$(pwd)/${OUTPUT_DIR}:/out" -e SDK_REVISION=${SDK_REVISION} --name "${OUTPUT_DIR}" loadgen-runner --no-reset) || exit $? + docker start ${DOCKER_ID} + docker wait ${DOCKER_ID} >"${OUTPUT_DIR}/exit_code" & + DOCKER_WAIT_PID=$! + while kill -0 $DOCKER_WAIT_PID 2>/dev/null; do wait $DOCKER_WAIT_PID; done + docker logs ${DOCKER_ID} >"${OUTPUT_DIR}/docker.log" 2>&1 + [ -d "/var/lib/docker" ] && sudo -n cat /var/lib/docker/containers/${DOCKER_ID}/${DOCKER_ID}-json.log >"${OUTPUT_DIR}/docker.json.log" + docker rm ${DOCKER_ID} + DOCKER_ID= +done diff --git a/start.sh b/start.sh index 4621cd5..bc6c939 100755 --- a/start.sh +++ b/start.sh @@ -1,2 +1,55 @@ -#! /bin/sh -exec agoric start testnet ${1+"$@"} +#!/bin/sh +set -e -x + + +LOADGEN_DIR="$(pwd)" + +SDK_REPO="${SDK_REPO:-https://github.com/Agoric/agoric-sdk.git}" + +# Create temporary directory for SDK source if none provided +if [ -z "${SDK_SRC}" ] +then + SDK_REVISION=${SDK_REVISION:-$(git ls-remote ${SDK_REPO} HEAD | awk '{ print substr($1,1,12) }')} + SDK_SRC=/tmp/agoric-sdk-src-${SDK_REVISION} +fi +mkdir -p "${SDK_SRC}" + +# Clone the repo if needed +if [ ! -d "${SDK_SRC}/.git" ] +then + git clone "${SDK_REPO}" "${SDK_SRC}" + if [ ! -z "${SDK_REVISION}" ] + then + git -C "${SDK_SRC}" reset --hard ${SDK_REVISION} + fi +fi + +SDK_FULL_REVISION=$(git -C "${SDK_SRC}" rev-parse HEAD) + +if [ ! -z "${SDK_REVISION}" -a "${SDK_FULL_REVISION#${SDK_REVISION}}" = "${SDK_FULL_REVISION}" ] +then + echo "Error: SDK is currently checked out at revision ${SDK_FULL_REVISION} but revision ${SDK_REVISION} was specified" + exit 2 +fi + +SDK_REVISION=$(git -C "${SDK_SRC}" rev-parse --short HEAD) + +AGORIC_BIN_DIR=/tmp/agoric-sdk-bin-${SDK_REVISION} +mkdir -p ${AGORIC_BIN_DIR} + +OUTPUT_DIR="${OUTPUT_DIR:-/tmp/agoric-sdk-out-${SDK_REVISION}}" +mkdir -p "${OUTPUT_DIR}" + +export PATH=$AGORIC_BIN_DIR:$PATH + +cd "$SDK_SRC" +yarn install +yarn build +make -C packages/cosmic-swingset + +rm -f "${AGORIC_BIN_DIR}/agoric" +yarn link-cli "${AGORIC_BIN_DIR}/agoric" + +cd "$LOADGEN_DIR" +agoric install +exec ./runner/bin/loadgen-runner --output-dir="${OUTPUT_DIR}" "$@" 2>&1 diff --git a/yarn.lock b/yarn.lock index fdac896..946bdbd 100644 --- a/yarn.lock +++ b/yarn.lock @@ -14,10 +14,10 @@ dependencies: ses "^0.12.7" -"@agoric/assert@^0.3.5": - version "0.3.5" - resolved "https://registry.yarnpkg.com/@agoric/assert/-/assert-0.3.5.tgz#c7fcf6874e81f2d008acb83eeaafbb5edf8020bd" - integrity sha512-4nrozRJsE2Kn5YSWEalfbs4as5CtSSOWez9iBwqhDaJcRS+ndJHqoYkWNsNHGDstqPl/5X5GcjfhbjdLZQz5ow== +"@agoric/assert@^0.3.7": + version "0.3.7" + resolved "https://registry.yarnpkg.com/@agoric/assert/-/assert-0.3.7.tgz#ccee9e4f1eb6412fe977fa8b887b6224ef901dfa" + integrity sha512-vzNdcFeO/1SMazEGXrdKjutUVP1RVBoDv2L8DvMoVay7NRiV93A0OKvn3f1Xe6HPGRyTQJz3XBmgLKlR1FJYBg== dependencies: ses "^0.13.4" @@ -56,10 +56,10 @@ ses "^0.12.6" source-map "^0.7.3" -"@agoric/bundle-source@^1.4.3": - version "1.4.3" - resolved "https://registry.yarnpkg.com/@agoric/bundle-source/-/bundle-source-1.4.3.tgz#e0a937c219228a5a9ac67bd5148cdcc9f89bcbe2" - integrity sha512-9jEumlkAogE44IEkVTKWjqlCQiHEjRzXmE4gCg5PjRitzcsaxuIgKXDGoFqLulebWmwRIwl/VmQD+yO5Tja9rQ== +"@agoric/bundle-source@^1.4.5": + version "1.4.5" + resolved "https://registry.yarnpkg.com/@agoric/bundle-source/-/bundle-source-1.4.5.tgz#6f18fd1ceb3a649850ed6897fba9e55a39352b1e" + integrity sha512-NkPV4ap8QpU0KPaVpGlzF+vI5jmF2aeY706mcZ4P4zpXO9tvGJQPu5jY5cGDr3EjD+TAv1nwfpztJUQRiUYVzg== dependencies: "@agoric/babel-standalone" "^7.14.3" "@babel/generator" "^7.14.2" @@ -70,22 +70,11 @@ "@rollup/plugin-commonjs" "^19.0.0" "@rollup/plugin-node-resolve" "^13.0.0" acorn "^8.2.4" - esm agoric-labs/esm#Agoric-built + c8 "^7.7.2" rollup "^2.47.0" ses "^0.13.4" source-map "^0.7.3" -"@agoric/captp@^1.7.19": - version "1.7.19" - resolved "https://registry.yarnpkg.com/@agoric/captp/-/captp-1.7.19.tgz#6d9c610046a066365e3b7c65ea83524c8613f67a" - integrity sha512-zqNzrZGTefWngt2ZFzHpgBxBNOOi/qlyREGZ7SKHPSzfFzEm9jql7013S3onY9BZjaZ+qBeX2eXbT2rDAm+ddw== - dependencies: - "@agoric/eventual-send" "^0.13.21" - "@agoric/marshal" "^0.4.18" - "@agoric/nat" "^4.1.0" - "@agoric/promise-kit" "^0.2.19" - esm agoric-labs/esm#Agoric-built - "@agoric/captp@^1.7.6": version "1.7.6" resolved "https://registry.yarnpkg.com/@agoric/captp/-/captp-1.7.6.tgz#06b4f105781ced85951ba8d4957b15a7ec4bd94b" @@ -97,6 +86,17 @@ "@agoric/promise-kit" "^0.2.6" esm "^3.2.5" +"@agoric/captp@^1.8.0": + version "1.8.0" + resolved "https://registry.yarnpkg.com/@agoric/captp/-/captp-1.8.0.tgz#75f0bbdfba807f86925ca380c73e31b0ce9466e3" + integrity sha512-ZwxfKg1C3ENnGWBUskZhHdpw7NEwmVR7yQLrbybkWFDLRarOOZc2PLNUMxbiPyNt+u0ngoWGKa39kVU1YqvIow== + dependencies: + "@agoric/assert" "^0.3.7" + "@agoric/eventual-send" "^0.13.23" + "@agoric/marshal" "^0.4.20" + "@agoric/nat" "^4.1.0" + "@agoric/promise-kit" "^0.2.21" + "@agoric/compartment-mapper@^0.2.3", "@agoric/compartment-mapper@^0.2.4": version "0.2.4" resolved "https://registry.yarnpkg.com/@agoric/compartment-mapper/-/compartment-mapper-0.2.4.tgz#db78b3d3c34db2204aba6eaa29155a3e0a371e73" @@ -209,36 +209,36 @@ "@agoric/same-structure" "^0.1.6" "@agoric/store" "^0.4.7" -"@agoric/ertp@^0.11.9": - version "0.11.9" - resolved "https://registry.yarnpkg.com/@agoric/ertp/-/ertp-0.11.9.tgz#794d0a6e8f901d6d2739d0797b84f40fc6657e3d" - integrity sha512-LEjuRFZGmUmrWlvZckWU+rGtY35py0zVIZiCDlLNoXt7/bI13IkaN5Zeed6Altw9Re2hrS/NwQVKqRSZw7dBEw== +"@agoric/ertp@^0.11.11": + version "0.11.11" + resolved "https://registry.yarnpkg.com/@agoric/ertp/-/ertp-0.11.11.tgz#39b3812615b275d6ffd79f5305c27e831dcb4b37" + integrity sha512-kUi2Slr0orBgR2X6DCk8c3wlVlD1ynV2vXIRJJQ4PDi9FU514+j2lp/Lg8Xt8eiYfV26xmXPYiPENbe+OaphVg== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/eventual-send" "^0.13.21" - "@agoric/marshal" "^0.4.18" + "@agoric/assert" "^0.3.7" + "@agoric/eventual-send" "^0.13.23" + "@agoric/marshal" "^0.4.20" "@agoric/nat" "^4.1.0" - "@agoric/notifier" "^0.3.21" - "@agoric/promise-kit" "^0.2.19" - "@agoric/same-structure" "^0.1.19" - "@agoric/store" "^0.4.21" + "@agoric/notifier" "^0.3.23" + "@agoric/promise-kit" "^0.2.21" + "@agoric/same-structure" "^0.1.21" + "@agoric/store" "^0.4.23" "@agoric/eventual-send@*", "@agoric/eventual-send@^0.13.6", "@agoric/eventual-send@^0.13.7": version "0.13.7" resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.7.tgz#6414ffa8b025a34c7074b03a63eb739698ad6c12" integrity sha512-2R94fuM0PXMvZPOotKoYc7dM2PaR2AJ9ZQqnsj7glec32HC/rrm1tZDqBqdpx81oRmdQbsmceMTTlUtcrHoPEg== -"@agoric/eventual-send@^0.13.21": - version "0.13.21" - resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.21.tgz#d715e4353c2c3680714610682d01181398833466" - integrity sha512-z8ARMFuEj8/EO8wZG7LZ91D0GZfh5Y78mLSHnSKp3zs5ncA11Yakj4cLYSSZLl6KnRRKMRMhP4XLJmC+CnZmHQ== +"@agoric/eventual-send@^0.13.23": + version "0.13.23" + resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.23.tgz#93d5a535e59af8ed43dfeb2398ddb470398bb5d2" + integrity sha512-+Ct1+rPrthfTULIZVsxSFJ6sbu0iWVytpmKhUQUELCnKyVANrDrH1OJkEREYuYjPCE1s5wMHW+GCsa3oqZh2WQ== -"@agoric/import-bundle@^0.2.21": - version "0.2.21" - resolved "https://registry.yarnpkg.com/@agoric/import-bundle/-/import-bundle-0.2.21.tgz#50ad82f94fe138a22b119bf67f4ac4a7a6d0f3b8" - integrity sha512-MKWiODn4ghIiD63ra+DQaVjr391Ivgui4KOlnbnaoG2MMwTFOs5y3YW/sNAqfDRGgIeCe3GYwe/mVgO7x/LLnA== +"@agoric/import-bundle@^0.2.23": + version "0.2.23" + resolved "https://registry.yarnpkg.com/@agoric/import-bundle/-/import-bundle-0.2.23.tgz#d95f8206a443736cf7337cc74993b918f13ba1fa" + integrity sha512-MVutpscCNe3pLobBWQQ8NwKF7DTZK46/XCELkxQn6EX5/JInGNIIphf5K9mDm4PcyXQa5U+gZXmHhRIKoIV+og== dependencies: - "@agoric/assert" "^0.3.5" + "@agoric/assert" "^0.3.7" "@endo/base64" "^0.2.4" "@endo/compartment-mapper" "^0.4.1" @@ -273,12 +273,12 @@ "@agoric/eventual-send" "^0.13.6" ses "^0.12.7" -"@agoric/install-ses@^0.5.19": - version "0.5.19" - resolved "https://registry.yarnpkg.com/@agoric/install-ses/-/install-ses-0.5.19.tgz#721b01a4f7d5eac3dac561bf61d0190419a2d464" - integrity sha512-5cCRJWG/pwfvTYRxh5EODbpq8yCdC731GlTkhokXBewCSxfiKenFKU+zqsnyqDFOrDRirM/yA85vER+cwhu1LA== +"@agoric/install-ses@^0.5.21": + version "0.5.21" + resolved "https://registry.yarnpkg.com/@agoric/install-ses/-/install-ses-0.5.21.tgz#19cc528a0e3cece25c23c1756dbc030e255eca55" + integrity sha512-gAJMvAWenJGVM7LBkOe2F4YruVaGOAQTLN9proYjiVmlWAaZbMr8hNxw7WPUZJpBvLOKkmcGqf56z6A7L9OagQ== dependencies: - "@agoric/eventual-send" "^0.13.21" + "@agoric/eventual-send" "^0.13.23" ses "^0.13.4" "@agoric/make-hardener@^0.1.2": @@ -286,15 +286,15 @@ resolved "https://registry.yarnpkg.com/@agoric/make-hardener/-/make-hardener-0.1.3.tgz#807b0072bef95d935c3370d406d9dfeb719f69ee" integrity sha512-rc9M2ErE/Zu822OLCnAltr957ZVTsBvVZ7KA2unqDpjo3q7PqZF2hWFB1xXD2Qkfwt5exQ3BjFbkj+NUaTg4gA== -"@agoric/marshal@^0.4.17", "@agoric/marshal@^0.4.18": - version "0.4.18" - resolved "https://registry.yarnpkg.com/@agoric/marshal/-/marshal-0.4.18.tgz#23033b7b74bc24e2b4fb83311823b768ba5ced3a" - integrity sha512-vtcWBoguo2sYJrjfH6XmnqH9+7rNBCTzkGjccenoMlBW80RLyi7u2xJs6Y30XNW3E9wysgaLYuUeSIQ9B+YYzg== +"@agoric/marshal@*", "@agoric/marshal@^0.4.20": + version "0.4.20" + resolved "https://registry.yarnpkg.com/@agoric/marshal/-/marshal-0.4.20.tgz#51832e69819c1a9876d5f03f2b98213d8197ba09" + integrity sha512-YLLhkIqP7FMNoazwhouSSkZXxkMGlmaW+xIFx0FcR87djsHkI7uhDXCno0Bi//ChAxDqqXiLa2aL7GSKUgsqQw== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/eventual-send" "^0.13.21" + "@agoric/assert" "^0.3.7" + "@agoric/eventual-send" "^0.13.23" "@agoric/nat" "^4.1.0" - "@agoric/promise-kit" "^0.2.19" + "@agoric/promise-kit" "^0.2.21" "@agoric/marshal@^0.4.3", "@agoric/marshal@^0.4.4": version "0.4.4" @@ -326,15 +326,15 @@ "@agoric/marshal" "^0.4.4" "@agoric/promise-kit" "^0.2.6" -"@agoric/notifier@^0.3.21": - version "0.3.21" - resolved "https://registry.yarnpkg.com/@agoric/notifier/-/notifier-0.3.21.tgz#b16466a6d4702a329a926ac7e0236045be9de2e3" - integrity sha512-Q6/Nglt/7cdoAPG5LlgB9R2/35ugv+8fqYTVV4dz5OsxPWTLnAGjgcgtN7bWGSTWWaEuXS4k6EuBPU9moAWkVQ== +"@agoric/notifier@^0.3.23": + version "0.3.23" + resolved "https://registry.yarnpkg.com/@agoric/notifier/-/notifier-0.3.23.tgz#a6c67bf404cb58cd2be089ae950f9486d782acd4" + integrity sha512-aNGOXTt3hTbKUk8UqPicijX1O5huUst7IefMJuETgbJHsfU5alCuhayISEXScougbTyIQwpfUW3Cri3SsPdVSw== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/eventual-send" "^0.13.21" - "@agoric/marshal" "^0.4.18" - "@agoric/promise-kit" "^0.2.19" + "@agoric/assert" "^0.3.7" + "@agoric/eventual-send" "^0.13.23" + "@agoric/marshal" "^0.4.20" + "@agoric/promise-kit" "^0.2.21" "@agoric/pegasus@^0.2.0": version "0.2.0" @@ -356,12 +356,12 @@ "@agoric/swingset-vat" "^0.16.0" "@agoric/zoe" "^0.15.0" -"@agoric/promise-kit@^0.2.19": - version "0.2.19" - resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.19.tgz#64207de0a020680ad95575f6a10ebcb322b9f006" - integrity sha512-86E4pw0uVCiJ5bhDR8V7xGO8SSeqcdWGJPS5ilk8UJMMJxrKvAIKx32ZgXcx1Lw2kCHbWbc3/FVCDcGMvB67rA== +"@agoric/promise-kit@^0.2.21": + version "0.2.21" + resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.21.tgz#66aa16a020479c610608f169ae08eea9d018d62a" + integrity sha512-3v7FxmkH5qBkMjQ5Js99VJfKbi06ez5HaRTo/iX47x7i+rzyVB6f9wwVXnlPXWnbFyRxaOuiz1R4hqd6CHEDTA== dependencies: - "@agoric/eventual-send" "^0.13.21" + "@agoric/eventual-send" "^0.13.23" "@agoric/promise-kit@^0.2.6": version "0.2.6" @@ -378,13 +378,13 @@ "@agoric/assert" "^0.2.6" "@agoric/sparse-ints" "^0.1.6" -"@agoric/same-structure@^0.1.19": - version "0.1.19" - resolved "https://registry.yarnpkg.com/@agoric/same-structure/-/same-structure-0.1.19.tgz#e5f469f00399c8eebf563fb80bbd7c5bd0d42224" - integrity sha512-D6YVDBOcW16ZQvpX/kS8e82lJicRybQj0wpSzKI7S0pc8npwoZbIgG9v1/yi9Lm/NnQB5PKjlRQxHUOznpHsEA== +"@agoric/same-structure@^0.1.21": + version "0.1.21" + resolved "https://registry.yarnpkg.com/@agoric/same-structure/-/same-structure-0.1.21.tgz#647afa5de3ac3c728afe27659e773d4d529079fd" + integrity sha512-vv038JuNpr918WOlvnKO6rOkWdA9rJvF+tqyWBi0BIim9TwGYCbMoWHtGTy3z14Rw9pRV/CtHKXvrdTZKuGx8g== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/marshal" "^0.4.18" + "@agoric/assert" "^0.3.7" + "@agoric/marshal" "^0.4.20" "@agoric/same-structure@^0.1.6": version "0.1.6" @@ -431,13 +431,13 @@ "@agoric/assert" "^0.2.6" "@agoric/marshal" "^0.4.4" -"@agoric/store@^0.4.21": - version "0.4.21" - resolved "https://registry.yarnpkg.com/@agoric/store/-/store-0.4.21.tgz#6f7086eb19173d8967eef06782fc79ab92345bcb" - integrity sha512-U3tzAvyhASauDJ4vC88yZF0u/QuekIBJklbNejW6OlOf8KiEOuhdkJQcxAxPtSFcoOQ9OXadGdspjkY3XRRCCg== +"@agoric/store@^0.4.23": + version "0.4.23" + resolved "https://registry.yarnpkg.com/@agoric/store/-/store-0.4.23.tgz#2db7c85b1ba636a1d0277bd0207b0b522042e37c" + integrity sha512-CY9yr1yLnXFEJ8ukrYaPYxuN3asDuuV2vYbPaYCdbzXs7vv9mFkOj/c5c8xz/v3RfWYNRSOulH78bfh5uVN+yA== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/marshal" "^0.4.18" + "@agoric/assert" "^0.3.7" + "@agoric/marshal" "^0.4.20" "@agoric/swing-store-lmdb@^0.4.6": version "0.4.6" @@ -446,12 +446,12 @@ dependencies: node-lmdb "^0.9.4" -"@agoric/swing-store-lmdb@^0.5.5": - version "0.5.5" - resolved "https://registry.yarnpkg.com/@agoric/swing-store-lmdb/-/swing-store-lmdb-0.5.5.tgz#8f5bd3286d70d87c5caf4a605e24774c4a6d0e29" - integrity sha512-PBobEkwQ/xCAo7kU18n5qngy7hztukeickQAdpvqRdYOglwjxEdgmiF3TiIG2RBOD3QB+YHLLcP3zA+fLayllg== +"@agoric/swing-store-lmdb@^0.5.7": + version "0.5.7" + resolved "https://registry.yarnpkg.com/@agoric/swing-store-lmdb/-/swing-store-lmdb-0.5.7.tgz#82f7d8294ddaf89d6cdd4186dd62b97deafc56d8" + integrity sha512-YHFCKRigfAN/hYNQwIZ2Ln33b5pywkLuc6BsvErQY39mAoNxxLupwJTurYzd+CFuIsCYRFQIy1rhgfrFP/8rZw== dependencies: - "@agoric/assert" "^0.3.5" + "@agoric/assert" "^0.3.7" better-sqlite3 "^7.4.1" node-lmdb "^0.9.4" @@ -462,12 +462,12 @@ dependencies: n-readlines "^1.0.0" -"@agoric/swing-store-simple@^0.4.5": - version "0.4.5" - resolved "https://registry.yarnpkg.com/@agoric/swing-store-simple/-/swing-store-simple-0.4.5.tgz#5404a70f6a5e739e8adec3a97c553eeadf6e6524" - integrity sha512-WJI+QRGFBGbqHPuCtHd1BjvoQtnbuFOEla05fIaFMg/NlZT/Wc7nwyh2VTpTc5SATxuc2CFto5wue3pHbFAsAA== +"@agoric/swing-store-simple@^0.4.7": + version "0.4.7" + resolved "https://registry.yarnpkg.com/@agoric/swing-store-simple/-/swing-store-simple-0.4.7.tgz#c4cf6067181ef640883a201f54acadd92769a12e" + integrity sha512-pG+6zungnAWLuWpbjBWJmddQ6UTVeNfnL5MpS7OGOxBzAqX1z5oiPR9ZIDCHiToDlUVZyAKxgjpZQHC560AuYA== dependencies: - "@agoric/assert" "^0.3.5" + "@agoric/assert" "^0.3.7" n-readlines "^1.0.0" "@agoric/swingset-vat@^0.16.0": @@ -506,29 +506,26 @@ tmp "^0.2.1" yargs "^14.2.0" -"@agoric/swingset-vat@^0.18.5": - version "0.18.5" - resolved "https://registry.yarnpkg.com/@agoric/swingset-vat/-/swingset-vat-0.18.5.tgz#e5c3ad4e6fce9f0c23f6967abc26842eadd5ccde" - integrity sha512-3yIbschuJFlOKwOb+sGZ1OAd364CIoH7/EsxmOxNGCDEqIl/PW1HBxAN9vXa0uoI0g3oQDPvQmvzzRqBaX86Hg== +"@agoric/swingset-vat@^0.19.0": + version "0.19.0" + resolved "https://registry.yarnpkg.com/@agoric/swingset-vat/-/swingset-vat-0.19.0.tgz#7b4873d2c93316db3bb07d3756def463607ecf90" + integrity sha512-dgfP0Q/rmPVbsF+5/H8GuX02qn3Pv8ypqssf93hNcoDv2YE3jHEO5TDrPPWX6C9o8vAHlyj/XFaIAdpM+9VGnw== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/babel-parser" "^7.6.4" + "@agoric/assert" "^0.3.7" "@agoric/babel-standalone" "^7.14.3" - "@agoric/bundle-source" "^1.4.3" - "@agoric/captp" "^1.7.19" - "@agoric/eventual-send" "^0.13.21" - "@agoric/import-bundle" "^0.2.21" - "@agoric/install-ses" "^0.5.19" - "@agoric/marshal" "^0.4.18" + "@agoric/bundle-source" "^1.4.5" + "@agoric/captp" "^1.8.0" + "@agoric/eventual-send" "^0.13.23" + "@agoric/import-bundle" "^0.2.23" + "@agoric/install-ses" "^0.5.21" + "@agoric/marshal" "^0.4.20" "@agoric/nat" "^4.1.0" - "@agoric/notifier" "^0.3.21" - "@agoric/promise-kit" "^0.2.19" - "@agoric/store" "^0.4.21" - "@agoric/swing-store-lmdb" "^0.5.5" - "@agoric/swing-store-simple" "^0.4.5" - "@agoric/tame-metering" "^2.0.5" - "@agoric/transform-metering" "^1.4.18" - "@agoric/xsnap" "^0.6.8" + "@agoric/notifier" "^0.3.23" + "@agoric/promise-kit" "^0.2.21" + "@agoric/store" "^0.4.23" + "@agoric/swing-store-lmdb" "^0.5.7" + "@agoric/swing-store-simple" "^0.4.7" + "@agoric/xsnap" "^0.6.10" "@endo/base64" "^0.2.4" "@types/tmp" "^0.2.0" anylogger "^0.21.0" @@ -546,23 +543,23 @@ dependencies: esm "^3.2.5" -"@agoric/tame-metering@^2.0.5": - version "2.0.5" - resolved "https://registry.yarnpkg.com/@agoric/tame-metering/-/tame-metering-2.0.5.tgz#e608388a86a33fa5a6214ad7f6381fc20e5bc682" - integrity sha512-EOzB9oAEIcfg4bo2CzhysOLrYGbLdmywcEaFIpkYUjm2ogB4TAXNZL/FydV2n3ZYBopj7TpTjMGCXhlYJIENkQ== +"@agoric/tame-metering@^2.0.7": + version "2.0.7" + resolved "https://registry.yarnpkg.com/@agoric/tame-metering/-/tame-metering-2.0.7.tgz#140b548a33c100bc917ec88812fcc91aed5b3936" + integrity sha512-ryx2YVHAOqfhlVydSyZYptp2g5fWO03qnkaJtLHiRBnYb32Vu2kJy0NPVMsF+rkvjlKfcWugV6TjHY/BQJ3LfQ== "@agoric/transform-eventual-send@^1.4.6": version "1.4.6" resolved "https://registry.yarnpkg.com/@agoric/transform-eventual-send/-/transform-eventual-send-1.4.6.tgz#04f2dca657997d58f5222e67c1f2dce79cccd808" integrity sha512-XICAgqUeitTJusQ2ULUlNh+EVWMDMULTAn+JZV7j0NgUQysqlHB1TaB6pNPJnIkJSLkOFWNomVNtNXZ1WAhzUQ== -"@agoric/transform-metering@^1.4.18": - version "1.4.18" - resolved "https://registry.yarnpkg.com/@agoric/transform-metering/-/transform-metering-1.4.18.tgz#ca380c7d808a40c1215e0570670cea1feb59cbbf" - integrity sha512-NBjY4nib5mJEcxh54cBLjoQxn/nI0iSwriokFm6vNid3zEMa6NiKWey+fYZ+c+DgInngHD+tzCzISJerCID6MA== +"@agoric/transform-metering@^1.4.20": + version "1.4.20" + resolved "https://registry.yarnpkg.com/@agoric/transform-metering/-/transform-metering-1.4.20.tgz#0a9eb24053c560e804daf4deef3344632bd3bbdd" + integrity sha512-RfQhp8n9o3tvLUIozMCrI8HsViklInpIIh3sjcgLEWVVhuv0+sSNnBkN8F9u7dDE3ViWOpvWXQFq45sCWK2/UA== dependencies: "@agoric/nat" "^4.1.0" - "@agoric/tame-metering" "^2.0.5" + "@agoric/tame-metering" "^2.0.7" "@babel/generator" "^7.14.2" "@babel/parser" "^7.14.2" "@babel/traverse" "^7.14.2" @@ -602,17 +599,17 @@ "@agoric/swingset-vat" "^0.16.0" "@agoric/zoe" "^0.15.0" -"@agoric/ui-components@^0.2.15": - version "0.2.16" - resolved "https://registry.yarnpkg.com/@agoric/ui-components/-/ui-components-0.2.16.tgz#4aba4d3d9f5c9787ccbd3a9e7fab0c123c0ded58" - integrity sha512-R+5b0GOeGcrZYYRqX34thDSfnm45xh4k4QceaWrq5jzhPeoUtp95rIX4SA4Mi6P8VyYSOEbKzzoZ2gIjx+Q2lQ== +"@agoric/ui-components@*": + version "0.2.18" + resolved "https://registry.yarnpkg.com/@agoric/ui-components/-/ui-components-0.2.18.tgz#5bca681cedf2ccc034dad088603a8dde6e758037" + integrity sha512-CNPWZ9RYybp/8JIaCYhRwAZc7heMxul2UeniM3pnHpvZ+oYHfsy+5fDtO18JLDyzS8rJ9//pCnlM8b9r+GXs9g== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/ertp" "^0.11.9" - "@agoric/eventual-send" "^0.13.21" - "@agoric/install-ses" "^0.5.19" + "@agoric/assert" "^0.3.7" + "@agoric/ertp" "^0.11.11" + "@agoric/eventual-send" "^0.13.23" + "@agoric/install-ses" "^0.5.21" "@agoric/nat" "^4.1.0" - "@agoric/zoe" "^0.17.4" + "@agoric/zoe" "^0.17.6" clsx "^1.1.1" "@agoric/xsnap@^0.5.1": @@ -628,16 +625,16 @@ glob "^7.1.6" ses "^0.12.7" -"@agoric/xsnap@^0.6.8": - version "0.6.8" - resolved "https://registry.yarnpkg.com/@agoric/xsnap/-/xsnap-0.6.8.tgz#b3e2a04287ded8e47101be86c850338570ffeace" - integrity sha512-Mvdr85Zsgvek88lQQgJBlma42I0I5et2aODGuiYMQHAwQQM+vAlLothEy5nqihY/exIXhiq17qabKrUkovOFfQ== +"@agoric/xsnap@^0.6.10": + version "0.6.10" + resolved "https://registry.yarnpkg.com/@agoric/xsnap/-/xsnap-0.6.10.tgz#acff6b343dd2d0db28f94662449fa458e89eb76a" + integrity sha512-0dJqOzqaQkCcnK3B6nZ4s71Fuz7PtNShAgwsqAt4078sUtbyi0Pq6m0pw5XGJrGpopMvZmPIrXG4fhsn4kz34w== dependencies: - "@agoric/assert" "^0.3.5" + "@agoric/assert" "^0.3.7" "@agoric/babel-standalone" "^7.14.3" - "@agoric/bundle-source" "^1.4.3" - "@agoric/eventual-send" "^0.13.21" - "@agoric/install-ses" "^0.5.19" + "@agoric/bundle-source" "^1.4.5" + "@agoric/eventual-send" "^0.13.23" + "@agoric/install-ses" "^0.5.21" esm agoric-labs/esm#Agoric-built glob "^7.1.6" ses "^0.13.4" @@ -661,24 +658,24 @@ "@agoric/swingset-vat" "^0.16.0" "@agoric/transform-metering" "^1.4.6" -"@agoric/zoe@^0.17.4": - version "0.17.4" - resolved "https://registry.yarnpkg.com/@agoric/zoe/-/zoe-0.17.4.tgz#3f577bce6005a05849ef4ce596692625067d5938" - integrity sha512-GLIT5+qTSniHKUOX93x9PcbSVALiEJoXpiuBeUv1sB8Zvb6ziq03JfYsyIKy1lVNnYjm1w7LnSd8dAESUE9ZQg== - dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/bundle-source" "^1.4.3" - "@agoric/ertp" "^0.11.9" - "@agoric/eventual-send" "^0.13.21" - "@agoric/import-bundle" "^0.2.21" - "@agoric/marshal" "^0.4.18" +"@agoric/zoe@^0.17.6": + version "0.17.6" + resolved "https://registry.yarnpkg.com/@agoric/zoe/-/zoe-0.17.6.tgz#45ce7002854ff48dee1a1566efedb75eace63379" + integrity sha512-pPVjObYh+A2l8Ih2MofLFyDY3eOZPFa6eSWfXE0EM7m/QufL58AcWL1wVXJcKXyMCaPD2UydXnQk/kmMvtLdvA== + dependencies: + "@agoric/assert" "^0.3.7" + "@agoric/bundle-source" "^1.4.5" + "@agoric/ertp" "^0.11.11" + "@agoric/eventual-send" "^0.13.23" + "@agoric/import-bundle" "^0.2.23" + "@agoric/marshal" "^0.4.20" "@agoric/nat" "^4.1.0" - "@agoric/notifier" "^0.3.21" - "@agoric/promise-kit" "^0.2.19" - "@agoric/same-structure" "^0.1.19" - "@agoric/store" "^0.4.21" - "@agoric/swingset-vat" "^0.18.5" - "@agoric/transform-metering" "^1.4.18" + "@agoric/notifier" "^0.3.23" + "@agoric/promise-kit" "^0.2.21" + "@agoric/same-structure" "^0.1.21" + "@agoric/store" "^0.4.23" + "@agoric/swingset-vat" "^0.19.0" + "@agoric/transform-metering" "^1.4.20" "@babel/code-frame@7.12.11": version "7.12.11" @@ -991,6 +988,11 @@ "@babel/helper-validator-identifier" "^7.14.5" to-fast-properties "^2.0.0" +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + "@concordance/react@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@concordance/react/-/react-2.0.0.tgz#aef913f27474c53731f4fd79cc2f54897de90fde" @@ -1064,15 +1066,15 @@ minimatch "^3.0.4" strip-json-comments "^3.1.1" -"@eslint/eslintrc@^0.4.2": - version "0.4.2" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.2.tgz#f63d0ef06f5c0c57d76c4ab5f63d3835c51b0179" - integrity sha512-8nmGq/4ycLpIwzvhI4tNDmQztZ8sp+hI7cyG8i1nQDhkAbRzHpXPidRAHlNvCZQpJTKw5ItIpMw9RSToGF00mg== +"@eslint/eslintrc@^0.4.1": + version "0.4.1" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.1.tgz#442763b88cecbe3ee0ec7ca6d6dd6168550cbf14" + integrity sha512-5v7TDE9plVhvxQeWLXDTvFvJBdH6pEsdnl2g/dAptmuFEPedQ4Erq5rsDsX+mvAM610IhNaO2W5V1dOOnDKxkQ== dependencies: ajv "^6.12.4" debug "^4.1.1" espree "^7.3.0" - globals "^13.9.0" + globals "^12.1.0" ignore "^4.0.6" import-fresh "^3.2.1" js-yaml "^3.13.1" @@ -1084,6 +1086,11 @@ resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + "@nodelib/fs.scandir@2.1.4": version "2.1.4" resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz#d4b3549a5db5de2683e0c1071ab4f140904bbf69" @@ -1240,6 +1247,11 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== +"@types/istanbul-lib-coverage@^2.0.1": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz#4ba8ddb720221f432e443bd5f9117fd22cfd4762" + integrity sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw== + "@types/json5@^0.0.29": version "0.0.29" resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" @@ -1255,6 +1267,13 @@ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== +"@types/readline-transform@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@types/readline-transform/-/readline-transform-1.0.0.tgz#c47f1a31606f8c96cdbb3728f3e775617846cbbb" + integrity sha512-Jgc7Sr78d3EJ4AV+GXfs3BHUxPTC2pn4DNeRu78O04UVSOFux8dnMdfKZkdjw739mhjxdot4+HVWTLXjvtpiFQ== + dependencies: + "@types/node" "*" + "@types/resolve@0.0.8": version "0.0.8" resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-0.0.8.tgz#f26074d238e02659e323ce1a13d041eee280e194" @@ -1274,48 +1293,53 @@ resolved "https://registry.yarnpkg.com/@types/tmp/-/tmp-0.2.0.tgz#e3f52b4d7397eaa9193592ef3fdd44dc0af4298c" integrity sha512-flgpHJjntpBAdJD43ShRosQvNC0ME97DCfGvZEDlAThQmnerRXrLbX6YgzRBQCZTthET9eAWFAMaYP0m0Y4HzQ== +"@types/yargs-parser@^20.2.1": + version "20.2.1" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-20.2.1.tgz#3b9ce2489919d9e4fea439b76916abc34b2df129" + integrity sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw== + "@typescript-eslint/parser@^4.18.0": - version "4.28.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.28.0.tgz#2404c16751a28616ef3abab77c8e51d680a12caa" - integrity sha512-7x4D22oPY8fDaOCvkuXtYYTQ6mTMmkivwEzS+7iml9F9VkHGbbZ3x4fHRwxAb5KeuSkLqfnYjs46tGx2Nour4A== + version "4.25.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.25.0.tgz#6b2cb6285aa3d55bfb263c650739091b0f19aceb" + integrity sha512-OZFa1SKyEJpAhDx8FcbWyX+vLwh7OEtzoo2iQaeWwxucyfbi0mT4DijbOSsTgPKzGHr6GrF2V5p/CEpUH/VBxg== dependencies: - "@typescript-eslint/scope-manager" "4.28.0" - "@typescript-eslint/types" "4.28.0" - "@typescript-eslint/typescript-estree" "4.28.0" - debug "^4.3.1" + "@typescript-eslint/scope-manager" "4.25.0" + "@typescript-eslint/types" "4.25.0" + "@typescript-eslint/typescript-estree" "4.25.0" + debug "^4.1.1" -"@typescript-eslint/scope-manager@4.28.0": - version "4.28.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.28.0.tgz#6a3009d2ab64a30fc8a1e257a1a320067f36a0ce" - integrity sha512-eCALCeScs5P/EYjwo6se9bdjtrh8ByWjtHzOkC4Tia6QQWtQr3PHovxh3TdYTuFcurkYI4rmFsRFpucADIkseg== +"@typescript-eslint/scope-manager@4.25.0": + version "4.25.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.25.0.tgz#9d86a5bcc46ef40acd03d85ad4e908e5aab8d4ca" + integrity sha512-2NElKxMb/0rya+NJG1U71BuNnp1TBd1JgzYsldsdA83h/20Tvnf/HrwhiSlNmuq6Vqa0EzidsvkTArwoq+tH6w== dependencies: - "@typescript-eslint/types" "4.28.0" - "@typescript-eslint/visitor-keys" "4.28.0" + "@typescript-eslint/types" "4.25.0" + "@typescript-eslint/visitor-keys" "4.25.0" -"@typescript-eslint/types@4.28.0": - version "4.28.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.28.0.tgz#a33504e1ce7ac51fc39035f5fe6f15079d4dafb0" - integrity sha512-p16xMNKKoiJCVZY5PW/AfILw2xe1LfruTcfAKBj3a+wgNYP5I9ZEKNDOItoRt53p4EiPV6iRSICy8EPanG9ZVA== +"@typescript-eslint/types@4.25.0": + version "4.25.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.25.0.tgz#0e444a5c5e3c22d7ffa5e16e0e60510b3de5af87" + integrity sha512-+CNINNvl00OkW6wEsi32wU5MhHti2J25TJsJJqgQmJu3B3dYDBcmOxcE5w9cgoM13TrdE/5ND2HoEnBohasxRQ== -"@typescript-eslint/typescript-estree@4.28.0": - version "4.28.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.28.0.tgz#e66d4e5aa2ede66fec8af434898fe61af10c71cf" - integrity sha512-m19UQTRtxMzKAm8QxfKpvh6OwQSXaW1CdZPoCaQuLwAq7VZMNuhJmZR4g5281s2ECt658sldnJfdpSZZaxUGMQ== +"@typescript-eslint/typescript-estree@4.25.0": + version "4.25.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.25.0.tgz#942e4e25888736bff5b360d9b0b61e013d0cfa25" + integrity sha512-1B8U07TGNAFMxZbSpF6jqiDs1cVGO0izVkf18Q/SPcUAc9LhHxzvSowXDTvkHMWUVuPpagupaW63gB6ahTXVlg== dependencies: - "@typescript-eslint/types" "4.28.0" - "@typescript-eslint/visitor-keys" "4.28.0" - debug "^4.3.1" - globby "^11.0.3" + "@typescript-eslint/types" "4.25.0" + "@typescript-eslint/visitor-keys" "4.25.0" + debug "^4.1.1" + globby "^11.0.1" is-glob "^4.0.1" - semver "^7.3.5" - tsutils "^3.21.0" + semver "^7.3.2" + tsutils "^3.17.1" -"@typescript-eslint/visitor-keys@4.28.0": - version "4.28.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.28.0.tgz#255c67c966ec294104169a6939d96f91c8a89434" - integrity sha512-PjJyTWwrlrvM5jazxYF5ZPs/nl0kHDZMVbuIcbpawVXaDPelp3+S9zpOz5RmVUfS/fD5l5+ZXNKnWhNYjPzCvw== +"@typescript-eslint/visitor-keys@4.25.0": + version "4.25.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.25.0.tgz#863e7ed23da4287c5b469b13223255d0fde6aaa7" + integrity sha512-AmkqV9dDJVKP/TcZrbf6s6i1zYXt5Hl8qOLrRDTFfRNae4+LB8A4N3i+FLZPW85zIxRy39BgeWOfMS3HoH5ngg== dependencies: - "@typescript-eslint/types" "4.28.0" + "@typescript-eslint/types" "4.25.0" eslint-visitor-keys "^2.0.0" abbrev@1: @@ -1512,7 +1536,7 @@ array-flatten@1.1.1: resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= -array-includes@^3.1.1, array-includes@^3.1.2, array-includes@^3.1.3: +array-includes@^3.1.1, array-includes@^3.1.2: version "3.1.3" resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.3.tgz#c7f619b382ad2afaf5326cddfdc0afc61af7690a" integrity sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A== @@ -1528,7 +1552,7 @@ array-union@^2.1.0: resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== -array.prototype.flat@^1.2.3, array.prototype.flat@^1.2.4: +array.prototype.flat@^1.2.3: version "1.2.4" resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz#6ef638b43312bd401b4c6199fdec7e2dc9e9a123" integrity sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg== @@ -1811,6 +1835,24 @@ bytes@3.1.0: resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== +c8@^7.7.2: + version "7.8.0" + resolved "https://registry.yarnpkg.com/c8/-/c8-7.8.0.tgz#8fcfe848587d9d5796f22e9b0546a387a66d1b3b" + integrity sha512-x2Bx+IIEd608B1LmjiNQ/kizRPkCWo5XzuV57J9afPjAHSnYXALwbCSOkQ7cSaNXBNblfqcvdycj+klmL+j6yA== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@istanbuljs/schema" "^0.1.2" + find-up "^5.0.0" + foreground-child "^2.0.0" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-report "^3.0.0" + istanbul-reports "^3.0.2" + rimraf "^3.0.0" + test-exclude "^6.0.0" + v8-to-istanbul "^8.0.0" + yargs "^16.2.0" + yargs-parser "^20.2.7" + cacheable-request@^6.0.0: version "6.1.0" resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" @@ -2127,6 +2169,13 @@ content-type@~1.0.4: resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== +convert-source-map@^1.6.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" @@ -2159,7 +2208,7 @@ core-util-is@1.0.2, core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= -cross-spawn@^7.0.2: +cross-spawn@^7.0.0, cross-spawn@^7.0.2: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== @@ -2206,13 +2255,6 @@ debug@2.6.9, debug@^2.6.9: dependencies: ms "2.0.0" -debug@^3.2.7: - version "3.2.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" - integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== - dependencies: - ms "^2.1.1" - debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: version "4.3.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" @@ -2339,13 +2381,6 @@ doctrine@1.5.0: esutils "^2.0.2" isarray "^1.0.0" -doctrine@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" - integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== - dependencies: - esutils "^2.0.2" - doctrine@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" @@ -2509,17 +2544,12 @@ eslint-config-airbnb-base@^14.0.0: object.assign "^4.1.2" object.entries "^1.1.2" -eslint-config-jessie@^0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/eslint-config-jessie/-/eslint-config-jessie-0.0.4.tgz#00845c81f8bc1c1d1c2386d5fda3f165532ed8ae" - integrity sha512-jmWq+A7iAKev6nfqpQj/HqC+SMqje7r1HzXqqbInzKOO1OEd9b9MtLJaIe1VpqftS5kDztSubSc9ygzUZCgGaA== - eslint-config-jessie@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/eslint-config-jessie/-/eslint-config-jessie-0.0.6.tgz#429de65983cdfcb161c62a5169605ded6130487b" integrity sha512-L19U3+qWJdhdUjxW7NkkB9X+343MwUB81dplbxwcbBCvrWA8WwmiWYww0g23j4Oz/Vy8KmdW1cyW5Ii6s5IJzQ== -eslint-config-prettier@^6.12.0, eslint-config-prettier@^6.9.0: +eslint-config-prettier@^6.12.0: version "6.15.0" resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-6.15.0.tgz#7f93f6cb7d45a92f1537a70ecc06366e1ac6fed9" integrity sha512-a1+kOYLR8wMGustcgAjdydMsQ2A/2ipRPwRKUmfYaSxc9ZPcrku080Ctl6zrZzZNs/U82MjSv+qKREkoq3bJaw== @@ -2542,14 +2572,6 @@ eslint-module-utils@^2.6.0: debug "^2.6.9" pkg-dir "^2.0.0" -eslint-module-utils@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.6.1.tgz#b51be1e473dd0de1c5ea638e22429c2490ea8233" - integrity sha512-ZXI9B8cxAJIH4nfkhTwcRTEAnrVfobYqwjWy/QMCZ8rHkZHFjf9yO4BzpiF9kCSfNlMG54eKigISHpX0+AaT4A== - dependencies: - debug "^3.2.7" - pkg-dir "^2.0.0" - eslint-plugin-eslint-comments@^3.1.2: version "3.2.0" resolved "https://registry.yarnpkg.com/eslint-plugin-eslint-comments/-/eslint-plugin-eslint-comments-3.2.0.tgz#9e1cd7b4413526abb313933071d7aba05ca12ffa" @@ -2558,27 +2580,6 @@ eslint-plugin-eslint-comments@^3.1.2: escape-string-regexp "^1.0.5" ignore "^5.0.5" -eslint-plugin-import@^2.20.0: - version "2.23.4" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.23.4.tgz#8dceb1ed6b73e46e50ec9a5bb2411b645e7d3d97" - integrity sha512-6/wP8zZRsnQFiR3iaPFgh5ImVRM1WN5NUWfTIRqwOdeiGJlBcSk82o1FEVq8yXmy4lkIzTo7YhHCIxlU/2HyEQ== - dependencies: - array-includes "^3.1.3" - array.prototype.flat "^1.2.4" - debug "^2.6.9" - doctrine "^2.1.0" - eslint-import-resolver-node "^0.3.4" - eslint-module-utils "^2.6.1" - find-up "^2.0.0" - has "^1.0.3" - is-core-module "^2.4.0" - minimatch "^3.0.4" - object.values "^1.1.3" - pkg-up "^2.0.0" - read-pkg-up "^3.0.0" - resolve "^1.20.0" - tsconfig-paths "^3.9.0" - eslint-plugin-import@^2.22.1: version "2.22.1" resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.22.1.tgz#0896c7e6a0cf44109a2d97b95903c2bb689d7702" @@ -2704,12 +2705,12 @@ eslint@^7.11.0: v8-compile-cache "^2.0.3" eslint@^7.23.0: - version "7.29.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.29.0.tgz#ee2a7648f2e729485e4d0bd6383ec1deabc8b3c0" - integrity sha512-82G/JToB9qIy/ArBzIWG9xvvwL3R86AlCjtGw+A29OMZDqhTybz/MByORSukGxeI+YPCR4coYyITKk8BFH9nDA== + version "7.27.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.27.0.tgz#665a1506d8f95655c9274d84bd78f7166b07e9c7" + integrity sha512-JZuR6La2ZF0UD384lcbnd0Cgg6QJjiCwhMD6eU4h/VGPcVGwawNNzKU41tgokGXnfjOOyI6QIffthhJTPzzuRA== dependencies: "@babel/code-frame" "7.12.11" - "@eslint/eslintrc" "^0.4.2" + "@eslint/eslintrc" "^0.4.1" ajv "^6.10.0" chalk "^4.0.0" cross-spawn "^7.0.2" @@ -2726,7 +2727,7 @@ eslint@^7.23.0: fast-deep-equal "^3.1.3" file-entry-cache "^6.0.1" functional-red-black-tree "^1.0.1" - glob-parent "^5.1.2" + glob-parent "^5.0.0" globals "^13.6.0" ignore "^4.0.6" import-fresh "^3.0.0" @@ -2753,7 +2754,7 @@ esm@^3.2.25, esm@^3.2.5: resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10" integrity sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA== -"esm@github:agoric-labs/esm#Agoric-built": +esm@agoric-labs/esm#Agoric-built: version "3.2.25" resolved "https://codeload.github.com/agoric-labs/esm/tar.gz/3603726ad4636b2f865f463188fcaade6375638e" @@ -2992,6 +2993,14 @@ find-up@^4.0.0: locate-path "^5.0.0" path-exists "^4.0.0" +find-up@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + flat-cache@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" @@ -3005,6 +3014,14 @@ flatted@^3.1.0: resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.1.1.tgz#c4b489e80096d9df1dfc97c79871aea7c617c469" integrity sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA== +foreground-child@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-2.0.0.tgz#71b32800c9f15aa8f2f83f4a6bd9bff35d861a53" + integrity sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA== + dependencies: + cross-spawn "^7.0.0" + signal-exit "^3.0.2" + forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" @@ -3125,7 +3142,7 @@ github-from-package@0.0.0: resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce" integrity sha1-l/tdlr/eiXMxPyDoKI75oWf6ZM4= -glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@^5.1.2, glob-parent@~5.1.0: +glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@~5.1.0: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== @@ -3170,13 +3187,6 @@ globals@^13.6.0: dependencies: type-fest "^0.20.2" -globals@^13.9.0: - version "13.9.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.9.0.tgz#4bf2bf635b334a173fb1daf7c5e6b218ecdc06cb" - integrity sha512-74/FduwI/JaIrr1H8e71UbDE+5x7pIPs1C2rrwC52SszOo043CsWOZEMW7o2Y58xwm9b+0RBKDxY5n2sUpEFxA== - dependencies: - type-fest "^0.20.2" - globby@^11.0.1: version "11.0.3" resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb" @@ -3189,18 +3199,6 @@ globby@^11.0.1: merge2 "^1.3.0" slash "^3.0.0" -globby@^11.0.3: - version "11.0.4" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.4.tgz#2cbaff77c2f2a62e71e9b2813a67b97a3a3001a5" - integrity sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" - got@^9.6.0: version "9.6.0" resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" @@ -3278,6 +3276,11 @@ hosted-git-info@^2.1.4: resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + http-cache-semantics@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390" @@ -3477,13 +3480,6 @@ is-core-module@^2.2.0: dependencies: has "^1.0.3" -is-core-module@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.4.0.tgz#8e9fc8e15027b011418026e98f0e6f4d86305cc1" - integrity sha512-6A2fkfq1rfeQZjxrZJGerpLCTHRNEBiSgnu0+obeJpEPZRUooHgsizvzv0ZjJwOz3iWIHdJtVWJ/tmPr3D21/A== - dependencies: - has "^1.0.3" - is-date-object@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" @@ -3643,6 +3639,28 @@ isstream@~0.1.2: resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= +istanbul-lib-coverage@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz#f5944a37c70b550b02a78a5c3b2055b280cec8ec" + integrity sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg== + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-reports@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.2.tgz#d593210e5000683750cb09fc0644e4b6e27fd53b" + integrity sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + js-string-escape@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/js-string-escape/-/js-string-escape-1.0.1.tgz#e2625badbc0d67c7533e9edc1068c587ae4137ef" @@ -3809,16 +3827,6 @@ load-json-file@^2.0.0: pify "^2.0.0" strip-bom "^3.0.0" -load-json-file@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b" - integrity sha1-L19Fq5HjMhYjT9U62rZo607AmTs= - dependencies: - graceful-fs "^4.1.2" - parse-json "^4.0.0" - pify "^3.0.0" - strip-bom "^3.0.0" - load-json-file@^5.2.0: version "5.3.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-5.3.0.tgz#4d3c1e01fa1c03ea78a60ac7af932c9ce53403f3" @@ -3853,6 +3861,13 @@ locate-path@^5.0.0: dependencies: p-locate "^4.1.0" +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + lodash.clonedeep@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" @@ -4086,7 +4101,7 @@ ms@2.1.2: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@^2.1.1, ms@^2.1.3: +ms@^2.1.3: version "2.1.3" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== @@ -4249,7 +4264,7 @@ object.entries@^1.1.2: es-abstract "^1.18.0-next.1" has "^1.0.3" -object.values@^1.1.1, object.values@^1.1.3: +object.values@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.3.tgz#eaa8b1e17589f02f698db093f7c62ee1699742ee" integrity sha512-nkF6PfDB9alkOUxpf1HNm/QlkeW3SReqL5WXeBLpEJJnlPSvRaDQpW3gQTksTN3fgJX4hL42RzKyOin6ff3tyw== @@ -4365,6 +4380,13 @@ p-limit@^2.0.0, p-limit@^2.2.0: dependencies: p-try "^2.0.0" +p-limit@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + p-locate@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" @@ -4386,6 +4408,13 @@ p-locate@^4.1.0: dependencies: p-limit "^2.2.0" +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + p-map@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" @@ -4499,13 +4528,6 @@ path-type@^2.0.0: dependencies: pify "^2.0.0" -path-type@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" - integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg== - dependencies: - pify "^3.0.0" - path-type@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" @@ -4526,11 +4548,6 @@ pify@^2.0.0: resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= - pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" @@ -4558,13 +4575,6 @@ pkg-dir@^4.2.0: dependencies: find-up "^4.0.0" -pkg-up@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-2.0.0.tgz#c819ac728059a461cab1c3889a2be3c49a004d7f" - integrity sha1-yBmscoBZpGHKscOImivjxJoATX8= - dependencies: - find-up "^2.1.0" - plur@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/plur/-/plur-4.0.0.tgz#729aedb08f452645fe8c58ef115bf16b0a73ef84" @@ -4612,11 +4622,6 @@ prettier-linter-helpers@^1.0.0: dependencies: fast-diff "^1.1.2" -prettier@^1.18.2: - version "1.19.1" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.19.1.tgz#f7d7f5ff8a9cd872a7be4ca142095956a60797cb" - integrity sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew== - prettier@^2.1.2: version "2.2.1" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.2.1.tgz#795a1a78dd52f073da0cd42b21f9c91381923ff5" @@ -4729,14 +4734,6 @@ read-pkg-up@^2.0.0: find-up "^2.0.0" read-pkg "^2.0.0" -read-pkg-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-3.0.0.tgz#3ed496685dba0f8fe118d0691dc51f4a1ff96f07" - integrity sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc= - dependencies: - find-up "^2.0.0" - read-pkg "^3.0.0" - read-pkg@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" @@ -4746,15 +4743,6 @@ read-pkg@^2.0.0: normalize-package-data "^2.3.2" path-type "^2.0.0" -read-pkg@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-3.0.0.tgz#9cbc686978fee65d16c00e2b19c237fcf6e38389" - integrity sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k= - dependencies: - load-json-file "^4.0.0" - normalize-package-data "^2.3.2" - path-type "^3.0.0" - read-pkg@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" @@ -4794,6 +4782,11 @@ readdirp@~3.5.0: dependencies: picomatch "^2.2.1" +readline-transform@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/readline-transform/-/readline-transform-1.0.0.tgz#3157f97428acaec0f05a5c1ff2c3120f4e6d904b" + integrity sha512-7KA6+N9IGat52d83dvxnApAWN+MtVb1MiVuMR/cf1O4kYsJG+g/Aav0AHcHKsb6StinayfPLne0+fMX2sOzAKg== + regenerator-runtime@^0.13.4: version "0.13.7" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55" @@ -4886,7 +4879,7 @@ resolve-from@^5.0.0: resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== -resolve@^1.10.0, resolve@^1.11.0, resolve@^1.11.1, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.14.2, resolve@^1.17.0, resolve@^1.19.0, resolve@^1.20.0: +resolve@^1.10.0, resolve@^1.11.0, resolve@^1.11.1, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.14.2, resolve@^1.17.0, resolve@^1.19.0: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== @@ -5021,7 +5014,7 @@ semver@^6.0.0, semver@^6.2.0, semver@^6.3.0: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== -semver@^7.1.3, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5: +semver@^7.1.3, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4: version "7.3.5" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== @@ -5078,6 +5071,11 @@ ses@^0.13.4: resolved "https://registry.yarnpkg.com/ses/-/ses-0.13.4.tgz#d547202ccb180c4a161e52007a9edff51d5fa9fd" integrity sha512-7pgHZF4i6tuMAhD6GeNgA8fxfgzeQ5queyRFnKfKPM6nvCEKd8JP3ityQ04rVspTzkzu9R0b2I4nb05ukblQDA== +ses@^0.14.0: + version "0.14.0" + resolved "https://registry.yarnpkg.com/ses/-/ses-0.14.0.tgz#d71a5ccac8e9054155c7e63a52325e924edd3b41" + integrity sha512-LQPb6IS19K12Osvndmu7lHoJVuYyxPPBAUSZlcVeOmHF4LMzjT8vIVoMYLyk4LHvmt6wBbrJph9JkIiD1+femQ== + set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" @@ -5434,6 +5432,15 @@ temp@^0.9.1: mkdirp "^0.5.1" rimraf "~2.6.2" +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + text-table@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" @@ -5513,7 +5520,7 @@ tslib@^1.8.1, tslib@^1.9.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tsutils@^3.21.0: +tsutils@^3.17.1: version "3.21.0" resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== @@ -5580,9 +5587,9 @@ typedarray-to-buffer@^3.1.5: is-typedarray "^1.0.0" typescript@^4.2.3: - version "4.3.4" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.3.4.tgz#3f85b986945bcf31071decdd96cf8bfa65f9dcbc" - integrity sha512-uauPG7XZn9F/mo+7MrsRjyvbxFpzemRjKEZXS4AK83oP2KKOJPvb+9cO/gmnv8arWZvhnjVOXz7B49m1l0e9Ew== + version "4.3.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.3.2.tgz#399ab18aac45802d6f2498de5054fcbbe716a805" + integrity sha512-zZ4hShnmnoVnAHpVHWpTcxdv7dWP60S2FsydQLV8V5PbS3FifjWFFRiHSWpDJahly88PRyV5teTSLoq4eG7mKw== unbox-primitive@^1.0.0: version "1.0.1" @@ -5660,6 +5667,15 @@ v8-compile-cache@^2.0.3: resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== +v8-to-istanbul@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.0.0.tgz#4229f2a99e367f3f018fa1d5c2b8ec684667c69c" + integrity sha512-LkmXi8UUNxnCC+JlH7/fsfsKr5AU110l+SYGJimWNkWhxbN5EyeOtm1MJ0hhvqMMOhGwBj1Fp70Yv9i+hX0QAg== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + source-map "^0.7.3" + validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" @@ -5807,6 +5823,11 @@ yargs-parser@^20.2.2: resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a" integrity sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw== +yargs-parser@^20.2.7: + version "20.2.9" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== + yargs@^14.2.0: version "14.2.3" resolved "https://registry.yarnpkg.com/yargs/-/yargs-14.2.3.tgz#1a1c3edced1afb2a2fea33604bc6d1d8d688a414" @@ -5836,3 +5857,8 @@ yargs@^16.2.0: string-width "^4.2.0" y18n "^5.0.5" yargs-parser "^20.2.2" + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==