From 7472dc24d012fac5aacd2319f0dc39f8e11e0eb0 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 18 Jun 2021 17:18:36 +0000 Subject: [PATCH 01/34] Initial runner implementation --- loadgen/loop.js | 10 +- package.json | 11 +- runner/bin/loadgen-runner | 2 + runner/jsconfig.json | 22 + runner/lib/entrypoint.js | 66 ++ runner/lib/helpers/async.d.ts | 138 ++++ runner/lib/helpers/async.js | 98 +++ runner/lib/helpers/child-process.js | 97 +++ runner/lib/helpers/fs.js | 153 ++++ runner/lib/helpers/line-stream-transform.d.ts | 16 + runner/lib/helpers/line-stream-transform.js | 33 + runner/lib/helpers/outputter.js | 43 + runner/lib/helpers/process-info.d.ts | 74 ++ runner/lib/helpers/procsfs.js | 266 +++++++ runner/lib/helpers/stream-steps.js | 60 ++ runner/lib/main.js | 735 ++++++++++++++++++ runner/lib/test-local-chain.js | 457 +++++++++++ runner/lib/test-operations.d.ts | 28 + runner/package.json | 57 ++ start.sh | 30 +- yarn.lock | 257 ++---- 21 files changed, 2465 insertions(+), 188 deletions(-) create mode 100755 runner/bin/loadgen-runner create mode 100644 runner/jsconfig.json create mode 100644 runner/lib/entrypoint.js create mode 100644 runner/lib/helpers/async.d.ts create mode 100644 runner/lib/helpers/async.js create mode 100644 runner/lib/helpers/child-process.js create mode 100644 runner/lib/helpers/fs.js create mode 100644 runner/lib/helpers/line-stream-transform.d.ts create mode 100644 runner/lib/helpers/line-stream-transform.js create mode 100644 runner/lib/helpers/outputter.js create mode 100644 runner/lib/helpers/process-info.d.ts create mode 100644 runner/lib/helpers/procsfs.js create mode 100644 runner/lib/helpers/stream-steps.js create mode 100644 runner/lib/main.js create mode 100644 runner/lib/test-local-chain.js create mode 100644 runner/lib/test-operations.d.ts create mode 100644 runner/package.json diff --git a/loadgen/loop.js b/loadgen/loop.js index 4777093..11ddb1d 100644 --- a/loadgen/loop.js +++ b/loadgen/loop.js @@ -1,4 +1,6 @@ /* global setInterval clearInterval setTimeout clearTimeout */ +/* eslint-disable no-continue */ + import { performance } from 'perf_hooks'; import http from 'http'; import { prepareFaucet } from './task-tap-faucet'; @@ -117,7 +119,7 @@ function updateConfig(config) { } } -function startServer() { +async function startServer() { const server = http.createServer((req, res) => { const url = new URL(req.url, `http://${req.headers.host}`); // console.log(`pathname ${url.pathname}, ${req.method}`); @@ -153,6 +155,9 @@ function startServer() { } }); server.listen(3352, '127.0.0.1'); + return new Promise((resolve, reject) => { + server.on('listening', resolve).on('error', reject); + }); } export default async function runCycles(homePromise, deployPowers) { @@ -170,7 +175,8 @@ export default async function runCycles(homePromise, deployPowers) { status[name] = { active: 0, succeeded: 0, failed: 0, next: 0 }; } console.log('all tasks ready'); - startServer(); + await startServer(); + console.log('server running on 127.0.0.1:3352'); if (!checkConfig(currentConfig)) { throw Error('bad config'); diff --git a/package.json b/package.json index 8104420..c2e74c2 100644 --- a/package.json +++ b/package.json @@ -6,19 +6,20 @@ "main": "index.js", "workspaces": [ "loadgen", - "_agstate/agoric-servers" + "_agstate/agoric-servers", + "runner" ], "devDependencies": { "@typescript-eslint/parser": "^4.18.0", "eslint": "^7.23.0", "eslint-config-airbnb-base": "^14.0.0", - "eslint-config-jessie": "^0.0.4", - "eslint-config-prettier": "^6.9.0", - "eslint-plugin-import": "^2.20.0", + "eslint-config-jessie": "^0.0.6", + "eslint-config-prettier": "^6.12.0", + "eslint-plugin-import": "^2.22.1", "eslint-plugin-jsdoc": "^30.4.2", "eslint-plugin-jsx-a11y": "^6.2.3", "eslint-plugin-prettier": "^3.1.2", - "prettier": "^1.18.2", + "prettier": "^2.1.2", "typescript": "^4.2.3" }, "scripts": { diff --git a/runner/bin/loadgen-runner b/runner/bin/loadgen-runner new file mode 100755 index 0000000..8ff0709 --- /dev/null +++ b/runner/bin/loadgen-runner @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('esm')(module)('../lib/entrypoint'); diff --git a/runner/jsconfig.json b/runner/jsconfig.json new file mode 100644 index 0000000..50681bb --- /dev/null +++ b/runner/jsconfig.json @@ -0,0 +1,22 @@ +// This file can contain .js-specific Typescript compiler config. +{ + "compilerOptions": { + "target": "esnext", + "noEmit": true, + "checkJs": true, + /* + // The following flags are for creating .d.ts files: + "noEmit": false, + "declaration": true, + "emitDeclarationOnly": true, +*/ + "strict": true, + "strictNullChecks": true, + "noImplicitAny": true, + "moduleResolution": "node", + }, + "include": [ + "lib/**/*.js", + "lib/**/*.d.ts" + ], +} \ No newline at end of file diff --git a/runner/lib/entrypoint.js b/runner/lib/entrypoint.js new file mode 100644 index 0000000..f4fa876 --- /dev/null +++ b/runner/lib/entrypoint.js @@ -0,0 +1,66 @@ +/* global process */ +// @ts-nocheck + +import '@agoric/install-ses'; + +import path from 'path'; +import { spawn } from 'child_process'; +import rawFs from 'fs'; +import os from 'os'; + +import main from './main.js'; +import { + flattenAggregateErrors, + aggregateTryFinally, +} from './helpers/async.js'; + +const fs = rawFs.promises; +const fsStream = { + createReadStream: rawFs.createReadStream, + createWriteStream: rawFs.createWriteStream, +}; +const progname = path.basename(process.argv[1]); + +const { stdout, stderr } = process; + +const rawArgs = process.argv.slice(2); + +process.on('uncaughtException', (error) => { + console.error('uncaught exception', error); + process.exit(2); +}); + +(async () => { + const tmpDir = await fs.mkdtemp( + path.join(os.tmpdir(), `${progname.replace(/[^a-z0-9_]/gi, '-')}-`), + ); + + return aggregateTryFinally( + async () => + main(progname, rawArgs, { + stdout, + stderr, + fs, + fsStream, + os, + process, + spawn, + tmpDir, + }), + async () => fs.rmdir(tmpDir, { recursive: true }), + ); +})().then( + (res) => { + res === undefined || process.exit(res); + }, + (rej) => { + // console.log(process._getActiveRequests(), process._getActiveHandles()); + console.error(rej); + if (rej.errors) { + flattenAggregateErrors(rej.errors).forEach((error) => + console.error('nested error:', error), + ); + } + process.exit(2); + }, +); diff --git a/runner/lib/helpers/async.d.ts b/runner/lib/helpers/async.d.ts new file mode 100644 index 0000000..b1492e8 --- /dev/null +++ b/runner/lib/helpers/async.d.ts @@ -0,0 +1,138 @@ +/* global Console */ +/* eslint-disable no-unused-vars,no-redeclare */ + +export declare function sleep(ms: number): Promise; + +export interface AggregateError extends Error { + readonly errors: Error[]; +} + +export declare function flattenAggregateErrors(errors: Error[]): Error[]; + +export declare function warnOnRejection( + operation: Promise, + console: Console, + ...messages: string[] +): void; + +export declare function aggregateTryFinally( + trier: () => Promise, + finalizer: () => Promise, +): Promise; + +export declare function tryTimeout( + timeoutMs: number, + trier: () => Promise, + canceler?: () => Promise, +): Promise; + +export declare function PromiseAllOrErrors< + T1, + T2, + T3, + T4, + T5, + T6, + T7, + T8, + T9, + T10 +>( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + T6 | PromiseLike, + T7 | PromiseLike, + T8 | PromiseLike, + T9 | PromiseLike, + T10 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + T6 | PromiseLike, + T7 | PromiseLike, + T8 | PromiseLike, + T9 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + T6 | PromiseLike, + T7 | PromiseLike, + T8 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5, T6, T7, T8]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + T6 | PromiseLike, + T7 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5, T6, T7]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + T6 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5, T6]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + T5 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4, T5]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + T4 | PromiseLike, + ], +): Promise<[T1, T2, T3, T4]>; + +export declare function PromiseAllOrErrors( + values: readonly [ + T1 | PromiseLike, + T2 | PromiseLike, + T3 | PromiseLike, + ], +): Promise<[T1, T2, T3]>; + +export declare function PromiseAllOrErrors( + values: readonly [T1 | PromiseLike, T2 | PromiseLike], +): Promise<[T1, T2]>; + +export declare function PromiseAllOrErrors( + values: readonly (T | PromiseLike)[], +): Promise; diff --git a/runner/lib/helpers/async.js b/runner/lib/helpers/async.js new file mode 100644 index 0000000..148d091 --- /dev/null +++ b/runner/lib/helpers/async.js @@ -0,0 +1,98 @@ +/* global setTimeout */ + +/** @type {import("./async.js").sleep} */ +export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); + +/** @param {Error[]} errors */ +const makeAggregateError = (errors) => { + const err = new Error(); + Object.defineProperties(err, { + name: { + value: 'AggregateError', + }, + errors: { + value: errors, + }, + }); + return err; +}; + +/** + * @template T + * @param {readonly (T | PromiseLike)[]} values + * @returns {Promise} + */ +export const PromiseAllOrErrors = async (values) => { + return Promise.allSettled(values).then((results) => { + const errors = /** @type {PromiseRejectedResult[]} */ (results.filter( + ({ status }) => status === 'rejected', + )).map((result) => result.reason); + if (!errors.length) { + return /** @type {PromiseFulfilledResult[]} */ (results).map( + (result) => result.value, + ); + } else if (errors.length === 1) { + throw errors[0]; + } else { + throw makeAggregateError(errors); + } + }); +}; + +/** @type {import("./async.js").flattenAggregateErrors} */ +export const flattenAggregateErrors = (errors) => + errors.reduce((arr, error) => { + arr.push(error); + if ('errors' in error) { + arr.push( + ...flattenAggregateErrors(/** @type {AggregateError} */ (error).errors), + ); + } + return arr; + }, /** @type {Error[]} */ ([])); + +/** @type {import("./async.js").warnOnRejection} */ +export const warnOnRejection = (operation, console, ...messages) => { + operation.catch((error) => { + console.warn(...messages, error); + if ('errors' in error) { + // TODO: Plug into SES error handling + console.warn( + 'Reasons:', + ...flattenAggregateErrors(/** @type {AggregateError} */ (error).errors), + ); + } + }); +}; + +/** @type {import("./async.js").aggregateTryFinally} */ +export const aggregateTryFinally = async (trier, finalizer) => + trier().then( + (result) => finalizer().then(() => result), + (tryError) => + finalizer() + .then( + () => tryError, + (finalizeError) => makeAggregateError([tryError, finalizeError]), + ) + .then((error) => Promise.reject(error)), + ); + +/** @type {import("./async.js").tryTimeout} */ +export const tryTimeout = async (timeoutMs, trier, canceler) => { + const result = Promise.race([ + sleep(timeoutMs).then(() => Promise.reject(new Error('Timeout'))), + trier(), + ]); + + return !canceler + ? result + : result.catch((error) => + canceler() + .then( + () => error, + (cancelerError) => makeAggregateError([error, cancelerError]), + ) + .then((finalError) => Promise.reject(finalError)), + ); +}; diff --git a/runner/lib/helpers/child-process.js b/runner/lib/helpers/child-process.js new file mode 100644 index 0000000..45497be --- /dev/null +++ b/runner/lib/helpers/child-process.js @@ -0,0 +1,97 @@ +/** + * @param {import("child_process").ChildProcess} childProcess + * @param {Object} [options] + * @param {boolean} [options.ignoreExitCode] do not error on non-zero exit codes + * @returns {Promise} The exit code of the process + */ +export const childProcessDone = ( + childProcess, + { ignoreExitCode = false } = {}, +) => + new Promise((resolve, reject) => + childProcess.on('error', reject).on('exit', (code) => { + if (!ignoreExitCode && (code == null || code !== 0)) { + reject(new Error(`Process exited with non-zero code: ${code}`)); + } else { + resolve(code != null ? code : -1); + } + }), + ); + +/** + * Makes a spawn that support non fd backed stdio streams + * Automatically creates a pipe stdio and pipes the stream + * + * @param {Object} options + * @param {import("child_process").spawn} options.spawn Node.js spawn + * @param {boolean} [options.end] Pipe option to automatically forward stream end + * @returns {import("child_process").spawn} + */ +export const makeSpawnWithPipedStream = ({ spawn, end }) => { + /** + * @param {string} command + * @param {ReadonlyArray} args + * @param {import("child_process").SpawnOptions} options + * @returns {import("child_process").ChildProcess} + */ + const pipedSpawn = (command, args, options) => { + const spawnOptions = + typeof args === 'object' && args != null && !Array.isArray(args) + ? /** @type {import("child_process").SpawnOptions} */ (args) + : options || {}; + let { stdio } = spawnOptions; + let stdin; + let stdout; + let stderr; + if (Array.isArray(stdio)) { + /** @type {(import("stream").Stream | undefined)[]} */ + const internalStdio = new Array(3); + + stdio = stdio.map((value, idx) => { + if ( + idx < 3 && + typeof value === 'object' && + value != null && + typeof (/** @type {any} */ (value).fd) !== 'number' + ) { + internalStdio[idx] = value; + return 'pipe'; + } + return value; + }); + + [stdin, stdout, stderr] = internalStdio; + } + + const childProcess = spawn(command, args, { + ...spawnOptions, + stdio, + }); + + if (stdin) { + stdin.pipe(/** @type {NodeJS.WritableStream} */ (childProcess.stdin), { + end, + }); + } + if (stdout) { + /** @type {NodeJS.ReadableStream} */ (childProcess.stdout).pipe( + /** @type {*} */ (stdout), + { + end, + }, + ); + } + if (stderr) { + /** @type {NodeJS.ReadableStream} */ (childProcess.stderr).pipe( + /** @type {*} */ (stderr), + { end }, + ); + } + + return /** @type {any} */ (childProcess); + }; + + // TODO: general covariance of return type allows our spawn to add stdio streams + // but NodeJS spawn overloads specifically disallow it + return /** @type {*} */ (pipedSpawn); +}; diff --git a/runner/lib/helpers/fs.js b/runner/lib/helpers/fs.js new file mode 100644 index 0000000..b9e3624 --- /dev/null +++ b/runner/lib/helpers/fs.js @@ -0,0 +1,153 @@ +// import { openSync, closeSync } from 'fs'; + +import { basename, dirname, join as joinPath } from 'path'; + +import { childProcessDone } from './child-process.js'; + +/** + * @callback FindByPrefix + * @param {string} prefix + * @returns {Promise} + */ + +/** + * @callback DirDiskUsage + * @param {string} rootDir + * @param {Object} [options] + * @param {number} [options.minFileSize] + * @returns {Promise>} + */ + +/** + * Make a FIFO file readable stream + * + * @callback MakeFIFO + * @param {string} name + * @returns {Promise} + */ + +/** + * @typedef FsHelper + * @property {FindByPrefix} findByPrefix + * @property {DirDiskUsage} dirDiskUsage + * @property {MakeFIFO} makeFIFO + */ + +/** @typedef {Pick} fsStream */ + +/** + * + * @param {Object} powers + * @param {import("fs/promises")} powers.fs Node.js promisified fs object + * @param {fsStream} powers.fsStream Node.js fs stream operations + * @param {import("child_process").spawn} powers.spawn Node.js spawn + * @param {string} powers.tmpDir Directory location to place temporary files in + * @returns {FsHelper} + * + */ +export const makeFsHelper = ({ fs, fsStream, spawn, tmpDir }) => { + /** @type {FindByPrefix} */ + const findByPrefix = async (prefix) => { + const parentDir = dirname(prefix); + const prefixBase = basename(prefix); + + const name = (await fs.readdir(parentDir)).find((dir) => + dir.startsWith(prefixBase), + ); + if (!name) { + throw new Error(`Couldn't find dir entry starting with prefix`); + } + return joinPath(parentDir, name); + }; + + // TODO: figure out why tsc complains when using /** @type {DirDiskUsage} */ + /** + * @param {string} rootDir + * @param {Object} [options] + * @param {number} [options.minFileSize] + */ + const dirDiskUsage = async (rootDir, { minFileSize = 5 } = {}) => { + /** @type {Record} */ + const book = {}; + + /** + * @param {string} subpath + */ + const processDir = async (subpath) => { + const dirEntNames = await fs.readdir(joinPath(rootDir, subpath)); + const dirEntStats = await Promise.all( + dirEntNames.map( + /** + * @param {string} name + * @returns {Promise<[string, import('fs').Stats]>} + * */ + async (name) => [ + joinPath(subpath, name), + await fs.lstat(joinPath(rootDir, subpath, name)), + ], + ), + ); + for (const [path, stat] of dirEntStats) { + if (stat.isDirectory()) { + // Await the recursion here to provide some level of order and parallelism limit + // eslint-disable-next-line no-await-in-loop + await processDir(path); + } else if (stat.isFile()) { + // A linux fs block is 512 bytes + // https://man7.org/linux/man-pages/man2/stat.2.html + const size = stat.blocks / 2; + if (size >= minFileSize) { + book[path] = stat.blocks / 2; + } + } else { + console.error('Unexpected file type', joinPath(rootDir, path)); + } + } + }; + + await processDir(''); + + return book; + }; + + /** @type {MakeFIFO} */ + const makeFIFO = async (name) => { + const fifoPath = joinPath(tmpDir, basename(name)); + await childProcessDone(spawn('mkfifo', [fifoPath], { stdio: 'inherit' })); + + const stream = fsStream.createReadStream(fifoPath, { + emitClose: true, + // Large buffer + // TODO: Make configurable + highWaterMark: 1024 * 1024, + }); + + // eslint-disable-next-line no-underscore-dangle + const originalStreamDestroy = stream._destroy; + // eslint-disable-next-line no-underscore-dangle + stream._destroy = (error, callback) => { + const internalStream = /** @type {{closed: boolean, fd: number | null}} */ ( + /** @type {unknown} */ (stream) + ); + if (!internalStream.closed && typeof internalStream.fd !== 'number') { + console.warn( + 'FIFO was never opened for write, self opening to unblock process.', + ); + // Unblock node's internal read open + (async () => (await fs.open(fifoPath, 'a')).close())(); + // closeSync(openSync(fifoPath, 'a')); + } + + originalStreamDestroy.call(stream, error, callback); + }; + + stream.once('close', () => { + // TODO: log errors + fs.rm(fifoPath); + }); + + return stream; + }; + + return harden({ dirDiskUsage, findByPrefix, makeFIFO }); +}; diff --git a/runner/lib/helpers/line-stream-transform.d.ts b/runner/lib/helpers/line-stream-transform.d.ts new file mode 100644 index 0000000..187add6 --- /dev/null +++ b/runner/lib/helpers/line-stream-transform.d.ts @@ -0,0 +1,16 @@ +/* eslint-disable no-unused-vars */ + +import type ReadlineTransform, { + ReadlineTransformOptions, +} from 'readline-transform'; + +export interface LineStreamTransformOptions extends ReadlineTransformOptions { + /** optional prefix to prepend for each line */ + prefix?: string; + /** ending for each line. If true, a new line is added. */ + lineEndings?: boolean | string; +} + +export default class LineStreamTransform extends ReadlineTransform { + constructor(options?: LineStreamTransformOptions); +} diff --git a/runner/lib/helpers/line-stream-transform.js b/runner/lib/helpers/line-stream-transform.js new file mode 100644 index 0000000..b213277 --- /dev/null +++ b/runner/lib/helpers/line-stream-transform.js @@ -0,0 +1,33 @@ +import ReadlineTransform from 'readline-transform'; + +/* eslint-disable no-underscore-dangle,no-nested-ternary */ + +export default class LineStreamTransform extends ReadlineTransform { + /** + * + * @param {import("./line-stream-transform.js").LineStreamTransformOptions} options + */ + constructor(options = {}) { + const defaultTransformOptions = { readableObjectMode: true }; + const { + transform: _, + prefix = '', + lineEndings = false, + ...readlineTransformOptions + } = options; + super({ ...defaultTransformOptions, ...readlineTransformOptions }); + this._prefix = prefix; + this._suffix = lineEndings + ? typeof lineEndings === 'string' + ? lineEndings + : '\n' + : ''; + } + + /** @param {string} line */ + _writeItem(line) { + if (line.length > 0 || !(/** @type {any} */ (this)._skipEmpty)) { + this.push(`${this._prefix}${line}${this._suffix}`); + } + } +} diff --git a/runner/lib/helpers/outputter.js b/runner/lib/helpers/outputter.js new file mode 100644 index 0000000..755b836 --- /dev/null +++ b/runner/lib/helpers/outputter.js @@ -0,0 +1,43 @@ +import { Console } from 'console'; + +import LineStreamTransform from './line-stream-transform.js'; + +/** + * @param {Object} options + * @param {import("stream").Writable} options.out + * @param {import("stream").Writable} [options.err] + * @param {string} [options.outPrefix] + * @param {string} [options.errPrefix] + * @param {boolean} [options.colorMode] + */ +export const makeOutputter = ({ + out, + err = out, + outPrefix, + errPrefix = outPrefix, + colorMode = true, +}) => { + if (outPrefix) { + const dstOut = out; + out = new LineStreamTransform({ + prefix: outPrefix, + lineEndings: true, + }); + out.pipe(dstOut); + } + + if (errPrefix) { + const dstErr = err; + err = new LineStreamTransform({ + prefix: errPrefix, + lineEndings: true, + }); + err.pipe(dstErr); + } + + return { + console: new Console({ stdout: out, stderr: err, colorMode }), + out, + err, + }; +}; diff --git a/runner/lib/helpers/process-info.d.ts b/runner/lib/helpers/process-info.d.ts new file mode 100644 index 0000000..9b9bca3 --- /dev/null +++ b/runner/lib/helpers/process-info.d.ts @@ -0,0 +1,74 @@ +/** + * See https://github.com/torvalds/linux/blob/master/Documentation/filesystems/proc.rst + * for details on some of these values + */ + +/** Process times in seconds (converted from jiffies) */ +export type ProcessTimes = { + /** time spent waiting for block IO */ + blockIo: number; + /** guest time of the task children */ + childGuest: number; + /** kernel mode including children */ + childKernel: number; + /** user mode including children */ + childUser: number; + /** guest time of the task */ + guest: number; + /** kernel mode */ + kernel: number; + /** user mode */ + user: number; +}; + +/** Process memory sizes in kiB */ +export type ProcessMemory = { + /** peak virtual memory size */ + vmPeak: number; + /** total program size */ + vmSize: number; + /** locked memory size */ + vmLocked: number; + /** pinned memory size */ + vmPinned: number; + /** peak resident set size ("high water mark") */ + vmHwm: number; + /** size of memory portions. It contains the three following parts (vmRSS = rssAnon + rssFile + rssShmem) */ + vmRss: number; + /** size of resident anonymous memory */ + rssAnon: number; + /** size of resident file mappings */ + rssFile: number; + /** size of resident shmem memory (includes SysV shm, mapping of tmpfs and shared anonymous mappings) */ + rssShmem: number; + /** size of private data segments */ + vmData: number; + /** size of stack segments */ + vmStack: number; + /** size of text segment */ + vmExe: number; + /** size of shared library code */ + vmLib: number; + /** size of page table entries */ + vmPte: number; + /** amount of swap used by anonymous private data (shmem swap usage is not included) */ + vmSwap: number; +}; + +export interface ProcessInfo { + /** The PID of the process */ + readonly pid: number; + /** The process' static start time in seconds relative to the origin process start */ + readonly startTimestamp: number; + /** Retrieves the current command line. This may change if when process "exec" */ + getArgv(): Promise; + /** Retrieves the current timing and memory usage for the process */ + getUsageSnapshot(): Promise<{ times: ProcessTimes; memory: ProcessMemory }>; + /** Retrieves the current list of child processes */ + getChildren(): Promise; + /** + * Retrieves the parent of the process. + * This may change if the original parent terminates. + */ + getParent(): Promise; +} diff --git a/runner/lib/helpers/procsfs.js b/runner/lib/helpers/procsfs.js new file mode 100644 index 0000000..5ddaeae --- /dev/null +++ b/runner/lib/helpers/procsfs.js @@ -0,0 +1,266 @@ +/* global process Buffer */ + +/** + * Helper module to read, parse and interpret procfs + * + * Authoritative info on content: + * https://github.com/torvalds/linux/blob/master/Documentation/filesystems/proc.rst + */ + +import { performance } from 'perf_hooks'; + +import { assert } from '@agoric/assert'; + +import { childProcessDone } from './child-process.js'; + +const statusLineFormat = /^([^:]+):[\s]+(.+)$/; + +/** @typedef {import("./process-info.js").ProcessInfo} ProcessInfo */ + +/** + * @callback GetProcessInfo + * @param {number} pid PID of the process to get info + * @returns {Promise} + */ + +/** + * @typedef ProcessHelper + * @property {GetProcessInfo} getProcessInfo + * @property {() => Promise} getCPUTimeOffset + */ + +/** + * + * @param {Object} powers + * @param {import("fs/promises")} powers.fs Node.js promisified fs object + * @param {import("child_process").spawn} powers.spawn Node.js spawn + * @param {number} [powers.startPid] The PID of the process to use as a start time reference + * @returns {ProcessHelper} + * + */ +export const makeProcfsHelper = ({ fs, spawn, startPid = process.pid }) => { + // Kernel data has no encoding so just copy bytes + /** @type {{encoding: BufferEncoding}} */ + const bufferOptions = { encoding: 'latin1' }; + + // A lot of kernel times are in jiffies/ticks, which frequency can be changed + // through a kernel compilation time configuration + const userHertzP = (async () => { + const childProcess = spawn('getconf', ['CLK_TCK'], { stdio: 'pipe' }); + const spawnResult = childProcessDone(childProcess); + + // The result will probably come in a single chunk, but let's be correct + const chunks = []; + for await (const chunk of childProcess.stdout) { + chunks.push(chunk); + } + await spawnResult; + return parseInt(Buffer.concat(chunks).toString(bufferOptions.encoding), 10); + })(); + + /** @typedef {string[]} ProcStat */ + /** + * Returns the split but unparsed stat data from /proc/:pid/stat + * + * @param {number} pid + * @returns {Promise} + */ + const getStat = async (pid) => { + const data = await fs.readFile(`/proc/${pid}/stat`, bufferOptions); + const idx1 = data.indexOf('('); + const idx2 = data.lastIndexOf(')'); + return [ + data.substring(0, idx1 - 1), + data.substring(idx1 + 1, idx2), + ...data.substring(idx2 + 2).split(' '), + ]; + }; + + /** @typedef {Record} ProcStatus */ + /** + * Returns the split but unparsed status data from /proc/:pid/status + * + * @param {number} pid + * @returns {Promise} + */ + const getStatus = async (pid) => { + const data = await fs.readFile(`/proc/${pid}/status`, bufferOptions); + /** @type {ProcStatus} */ + const status = {}; + for (const line of data.split('\n')) { + const matches = statusLineFormat.exec(line); + if (matches) { + status[matches[1]] = matches[2]; + } + } + return status; + }; + + /** + * Returns the split command line from /proc/:pid/cmdline + * + * @param {number} pid + * @returns {Promise} + */ + const getCmdline = async (pid) => { + const data = await fs.readFile(`/proc/${pid}/cmdline`, bufferOptions); + if (!data) return null; + const argv = data.split('\x00'); + argv.pop(); // trailing empty line + return argv; + }; + + /** @param {ProcStat} stat */ + const getStartTicks = (stat) => parseInt(stat[21], 10); + + const startTicksOriginP = getStat(startPid).then(getStartTicks); + + // TODO: Use a WeakValueMap + /** @type {Map} */ + const knownProcessInfo = new Map(); + + /** @type {GetProcessInfo} */ + const getProcessInfo = async (pid) => { + const startTicks = getStartTicks(await getStat(pid)); + + // Technically PIDs can be recycled, but the startTicks will be different + const uniquePid = `${pid}-${startTicks}`; + + /** @param {ProcStat} stat */ + const assertSameProcess = (stat) => { + assert(String(pid) === stat[0]); + assert(startTicks === getStartTicks(stat)); + }; + + let processInfo = knownProcessInfo.get(uniquePid); + + if (!processInfo) { + const startTimestamp = + (startTicks - (await startTicksOriginP)) / (await userHertzP); + + processInfo = harden({ + pid, + startTimestamp, + getArgv: async () => { + return getCmdline(pid); + }, + getUsageSnapshot: async () => { + const [stat, status, userHertz] = await Promise.all([ + getStat(pid), + getStatus(pid), + userHertzP, + ]); + assertSameProcess(stat); + + const times = { + blockIo: parseInt(stat[41], 10) / userHertz, + childGuest: parseInt(stat[43], 10) / userHertz, + childKernel: parseInt(stat[16], 10) / userHertz, + childUser: parseInt(stat[15], 10) / userHertz, + guest: parseInt(stat[42], 10) / userHertz, + kernel: parseInt(stat[14], 10) / userHertz, + user: parseInt(stat[13], 10) / userHertz, + }; + + // TODO: Parse /proc/:pid/smaps values to get better RSS info + const memory = { + // rss: parseInt(stat[23], 10) * 4, + // rssSoftLimit: parseInt(stat[24], 10), + // vsize: parseInt(stat[22], 10) / 1024, + vmData: parseInt(status.VmData, 10), + vmExe: parseInt(status.VmExe, 10), + vmHwm: parseInt(status.VmHWM, 10), + vmLib: parseInt(status.VmLib, 10), + vmLocked: parseInt(status.VmLck, 10), + vmPeak: parseInt(status.VmPeak, 10), + vmPinned: parseInt(status.VmPin, 10), + vmPte: parseInt(status.VmPTE, 10), + vmRss: parseInt(status.VmRSS, 10), + vmSize: parseInt(status.VmSize, 10), + vmStack: parseInt(status.VmStk, 10), + vmSwap: parseInt(status.VmSwap, 10), + rssAnon: parseInt(status.RssAnon, 10), + rssFile: parseInt(status.RssFile, 10), + rssShmem: parseInt(status.RssShmem, 10), + }; + + return harden({ times, memory }); + }, + getChildren: async () => { + assertSameProcess(await getStat(pid)); + + const tids = await fs.readdir(`/proc/${pid}/task`, bufferOptions); + + const rawChildrens = await Promise.all( + tids.map((tid) => + fs.readFile(`/proc/${pid}/task/${tid}/children`, bufferOptions), + ), + ); + + /** @type {Set} */ + const cpids = new Set(); + + for (const rawChildren of rawChildrens) { + const rawCpids = rawChildren.split(' '); + if (!rawCpids[rawCpids.length - 1]) { + rawCpids.pop(); // remove empty trail + } + for (const rawCpid of rawCpids) { + cpids.add(parseInt(rawCpid, 10)); + } + } + + // Ignore any children that may have gone missing by the time we get their info + const childrenInfoResolutions = await Promise.allSettled( + [...cpids].map(getProcessInfo), + ); + return harden( + childrenInfoResolutions + .filter(({ status }) => status === 'fulfilled') + .map( + (r) => + /** @type {PromiseFulfilledResult} */ (r).value, + ), + ); + }, + getParent: async () => { + const stat = await getStat(pid); + assertSameProcess(stat); + const ppid = parseInt(stat[3], 10); + return getProcessInfo(ppid); + }, + }); + knownProcessInfo.set(uniquePid, processInfo); + } + + return processInfo; + }; + + /** + * Estimates the offset between ProcessInfo startTimestamp + * and performance.now()'s origin for the current process. + * + * The absolute value of this offset should be below 0.01s + * on a system with somewhat accurate time measurement + * + * @returns {Promise} The offset in seconds + */ + const getCPUTimeOffset = async () => { + const perfNowBefore = performance.now(); + const uptime = await fs.readFile('/proc/uptime', bufferOptions); + const perfNow = (perfNowBefore + performance.now()) / 2; + + // Process start time is static and expressed in jiffies + // It's not adjusted like other kernel monotonic clock + const startMsOrigin = + ((await startTicksOriginP) * 1000) / (await userHertzP); + + // Uptime is a monotonic clock that represents elapsed time since system boot + // It does get adjusted by NTP, and thus might deviate over time from jiffies + const uptimeMs = Number(uptime.split(' ')[0]) * 1000; + + return Math.round(uptimeMs - startMsOrigin - perfNow) / 1000; + }; + + return harden({ getProcessInfo, getCPUTimeOffset }); +}; diff --git a/runner/lib/helpers/stream-steps.js b/runner/lib/helpers/stream-steps.js new file mode 100644 index 0000000..4ff23b3 --- /dev/null +++ b/runner/lib/helpers/stream-steps.js @@ -0,0 +1,60 @@ +import { promisify } from 'util'; +import { finished as finishedCallback } from 'stream'; + +import { makePromiseKit } from '@agoric/promise-kit'; + +import LineStreamTransform from './line-stream-transform.js'; + +const finished = promisify(finishedCallback); + +/** + * @typedef {Object} StepConfig + * @property {RegExp} matcher + * @property {number} [resultIndex=1] the index in the match result to use as resolution + */ + +/** + * @param {import("stream").Readable} stream + * @param {StepConfig[]} steps + * @param {Object} [options] + * @param {boolean} [options.waitEnd=true] + */ +export const whenStreamSteps = (stream, steps, { waitEnd = true } = {}) => { + const stepsAndKits = steps.map((step) => ({ step, kit: makePromiseKit() })); + + const lines = new LineStreamTransform(); + // const pipeResult = pipeline(stream, lines); + stream.pipe(lines); + + const parseResult = (async () => { + for await (const line of lines) { + if (stepsAndKits.length) { + const match = stepsAndKits[0].step.matcher.exec(line); + if (match) { + const stepAndKit = /** @type {{step: StepConfig, kit: import('@agoric/promise-kit').PromiseRecord}} */ (stepsAndKits.shift()); + const { + step: { resultIndex = 1 }, + kit: { resolve }, + } = stepAndKit; + resolve(match[resultIndex]); + } + } + + if (!stepsAndKits.length) { + stream.unpipe(lines); + lines.end(); + } + } + + if (stepsAndKits.length) { + const error = new Error('Stream ended before match found'); + stepsAndKits.forEach(({ kit: { reject } }) => reject(error)); + } + + if (waitEnd) { + await finished(stream); + } + })(); + + return [...stepsAndKits.map(({ kit: { promise } }) => promise), parseResult]; +}; diff --git a/runner/lib/main.js b/runner/lib/main.js new file mode 100644 index 0000000..5ea7844 --- /dev/null +++ b/runner/lib/main.js @@ -0,0 +1,735 @@ +/* global process setInterval clearInterval */ +/* eslint-disable no-continue */ + +// import { Command } from 'commander'; + +import { resolve as resolvePath, join as joinPath, basename } from 'path'; +import { performance } from 'perf_hooks'; +import zlib from 'zlib'; +import { promisify } from 'util'; +import { + pipeline as pipelineCallback, + finished as finishedCallback, +} from 'stream'; + +import chalk from 'chalk'; +import { makePromiseKit } from '@agoric/promise-kit'; + +import { + sleep, + PromiseAllOrErrors, + warnOnRejection, + aggregateTryFinally, +} from './helpers/async.js'; +import { childProcessDone } from './helpers/child-process.js'; +import { makeFsHelper } from './helpers/fs.js'; +import { makeProcfsHelper } from './helpers/procsfs.js'; +import { makeOutputter } from './helpers/outputter.js'; + +import { makeTestOperations } from './test-local-chain.js'; + +const pipeline = promisify(pipelineCallback); +const finished = promisify(finishedCallback); + +const monitorInterval = 5 * 60 * 1000; + +const stageDuration = 6 * 60 * 60 * 1000; + +const vatIdentifierRE = /^(v\d+):(.*)$/; +const knownVatsNamesWithoutProcess = ['comms', 'vattp']; + +/** + * @typedef { | + * 'create-vat' | + * 'vat-startup-finish' | + * 'replay-transcript-start' | + * 'cosmic-swingset-end-block-start' | + * 'cosmic-swingset-end-block-finish' | + * 'cosmic-swingset-begin-block' + * } SupportedSlogEventTypes + */ + +/** + * @typedef {{ + * time: number, + * type: SupportedSlogEventTypes + * }} SlogEventBase + */ + +/** + * @typedef {SlogEventBase & Record} SlogEvent + */ + +/** + * @typedef {{ + * time: number, + * type: 'create-vat', + * vatID: string, + * name?: string, + * dynamic: boolean, + * } & Record} SlogCreateVatEvent + */ + +/** + * @typedef {{ + * time: number, + * type: 'vat-startup-finish' | 'replay-transcript-start', + * vatID: string + * } & Record} SlogVatEvent + */ + +/** + * @typedef {{ + * time: number, + * type: 'cosmic-swingset-end-block-start' | + * 'cosmic-swingset-end-block-finish' | + * 'cosmic-swingset-begin-block', + * vatID: string + * } & Record} SlogCosmicSwingsetEvent + */ + +/** @type {SupportedSlogEventTypes[]} */ +const supportedSlogEventTypes = [ + 'create-vat', + 'vat-startup-finish', + 'replay-transcript-start', + 'cosmic-swingset-end-block-start', + 'cosmic-swingset-end-block-finish', + 'cosmic-swingset-begin-block', +]; + +/** + * + * @param {string} progName + * @param {string[]} rawArgs + * @param {Object} powers + * @param {import("stream").Writable} powers.stdout + * @param {import("stream").Writable} powers.stderr + * @param {import("fs/promises")} powers.fs Node.js promisified fs object + * @param {import("./helpers/fs.js").fsStream} powers.fsStream Node.js fs stream operations + * @param {import("child_process").spawn} powers.spawn Node.js spawn + * @param {string} powers.tmpDir Directory location to place temporary files in + */ +const main = async (progName, rawArgs, powers) => { + const { stdout, stderr, fs, fsStream, spawn, tmpDir } = powers; + + const outputDir = rawArgs[0] || `run-results-${Date.now()}`; + + const { getProcessInfo, getCPUTimeOffset } = makeProcfsHelper({ fs, spawn }); + const { findByPrefix, dirDiskUsage, makeFIFO } = makeFsHelper({ + fs, + fsStream, + spawn, + tmpDir, + }); + + const { resetChain, runChain, runClient, runLoadGen } = makeTestOperations({ + spawn, + findDirByPrefix: findByPrefix, + makeFIFO, + getProcessInfo, + }); + + /** + * @param {string} [prefix] + * @param {import("stream").Writable} [out] + * @param {import("stream").Writable} [err] + */ + const makeConsole = (prefix, out = stdout, err = stderr) => + makeOutputter({ + out, + err, + outPrefix: prefix && `${chalk.green(prefix)}: `, + errPrefix: prefix && `${chalk.bold.red(prefix)}: `, + }); + + let { console } = makeConsole(); + + console.log(`Outputting to ${resolvePath(outputDir)}`); + await fs.mkdir(outputDir, { recursive: true }); + + const outputStream = fsStream.createWriteStream( + joinPath(outputDir, 'perf.log'), + ); + + let currentStage = 0; + let currentStageElapsedOffset = 0; + /** @type {string} */ + let chainStorageLocation; + + const slogEventRE = new RegExp( + `^{"time":\\d+(?:\\.\\d+),"type":"(?:${supportedSlogEventTypes.join( + '|', + )})"`, + ); + + /** + * + * @param {string} eventType + * @param {Record} [data] + */ + const logPerfEvent = (eventType, data = {}) => { + const timestamp = Math.round(performance.now() * 1000) / 1e6; + outputStream.write( + JSON.stringify( + { + timestamp, + stage: currentStage, + elapsed: timestamp - currentStageElapsedOffset, + time: undefined, // Placeholder to put data.time before type if it exists + type: `perf-${eventType}`, + ...data, + }, + (_, arg) => (typeof arg === 'bigint' ? Number(arg) : arg), + ), + ); + outputStream.write('\n'); + }; + + /** + * @param {import("./test-operations.js").RunChainInfo} chainInfo + * @param {Object} param1 + * @param {() => void} param1.resolveFirstBlock + * @param {import("stream").Writable} param1.out + * @param {import("stream").Writable} param1.err + */ + const monitorChain = async ( + { slogLines, storageLocation, processInfo: kernelProcessInfo }, + { resolveFirstBlock, out, err }, + ) => { + const { console: monitorConsole } = makeConsole('monitor-chain', out, err); + + /** + * @typedef {{ + * processInfo: import("./helpers/procsfs.js").ProcessInfo | null | undefined, + * vatName: string | undefined, + * started: boolean, + * }} VatInfo + */ + /** @type {Map} */ + const vatInfos = new Map(); + let vatUpdated = Promise.resolve(); + + const updateVatInfos = async () => { + monitorConsole.log('Updating vat infos'); + const childrenInfos = new Set( + await kernelProcessInfo.getChildren().catch(() => []), + ); + for (const info of childrenInfos) { + const argv = await info.getArgv(); // eslint-disable-line no-await-in-loop + if (!argv || basename(argv[0]) !== 'xsnap') continue; + const vatIdentifierMatches = vatIdentifierRE.exec(argv[1]); + if (!vatIdentifierMatches) continue; + const vatID = vatIdentifierMatches[1]; + const vatInfo = vatInfos.get(vatID); + + if (!vatInfo) { + /** @type {string | undefined} */ + let vatName = vatIdentifierMatches[2]; + if (!vatName || vatName === 'undefined') vatName = undefined; + // TODO: warn found vat process without create event + monitorConsole.warn( + `found vat ${vatID}${ + vatName ? ` ${vatName}` : '' + } process before create event`, + 'pid=', + info.pid, + ); + // vatInfo = { vatName, processInfo: info }; + // vatInfos.set(vatID, vatInfo); + continue; + } + + if (vatInfo.processInfo !== info) { + // TODO: warn if replacing with new processInfo ? + } + + vatInfo.processInfo = info; + + // if (!vatInfo.started) { + // monitorConsole.warn( + // `found vat ${vatID}${ + // vatInfo.vatName ? ` ${vatInfo.vatName}` : '' + // } process before vat start event`, + // 'pid=', + // info.pid, + // ); + // } + } + for (const [vatID, vatInfo] of vatInfos) { + if (vatInfo.processInfo && !childrenInfos.has(vatInfo.processInfo)) { + vatInfo.processInfo = null; + } + + if ( + vatInfo.started && + !vatInfo.processInfo && + vatInfo.vatName && + !knownVatsNamesWithoutProcess.includes(vatInfo.vatName) + ) { + // Either the vat started but the process doesn't exist yet (undefined) + // or the vat process exited but the vat didn't stop yet (null) + monitorConsole.warn( + `Vat ${vatID} started but process ${ + vatInfo.processInfo === null + ? 'exited early' + : "doesn't exist yet" + }`, + ); + } + } + }; + + const ensureVatInfoUpdated = async () => { + const vatUpdatedBefore = vatUpdated; + await vatUpdated; + if (vatUpdated === vatUpdatedBefore) { + vatUpdated = updateVatInfos(); + warnOnRejection( + vatUpdated, + monitorConsole, + 'Failed to update vat process infos', + ); + } + }; + + const logProcessUsage = async () => + PromiseAllOrErrors( + [ + { + eventData: { + processType: 'kernel', + }, + processInfo: kernelProcessInfo, + }, + ...[...vatInfos].map(([vatID, { processInfo, vatName }]) => ({ + eventData: { + processType: 'vat', + vatID, + name: vatName, + }, + processInfo, + })), + ].map(async ({ eventData, processInfo }) => { + if (!processInfo) return; + const { times, memory } = await processInfo.getUsageSnapshot(); + logPerfEvent('chain-process-usage', { + ...eventData, + real: + Math.round( + performance.now() * 1000 - processInfo.startTimestamp * 1e6, + ) / 1e6, + ...times, + ...memory, + }); + }), + ).then(() => {}); + + const logStorageUsage = async () => { + logPerfEvent('chain-storage-usage', { + chain: await dirDiskUsage(storageLocation), + }); + }; + + const monitorIntervalId = setInterval( + () => + warnOnRejection( + PromiseAllOrErrors([logStorageUsage(), logProcessUsage()]), + monitorConsole, + 'Failure during usage monitoring', + ), + monitorInterval, + ); + + const slogOutput = zlib.createGzip({ + level: zlib.constants.Z_BEST_COMPRESSION, + }); + const slogOutputWriteStream = fsStream.createWriteStream( + joinPath(outputDir, `chain-stage-${currentStage}.slog.gz`), + ); + // const slogOutput = slogOutputWriteStream; + // const slogOutputPipeResult = finished(slogOutput); + const slogOutputPipeResult = pipeline(slogOutput, slogOutputWriteStream); + + /** @type {number | null} */ + let slogStart = null; + + let slogBlocksSeen = 0; + + for await (const line of slogLines) { + slogOutput.write(line); + slogOutput.write('\n'); + + if (slogStart == null) { + // TODO: figure out a better way + // There is a risk we could be late to the party here, with the chain + // having started some time before us but in reality we usually find + // the process before it starts the kernel + slogStart = performance.now() / 1000; + warnOnRejection( + logStorageUsage(), + monitorConsole, + 'Failed to get first storage usage', + ); + } + + // Avoid JSON parsing lines we don't care about + if (!slogEventRE.test(line)) continue; + + const localEventTime = performance.timeOrigin + performance.now(); + + /** @type {SlogEvent} */ + let event; + try { + event = JSON.parse(line); + } catch (error) { + monitorConsole.warn('Failed to parse slog line', line, error); + continue; + } + + monitorConsole.log( + 'slog event', + event.type, + 'delay', + Math.round(localEventTime - event.time * 1000), + 'ms', + ); + + switch (event.type) { + case 'create-vat': { + const { + vatID, + name: vatName, + } = /** @type {SlogCreateVatEvent} */ (event); + if (!vatInfos.has(vatID)) { + vatInfos.set(vatID, { + vatName, + processInfo: undefined, + started: false, + }); + } else { + // TODO: warn already created vat before + } + break; + } + case 'vat-startup-finish': { + const { vatID } = /** @type {SlogVatEvent} */ (event); + const vatInfo = vatInfos.get(vatID); + if (!vatInfo) { + // TODO: warn unknown vat + } else { + vatInfo.started = true; + ensureVatInfoUpdated(); + } + break; + } + case 'replay-transcript-start': { + const { vatID } = /** @type {SlogVatEvent} */ (event); + const vatInfo = vatInfos.get(vatID); + if (!vatInfo) { + // TODO: warn unknown vat + } else if (!vatInfo.processInfo) { + ensureVatInfoUpdated(); + } + break; + } + case 'cosmic-swingset-end-block-start': { + if (event.blockHeight === 0) { + logPerfEvent('chain-first-init-start'); + } + break; + } + case 'cosmic-swingset-end-block-finish': { + if (event.blockHeight === 0) { + // TODO: measure duration from start to finish + logPerfEvent('chain-first-init-finish'); + } + break; + } + case 'cosmic-swingset-begin-block': { + if (!slogBlocksSeen) { + logPerfEvent('stage-first-block'); + warnOnRejection( + logProcessUsage(), + monitorConsole, + 'Failed to get initial process usage', + ); + } + slogBlocksSeen += 1; + if (slogBlocksSeen === 1) { + resolveFirstBlock(); + } + monitorConsole.log('begin-block', event.blockHeight); + break; + } + default: + } + } + + clearInterval(monitorIntervalId); + + slogOutput.end(); + await slogOutputPipeResult; + }; + + /** + * @param {Object} param0 + * @param {boolean} [param0.chainOnly] + */ + const runStage = async ({ chainOnly } = {}) => { + /** @type {import("stream").Writable} */ + let out; + /** @type {import("stream").Writable} */ + let err; + + currentStage += 1; + currentStageElapsedOffset = Math.round(performance.now() * 1000) / 1e6; + ({ console, out, err } = makeConsole(`stage-${currentStage}`)); + + const { console: stageConsole } = makeConsole('runner', out, err); + + logPerfEvent('stage-start'); + const stageStart = performance.now(); + + stageConsole.log('Running chain'); + logPerfEvent('run-chain-start'); + const runChainResult = await runChain({ stdout: out, stderr: err }); + logPerfEvent('run-chain-finish'); + + currentStageElapsedOffset = runChainResult.processInfo.startTimestamp; + chainStorageLocation = runChainResult.storageLocation; + /** @type {import("@agoric/promise-kit").PromiseRecord} */ + const { + promise: chainFirstBlock, + resolve: resolveFirstBlock, + } = makePromiseKit(); + const monitorChainDone = monitorChain(runChainResult, { + resolveFirstBlock, + out, + err, + }); + + await aggregateTryFinally( + async () => { + await runChainResult.ready; + logPerfEvent('chain-ready'); + stageConsole.log('Chain ready'); + + await chainFirstBlock; + + if (!chainOnly) { + stageConsole.log('Running client'); + logPerfEvent('run-client-start'); + const runClientStart = performance.now(); + const runClientResult = await runClient({ stdout: out, stderr: err }); + logPerfEvent('run-client-finish'); + + await aggregateTryFinally( + async () => { + await runClientResult.ready; + logPerfEvent('client-ready', { + duration: + Math.round((performance.now() - runClientStart) * 1000) / 1e6, + }); + + stageConsole.log('Running load gen'); + logPerfEvent('run-loadgen-start'); + const runLoadGenResult = await runLoadGen({ + stdout: out, + stderr: err, + }); + logPerfEvent('run-loadgen-finish'); + + await aggregateTryFinally( + async () => { + await runLoadGenResult.ready; + logPerfEvent('loadgen-ready'); + + const sleepTime = + stageDuration - (performance.now() - stageStart); + stageConsole.log( + 'Stage ready, going to sleep for', + Math.round(sleepTime / (1000 * 60)), + 'minutes', + ); + logPerfEvent('stage-ready'); + + const signal = makePromiseKit(); + const onInterrupt = () => + signal.reject(new Error('Interrupted')); + process.on('SIGINT', onInterrupt); + + await aggregateTryFinally( + async () => { + await Promise.race([sleep(sleepTime), signal.promise]); + logPerfEvent('stage-shutdown'); + }, + async () => { + process.off('SIGINT', onInterrupt); + }, + ); + }, + async () => { + stageConsole.log('Stopping load-gen'); + + runLoadGenResult.stop(); + await runLoadGenResult.done; + logPerfEvent('loadgen-stopped'); + }, + ); + }, + async () => { + stageConsole.log('Stopping client'); + + runClientResult.stop(); + await runClientResult.done; + logPerfEvent('client-stopped'); + }, + ); + } + }, + async () => { + stageConsole.log('Stopping chain'); + + runChainResult.stop(); + await runChainResult.done; + logPerfEvent('chain-stopped'); + + await monitorChainDone; + }, + ); + + logPerfEvent('stage-finish'); + currentStageElapsedOffset = 0; + }; + + await aggregateTryFinally( + async () => { + let out; + let err; + ({ console, out, err } = makeConsole('init')); + logPerfEvent('start', { + cpuTimeOffset: await getCPUTimeOffset(), + timeOrigin: performance.timeOrigin / 1000, + // TODO: add other interesting info here + }); + + logPerfEvent('reset-chain-start'); + await resetChain({ stdout: out, stderr: err }); + logPerfEvent('reset-chain-finish'); + + while (currentStage < 4) { + await runStage(); // eslint-disable-line no-await-in-loop + } + + await runStage({ chainOnly: true }); + }, + async () => { + outputStream.end(); + + if (chainStorageLocation) { + await childProcessDone( + spawn('tar', [ + '-cSJf', + joinPath(outputDir, 'chain-storage.tar.xz'), + chainStorageLocation, + ]), + ); + } + + await PromiseAllOrErrors([ + // finished(fifo), + finished(outputStream), + ]); + }, + ); + + // console.log(await dirDiskUsage(chainDir)); + + // const myInfo = await getProcessInfo(process.pid); + // const parentInfo = await myInfo.getParent(); + // const systemArgs = myInfo.argv || []; + + // stdout(`Hello world: ${progName}: ${rawArgs.join(', ')}\n`); + // stdout(`System args: ${systemArgs.join(' ')}\n`); + // stdout(`Start ticks: ${0 - parentInfo.startTimestamp}\n`); + + // /** + // * + // * @param {import("./helpers/process-info.js").ProcessInfo} info + // * @param {string} indent + // */ + // const printProcess = async (info, indent = '') => { + // stdout( + // `${indent}(${info.pid})[${info.startTimestamp}]: ${ + // (info.argv || ['(null)'])[0] + // }\n`, + // ); + // stdout( + // `${indent} ${JSON.stringify(await info.getUsageSnapshot())}\n`, + // ); + // if (indent.length < 2) { + // for (const child of await info.getChildren()) { + // // eslint-disable-next-line no-await-in-loop + // await printProcess(child, `${indent} `); + // } + // } + // }; + + // await printProcess(await getProcessInfo(1)); + // // await printProcess(await getProcessInfo(108)); + + // const fifo = fsStream.createReadStream('./chain.second.slog', { + // emitClose: true, + // }); + + // fifo + // .once('ready', () => { + // console.log('fifo ready'); + // }) + // .once('open', () => { + // fifo.once('close', () => { + // // TODO: log errors + // console.log('removing fifo'); + // }); + // console.log('fifo open'); + // }); + + // const fifo = await makeFIFO('chain.slog'); + // console.log('Created FIFO:', fifo.path); + + // const rl = readline.createInterface({ + // input: process.stdin, + // output: process.stdout, + // }); + + // console.log( + // 'Answer', + // await new Promise((resolve) => + // rl.question('Press enter when ready.', resolve), + // ), + // ); + + // const fifoLines = readline.createInterface({ + // input: fifo, + // }); + + // for await (const fifoLine of fifoLines) { + // console.log('fifo: ', fifoLine); + // } + + // fifo.close(); + // rl.close(); + // await pipeResult; + + // const lines = readline.createInterface({ input: testFile }); + + // for await (const line of lines) { + // console.log('read', line); + // } + // testFile.close(); + + // fifo.destroy(); +}; + +export default main; diff --git a/runner/lib/test-local-chain.js b/runner/lib/test-local-chain.js new file mode 100644 index 0000000..595cddc --- /dev/null +++ b/runner/lib/test-local-chain.js @@ -0,0 +1,457 @@ +/* global process Buffer */ + +import chalk from 'chalk'; + +import { dirname } from 'path'; +import { promisify } from 'util'; +import { PassThrough, pipeline as pipelineCallback } from 'stream'; + +// TODO: pass an "httpRequest" as power instead of importing +import http from 'http'; + +import { + childProcessDone, + makeSpawnWithPipedStream, +} from './helpers/child-process.js'; +import LineStreamTransform from './helpers/line-stream-transform.js'; +import { PromiseAllOrErrors, sleep, tryTimeout } from './helpers/async.js'; +import { makeOutputter } from './helpers/outputter.js'; +import { whenStreamSteps } from './helpers/stream-steps.js'; + +const pipeline = promisify(pipelineCallback); + +/** + * @param {string | URL} url + * @param {http.RequestOptions & {body?: Buffer}} options + * @returns {Promise} + */ +const httpRequest = (url, options) => { + return new Promise((resolve, reject) => { + const { body, ...httpOptions } = options; + + const req = http.request(url, httpOptions); + req.on('response', resolve).on('error', reject); + if (body) { + req.write(body); + } + req.end(); + }); +}; + +/** + * + * @param {import('./helpers/process-info.js').ProcessInfo} info + * @param {number} [retries] + * @returns {Promise} + */ +const untilArgv = async (info, retries = 50) => { + const argv = await info.getArgv(); + return ( + argv || + (retries > 0 ? (await sleep(100), untilArgv(info, retries - 1)) : null) + ); +}; + +/** + * + * @param {import('./helpers/process-info.js').ProcessInfo} info + * @param {number} [retries] + * @returns {Promise} + */ +const untilChildren = async (info, retries = 50) => { + const children = await info.getChildren(); + return children.length || retries === 0 + ? children + : (await sleep(100), untilChildren(info, retries - 1)); +}; + +/** @typedef {(argv: string[]) => boolean} ArgvMatcher */ + +/** + * @param {(RegExp | null | undefined)[]} argMatchers + * @returns {ArgvMatcher} + */ +const getArgvMatcher = (argMatchers) => (argv) => + argv.every((arg, idx) => { + const matcher = argMatchers[idx]; + return !matcher || matcher.test(arg); + }); + +/** + * @param {ArgvMatcher} argvMatcher + * @returns {ArgvMatcher} + */ +const wrapArgvMatcherIgnoreEnvShebang = (argvMatcher) => (argv) => + argvMatcher(argv) || (/env$/.test(argv[0]) && argvMatcher(argv.slice(1))); + +/** + * @param {import('./helpers/process-info.js').ProcessInfo} launcherInfo + * @param {ArgvMatcher} argvMatcher + */ +const getChildMatchingArgv = async (launcherInfo, argvMatcher) => { + const childrenWithArgv = await Promise.all( + (await untilChildren(launcherInfo)).map(async (info) => ({ + info, + argv: await untilArgv(info), + })), + ); + + const result = childrenWithArgv.find(({ argv }) => argv && argvMatcher(argv)); + + if (result) { + return result.info; + } + + throw new Error("Couldn't find child process"); +}; + +const chainDirPrefix = '_agstate/agoric-servers/local-chain-'; +const loadGenConfig = { faucet: { interval: 30 } }; + +const chainStartRE = /ag-chain-cosmos start --home=(.*)$/; +const chainBlockBeginRE = /block-manager: block (\d+) begin$/; +const clientStartRE = /\bsolo\b\S+entrypoint\.[cm]?js start/; +const clientWalletReadyRE = /(?:Deployed Wallet!|Don't need our provides: wallet)/; +const loadGenStartRE = /deploy.*loadgen\/loop\.js/; +const loadGenReadyRE = /server running/; + +const chainNodeArgvMatcher = wrapArgvMatcherIgnoreEnvShebang( + getArgvMatcher([/node$/, /chain-entrypoint/]), +); +const chainGoArgvMatcher = getArgvMatcher([/sh$/, /ag-chain-cosmos$/]); +/** @param {string[]} argv */ +const chainArgvMatcher = (argv) => + chainNodeArgvMatcher(argv) || chainGoArgvMatcher(argv); +const clientArgvMatcher = wrapArgvMatcherIgnoreEnvShebang( + getArgvMatcher([/node$/, /\bsolo\b\S+entrypoint\.[cm]?js/]), +); + +/** + * + * @param {Object} powers + * @param {import("child_process").spawn} powers.spawn Node.js spawn + * @param {import("./helpers/fs.js").MakeFIFO} powers.makeFIFO Make a FIFO file readable stream + * @param {import("./helpers/fs.js").FindByPrefix} powers.findDirByPrefix + * @param {import("./helpers/procsfs.js").GetProcessInfo} powers.getProcessInfo + * @returns {import("./test-operations.js").TestOperations} + * + */ +export const makeTestOperations = ({ + spawn, + findDirByPrefix, + makeFIFO, + getProcessInfo, +}) => { + // TODO: Print out commands executed + + const spawnWithPipe = makeSpawnWithPipedStream({ spawn, end: false }); + + /** + * @param {string} command + * @param {ReadonlyArray} args + * @param {import("child_process").SpawnOptionsWithStdioTuple<'ignore' | undefined, import("stream").Writable, import("stream").Writable>} options + * @returns {import("child_process").ChildProcessByStdio} + */ + const spawnPrintAndPipeOutput = (command, args, options) => { + const env = (options.env !== process.env ? options.env : null) || {}; + const envPairs = Object.entries( + // While prototype properties are used by spawn + // don't clutter the print output with the "inherited" env + Object.getOwnPropertyDescriptors(env), + ) + .filter(([_, desc]) => desc.enumerable) + .map(([name, desc]) => `${name}=${desc.value}`); + + const [_, out, err, ...others] = options.stdio; + + out.write(`${[...envPairs, command, ...args].join(' ')}\n`); + + const childProcess = spawnWithPipe(command, args, { + ...options, + stdio: ['ignore', out, err, ...others], + }); + + // The childProcess does include the out and err streams but spawnWithPipe doesn't have the correct return type + return /** @type {*} */ (childProcess); + }; + + /** + * @param {string} prefix + * @param {import("stream").Writable} stdout + * @param {import("stream").Writable} stderr + * @returns {{stdio: [undefined, import("stream").Writable, import("stream").Writable], console: Console}} + */ + const getConsoleAndStdio = (prefix, stdout, stderr) => { + const { console, out, err } = makeOutputter({ + out: stdout, + err: stderr, + outPrefix: prefix && `${chalk.bold.blue(prefix)}: `, + errPrefix: prefix && `${chalk.bold.red(prefix)}: `, + }); + return { console, stdio: [undefined, out, err] }; + }; + + return harden({ + resetChain: async ({ stdout, stderr }) => { + const { console, stdio } = getConsoleAndStdio( + 'reset-chain', + stdout, + stderr, + ); + + console.log('Starting'); + + const stateDir = dirname(chainDirPrefix); + await childProcessDone( + spawnPrintAndPipeOutput('rm', ['-rf', stateDir], { stdio }), + ); + await childProcessDone( + spawnPrintAndPipeOutput('git', ['checkout', '--', stateDir], { stdio }), + ); + await childProcessDone( + spawnPrintAndPipeOutput('agoric', ['install'], { stdio }), + ); + + console.log('Done'); + }, + runChain: async ({ stdout, stderr, timeout = 30 }) => { + const { console, stdio } = getConsoleAndStdio('chain', stdout, stderr); + + console.log('Starting chain'); + + const slogFifo = await makeFIFO('chain.slog'); + const slogLines = new LineStreamTransform(); + const slogPipeResult = pipeline(slogFifo, slogLines); + + const chainEnv = Object.create(process.env); + chainEnv.SLOGFILE = slogFifo.path; + + const launcherCp = spawnPrintAndPipeOutput( + 'agoric', + ['start', 'local-chain', '--verbose'], + { stdio, env: chainEnv, detached: true }, + ); + + const chainDone = childProcessDone(launcherCp); + + chainDone.then( + () => console.log('Chain exited successfully'), + (error) => console.error('Chain exited with error', error), + ); + + const [chainStarted, firstBlock, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: chainStartRE }, + { matcher: chainBlockBeginRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + const done = PromiseAllOrErrors([ + slogPipeResult, + outputParsed, + chainDone, + ]).then(() => {}); + + return tryTimeout( + timeout * 1000, + async () => { + await chainStarted; + + console.log('Chain running'); + + const [storageLocation, processInfo] = await PromiseAllOrErrors([ + chainStarted.then(findDirByPrefix), + getProcessInfo( + /** @type {number} */ (launcherCp.pid), + ).then((launcherInfo) => + getChildMatchingArgv(launcherInfo, chainArgvMatcher), + ), + ]); + + const stop = () => process.kill(processInfo.pid); + + return harden({ + stop, + done, + ready: firstBlock, + slogLines: { + [Symbol.asyncIterator]: () => slogLines[Symbol.asyncIterator](), + }, + storageLocation, + processInfo, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([done, firstBlock]); + launcherCp.kill(); + slogFifo.close(); + }, + ); + }, + runClient: async ({ stdout, stderr, timeout = 20 }) => { + const { console, stdio } = getConsoleAndStdio('client', stdout, stderr); + + console.log('Starting client'); + + const launcherCp = spawnPrintAndPipeOutput( + 'agoric', + ['start', 'local-solo'], + { stdio, detached: true }, + ); + + const clientDone = childProcessDone(launcherCp); + + clientDone.then( + () => console.log('Client exited successfully'), + (error) => console.error('Client exited with error', error), + ); + + const [clientStarted, walletReady, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: clientStartRE, resultIndex: -1 }, + { matcher: clientWalletReadyRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + const done = PromiseAllOrErrors([ + outputParsed, + clientDone, + ]).then(() => {}); + + return tryTimeout( + timeout * 1000, + async () => { + await clientStarted; + + console.log('Client running'); + + const processInfo = await getProcessInfo( + /** @type {number} */ (launcherCp.pid), + ).then((launcherInfo) => + getChildMatchingArgv(launcherInfo, clientArgvMatcher), + ); + + const stop = () => process.kill(processInfo.pid); + + return harden({ + stop, + done, + ready: walletReady, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([done, walletReady]); + launcherCp.kill(); + }, + ); + }, + runLoadGen: async ({ stdout, stderr, timeout = 10 }) => { + const { console, stdio } = getConsoleAndStdio('load-gen', stdout, stderr); + + console.log('Starting load gen'); + + const loadGenEnv = Object.create(process.env); + // loadGenEnv.DEBUG = 'agoric'; + + const launcherCp = spawnPrintAndPipeOutput( + 'agoric', + ['deploy', 'loadgen/loop.js'], + { stdio, env: loadGenEnv, detached: true }, + ); + + let stopped = false; + const stop = () => { + stopped = true; + launcherCp.kill(); + }; + + // Load gen exit with non-zero code when killed + const loadGenDone = childProcessDone(launcherCp).catch((err) => + stopped ? 0 : Promise.reject(err), + ); + + loadGenDone.then( + () => console.log('Load gen app stopped successfully'), + (error) => console.error('Load gen app stopped with error', error), + ); + + // The agoric deploy output is currently sent to stderr + // Combine both stderr and stdout in to detect both steps + // accommodating future changes + const combinedOutput = new PassThrough(); + const outLines = new LineStreamTransform({ lineEndings: true }); + const errLines = new LineStreamTransform({ lineEndings: true }); + launcherCp.stdout.pipe(outLines).pipe(combinedOutput); + launcherCp.stderr.pipe(errLines).pipe(combinedOutput); + + const [deploying, tasksReady, outputParsed] = whenStreamSteps( + combinedOutput, + [{ matcher: loadGenStartRE }, { matcher: loadGenReadyRE }], + { + waitEnd: false, + }, + ); + + const cleanCombined = () => { + launcherCp.stdout.unpipe(outLines); + launcherCp.stderr.unpipe(errLines); + }; + outputParsed.then(cleanCombined, cleanCombined); + + const done = PromiseAllOrErrors([ + outputParsed, + loadGenDone, + ]).then(() => {}); + + return tryTimeout( + timeout * 1000, + async () => { + await deploying; + + console.log('Load gen app running'); + + const ready = tasksReady.then(async () => { + console.log('Making request to start faucet'); + const body = Buffer.from(JSON.stringify(loadGenConfig), 'utf8'); + + const res = await httpRequest('http://127.0.0.1:3352/config', { + body, + method: 'PUT', + headers: { + 'Content-Type': 'application/json', + 'Content-Length': body.byteLength, + }, + }); + // Consume and discard the response + for await (const _ of res); + + if (!res.statusCode || res.statusCode >= 400) { + throw new Error('Could not start faucet'); + } + }); + + return harden({ + stop, + done, + ready, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([loadGenDone, tasksReady]); + launcherCp.kill(); + }, + ); + }, + }); +}; diff --git a/runner/lib/test-operations.d.ts b/runner/lib/test-operations.d.ts new file mode 100644 index 0000000..e76ad6b --- /dev/null +++ b/runner/lib/test-operations.d.ts @@ -0,0 +1,28 @@ +/* eslint-disable no-unused-vars,no-redeclare */ + +export type RunResult = { + stop: () => void; + done: Promise; + ready: Promise; +}; + +export type RunChainInfo = { + slogLines: AsyncIterable; + processInfo: import('./helpers/process-info.js').ProcessInfo; + storageLocation: string; +}; + +export type RunChainResult = RunResult & RunChainInfo; + +interface OperationBaseOption { + readonly stdout: import('stream').Writable; + readonly stderr: import('stream').Writable; + readonly timeout?: number; +} + +export interface TestOperations { + resetChain(options: OperationBaseOption): Promise; + runChain(options: OperationBaseOption): Promise; + runClient(options: OperationBaseOption): Promise; + runLoadGen(options: OperationBaseOption): Promise; +} diff --git a/runner/package.json b/runner/package.json new file mode 100644 index 0000000..6551ec9 --- /dev/null +++ b/runner/package.json @@ -0,0 +1,57 @@ +{ + "name": "loadgen-runner", + "version": "0.1.0", + "private": true, + "description": "Runner for the load generator", + "parsers": { + "js": "mjs" + }, + "bin": "bin/loadgen-runner", + "scripts": { + "build": "exit 0", + "test": "exit 0", + "lint": "yarn lint:js && yarn lint:prettier", + "lint:js": "eslint '**/*.{js,ts}'", + "lint:prettier": "prettier --check '**/*.{js,ts}'", + "lint:types": "tsc -p jsconfig.json", + "lint-fix": "yarn lint-fix:js && yarn lint-fix:prettier", + "lint-fix:js": "eslint --fix '**/*.{js,ts}'", + "lint-fix:prettier": "prettier --write '**/*.{js,ts}'" + }, + "devDependencies": { + "eslint": "^7.11.0", + "@agoric/eslint-config": "^0.3.6", + "prettier": "^2.1.2", + "ava": "^3.13.0" + }, + "dependencies": { + "@agoric/assert": "*", + "@agoric/install-ses": "*", + "@agoric/promise-kit": "*", + "@iarna/toml": "^2.2.3", + "agoric": "*", + "anylogger": "^0.21.0", + "chalk": "^2.4.2", + "commander": "^5.0.0", + "deterministic-json": "^1.0.5", + "esm": "^3.2.25", + "inquirer": "^6.3.1", + "readline-transform": "^1.0.0" + }, + "keywords": [], + "author": "Agoric", + "license": "Apache-2.0", + "eslintConfig": { + "extends": [ + "@agoric" + ], + "rules": { + "prettier/prettier": "off" + } + }, + "prettier": { + "trailingComma": "all", + "singleQuote": true + }, + "eslintIgnore": [] +} diff --git a/start.sh b/start.sh index 4621cd5..2b3a415 100755 --- a/start.sh +++ b/start.sh @@ -1,2 +1,30 @@ #! /bin/sh -exec agoric start testnet ${1+"$@"} +set -e -x + +AGORIC_SDK_GITHUB_REPO=https://github.com/Agoric/agoric-sdk +GIT_HEAD="$(git ls-remote $AGORIC_SDK_GITHUB_REPO.git HEAD | awk '{ print substr($1,1,10) }')" + +LOADGEN_DIR=$(pwd) +WORK_DIR=/tmp/agoric-sdk-${GIT_HEAD} + +mkdir -p $WORK_DIR/bin $WORK_DIR/src $WORK_DIR/out $WORK_DIR/go/bin +export GOPATH=$WORK_DIR/go +export PATH=$WORK_DIR/bin:$GOPATH/bin:$PATH + +if [ ! -d $WORK_DIR/src/.git ] +then + git clone $AGORIC_SDK_GITHUB_REPO.git $WORK_DIR/src +fi + +cd $WORK_DIR/src +git fetch +git reset --hard $GIT_HEAD +yarn install +yarn build +make -C packages/cosmic-swingset +rm -f $WORK_DIR/bin/agoric +yarn link-cli $WORK_DIR/bin/agoric + +cd $LOADGEN_DIR +agoric install +./runner/bin/loadgen-runner $WORK_DIR/out 2>&1 | tee $WORK_DIR/out/runner.log diff --git a/yarn.lock b/yarn.lock index fdac896..5dc6b77 100644 --- a/yarn.lock +++ b/yarn.lock @@ -223,11 +223,21 @@ "@agoric/same-structure" "^0.1.19" "@agoric/store" "^0.4.21" +"@agoric/eslint-config@^0.3.6": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@agoric/eslint-config/-/eslint-config-0.3.6.tgz#3c7bcbb1ed1c4584ff4ec530fa252db742a3ef9f" + integrity sha512-MftGwMGKzJiCZcrpGuyL4PppvJLigqOhT1nwOAhkHjVCdHLp5jzu+SDB8tMKYkOz/zaXHOjRU/xld+vzeCS6Ew== + "@agoric/eventual-send@*", "@agoric/eventual-send@^0.13.6", "@agoric/eventual-send@^0.13.7": version "0.13.7" resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.7.tgz#6414ffa8b025a34c7074b03a63eb739698ad6c12" integrity sha512-2R94fuM0PXMvZPOotKoYc7dM2PaR2AJ9ZQqnsj7glec32HC/rrm1tZDqBqdpx81oRmdQbsmceMTTlUtcrHoPEg== +"@agoric/eventual-send@^0.13.14": + version "0.13.14" + resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.14.tgz#36db3b19a60db82bac0dd2fd0a20e49fca93aac6" + integrity sha512-RTjrO6YnIj9c03Q/7gvb+JKBeh26Qo3KtwYb5B5rzYziQGob8VTHiwgKtO8TpQQBGM4Kp0YpnzmTqvestdHq8w== + "@agoric/eventual-send@^0.13.21": version "0.13.21" resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.21.tgz#d715e4353c2c3680714610682d01181398833466" @@ -356,6 +366,13 @@ "@agoric/swingset-vat" "^0.16.0" "@agoric/zoe" "^0.15.0" +"@agoric/promise-kit@*": + version "0.2.13" + resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.13.tgz#7be370c9b40172219379fea2c3a28331eab065d5" + integrity sha512-FDzKogd6jGdzQoY26NuQbUwE8TZSne9d7jwjHCUKEQpzqrho9nnXcJPn18Pvry1tLrm8gRfV8Wj7x7O5O+cCiw== + dependencies: + "@agoric/eventual-send" "^0.13.14" + "@agoric/promise-kit@^0.2.19": version "0.2.19" resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.19.tgz#64207de0a020680ad95575f6a10ebcb322b9f006" @@ -1064,15 +1081,15 @@ minimatch "^3.0.4" strip-json-comments "^3.1.1" -"@eslint/eslintrc@^0.4.2": - version "0.4.2" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.2.tgz#f63d0ef06f5c0c57d76c4ab5f63d3835c51b0179" - integrity sha512-8nmGq/4ycLpIwzvhI4tNDmQztZ8sp+hI7cyG8i1nQDhkAbRzHpXPidRAHlNvCZQpJTKw5ItIpMw9RSToGF00mg== +"@eslint/eslintrc@^0.4.1": + version "0.4.1" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.1.tgz#442763b88cecbe3ee0ec7ca6d6dd6168550cbf14" + integrity sha512-5v7TDE9plVhvxQeWLXDTvFvJBdH6pEsdnl2g/dAptmuFEPedQ4Erq5rsDsX+mvAM610IhNaO2W5V1dOOnDKxkQ== dependencies: ajv "^6.12.4" debug "^4.1.1" espree "^7.3.0" - globals "^13.9.0" + globals "^12.1.0" ignore "^4.0.6" import-fresh "^3.2.1" js-yaml "^3.13.1" @@ -1275,47 +1292,47 @@ integrity sha512-flgpHJjntpBAdJD43ShRosQvNC0ME97DCfGvZEDlAThQmnerRXrLbX6YgzRBQCZTthET9eAWFAMaYP0m0Y4HzQ== "@typescript-eslint/parser@^4.18.0": - version "4.28.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.28.0.tgz#2404c16751a28616ef3abab77c8e51d680a12caa" - integrity sha512-7x4D22oPY8fDaOCvkuXtYYTQ6mTMmkivwEzS+7iml9F9VkHGbbZ3x4fHRwxAb5KeuSkLqfnYjs46tGx2Nour4A== + version "4.25.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.25.0.tgz#6b2cb6285aa3d55bfb263c650739091b0f19aceb" + integrity sha512-OZFa1SKyEJpAhDx8FcbWyX+vLwh7OEtzoo2iQaeWwxucyfbi0mT4DijbOSsTgPKzGHr6GrF2V5p/CEpUH/VBxg== dependencies: - "@typescript-eslint/scope-manager" "4.28.0" - "@typescript-eslint/types" "4.28.0" - "@typescript-eslint/typescript-estree" "4.28.0" - debug "^4.3.1" + "@typescript-eslint/scope-manager" "4.25.0" + "@typescript-eslint/types" "4.25.0" + "@typescript-eslint/typescript-estree" "4.25.0" + debug "^4.1.1" -"@typescript-eslint/scope-manager@4.28.0": - version "4.28.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.28.0.tgz#6a3009d2ab64a30fc8a1e257a1a320067f36a0ce" - integrity sha512-eCALCeScs5P/EYjwo6se9bdjtrh8ByWjtHzOkC4Tia6QQWtQr3PHovxh3TdYTuFcurkYI4rmFsRFpucADIkseg== +"@typescript-eslint/scope-manager@4.25.0": + version "4.25.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.25.0.tgz#9d86a5bcc46ef40acd03d85ad4e908e5aab8d4ca" + integrity sha512-2NElKxMb/0rya+NJG1U71BuNnp1TBd1JgzYsldsdA83h/20Tvnf/HrwhiSlNmuq6Vqa0EzidsvkTArwoq+tH6w== dependencies: - "@typescript-eslint/types" "4.28.0" - "@typescript-eslint/visitor-keys" "4.28.0" + "@typescript-eslint/types" "4.25.0" + "@typescript-eslint/visitor-keys" "4.25.0" -"@typescript-eslint/types@4.28.0": - version "4.28.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.28.0.tgz#a33504e1ce7ac51fc39035f5fe6f15079d4dafb0" - integrity sha512-p16xMNKKoiJCVZY5PW/AfILw2xe1LfruTcfAKBj3a+wgNYP5I9ZEKNDOItoRt53p4EiPV6iRSICy8EPanG9ZVA== +"@typescript-eslint/types@4.25.0": + version "4.25.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.25.0.tgz#0e444a5c5e3c22d7ffa5e16e0e60510b3de5af87" + integrity sha512-+CNINNvl00OkW6wEsi32wU5MhHti2J25TJsJJqgQmJu3B3dYDBcmOxcE5w9cgoM13TrdE/5ND2HoEnBohasxRQ== -"@typescript-eslint/typescript-estree@4.28.0": - version "4.28.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.28.0.tgz#e66d4e5aa2ede66fec8af434898fe61af10c71cf" - integrity sha512-m19UQTRtxMzKAm8QxfKpvh6OwQSXaW1CdZPoCaQuLwAq7VZMNuhJmZR4g5281s2ECt658sldnJfdpSZZaxUGMQ== +"@typescript-eslint/typescript-estree@4.25.0": + version "4.25.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.25.0.tgz#942e4e25888736bff5b360d9b0b61e013d0cfa25" + integrity sha512-1B8U07TGNAFMxZbSpF6jqiDs1cVGO0izVkf18Q/SPcUAc9LhHxzvSowXDTvkHMWUVuPpagupaW63gB6ahTXVlg== dependencies: - "@typescript-eslint/types" "4.28.0" - "@typescript-eslint/visitor-keys" "4.28.0" - debug "^4.3.1" - globby "^11.0.3" + "@typescript-eslint/types" "4.25.0" + "@typescript-eslint/visitor-keys" "4.25.0" + debug "^4.1.1" + globby "^11.0.1" is-glob "^4.0.1" - semver "^7.3.5" - tsutils "^3.21.0" + semver "^7.3.2" + tsutils "^3.17.1" -"@typescript-eslint/visitor-keys@4.28.0": - version "4.28.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.28.0.tgz#255c67c966ec294104169a6939d96f91c8a89434" - integrity sha512-PjJyTWwrlrvM5jazxYF5ZPs/nl0kHDZMVbuIcbpawVXaDPelp3+S9zpOz5RmVUfS/fD5l5+ZXNKnWhNYjPzCvw== +"@typescript-eslint/visitor-keys@4.25.0": + version "4.25.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.25.0.tgz#863e7ed23da4287c5b469b13223255d0fde6aaa7" + integrity sha512-AmkqV9dDJVKP/TcZrbf6s6i1zYXt5Hl8qOLrRDTFfRNae4+LB8A4N3i+FLZPW85zIxRy39BgeWOfMS3HoH5ngg== dependencies: - "@typescript-eslint/types" "4.28.0" + "@typescript-eslint/types" "4.25.0" eslint-visitor-keys "^2.0.0" abbrev@1: @@ -1512,7 +1529,7 @@ array-flatten@1.1.1: resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= -array-includes@^3.1.1, array-includes@^3.1.2, array-includes@^3.1.3: +array-includes@^3.1.1, array-includes@^3.1.2: version "3.1.3" resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.3.tgz#c7f619b382ad2afaf5326cddfdc0afc61af7690a" integrity sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A== @@ -1528,7 +1545,7 @@ array-union@^2.1.0: resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== -array.prototype.flat@^1.2.3, array.prototype.flat@^1.2.4: +array.prototype.flat@^1.2.3: version "1.2.4" resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz#6ef638b43312bd401b4c6199fdec7e2dc9e9a123" integrity sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg== @@ -2206,13 +2223,6 @@ debug@2.6.9, debug@^2.6.9: dependencies: ms "2.0.0" -debug@^3.2.7: - version "3.2.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" - integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== - dependencies: - ms "^2.1.1" - debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: version "4.3.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" @@ -2339,13 +2349,6 @@ doctrine@1.5.0: esutils "^2.0.2" isarray "^1.0.0" -doctrine@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" - integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== - dependencies: - esutils "^2.0.2" - doctrine@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" @@ -2509,17 +2512,12 @@ eslint-config-airbnb-base@^14.0.0: object.assign "^4.1.2" object.entries "^1.1.2" -eslint-config-jessie@^0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/eslint-config-jessie/-/eslint-config-jessie-0.0.4.tgz#00845c81f8bc1c1d1c2386d5fda3f165532ed8ae" - integrity sha512-jmWq+A7iAKev6nfqpQj/HqC+SMqje7r1HzXqqbInzKOO1OEd9b9MtLJaIe1VpqftS5kDztSubSc9ygzUZCgGaA== - eslint-config-jessie@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/eslint-config-jessie/-/eslint-config-jessie-0.0.6.tgz#429de65983cdfcb161c62a5169605ded6130487b" integrity sha512-L19U3+qWJdhdUjxW7NkkB9X+343MwUB81dplbxwcbBCvrWA8WwmiWYww0g23j4Oz/Vy8KmdW1cyW5Ii6s5IJzQ== -eslint-config-prettier@^6.12.0, eslint-config-prettier@^6.9.0: +eslint-config-prettier@^6.12.0: version "6.15.0" resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-6.15.0.tgz#7f93f6cb7d45a92f1537a70ecc06366e1ac6fed9" integrity sha512-a1+kOYLR8wMGustcgAjdydMsQ2A/2ipRPwRKUmfYaSxc9ZPcrku080Ctl6zrZzZNs/U82MjSv+qKREkoq3bJaw== @@ -2542,14 +2540,6 @@ eslint-module-utils@^2.6.0: debug "^2.6.9" pkg-dir "^2.0.0" -eslint-module-utils@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.6.1.tgz#b51be1e473dd0de1c5ea638e22429c2490ea8233" - integrity sha512-ZXI9B8cxAJIH4nfkhTwcRTEAnrVfobYqwjWy/QMCZ8rHkZHFjf9yO4BzpiF9kCSfNlMG54eKigISHpX0+AaT4A== - dependencies: - debug "^3.2.7" - pkg-dir "^2.0.0" - eslint-plugin-eslint-comments@^3.1.2: version "3.2.0" resolved "https://registry.yarnpkg.com/eslint-plugin-eslint-comments/-/eslint-plugin-eslint-comments-3.2.0.tgz#9e1cd7b4413526abb313933071d7aba05ca12ffa" @@ -2558,27 +2548,6 @@ eslint-plugin-eslint-comments@^3.1.2: escape-string-regexp "^1.0.5" ignore "^5.0.5" -eslint-plugin-import@^2.20.0: - version "2.23.4" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.23.4.tgz#8dceb1ed6b73e46e50ec9a5bb2411b645e7d3d97" - integrity sha512-6/wP8zZRsnQFiR3iaPFgh5ImVRM1WN5NUWfTIRqwOdeiGJlBcSk82o1FEVq8yXmy4lkIzTo7YhHCIxlU/2HyEQ== - dependencies: - array-includes "^3.1.3" - array.prototype.flat "^1.2.4" - debug "^2.6.9" - doctrine "^2.1.0" - eslint-import-resolver-node "^0.3.4" - eslint-module-utils "^2.6.1" - find-up "^2.0.0" - has "^1.0.3" - is-core-module "^2.4.0" - minimatch "^3.0.4" - object.values "^1.1.3" - pkg-up "^2.0.0" - read-pkg-up "^3.0.0" - resolve "^1.20.0" - tsconfig-paths "^3.9.0" - eslint-plugin-import@^2.22.1: version "2.22.1" resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.22.1.tgz#0896c7e6a0cf44109a2d97b95903c2bb689d7702" @@ -2704,12 +2673,12 @@ eslint@^7.11.0: v8-compile-cache "^2.0.3" eslint@^7.23.0: - version "7.29.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.29.0.tgz#ee2a7648f2e729485e4d0bd6383ec1deabc8b3c0" - integrity sha512-82G/JToB9qIy/ArBzIWG9xvvwL3R86AlCjtGw+A29OMZDqhTybz/MByORSukGxeI+YPCR4coYyITKk8BFH9nDA== + version "7.27.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.27.0.tgz#665a1506d8f95655c9274d84bd78f7166b07e9c7" + integrity sha512-JZuR6La2ZF0UD384lcbnd0Cgg6QJjiCwhMD6eU4h/VGPcVGwawNNzKU41tgokGXnfjOOyI6QIffthhJTPzzuRA== dependencies: "@babel/code-frame" "7.12.11" - "@eslint/eslintrc" "^0.4.2" + "@eslint/eslintrc" "^0.4.1" ajv "^6.10.0" chalk "^4.0.0" cross-spawn "^7.0.2" @@ -2726,7 +2695,7 @@ eslint@^7.23.0: fast-deep-equal "^3.1.3" file-entry-cache "^6.0.1" functional-red-black-tree "^1.0.1" - glob-parent "^5.1.2" + glob-parent "^5.0.0" globals "^13.6.0" ignore "^4.0.6" import-fresh "^3.0.0" @@ -2753,7 +2722,7 @@ esm@^3.2.25, esm@^3.2.5: resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10" integrity sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA== -"esm@github:agoric-labs/esm#Agoric-built": +esm@agoric-labs/esm#Agoric-built: version "3.2.25" resolved "https://codeload.github.com/agoric-labs/esm/tar.gz/3603726ad4636b2f865f463188fcaade6375638e" @@ -3125,7 +3094,7 @@ github-from-package@0.0.0: resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce" integrity sha1-l/tdlr/eiXMxPyDoKI75oWf6ZM4= -glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@^5.1.2, glob-parent@~5.1.0: +glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@~5.1.0: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== @@ -3170,13 +3139,6 @@ globals@^13.6.0: dependencies: type-fest "^0.20.2" -globals@^13.9.0: - version "13.9.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.9.0.tgz#4bf2bf635b334a173fb1daf7c5e6b218ecdc06cb" - integrity sha512-74/FduwI/JaIrr1H8e71UbDE+5x7pIPs1C2rrwC52SszOo043CsWOZEMW7o2Y58xwm9b+0RBKDxY5n2sUpEFxA== - dependencies: - type-fest "^0.20.2" - globby@^11.0.1: version "11.0.3" resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb" @@ -3189,18 +3151,6 @@ globby@^11.0.1: merge2 "^1.3.0" slash "^3.0.0" -globby@^11.0.3: - version "11.0.4" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.4.tgz#2cbaff77c2f2a62e71e9b2813a67b97a3a3001a5" - integrity sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" - got@^9.6.0: version "9.6.0" resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" @@ -3477,13 +3427,6 @@ is-core-module@^2.2.0: dependencies: has "^1.0.3" -is-core-module@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.4.0.tgz#8e9fc8e15027b011418026e98f0e6f4d86305cc1" - integrity sha512-6A2fkfq1rfeQZjxrZJGerpLCTHRNEBiSgnu0+obeJpEPZRUooHgsizvzv0ZjJwOz3iWIHdJtVWJ/tmPr3D21/A== - dependencies: - has "^1.0.3" - is-date-object@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" @@ -3809,16 +3752,6 @@ load-json-file@^2.0.0: pify "^2.0.0" strip-bom "^3.0.0" -load-json-file@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b" - integrity sha1-L19Fq5HjMhYjT9U62rZo607AmTs= - dependencies: - graceful-fs "^4.1.2" - parse-json "^4.0.0" - pify "^3.0.0" - strip-bom "^3.0.0" - load-json-file@^5.2.0: version "5.3.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-5.3.0.tgz#4d3c1e01fa1c03ea78a60ac7af932c9ce53403f3" @@ -4086,7 +4019,7 @@ ms@2.1.2: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@^2.1.1, ms@^2.1.3: +ms@^2.1.3: version "2.1.3" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== @@ -4249,7 +4182,7 @@ object.entries@^1.1.2: es-abstract "^1.18.0-next.1" has "^1.0.3" -object.values@^1.1.1, object.values@^1.1.3: +object.values@^1.1.1: version "1.1.3" resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.3.tgz#eaa8b1e17589f02f698db093f7c62ee1699742ee" integrity sha512-nkF6PfDB9alkOUxpf1HNm/QlkeW3SReqL5WXeBLpEJJnlPSvRaDQpW3gQTksTN3fgJX4hL42RzKyOin6ff3tyw== @@ -4499,13 +4432,6 @@ path-type@^2.0.0: dependencies: pify "^2.0.0" -path-type@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" - integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg== - dependencies: - pify "^3.0.0" - path-type@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" @@ -4526,11 +4452,6 @@ pify@^2.0.0: resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= - pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" @@ -4558,13 +4479,6 @@ pkg-dir@^4.2.0: dependencies: find-up "^4.0.0" -pkg-up@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-2.0.0.tgz#c819ac728059a461cab1c3889a2be3c49a004d7f" - integrity sha1-yBmscoBZpGHKscOImivjxJoATX8= - dependencies: - find-up "^2.1.0" - plur@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/plur/-/plur-4.0.0.tgz#729aedb08f452645fe8c58ef115bf16b0a73ef84" @@ -4612,11 +4526,6 @@ prettier-linter-helpers@^1.0.0: dependencies: fast-diff "^1.1.2" -prettier@^1.18.2: - version "1.19.1" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.19.1.tgz#f7d7f5ff8a9cd872a7be4ca142095956a60797cb" - integrity sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew== - prettier@^2.1.2: version "2.2.1" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.2.1.tgz#795a1a78dd52f073da0cd42b21f9c91381923ff5" @@ -4729,14 +4638,6 @@ read-pkg-up@^2.0.0: find-up "^2.0.0" read-pkg "^2.0.0" -read-pkg-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-3.0.0.tgz#3ed496685dba0f8fe118d0691dc51f4a1ff96f07" - integrity sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc= - dependencies: - find-up "^2.0.0" - read-pkg "^3.0.0" - read-pkg@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" @@ -4746,15 +4647,6 @@ read-pkg@^2.0.0: normalize-package-data "^2.3.2" path-type "^2.0.0" -read-pkg@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-3.0.0.tgz#9cbc686978fee65d16c00e2b19c237fcf6e38389" - integrity sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k= - dependencies: - load-json-file "^4.0.0" - normalize-package-data "^2.3.2" - path-type "^3.0.0" - read-pkg@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" @@ -4794,6 +4686,11 @@ readdirp@~3.5.0: dependencies: picomatch "^2.2.1" +readline-transform@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/readline-transform/-/readline-transform-1.0.0.tgz#3157f97428acaec0f05a5c1ff2c3120f4e6d904b" + integrity sha512-7KA6+N9IGat52d83dvxnApAWN+MtVb1MiVuMR/cf1O4kYsJG+g/Aav0AHcHKsb6StinayfPLne0+fMX2sOzAKg== + regenerator-runtime@^0.13.4: version "0.13.7" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55" @@ -4886,7 +4783,7 @@ resolve-from@^5.0.0: resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== -resolve@^1.10.0, resolve@^1.11.0, resolve@^1.11.1, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.14.2, resolve@^1.17.0, resolve@^1.19.0, resolve@^1.20.0: +resolve@^1.10.0, resolve@^1.11.0, resolve@^1.11.1, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.14.2, resolve@^1.17.0, resolve@^1.19.0: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== @@ -5021,7 +4918,7 @@ semver@^6.0.0, semver@^6.2.0, semver@^6.3.0: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== -semver@^7.1.3, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5: +semver@^7.1.3, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4: version "7.3.5" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== @@ -5513,7 +5410,7 @@ tslib@^1.8.1, tslib@^1.9.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tsutils@^3.21.0: +tsutils@^3.17.1: version "3.21.0" resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== @@ -5580,9 +5477,9 @@ typedarray-to-buffer@^3.1.5: is-typedarray "^1.0.0" typescript@^4.2.3: - version "4.3.4" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.3.4.tgz#3f85b986945bcf31071decdd96cf8bfa65f9dcbc" - integrity sha512-uauPG7XZn9F/mo+7MrsRjyvbxFpzemRjKEZXS4AK83oP2KKOJPvb+9cO/gmnv8arWZvhnjVOXz7B49m1l0e9Ew== + version "4.3.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.3.2.tgz#399ab18aac45802d6f2498de5054fcbbe716a805" + integrity sha512-zZ4hShnmnoVnAHpVHWpTcxdv7dWP60S2FsydQLV8V5PbS3FifjWFFRiHSWpDJahly88PRyV5teTSLoq4eG7mKw== unbox-primitive@^1.0.0: version "1.0.1" From b748f23bb4feab60882b0f549e1e9f7028c921e7 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 22 Jun 2021 10:38:36 +0000 Subject: [PATCH 02/34] Docker runner and start script overall --- .dockerignore | 8 ++ Dockerfile | 157 ++++++++++++++++++++++++++++++++++ README.md | 69 ++++++++++++++- library-scripts/go-debian.sh | 129 ++++++++++++++++++++++++++++ runner/lib/helpers/procsfs.js | 2 +- runner/lib/main.js | 93 +------------------- start.sh | 59 +++++++++---- 7 files changed, 407 insertions(+), 110 deletions(-) create mode 100644 .dockerignore create mode 100644 Dockerfile create mode 100644 library-scripts/go-debian.sh diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..b3023cb --- /dev/null +++ b/.dockerignore @@ -0,0 +1,8 @@ +node_modules +defaults.js +installationConstants.js +_agstate/yarn-links/ +.idea/ +ui/.cache/ +ui/dist/ +./Dockerfile diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..1e1fde8 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,157 @@ +# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.177.0/containers/typescript-node/.devcontainer/base.Dockerfile + +# [Choice] Node.js version: 16, 14, 12 +ARG VARIANT="14-buster" +FROM mcr.microsoft.com/vscode/devcontainers/typescript-node:0-${VARIANT} as base + +# [Optional] Uncomment this section to install additional OS packages. +# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ +# && apt-get -y install --no-install-recommends + +# [Optional] Uncomment if you want to install an additional version of node using nvm +# ARG EXTRA_NODE_VERSION=10 +# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}" + +# [Optional] Uncomment if you want to install more global node packages +# RUN su node -c "npm install -g " + +############################## +# From https://github.com/docker-library/golang/blob/master/1.16/buster/Dockerfile + +ENV PATH /usr/local/go/bin:$PATH + +ENV GOLANG_VERSION 1.16.5 + +RUN set -eux; \ + \ + dpkgArch="$(dpkg --print-architecture)"; \ + url=; \ + case "${dpkgArch##*-}" in \ + 'amd64') \ + url='https://dl.google.com/go/go1.16.5.linux-amd64.tar.gz'; \ + sha256='b12c23023b68de22f74c0524f10b753e7b08b1504cb7e417eccebdd3fae49061'; \ + ;; \ + 'armel') \ + export GOARCH='arm' GOARM='5' GOOS='linux'; \ + ;; \ + 'armhf') \ + url='https://dl.google.com/go/go1.16.5.linux-armv6l.tar.gz'; \ + sha256='93cacacfbe87e3106b5bf5821de106f0f0a43c8bd1029826d44445c15df795a5'; \ + ;; \ + 'arm64') \ + url='https://dl.google.com/go/go1.16.5.linux-arm64.tar.gz'; \ + sha256='d5446b46ef6f36fdffa852f73dfbbe78c1ddf010b99fa4964944b9ae8b4d6799'; \ + ;; \ + 'i386') \ + url='https://dl.google.com/go/go1.16.5.linux-386.tar.gz'; \ + sha256='a37c6b71d0b673fe8dfeb2a8b3de78824f05d680ad32b7ac6b58c573fa6695de'; \ + ;; \ + 'mips64el') \ + export GOARCH='mips64le' GOOS='linux'; \ + ;; \ + 'ppc64el') \ + url='https://dl.google.com/go/go1.16.5.linux-ppc64le.tar.gz'; \ + sha256='fad2da6c86ede8448d2d0e66e1776e2f0ae9169714eade29b9ffbbdede7fc6cc'; \ + ;; \ + 's390x') \ + url='https://dl.google.com/go/go1.16.5.linux-s390x.tar.gz'; \ + sha256='21085f6a3568fae639edf383cce78bcb00d8f415e5e3d7feb04b6124e8e9efc1'; \ + ;; \ + *) echo >&2 "error: unsupported architecture '$dpkgArch' (likely packaging update needed)"; exit 1 ;; \ + esac; \ + build=; \ + if [ -z "$url" ]; then \ +# https://github.com/golang/go/issues/38536#issuecomment-616897960 + build=1; \ + url='https://dl.google.com/go/go1.16.5.src.tar.gz'; \ + sha256='7bfa7e5908c7cc9e75da5ddf3066d7cbcf3fd9fa51945851325eebc17f50ba80'; \ + echo >&2; \ + echo >&2 "warning: current architecture ($dpkgArch) does not have a corresponding Go binary release; will be building from source"; \ + echo >&2; \ + fi; \ + \ + wget -O go.tgz.asc "$url.asc" --progress=dot:giga; \ + wget -O go.tgz "$url" --progress=dot:giga; \ + echo "$sha256 *go.tgz" | sha256sum --strict --check -; \ + \ +# https://github.com/golang/go/issues/14739#issuecomment-324767697 + export GNUPGHOME="$(mktemp -d)"; \ +# https://www.google.com/linuxrepositories/ +# gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys 'EB4C 1BFD 4F04 2F6D DDCC EC91 7721 F63B D38B 4796'; \ + curl 'https://dl.google.com/linux/linux_signing_key.pub' | gpg --batch --import; \ + gpg --batch --verify go.tgz.asc go.tgz; \ + gpgconf --kill all; \ + rm -rf "$GNUPGHOME" go.tgz.asc; \ + \ + tar -C /usr/local -xzf go.tgz; \ + rm go.tgz; \ + \ + if [ -n "$build" ]; then \ + savedAptMark="$(apt-mark showmanual)"; \ + apt-get update; \ + apt-get install -y --no-install-recommends golang-go; \ + \ + ( \ + cd /usr/local/go/src; \ +# set GOROOT_BOOTSTRAP + GOHOST* such that we can build Go successfully + export GOROOT_BOOTSTRAP="$(go env GOROOT)" GOHOSTOS="$GOOS" GOHOSTARCH="$GOARCH"; \ + ./make.bash; \ + ); \ + \ + apt-mark auto '.*' > /dev/null; \ + apt-mark manual $savedAptMark > /dev/null; \ + apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \ + rm -rf /var/lib/apt/lists/*; \ + \ +# pre-compile the standard library, just like the official binary release tarballs do + go install std; \ +# go install: -race is only supported on linux/amd64, linux/ppc64le, linux/arm64, freebsd/amd64, netbsd/amd64, darwin/amd64 and windows/amd64 +# go install -race std; \ + \ +# remove a few intermediate / bootstrapping files the official binary release tarballs do not contain + rm -rf \ + /usr/local/go/pkg/*/cmd \ + /usr/local/go/pkg/bootstrap \ + /usr/local/go/pkg/obj \ + /usr/local/go/pkg/tool/*/api \ + /usr/local/go/pkg/tool/*/go_bootstrap \ + /usr/local/go/src/cmd/dist/dist \ + ; \ + fi; \ + \ + go version + +ENV GOPATH /go +ENV PATH $GOPATH/bin:$PATH +RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH" +#WORKDIR $GOPATH + +############################## +# From https://github.com/microsoft/vscode-dev-containers/blob/v0.163.1/containers/go/.devcontainer/base.Dockerfile + +# Install Go tools +ENV GO111MODULE=auto +COPY library-scripts/go-debian.sh /tmp/library-scripts/ +RUN bash /tmp/library-scripts/go-debian.sh "none" "/usr/local/go" "${GOPATH}" "node" "false" \ + && apt-get clean -y && rm -rf /tmp/library-scripts + + +############################## +FROM base + +ARG USER_UID=1000 +ARG USER_GID=$USER_UID + +ENV IS_DOCKER=true +ENV SDK_SRC=/src +ENV OUTPUT_DIR=/out +ENV SDK_REVISION= + +WORKDIR /app +COPY --chown=$USER_UID:$USER_GID . . + +RUN mkdir -p $SDK_SRC $OUTPUT_DIR && chown $USER_UID:$USER_GID $SDK_SRC $OUTPUT_DIR /app + +USER $USER_UID:$USER_GID + +ENTRYPOINT ["/app/start.sh"] \ No newline at end of file diff --git a/README.md b/README.md index b1a5c0d..73ed9be 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,67 @@ # Load Generator +## Runner + +### All-in-one Docker + +First build the image: + +```sh +docker build . -t loadgen-runner +``` + +#### Mount points + +- `/out`: directory containing output artifacts +- `/src`: directory containing `agoric-sdk` repo. Automatically checked out if not a git repo (empty) + +#### Environment + +- `SDK_REVISION`: The agoric-sdk git revision to checkout for the test + +#### Examples + +```sh +OUTPUT_DIR=$HOME/loadgen-output +mkdir -p $OUTPUT_DIR +docker run --rm -v $OUTPUT_DIR:/out -e SDK_REVISION=fa7ff5e55e loadgen-runner +``` + +```sh +OUTPUT_DIR=$HOME/loadgen-output +mkdir -p $OUTPUT_DIR +docker run --rm -v $OUTPUT_DIR:/out -v ../agoric-sdk:/src loadgen-runner +``` + +### All-in-one Linux Shell + +#### Environment + +- `OUTPUT_DIR`: directory containing output artifacts. Creates temporary folder if not set +- `SDK_DIR`: directory containing `agoric-sdk` repo. Creates temporary folder if not set +- `SDK_REVISION`: The agoric-sdk git revision to checkout for the test, if no existing repo found. Remote head if not set + +#### Examples + +```sh +SDK_REVISION=fa7ff5e55e OUTPUT_DIR=$HOME/loadgen-output ./start.sh +``` + +```sh +SDK_DIR=../agoric-sdk ./start.sh +``` + +### Direct linux shell + +Assuming the agoric-sdk and cosmic-swingset are built and installed, and the agoric cli is available in PATH. + +```sh +mkdir -p $HOME/loadgen-output +./runner/bin/loadgen-runner $HOME/loadgen-output +``` + +## Manual + In one terminal: ```sh @@ -42,12 +104,13 @@ curl -X PUT --data '{"faucet":{"interval":60}}' http://127.0.0.1:3352/config ``` To disable all generators: + ```sh curl -X PUT --data '{}' http://127.0.0.1:3352/config ``` The load generators defined so far: -* `faucet`: initialize by creating a `dapp-fungible-faucet` -style mint, then each cycle requests an invitation and completes it, adding 1000 Tokens to Bob's Purse. Takes 4 round-trips to complete. -* `amm`: initialize by selling half our BLD to get RUN, then record the balances. Each cycle sells 1% of the recorded BLD to get RUN, then sells 1% of the recorded RUN to get BLD. Because of fees, the total available will drop slowly over time. -* `vault`: initialize by recording our BLD balance and the BLD/RUN price. Each cycle deposits 1% of the recorded BLD balance and borrows half its value in RUN, then pays back the loan and recovers the BLD (less fees). +- `faucet`: initialize by creating a `dapp-fungible-faucet` -style mint, then each cycle requests an invitation and completes it, adding 1000 Tokens to Bob's Purse. Takes 4 round-trips to complete. +- `amm`: initialize by selling half our BLD to get RUN, then record the balances. Each cycle sells 1% of the recorded BLD to get RUN, then sells 1% of the recorded RUN to get BLD. Because of fees, the total available will drop slowly over time. +- `vault`: initialize by recording our BLD balance and the BLD/RUN price. Each cycle deposits 1% of the recorded BLD balance and borrows half its value in RUN, then pays back the loan and recovers the BLD (less fees). diff --git a/library-scripts/go-debian.sh b/library-scripts/go-debian.sh new file mode 100644 index 0000000..d0f22a9 --- /dev/null +++ b/library-scripts/go-debian.sh @@ -0,0 +1,129 @@ +#!/usr/bin/env bash +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- +# +# Docs: https://github.com/microsoft/vscode-dev-containers/blob/master/script-library/docs/go.md +# +# Syntax: ./go-debian.sh [Go version] [GOROOT] [GOPATH] [non-root user] [Add GOPATH, GOROOT to rc files flag] [Install tools flag] + +TARGET_GO_VERSION=${1:-"latest"} +TARGET_GOROOT=${2:-"/usr/local/go"} +TARGET_GOPATH=${3:-"/go"} +USERNAME=${4:-"automatic"} +UPDATE_RC=${5:-"true"} +INSTALL_GO_TOOLS=${6:-"true"} + +set -e + +if [ "$(id -u)" -ne 0 ]; then + echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' + exit 1 +fi + +# Ensure that login shells get the correct path if the user updated the PATH using ENV. +rm -f /etc/profile.d/00-restore-env.sh +echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh +chmod +x /etc/profile.d/00-restore-env.sh + +# Determine the appropriate non-root user +if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then + USERNAME="" + POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") + for CURRENT_USER in ${POSSIBLE_USERS[@]}; do + if id -u ${CURRENT_USER} > /dev/null 2>&1; then + USERNAME=${CURRENT_USER} + break + fi + done + if [ "${USERNAME}" = "" ]; then + USERNAME=root + fi +elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then + USERNAME=root +fi + +function updaterc() { + if [ "${UPDATE_RC}" = "true" ]; then + echo "Updating /etc/bash.bashrc and /etc/zsh/zshrc..." + echo -e "$1" >> /etc/bash.bashrc + if [ -f "/etc/zsh/zshrc" ]; then + echo -e "$1" >> /etc/zsh/zshrc + fi + fi +} + +export DEBIAN_FRONTEND=noninteractive + +# Install curl, tar, git, other dependencies if missing +if ! dpkg -s curl ca-certificates tar git g++ gcc libc6-dev make pkg-config > /dev/null 2>&1; then + if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then + apt-get update + fi + apt-get -y install --no-install-recommends curl ca-certificates tar git g++ gcc libc6-dev make pkg-config +fi + +# Get latest version number if latest is specified +if [ "${TARGET_GO_VERSION}" = "latest" ] || [ "${TARGET_GO_VERSION}" = "current" ] || [ "${TARGET_GO_VERSION}" = "lts" ]; then + TARGET_GO_VERSION=$(curl -sSL "https://golang.org/VERSION?m=text" | sed -n '/^go/s///p' ) +fi + +# Install Go +GO_INSTALL_SCRIPT="$(cat < /dev/null 2>&1; then + mkdir -p "${TARGET_GOROOT}" "${TARGET_GOPATH}" + chown -R ${USERNAME} "${TARGET_GOROOT}" "${TARGET_GOPATH}" + su ${USERNAME} -c "${GO_INSTALL_SCRIPT}" +else + echo "Go already installed. Skipping." +fi + +# Install Go tools that are isImportant && !replacedByGopls based on +# https://github.com/golang/vscode-go/blob/0c6dce4a96978f61b022892c1376fe3a00c27677/src/goTools.ts#L188 +# exception: golangci-lint is installed using their install script below. +GO_TOOLS="\ + golang.org/x/tools/gopls \ + honnef.co/go/tools/... \ + golang.org/x/lint/golint \ + github.com/mgechev/revive \ + github.com/uudashr/gopkgs/v2/cmd/gopkgs \ + github.com/ramya-rao-a/go-outline \ + github.com/go-delve/delve/cmd/dlv \ + github.com/golangci/golangci-lint/cmd/golangci-lint" +if [ "${INSTALL_GO_TOOLS}" = "true" ]; then + echo "Installing common Go tools..." + export PATH=${TARGET_GOROOT}/bin:${PATH} + mkdir -p /tmp/gotools /usr/local/etc/vscode-dev-containers ${TARGET_GOPATH}/bin + cd /tmp/gotools + export GOPATH=/tmp/gotools + export GOCACHE=/tmp/gotools/cache + + # Go tools w/module support + export GO111MODULE=on + (echo "${GO_TOOLS}" | xargs -n 1 go get -v )2>&1 | tee -a /usr/local/etc/vscode-dev-containers/go.log + + # Move Go tools into path and clean up + mv /tmp/gotools/bin/* ${TARGET_GOPATH}/bin/ + rm -rf /tmp/gotools + chown -R ${USERNAME} "${TARGET_GOPATH}" +fi + +# Add GOPATH variable and bin directory into PATH in bashrc/zshrc files (unless disabled) +updaterc "$(cat << EOF +export GOPATH="${TARGET_GOPATH}" +if [[ "\${PATH}" != *"\${GOPATH}/bin"* ]]; then export PATH="\${PATH}:\${GOPATH}/bin"; fi +export GOROOT="${TARGET_GOROOT}" +if [[ "\${PATH}" != *"\${GOROOT}/bin"* ]]; then export PATH="\${PATH}:\${GOROOT}/bin"; fi +EOF +)" + +echo "Done!" diff --git a/runner/lib/helpers/procsfs.js b/runner/lib/helpers/procsfs.js index 5ddaeae..c5838d4 100644 --- a/runner/lib/helpers/procsfs.js +++ b/runner/lib/helpers/procsfs.js @@ -9,7 +9,7 @@ import { performance } from 'perf_hooks'; -import { assert } from '@agoric/assert'; +import assert from 'assert'; import { childProcessDone } from './child-process.js'; diff --git a/runner/lib/main.js b/runner/lib/main.js index 5ea7844..4413e7e 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -149,7 +149,7 @@ const main = async (progName, rawArgs, powers) => { await fs.mkdir(outputDir, { recursive: true }); const outputStream = fsStream.createWriteStream( - joinPath(outputDir, 'perf.log'), + joinPath(outputDir, 'perf.jsonl'), ); let currentStage = 0; @@ -557,7 +557,8 @@ const main = async (progName, rawArgs, powers) => { const signal = makePromiseKit(); const onInterrupt = () => signal.reject(new Error('Interrupted')); - process.on('SIGINT', onInterrupt); + process.once('SIGINT', onInterrupt); + process.once('SIGTERM', onInterrupt); await aggregateTryFinally( async () => { @@ -566,6 +567,7 @@ const main = async (progName, rawArgs, powers) => { }, async () => { process.off('SIGINT', onInterrupt); + process.off('SIGTERM', onInterrupt); }, ); }, @@ -643,93 +645,6 @@ const main = async (progName, rawArgs, powers) => { ]); }, ); - - // console.log(await dirDiskUsage(chainDir)); - - // const myInfo = await getProcessInfo(process.pid); - // const parentInfo = await myInfo.getParent(); - // const systemArgs = myInfo.argv || []; - - // stdout(`Hello world: ${progName}: ${rawArgs.join(', ')}\n`); - // stdout(`System args: ${systemArgs.join(' ')}\n`); - // stdout(`Start ticks: ${0 - parentInfo.startTimestamp}\n`); - - // /** - // * - // * @param {import("./helpers/process-info.js").ProcessInfo} info - // * @param {string} indent - // */ - // const printProcess = async (info, indent = '') => { - // stdout( - // `${indent}(${info.pid})[${info.startTimestamp}]: ${ - // (info.argv || ['(null)'])[0] - // }\n`, - // ); - // stdout( - // `${indent} ${JSON.stringify(await info.getUsageSnapshot())}\n`, - // ); - // if (indent.length < 2) { - // for (const child of await info.getChildren()) { - // // eslint-disable-next-line no-await-in-loop - // await printProcess(child, `${indent} `); - // } - // } - // }; - - // await printProcess(await getProcessInfo(1)); - // // await printProcess(await getProcessInfo(108)); - - // const fifo = fsStream.createReadStream('./chain.second.slog', { - // emitClose: true, - // }); - - // fifo - // .once('ready', () => { - // console.log('fifo ready'); - // }) - // .once('open', () => { - // fifo.once('close', () => { - // // TODO: log errors - // console.log('removing fifo'); - // }); - // console.log('fifo open'); - // }); - - // const fifo = await makeFIFO('chain.slog'); - // console.log('Created FIFO:', fifo.path); - - // const rl = readline.createInterface({ - // input: process.stdin, - // output: process.stdout, - // }); - - // console.log( - // 'Answer', - // await new Promise((resolve) => - // rl.question('Press enter when ready.', resolve), - // ), - // ); - - // const fifoLines = readline.createInterface({ - // input: fifo, - // }); - - // for await (const fifoLine of fifoLines) { - // console.log('fifo: ', fifoLine); - // } - - // fifo.close(); - // rl.close(); - // await pipeResult; - - // const lines = readline.createInterface({ input: testFile }); - - // for await (const line of lines) { - // console.log('read', line); - // } - // testFile.close(); - - // fifo.destroy(); }; export default main; diff --git a/start.sh b/start.sh index 2b3a415..f98d8fc 100755 --- a/start.sh +++ b/start.sh @@ -1,30 +1,55 @@ -#! /bin/sh +#!/bin/sh set -e -x -AGORIC_SDK_GITHUB_REPO=https://github.com/Agoric/agoric-sdk -GIT_HEAD="$(git ls-remote $AGORIC_SDK_GITHUB_REPO.git HEAD | awk '{ print substr($1,1,10) }')" -LOADGEN_DIR=$(pwd) -WORK_DIR=/tmp/agoric-sdk-${GIT_HEAD} +LOADGEN_DIR="$(pwd)" -mkdir -p $WORK_DIR/bin $WORK_DIR/src $WORK_DIR/out $WORK_DIR/go/bin -export GOPATH=$WORK_DIR/go -export PATH=$WORK_DIR/bin:$GOPATH/bin:$PATH +SDK_REPO="${SDK_REPO:-https://github.com/Agoric/agoric-sdk.git}" -if [ ! -d $WORK_DIR/src/.git ] +# Create temporary directory for SDK source if none provided +if [ -z "${SDK_SRC}" ] then - git clone $AGORIC_SDK_GITHUB_REPO.git $WORK_DIR/src + SDK_REVISION=${SDK_REVISION:-$(git ls-remote ${SDK_REPO} HEAD | awk '{ print substr($1,1,12) }')} + SDK_SRC=/tmp/agoric-sdk-src-${SDK_REVISION} fi +mkdir -p "${SDK_SRC}" -cd $WORK_DIR/src -git fetch -git reset --hard $GIT_HEAD +# Clone the repo if needed +if [ ! -d "${SDK_SRC}/.git" ] +then + git clone "${SDK_REPO}" "${SDK_SRC}" + if [ ! -z "${SDK_REVISION}" ] + then + git -C "${SDK_SRC}" reset --hard ${SDK_REVISION} + fi +fi + +SDK_FULL_REVISION=$(git -C "${SDK_SRC}" rev-parse HEAD) + +if [ ! -z "${SDK_REVISION}" -a "${SDK_FULL_REVISION#${SDK_REVISION}}" = "${SDK_FULL_REVISION}" ] +then + echo "Error: SDK is currently checked out at revision ${SDK_FULL_REVISION} but revision ${SDK_REVISION} was specified" + exit 2 +fi + +SDK_REVISION=$(git -C "${SDK_SRC}" rev-parse --short HEAD) + +AGORIC_BIN_DIR=/tmp/agoric-sdk-bin-${SDK_REVISION} +mkdir -p ${AGORIC_BIN_DIR} + +OUTPUT_DIR="${OUTPUT_DIR:-/tmp/agoric-sdk-out-${SDK_REVISION}}" +mkdir -p "${OUTPUT_DIR}" + +export PATH=$AGORIC_BIN_DIR:$PATH + +cd "$SDK_SRC" yarn install yarn build make -C packages/cosmic-swingset -rm -f $WORK_DIR/bin/agoric -yarn link-cli $WORK_DIR/bin/agoric -cd $LOADGEN_DIR +rm -f "${AGORIC_BIN_DIR}/agoric" +yarn link-cli "${AGORIC_BIN_DIR}/agoric" + +cd "$LOADGEN_DIR" agoric install -./runner/bin/loadgen-runner $WORK_DIR/out 2>&1 | tee $WORK_DIR/out/runner.log +exec ./runner/bin/loadgen-runner "${OUTPUT_DIR}" "$@" 2>&1 From 3450fdf5a3204759e9cc4545552af1e180e95ce2 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 22 Jun 2021 21:33:37 +0000 Subject: [PATCH 03/34] fix typing and newer sdk compatibility --- runner/lib/ses-types-patch.d.ts | 11 +++++++++++ runner/lib/test-local-chain.js | 10 +++++++++- runner/package.json | 9 +++++---- yarn.lock | 29 ++++++++++++----------------- 4 files changed, 37 insertions(+), 22 deletions(-) create mode 100644 runner/lib/ses-types-patch.d.ts diff --git a/runner/lib/ses-types-patch.d.ts b/runner/lib/ses-types-patch.d.ts new file mode 100644 index 0000000..da34322 --- /dev/null +++ b/runner/lib/ses-types-patch.d.ts @@ -0,0 +1,11 @@ +/* eslint-disable */ + +// This file is not referenced anywhere but it makes +// tsc happy for missing types in the source of dependencies + +declare global { + var LOCKDOWN_OPTIONS: string | void; + var HandledPromise: HandledPromiseConstructor; +} + +export {}; diff --git a/runner/lib/test-local-chain.js b/runner/lib/test-local-chain.js index 595cddc..6e698d1 100644 --- a/runner/lib/test-local-chain.js +++ b/runner/lib/test-local-chain.js @@ -102,6 +102,14 @@ const getChildMatchingArgv = async (launcherInfo, argvMatcher) => { return result.info; } + console.error( + `getChildMatchingArgv: ${ + childrenWithArgv.length + } child process, none of ["${childrenWithArgv + .map(({ argv }) => (argv || ['no argv']).join(' ')) + .join('", "')}"] match expected arguments`, + ); + throw new Error("Couldn't find child process"); }; @@ -118,7 +126,7 @@ const loadGenReadyRE = /server running/; const chainNodeArgvMatcher = wrapArgvMatcherIgnoreEnvShebang( getArgvMatcher([/node$/, /chain-entrypoint/]), ); -const chainGoArgvMatcher = getArgvMatcher([/sh$/, /ag-chain-cosmos$/]); +const chainGoArgvMatcher = getArgvMatcher([/(?:sh|node)$/, /ag-chain-cosmos$/]); /** @param {string[]} argv */ const chainArgvMatcher = (argv) => chainNodeArgvMatcher(argv) || chainGoArgvMatcher(argv); diff --git a/runner/package.json b/runner/package.json index 6551ec9..7f4557c 100644 --- a/runner/package.json +++ b/runner/package.json @@ -10,7 +10,7 @@ "scripts": { "build": "exit 0", "test": "exit 0", - "lint": "yarn lint:js && yarn lint:prettier", + "lint": "yarn lint:js && yarn lint:prettier && yarn lint:types", "lint:js": "eslint '**/*.{js,ts}'", "lint:prettier": "prettier --check '**/*.{js,ts}'", "lint:types": "tsc -p jsconfig.json", @@ -19,10 +19,11 @@ "lint-fix:prettier": "prettier --write '**/*.{js,ts}'" }, "devDependencies": { - "eslint": "^7.11.0", "@agoric/eslint-config": "^0.3.6", - "prettier": "^2.1.2", - "ava": "^3.13.0" + "@types/readline-transform": "^1.0.0", + "ava": "^3.13.0", + "eslint": "^7.11.0", + "prettier": "^2.1.2" }, "dependencies": { "@agoric/assert": "*", diff --git a/yarn.lock b/yarn.lock index 5dc6b77..2508c8e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -233,11 +233,6 @@ resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.7.tgz#6414ffa8b025a34c7074b03a63eb739698ad6c12" integrity sha512-2R94fuM0PXMvZPOotKoYc7dM2PaR2AJ9ZQqnsj7glec32HC/rrm1tZDqBqdpx81oRmdQbsmceMTTlUtcrHoPEg== -"@agoric/eventual-send@^0.13.14": - version "0.13.14" - resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.14.tgz#36db3b19a60db82bac0dd2fd0a20e49fca93aac6" - integrity sha512-RTjrO6YnIj9c03Q/7gvb+JKBeh26Qo3KtwYb5B5rzYziQGob8VTHiwgKtO8TpQQBGM4Kp0YpnzmTqvestdHq8w== - "@agoric/eventual-send@^0.13.21": version "0.13.21" resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.21.tgz#d715e4353c2c3680714610682d01181398833466" @@ -366,12 +361,12 @@ "@agoric/swingset-vat" "^0.16.0" "@agoric/zoe" "^0.15.0" -"@agoric/promise-kit@*": - version "0.2.13" - resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.13.tgz#7be370c9b40172219379fea2c3a28331eab065d5" - integrity sha512-FDzKogd6jGdzQoY26NuQbUwE8TZSne9d7jwjHCUKEQpzqrho9nnXcJPn18Pvry1tLrm8gRfV8Wj7x7O5O+cCiw== +"@agoric/promise-kit@*", "@agoric/promise-kit@^0.2.6": + version "0.2.6" + resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.6.tgz#e11ac8fbeb00c144941fbf03f53203d55a5ec8ff" + integrity sha512-6RCPgt5RpRq6/a9NgH/7xAR2GaHTiUn7K+PJuW1vBMPMhn56qxx2Mw5Ieol4+DZ6TIcC4WOBGtCwL6YTAF5DKg== dependencies: - "@agoric/eventual-send" "^0.13.14" + "@agoric/eventual-send" "^0.13.6" "@agoric/promise-kit@^0.2.19": version "0.2.19" @@ -380,13 +375,6 @@ dependencies: "@agoric/eventual-send" "^0.13.21" -"@agoric/promise-kit@^0.2.6": - version "0.2.6" - resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.6.tgz#e11ac8fbeb00c144941fbf03f53203d55a5ec8ff" - integrity sha512-6RCPgt5RpRq6/a9NgH/7xAR2GaHTiUn7K+PJuW1vBMPMhn56qxx2Mw5Ieol4+DZ6TIcC4WOBGtCwL6YTAF5DKg== - dependencies: - "@agoric/eventual-send" "^0.13.6" - "@agoric/registrar@^0.2.7": version "0.2.7" resolved "https://registry.yarnpkg.com/@agoric/registrar/-/registrar-0.2.7.tgz#2e15a6f566fd5054b3b5a3006e666f1b0831f32c" @@ -1272,6 +1260,13 @@ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== +"@types/readline-transform@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@types/readline-transform/-/readline-transform-1.0.0.tgz#c47f1a31606f8c96cdbb3728f3e775617846cbbb" + integrity sha512-Jgc7Sr78d3EJ4AV+GXfs3BHUxPTC2pn4DNeRu78O04UVSOFux8dnMdfKZkdjw739mhjxdot4+HVWTLXjvtpiFQ== + dependencies: + "@types/node" "*" + "@types/resolve@0.0.8": version "0.0.8" resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-0.0.8.tgz#f26074d238e02659e323ce1a13d041eee280e194" From 1f453312155a964cea6f90ad1205285f815f0c7b Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Wed, 23 Jun 2021 19:16:23 +0000 Subject: [PATCH 04/34] Ignore chain exit code on stop Don't sleep negative time --- runner/lib/main.js | 6 ++++-- runner/lib/test-local-chain.js | 19 +++++++++++++++++-- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/runner/lib/main.js b/runner/lib/main.js index 4413e7e..0fdc181 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -545,8 +545,10 @@ const main = async (progName, rawArgs, powers) => { await runLoadGenResult.ready; logPerfEvent('loadgen-ready'); - const sleepTime = - stageDuration - (performance.now() - stageStart); + const sleepTime = Math.max( + 0, + stageDuration - (performance.now() - stageStart), + ); stageConsole.log( 'Stage ready, going to sleep for', Math.round(sleepTime / (1000 * 60)), diff --git a/runner/lib/test-local-chain.js b/runner/lib/test-local-chain.js index 6e698d1..98b2495 100644 --- a/runner/lib/test-local-chain.js +++ b/runner/lib/test-local-chain.js @@ -240,7 +240,19 @@ export const makeTestOperations = ({ { stdio, env: chainEnv, detached: true }, ); - const chainDone = childProcessDone(launcherCp); + let stopped = false; + + // Chain exit with code 98 when killed + const chainDone = childProcessDone(launcherCp, { + ignoreExitCode: true, + }).then((code) => { + if (code !== 0 && (!stopped || code !== 98)) { + return Promise.reject( + new Error(`Chain exited with non-zero code: ${code}`), + ); + } + return 0; + }); chainDone.then( () => console.log('Chain exited successfully'), @@ -280,7 +292,10 @@ export const makeTestOperations = ({ ), ]); - const stop = () => process.kill(processInfo.pid); + const stop = () => { + stopped = true; + process.kill(processInfo.pid); + }; return harden({ stop, From 0175fd46be405913824a3d934b5417bfb1b932d3 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 24 Jun 2021 00:34:32 +0000 Subject: [PATCH 05/34] Fix realtime measurements Capture chain storage at the end of every stage --- runner/lib/helpers/procsfs.js | 5 ++- runner/lib/main.js | 59 ++++++++++++++++------------------- 2 files changed, 31 insertions(+), 33 deletions(-) diff --git a/runner/lib/helpers/procsfs.js b/runner/lib/helpers/procsfs.js index c5838d4..d977896 100644 --- a/runner/lib/helpers/procsfs.js +++ b/runner/lib/helpers/procsfs.js @@ -241,7 +241,10 @@ export const makeProcfsHelper = ({ fs, spawn, startPid = process.pid }) => { * and performance.now()'s origin for the current process. * * The absolute value of this offset should be below 0.01s - * on a system with somewhat accurate time measurement + * on a system with somewhat accurate time measurement if + * node was the first image executed. If there was a delay + * from process creation to node execution, the value returned + * will capture an approximation of that delay within 10ms. * * @returns {Promise} The offset in seconds */ diff --git a/runner/lib/main.js b/runner/lib/main.js index 0fdc181..9de8577 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -98,6 +98,10 @@ const supportedSlogEventTypes = [ 'cosmic-swingset-begin-block', ]; +const slogEventRE = new RegExp( + `^{"time":\\d+(?:\\.\\d+),"type":"(?:${supportedSlogEventTypes.join('|')})"`, +); + /** * * @param {string} progName @@ -153,15 +157,8 @@ const main = async (progName, rawArgs, powers) => { ); let currentStage = 0; - let currentStageElapsedOffset = 0; - /** @type {string} */ - let chainStorageLocation; - - const slogEventRE = new RegExp( - `^{"time":\\d+(?:\\.\\d+),"type":"(?:${supportedSlogEventTypes.join( - '|', - )})"`, - ); + let currentStageElapsedOffsetNs = 0; + const cpuTimeOffset = await getCPUTimeOffset(); /** * @@ -169,13 +166,13 @@ const main = async (progName, rawArgs, powers) => { * @param {Record} [data] */ const logPerfEvent = (eventType, data = {}) => { - const timestamp = Math.round(performance.now() * 1000) / 1e6; + const perfNowNs = performance.now() * 1000; outputStream.write( JSON.stringify( { - timestamp, + timestamp: Math.round(perfNowNs) / 1e6, stage: currentStage, - elapsed: timestamp - currentStageElapsedOffset, + elapsed: Math.round(perfNowNs - currentStageElapsedOffsetNs) / 1e6, time: undefined, // Placeholder to put data.time before type if it exists type: `perf-${eventType}`, ...data, @@ -317,7 +314,8 @@ const main = async (progName, rawArgs, powers) => { ...eventData, real: Math.round( - performance.now() * 1000 - processInfo.startTimestamp * 1e6, + performance.now() * 1000 - + (processInfo.startTimestamp - cpuTimeOffset) * 1e6, ) / 1e6, ...times, ...memory, @@ -483,7 +481,7 @@ const main = async (progName, rawArgs, powers) => { let err; currentStage += 1; - currentStageElapsedOffset = Math.round(performance.now() * 1000) / 1e6; + currentStageElapsedOffsetNs = performance.now() * 1000; ({ console, out, err } = makeConsole(`stage-${currentStage}`)); const { console: stageConsole } = makeConsole('runner', out, err); @@ -496,8 +494,9 @@ const main = async (progName, rawArgs, powers) => { const runChainResult = await runChain({ stdout: out, stderr: err }); logPerfEvent('run-chain-finish'); - currentStageElapsedOffset = runChainResult.processInfo.startTimestamp; - chainStorageLocation = runChainResult.storageLocation; + currentStageElapsedOffsetNs = + (runChainResult.processInfo.startTimestamp - cpuTimeOffset) * 1e6; + const chainStorageLocation = runChainResult.storageLocation; /** @type {import("@agoric/promise-kit").PromiseRecord} */ const { promise: chainFirstBlock, @@ -599,12 +598,21 @@ const main = async (progName, rawArgs, powers) => { await runChainResult.done; logPerfEvent('chain-stopped'); - await monitorChainDone; + await PromiseAllOrErrors([ + childProcessDone( + spawn('tar', [ + '-cSJf', + joinPath(outputDir, `chain-storage-stage-${currentStage}.tar.xz`), + chainStorageLocation, + ]), + ), + monitorChainDone, + ]); }, ); logPerfEvent('stage-finish'); - currentStageElapsedOffset = 0; + currentStageElapsedOffsetNs = 0; }; await aggregateTryFinally( @@ -631,20 +639,7 @@ const main = async (progName, rawArgs, powers) => { async () => { outputStream.end(); - if (chainStorageLocation) { - await childProcessDone( - spawn('tar', [ - '-cSJf', - joinPath(outputDir, 'chain-storage.tar.xz'), - chainStorageLocation, - ]), - ); - } - - await PromiseAllOrErrors([ - // finished(fifo), - finished(outputStream), - ]); + await finished(outputStream); }, ); }; From fbf6778519c2c65d9bf3bd1300c2466c0afd873e Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Mon, 28 Jun 2021 22:17:34 +0000 Subject: [PATCH 06/34] Restart chain after first empty block --- runner/lib/main.js | 48 +++++++++++++++++++++++++++++++++++----------- 1 file changed, 37 insertions(+), 11 deletions(-) diff --git a/runner/lib/main.js b/runner/lib/main.js index 9de8577..af82237 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -156,7 +156,7 @@ const main = async (progName, rawArgs, powers) => { joinPath(outputDir, 'perf.jsonl'), ); - let currentStage = 0; + let currentStage = -1; let currentStageElapsedOffsetNs = 0; const cpuTimeOffset = await getCPUTimeOffset(); @@ -186,13 +186,13 @@ const main = async (progName, rawArgs, powers) => { /** * @param {import("./test-operations.js").RunChainInfo} chainInfo * @param {Object} param1 - * @param {() => void} param1.resolveFirstBlock + * @param {() => void} param1.resolveFirstEmptyBlock * @param {import("stream").Writable} param1.out * @param {import("stream").Writable} param1.err */ const monitorChain = async ( { slogLines, storageLocation, processInfo: kernelProcessInfo }, - { resolveFirstBlock, out, err }, + { resolveFirstEmptyBlock, out, err }, ) => { const { console: monitorConsole } = makeConsole('monitor-chain', out, err); @@ -353,6 +353,8 @@ const main = async (progName, rawArgs, powers) => { let slogStart = null; let slogBlocksSeen = 0; + let slogEmptyBlocksSeen = 0; + let slogLinesInBlock = 0; for await (const line of slogLines) { slogOutput.write(line); @@ -371,6 +373,8 @@ const main = async (progName, rawArgs, powers) => { ); } + slogLinesInBlock += 1; + // Avoid JSON parsing lines we don't care about if (!slogEventRE.test(line)) continue; @@ -435,6 +439,7 @@ const main = async (progName, rawArgs, powers) => { if (event.blockHeight === 0) { logPerfEvent('chain-first-init-start'); } + slogLinesInBlock = 0; break; } case 'cosmic-swingset-end-block-finish': { @@ -442,11 +447,28 @@ const main = async (progName, rawArgs, powers) => { // TODO: measure duration from start to finish logPerfEvent('chain-first-init-finish'); } + // Finish line doesn't count + slogLinesInBlock -= 1; + if (slogLinesInBlock === 0) { + if (!slogEmptyBlocksSeen) { + logPerfEvent('stage-first-empty-block', { + block: event.blockHeight, + }); + resolveFirstEmptyBlock(); + } + slogEmptyBlocksSeen += 1; + } + monitorConsole.log( + 'end-block', + event.blockHeight, + 'linesInBlock=', + slogLinesInBlock, + ); break; } case 'cosmic-swingset-begin-block': { if (!slogBlocksSeen) { - logPerfEvent('stage-first-block'); + logPerfEvent('stage-first-block', { block: event.blockHeight }); warnOnRejection( logProcessUsage(), monitorConsole, @@ -454,9 +476,6 @@ const main = async (progName, rawArgs, powers) => { ); } slogBlocksSeen += 1; - if (slogBlocksSeen === 1) { - resolveFirstBlock(); - } monitorConsole.log('begin-block', event.blockHeight); break; } @@ -499,11 +518,11 @@ const main = async (progName, rawArgs, powers) => { const chainStorageLocation = runChainResult.storageLocation; /** @type {import("@agoric/promise-kit").PromiseRecord} */ const { - promise: chainFirstBlock, - resolve: resolveFirstBlock, + promise: chainFirstEmptyBlock, + resolve: resolveFirstEmptyBlock, } = makePromiseKit(); const monitorChainDone = monitorChain(runChainResult, { - resolveFirstBlock, + resolveFirstEmptyBlock, out, err, }); @@ -514,7 +533,7 @@ const main = async (progName, rawArgs, powers) => { logPerfEvent('chain-ready'); stageConsole.log('Chain ready'); - await chainFirstBlock; + await chainFirstEmptyBlock; if (!chainOnly) { stageConsole.log('Running client'); @@ -615,6 +634,8 @@ const main = async (progName, rawArgs, powers) => { currentStageElapsedOffsetNs = 0; }; + // Main + await aggregateTryFinally( async () => { let out; @@ -630,10 +651,15 @@ const main = async (progName, rawArgs, powers) => { await resetChain({ stdout: out, stderr: err }); logPerfEvent('reset-chain-finish'); + // Initialize the chain and restart + await runStage({ chainOnly: true }); + + // Run 4 load gen stages while (currentStage < 4) { await runStage(); // eslint-disable-line no-await-in-loop } + // One final restart to capture the replay time await runStage({ chainOnly: true }); }, async () => { From bd5ef6bfd54bd149cfd3794dadc503e648b2229d Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 1 Jul 2021 20:03:31 +0000 Subject: [PATCH 07/34] Add config piping to runner Rename loadGen -> loadgen --- runner/lib/main.js | 167 ++++++++++++++++++++++++++------ runner/lib/test-local-chain.js | 52 +++++----- runner/lib/test-operations.d.ts | 17 ++-- runner/package.json | 4 +- start.sh | 2 +- 5 files changed, 177 insertions(+), 65 deletions(-) diff --git a/runner/lib/main.js b/runner/lib/main.js index af82237..4790d21 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -1,8 +1,6 @@ /* global process setInterval clearInterval */ /* eslint-disable no-continue */ -// import { Command } from 'commander'; - import { resolve as resolvePath, join as joinPath, basename } from 'path'; import { performance } from 'perf_hooks'; import zlib from 'zlib'; @@ -12,6 +10,7 @@ import { finished as finishedCallback, } from 'stream'; +import yargsParser from 'yargs-parser'; import chalk from 'chalk'; import { makePromiseKit } from '@agoric/promise-kit'; @@ -31,9 +30,13 @@ import { makeTestOperations } from './test-local-chain.js'; const pipeline = promisify(pipelineCallback); const finished = promisify(finishedCallback); -const monitorInterval = 5 * 60 * 1000; - -const stageDuration = 6 * 60 * 60 * 1000; +const defaultLoadgenConfig = { + vault: { interval: 120 }, + amm: { wait: 60, interval: 120 }, +}; +const defaultMonitorIntervalMinutes = 5; +const defaultStageDurationMinutes = 6 * 60; +const defaultNumberStages = 4 + 2; const vatIdentifierRE = /^(v\d+):(.*)$/; const knownVatsNamesWithoutProcess = ['comms', 'vattp']; @@ -102,6 +105,53 @@ const slogEventRE = new RegExp( `^{"time":\\d+(?:\\.\\d+),"type":"(?:${supportedSlogEventTypes.join('|')})"`, ); +/** + * @param {unknown} maybeObj + * @param {Record} [defaultValue] + */ +const coerceRecordOption = (maybeObj, defaultValue = {}) => { + if (maybeObj == null) { + return defaultValue; + } + + if (typeof maybeObj !== 'object') { + throw new Error('Unexpected object option value'); + } + + return /** @type {Record} */ (maybeObj); +}; + +/** + * @template {boolean | undefined} T + * @param {unknown} maybeBoolValue + * @param {T} defaultValue + * @param {boolean} [assertBool] + */ +const coerceBooleanOption = ( + maybeBoolValue, + defaultValue, + assertBool = true, +) => { + switch (maybeBoolValue) { + case 1: + case true: + case 'true': + return true; + case 0: + case false: + case 'false': + return false; + case null: + case undefined: + return defaultValue; + default: + if (assertBool) { + throw new Error(`Unexpected boolean option value ${maybeBoolValue}`); + } + return defaultValue; + } +}; + /** * * @param {string} progName @@ -117,7 +167,8 @@ const slogEventRE = new RegExp( const main = async (progName, rawArgs, powers) => { const { stdout, stderr, fs, fsStream, spawn, tmpDir } = powers; - const outputDir = rawArgs[0] || `run-results-${Date.now()}`; + // TODO: switch to full yargs for documenting output + const argv = yargsParser(rawArgs); const { getProcessInfo, getCPUTimeOffset } = makeProcfsHelper({ fs, spawn }); const { findByPrefix, dirDiskUsage, makeFIFO } = makeFsHelper({ @@ -127,7 +178,7 @@ const main = async (progName, rawArgs, powers) => { tmpDir, }); - const { resetChain, runChain, runClient, runLoadGen } = makeTestOperations({ + const { setupTest, runChain, runClient, runLoadgen } = makeTestOperations({ spawn, findDirByPrefix: findByPrefix, makeFIFO, @@ -149,6 +200,7 @@ const main = async (progName, rawArgs, powers) => { let { console } = makeConsole(); + const outputDir = String(argv.outputDir || `run-results-${Date.now()}`); console.log(`Outputting to ${resolvePath(outputDir)}`); await fs.mkdir(outputDir, { recursive: true }); @@ -156,6 +208,9 @@ const main = async (progName, rawArgs, powers) => { joinPath(outputDir, 'perf.jsonl'), ); + const monitorInterval = + Number(argv.monitorInterval || defaultMonitorIntervalMinutes) * 60 * 1000; + let currentStage = -1; let currentStageElapsedOffsetNs = 0; const cpuTimeOffset = await getCPUTimeOffset(); @@ -213,9 +268,9 @@ const main = async (progName, rawArgs, powers) => { await kernelProcessInfo.getChildren().catch(() => []), ); for (const info of childrenInfos) { - const argv = await info.getArgv(); // eslint-disable-line no-await-in-loop - if (!argv || basename(argv[0]) !== 'xsnap') continue; - const vatIdentifierMatches = vatIdentifierRE.exec(argv[1]); + const vatArgv = await info.getArgv(); // eslint-disable-line no-await-in-loop + if (!vatArgv || basename(vatArgv[0]) !== 'xsnap') continue; + const vatIdentifierMatches = vatIdentifierRE.exec(vatArgv[1]); if (!vatIdentifierMatches) continue; const vatID = vatIdentifierMatches[1]; const vatInfo = vatInfos.get(vatID); @@ -491,15 +546,16 @@ const main = async (progName, rawArgs, powers) => { /** * @param {Object} param0 - * @param {boolean} [param0.chainOnly] + * @param {boolean} param0.chainOnly + * @param {number} param0.duration + * @param {unknown} param0.loadgenConfig */ - const runStage = async ({ chainOnly } = {}) => { + const runStage = async ({ chainOnly, duration, loadgenConfig }) => { /** @type {import("stream").Writable} */ let out; /** @type {import("stream").Writable} */ let err; - currentStage += 1; currentStageElapsedOffsetNs = performance.now() * 1000; ({ console, out, err } = makeConsole(`stage-${currentStage}`)); @@ -508,7 +564,7 @@ const main = async (progName, rawArgs, powers) => { logPerfEvent('stage-start'); const stageStart = performance.now(); - stageConsole.log('Running chain'); + stageConsole.log('Running chain', { chainOnly, duration, loadgenConfig }); logPerfEvent('run-chain-start'); const runChainResult = await runChain({ stdout: out, stderr: err }); logPerfEvent('run-chain-finish'); @@ -552,20 +608,21 @@ const main = async (progName, rawArgs, powers) => { stageConsole.log('Running load gen'); logPerfEvent('run-loadgen-start'); - const runLoadGenResult = await runLoadGen({ + const runLoadgenResult = await runLoadgen({ stdout: out, stderr: err, + config: loadgenConfig, }); logPerfEvent('run-loadgen-finish'); await aggregateTryFinally( async () => { - await runLoadGenResult.ready; + await runLoadgenResult.ready; logPerfEvent('loadgen-ready'); const sleepTime = Math.max( 0, - stageDuration - (performance.now() - stageStart), + duration - (performance.now() - stageStart), ); stageConsole.log( 'Stage ready, going to sleep for', @@ -592,10 +649,10 @@ const main = async (progName, rawArgs, powers) => { ); }, async () => { - stageConsole.log('Stopping load-gen'); + stageConsole.log('Stopping loadgen'); - runLoadGenResult.stop(); - await runLoadGenResult.done; + runLoadgenResult.stop(); + await runLoadgenResult.done; logPerfEvent('loadgen-stopped'); }, ); @@ -647,20 +704,68 @@ const main = async (progName, rawArgs, powers) => { // TODO: add other interesting info here }); - logPerfEvent('reset-chain-start'); - await resetChain({ stdout: out, stderr: err }); - logPerfEvent('reset-chain-finish'); + const reset = coerceBooleanOption(argv.reset, true); + const setupConfig = { reset }; + logPerfEvent('setup-test-start', setupConfig); + await setupTest({ stdout: out, stderr: err, config: setupConfig }); + logPerfEvent('setup-test-finish'); - // Initialize the chain and restart - await runStage({ chainOnly: true }); + const stages = + argv.stages != null + ? parseInt(String(argv.stages), 10) + : defaultNumberStages; - // Run 4 load gen stages - while (currentStage < 4) { - await runStage(); // eslint-disable-line no-await-in-loop - } + const stageConfigs = coerceRecordOption(argv.stage); + + const sharedLoadgenConfig = coerceRecordOption( + stageConfigs.loadgen, + defaultLoadgenConfig, + ); + + const sharedStageDurationMinutes = + stageConfigs.duration != null + ? Number(stageConfigs.duration) + : defaultStageDurationMinutes; + + while (currentStage < stages - 1) { + currentStage += 1; - // One final restart to capture the replay time - await runStage({ chainOnly: true }); + const stageConfig = coerceRecordOption(stageConfigs[currentStage]); + + const withLoadgen = coerceBooleanOption( + stageConfig.loadgen, + undefined, + false, + ); + + const loadgenConfig = + withLoadgen == null + ? coerceRecordOption(stageConfig.loadgen, sharedLoadgenConfig) + : sharedLoadgenConfig; + + // By default the first stage will only initialize the chain from genesis + // and the last stage will only capture the chain restart time + // loadgen and chainOnly options overide default + const chainOnly = coerceBooleanOption( + stageConfig.chainOnly, + withLoadgen != null + ? !withLoadgen // use boolean loadgen option value as default chainOnly + : loadgenConfig === sharedLoadgenConfig && // user provided stage loadgen config implies chain + (currentStage === 0 || currentStage === stages - 1), + ); + + const duration = + Number(stageConfig.duration || sharedStageDurationMinutes) * + 60 * + 1000; + + // eslint-disable-next-line no-await-in-loop + await runStage({ + chainOnly, + duration, + loadgenConfig, + }); + } }, async () => { outputStream.end(); diff --git a/runner/lib/test-local-chain.js b/runner/lib/test-local-chain.js index 98b2495..52233ef 100644 --- a/runner/lib/test-local-chain.js +++ b/runner/lib/test-local-chain.js @@ -114,14 +114,13 @@ const getChildMatchingArgv = async (launcherInfo, argvMatcher) => { }; const chainDirPrefix = '_agstate/agoric-servers/local-chain-'; -const loadGenConfig = { faucet: { interval: 30 } }; const chainStartRE = /ag-chain-cosmos start --home=(.*)$/; const chainBlockBeginRE = /block-manager: block (\d+) begin$/; const clientStartRE = /\bsolo\b\S+entrypoint\.[cm]?js start/; const clientWalletReadyRE = /(?:Deployed Wallet!|Don't need our provides: wallet)/; -const loadGenStartRE = /deploy.*loadgen\/loop\.js/; -const loadGenReadyRE = /server running/; +const loadgenStartRE = /deploy.*loadgen\/loop\.js/; +const loadgenReadyRE = /server running/; const chainNodeArgvMatcher = wrapArgvMatcherIgnoreEnvShebang( getArgvMatcher([/node$/, /chain-entrypoint/]), @@ -200,22 +199,29 @@ export const makeTestOperations = ({ }; return harden({ - resetChain: async ({ stdout, stderr }) => { + setupTest: async ({ stdout, stderr, config = {} }) => { const { console, stdio } = getConsoleAndStdio( - 'reset-chain', + 'setup-test', stdout, stderr, ); console.log('Starting'); - const stateDir = dirname(chainDirPrefix); - await childProcessDone( - spawnPrintAndPipeOutput('rm', ['-rf', stateDir], { stdio }), - ); - await childProcessDone( - spawnPrintAndPipeOutput('git', ['checkout', '--', stateDir], { stdio }), - ); + const { reset } = /** @type {{reset?: boolean}} */ (config); + + if (reset) { + console.log('Resetting state'); + const stateDir = dirname(chainDirPrefix); + await childProcessDone( + spawnPrintAndPipeOutput('rm', ['-rf', stateDir], { stdio }), + ); + await childProcessDone( + spawnPrintAndPipeOutput('git', ['checkout', '--', stateDir], { + stdio, + }), + ); + } await childProcessDone( spawnPrintAndPipeOutput('agoric', ['install'], { stdio }), ); @@ -378,18 +384,18 @@ export const makeTestOperations = ({ }, ); }, - runLoadGen: async ({ stdout, stderr, timeout = 10 }) => { - const { console, stdio } = getConsoleAndStdio('load-gen', stdout, stderr); + runLoadgen: async ({ stdout, stderr, timeout = 10, config = {} }) => { + const { console, stdio } = getConsoleAndStdio('loadgen', stdout, stderr); console.log('Starting load gen'); - const loadGenEnv = Object.create(process.env); - // loadGenEnv.DEBUG = 'agoric'; + const loadgenEnv = Object.create(process.env); + // loadgenEnv.DEBUG = 'agoric'; const launcherCp = spawnPrintAndPipeOutput( 'agoric', ['deploy', 'loadgen/loop.js'], - { stdio, env: loadGenEnv, detached: true }, + { stdio, env: loadgenEnv, detached: true }, ); let stopped = false; @@ -399,11 +405,11 @@ export const makeTestOperations = ({ }; // Load gen exit with non-zero code when killed - const loadGenDone = childProcessDone(launcherCp).catch((err) => + const loadgenDone = childProcessDone(launcherCp).catch((err) => stopped ? 0 : Promise.reject(err), ); - loadGenDone.then( + loadgenDone.then( () => console.log('Load gen app stopped successfully'), (error) => console.error('Load gen app stopped with error', error), ); @@ -419,7 +425,7 @@ export const makeTestOperations = ({ const [deploying, tasksReady, outputParsed] = whenStreamSteps( combinedOutput, - [{ matcher: loadGenStartRE }, { matcher: loadGenReadyRE }], + [{ matcher: loadgenStartRE }, { matcher: loadgenReadyRE }], { waitEnd: false, }, @@ -433,7 +439,7 @@ export const makeTestOperations = ({ const done = PromiseAllOrErrors([ outputParsed, - loadGenDone, + loadgenDone, ]).then(() => {}); return tryTimeout( @@ -445,7 +451,7 @@ export const makeTestOperations = ({ const ready = tasksReady.then(async () => { console.log('Making request to start faucet'); - const body = Buffer.from(JSON.stringify(loadGenConfig), 'utf8'); + const body = Buffer.from(JSON.stringify(config), 'utf8'); const res = await httpRequest('http://127.0.0.1:3352/config', { body, @@ -471,7 +477,7 @@ export const makeTestOperations = ({ }, async () => { // Avoid unhandled rejections for promises that can no longer be handled - Promise.allSettled([loadGenDone, tasksReady]); + Promise.allSettled([loadgenDone, tasksReady]); launcherCp.kill(); }, ); diff --git a/runner/lib/test-operations.d.ts b/runner/lib/test-operations.d.ts index e76ad6b..c5645bc 100644 --- a/runner/lib/test-operations.d.ts +++ b/runner/lib/test-operations.d.ts @@ -1,15 +1,15 @@ /* eslint-disable no-unused-vars,no-redeclare */ export type RunResult = { - stop: () => void; - done: Promise; - ready: Promise; + readonly stop: () => void; + readonly done: Promise; + readonly ready: Promise; }; export type RunChainInfo = { - slogLines: AsyncIterable; - processInfo: import('./helpers/process-info.js').ProcessInfo; - storageLocation: string; + readonly slogLines: AsyncIterable; + readonly processInfo: import('./helpers/process-info.js').ProcessInfo; + readonly storageLocation: string; }; export type RunChainResult = RunResult & RunChainInfo; @@ -18,11 +18,12 @@ interface OperationBaseOption { readonly stdout: import('stream').Writable; readonly stderr: import('stream').Writable; readonly timeout?: number; + readonly config?: unknown; } export interface TestOperations { - resetChain(options: OperationBaseOption): Promise; + setupTest(options: OperationBaseOption): Promise; runChain(options: OperationBaseOption): Promise; runClient(options: OperationBaseOption): Promise; - runLoadGen(options: OperationBaseOption): Promise; + runLoadgen(options: OperationBaseOption): Promise; } diff --git a/runner/package.json b/runner/package.json index 7f4557c..07bf294 100644 --- a/runner/package.json +++ b/runner/package.json @@ -33,11 +33,11 @@ "agoric": "*", "anylogger": "^0.21.0", "chalk": "^2.4.2", - "commander": "^5.0.0", "deterministic-json": "^1.0.5", "esm": "^3.2.25", "inquirer": "^6.3.1", - "readline-transform": "^1.0.0" + "readline-transform": "^1.0.0", + "yargs-parser": "^20.2.2" }, "keywords": [], "author": "Agoric", diff --git a/start.sh b/start.sh index f98d8fc..bc6c939 100755 --- a/start.sh +++ b/start.sh @@ -52,4 +52,4 @@ yarn link-cli "${AGORIC_BIN_DIR}/agoric" cd "$LOADGEN_DIR" agoric install -exec ./runner/bin/loadgen-runner "${OUTPUT_DIR}" "$@" 2>&1 +exec ./runner/bin/loadgen-runner --output-dir="${OUTPUT_DIR}" "$@" 2>&1 From de1fe691051f067566fba9784fac24a18c2986ef Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 1 Jul 2021 20:41:25 +0000 Subject: [PATCH 08/34] Allow running chainOnly in monitoring mode --- runner/lib/main.js | 79 +++++++++++++++++++++++++++++----------------- 1 file changed, 50 insertions(+), 29 deletions(-) diff --git a/runner/lib/main.js b/runner/lib/main.js index 4790d21..63f0397 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -564,6 +564,49 @@ const main = async (progName, rawArgs, powers) => { logPerfEvent('stage-start'); const stageStart = performance.now(); + const stageSleep = async () => { + /** @type {Promise} */ + let sleeping; + if (duration < 0) { + // sleeping forever + sleeping = new Promise(() => {}); + stageConsole.log('Stage ready, waiting for end of chain'); + } else { + const sleepTime = Math.max( + 0, + duration - (performance.now() - stageStart), + ); + if (sleepTime) { + sleeping = sleep(sleepTime); + stageConsole.log( + 'Stage ready, going to sleep for', + Math.round(sleepTime / (1000 * 60)), + 'minutes', + ); + } else { + sleeping = Promise.resolve(); + stageConsole.log('Stage ready, no time to sleep, moving on'); + } + } + logPerfEvent('stage-ready'); + + const signal = makePromiseKit(); + const onInterrupt = () => signal.reject(new Error('Interrupted')); + process.once('SIGINT', onInterrupt); + process.once('SIGTERM', onInterrupt); + + await aggregateTryFinally( + async () => { + await Promise.race([sleeping, signal.promise]); + logPerfEvent('stage-shutdown'); + }, + async () => { + process.off('SIGINT', onInterrupt); + process.off('SIGTERM', onInterrupt); + }, + ); + }; + stageConsole.log('Running chain', { chainOnly, duration, loadgenConfig }); logPerfEvent('run-chain-start'); const runChainResult = await runChain({ stdout: out, stderr: err }); @@ -591,7 +634,9 @@ const main = async (progName, rawArgs, powers) => { await chainFirstEmptyBlock; - if (!chainOnly) { + if (chainOnly) { + await stageSleep(); + } else { stageConsole.log('Running client'); logPerfEvent('run-client-start'); const runClientStart = performance.now(); @@ -620,33 +665,7 @@ const main = async (progName, rawArgs, powers) => { await runLoadgenResult.ready; logPerfEvent('loadgen-ready'); - const sleepTime = Math.max( - 0, - duration - (performance.now() - stageStart), - ); - stageConsole.log( - 'Stage ready, going to sleep for', - Math.round(sleepTime / (1000 * 60)), - 'minutes', - ); - logPerfEvent('stage-ready'); - - const signal = makePromiseKit(); - const onInterrupt = () => - signal.reject(new Error('Interrupted')); - process.once('SIGINT', onInterrupt); - process.once('SIGTERM', onInterrupt); - - await aggregateTryFinally( - async () => { - await Promise.race([sleep(sleepTime), signal.promise]); - logPerfEvent('stage-shutdown'); - }, - async () => { - process.off('SIGINT', onInterrupt); - process.off('SIGTERM', onInterrupt); - }, - ); + await stageSleep(); }, async () => { stageConsole.log('Stopping loadgen'); @@ -755,7 +774,9 @@ const main = async (progName, rawArgs, powers) => { ); const duration = - Number(stageConfig.duration || sharedStageDurationMinutes) * + (stageConfig.duration != null + ? Number(stageConfig.duration) + : (!chainOnly && sharedStageDurationMinutes) || 0) * 60 * 1000; From 90902ef529dab6a2511a262e56a44524ac50f1bf Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 1 Jul 2021 21:25:09 +0000 Subject: [PATCH 09/34] Move spawnPrint to helpers --- runner/lib/helpers/child-process.js | 46 +++++++++++++++++++++++++++++ runner/lib/test-local-chain.js | 38 ++++-------------------- 2 files changed, 51 insertions(+), 33 deletions(-) diff --git a/runner/lib/helpers/child-process.js b/runner/lib/helpers/child-process.js index 45497be..2e91379 100644 --- a/runner/lib/helpers/child-process.js +++ b/runner/lib/helpers/child-process.js @@ -1,3 +1,5 @@ +/* global process */ + /** * @param {import("child_process").ChildProcess} childProcess * @param {Object} [options] @@ -95,3 +97,47 @@ export const makeSpawnWithPipedStream = ({ spawn, end }) => { // but NodeJS spawn overloads specifically disallow it return /** @type {*} */ (pipedSpawn); }; + +/** + * @callback PipedSpawn + * @param {string} command + * @param {ReadonlyArray} args + * @param {import("child_process").SpawnOptionsWithStdioTuple<'ignore' | undefined, import("stream").Writable, import("stream").Writable>} options + * @returns {import("child_process").ChildProcessByStdio} + */ + +/** + * Makes a verbose spawn that requires a writable stream for stdout and stderr, + * prints out the executed command and pipes child process streams + * + * @param {Object} options + * @param {import("child_process").spawn} options.spawn Node.js spawn + * @param {boolean} [options.end] Pipe option to automatically forward stream end + * @returns {PipedSpawn} + */ +export const makeSpawnWithPrintAndPipeOutput = ({ spawn, end }) => { + const spawnWithPipe = makeSpawnWithPipedStream({ spawn, end }); + + return (command, args, options) => { + const env = (options.env !== process.env ? options.env : null) || {}; + const envPairs = Object.entries( + // While prototype properties are used by spawn + // don't clutter the print output with the "inherited" env + Object.getOwnPropertyDescriptors(env), + ) + .filter(([_, desc]) => desc.enumerable) + .map(([name, desc]) => `${name}=${desc.value}`); + + const [_, out, err, ...others] = options.stdio; + + out.write(`${[...envPairs, command, ...args].join(' ')}\n`); + + const childProcess = spawnWithPipe(command, args, { + ...options, + stdio: ['ignore', out, err, ...others], + }); + + // The childProcess does include the out and err streams but spawnWithPipe doesn't have the correct return type + return /** @type {*} */ (childProcess); + }; +}; diff --git a/runner/lib/test-local-chain.js b/runner/lib/test-local-chain.js index 52233ef..0aa78dc 100644 --- a/runner/lib/test-local-chain.js +++ b/runner/lib/test-local-chain.js @@ -11,7 +11,7 @@ import http from 'http'; import { childProcessDone, - makeSpawnWithPipedStream, + makeSpawnWithPrintAndPipeOutput, } from './helpers/child-process.js'; import LineStreamTransform from './helpers/line-stream-transform.js'; import { PromiseAllOrErrors, sleep, tryTimeout } from './helpers/async.js'; @@ -149,38 +149,10 @@ export const makeTestOperations = ({ makeFIFO, getProcessInfo, }) => { - // TODO: Print out commands executed - - const spawnWithPipe = makeSpawnWithPipedStream({ spawn, end: false }); - - /** - * @param {string} command - * @param {ReadonlyArray} args - * @param {import("child_process").SpawnOptionsWithStdioTuple<'ignore' | undefined, import("stream").Writable, import("stream").Writable>} options - * @returns {import("child_process").ChildProcessByStdio} - */ - const spawnPrintAndPipeOutput = (command, args, options) => { - const env = (options.env !== process.env ? options.env : null) || {}; - const envPairs = Object.entries( - // While prototype properties are used by spawn - // don't clutter the print output with the "inherited" env - Object.getOwnPropertyDescriptors(env), - ) - .filter(([_, desc]) => desc.enumerable) - .map(([name, desc]) => `${name}=${desc.value}`); - - const [_, out, err, ...others] = options.stdio; - - out.write(`${[...envPairs, command, ...args].join(' ')}\n`); - - const childProcess = spawnWithPipe(command, args, { - ...options, - stdio: ['ignore', out, err, ...others], - }); - - // The childProcess does include the out and err streams but spawnWithPipe doesn't have the correct return type - return /** @type {*} */ (childProcess); - }; + const spawnPrintAndPipeOutput = makeSpawnWithPrintAndPipeOutput({ + spawn, + end: false, + }); /** * @param {string} prefix From 296de5d61dc31e47a41f25e3f2209d046d96d093 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 1 Jul 2021 21:36:09 +0000 Subject: [PATCH 10/34] Refactor test helpers in own file --- runner/lib/test-helpers.js | 116 ++++++++++++++++++++++++++++++ runner/lib/test-local-chain.js | 124 +++------------------------------ 2 files changed, 124 insertions(+), 116 deletions(-) create mode 100644 runner/lib/test-helpers.js diff --git a/runner/lib/test-helpers.js b/runner/lib/test-helpers.js new file mode 100644 index 0000000..70aaa22 --- /dev/null +++ b/runner/lib/test-helpers.js @@ -0,0 +1,116 @@ +import chalk from 'chalk'; + +// TODO: pass an "httpRequest" as power instead of importing +import http from 'http'; + +import { sleep } from './helpers/async.js'; +import { makeOutputter } from './helpers/outputter.js'; + +/** + * @param {string | URL} url + * @param {http.RequestOptions & {body?: Buffer}} options + * @returns {Promise} + */ +export const httpRequest = (url, options) => { + return new Promise((resolve, reject) => { + const { body, ...httpOptions } = options; + + const req = http.request(url, httpOptions); + req.on('response', resolve).on('error', reject); + if (body) { + req.write(body); + } + req.end(); + }); +}; + +/** + * + * @param {import('./helpers/process-info.js').ProcessInfo} info + * @param {number} [retries] + * @returns {Promise} + */ +export const untilArgv = async (info, retries = 50) => { + const argv = await info.getArgv(); + return ( + argv || + (retries > 0 ? (await sleep(100), untilArgv(info, retries - 1)) : null) + ); +}; + +/** + * + * @param {import('./helpers/process-info.js').ProcessInfo} info + * @param {number} [retries] + * @returns {Promise} + */ +export const untilChildren = async (info, retries = 50) => { + const children = await info.getChildren(); + return children.length || retries === 0 + ? children + : (await sleep(100), untilChildren(info, retries - 1)); +}; + +/** @typedef {(argv: string[]) => boolean} ArgvMatcher */ + +/** + * @param {(RegExp | null | undefined)[]} argMatchers + * @returns {ArgvMatcher} + */ +export const getArgvMatcher = (argMatchers) => (argv) => + argv.every((arg, idx) => { + const matcher = argMatchers[idx]; + return !matcher || matcher.test(arg); + }); + +/** + * @param {ArgvMatcher} argvMatcher + * @returns {ArgvMatcher} + */ +export const wrapArgvMatcherIgnoreEnvShebang = (argvMatcher) => (argv) => + argvMatcher(argv) || (/env$/.test(argv[0]) && argvMatcher(argv.slice(1))); + +/** + * @param {import('./helpers/process-info.js').ProcessInfo} launcherInfo + * @param {ArgvMatcher} argvMatcher + */ +export const getChildMatchingArgv = async (launcherInfo, argvMatcher) => { + const childrenWithArgv = await Promise.all( + (await untilChildren(launcherInfo)).map(async (info) => ({ + info, + argv: await untilArgv(info), + })), + ); + + const result = childrenWithArgv.find(({ argv }) => argv && argvMatcher(argv)); + + if (result) { + return result.info; + } + + console.error( + `getChildMatchingArgv: ${ + childrenWithArgv.length + } child process, none of ["${childrenWithArgv + .map(({ argv }) => (argv || ['no argv']).join(' ')) + .join('", "')}"] match expected arguments`, + ); + + throw new Error("Couldn't find child process"); +}; + +/** + * @param {string} prefix + * @param {import("stream").Writable} stdout + * @param {import("stream").Writable} stderr + * @returns {{stdio: [undefined, import("stream").Writable, import("stream").Writable], console: Console}} + */ +export const getConsoleAndStdio = (prefix, stdout, stderr) => { + const { console, out, err } = makeOutputter({ + out: stdout, + err: stderr, + outPrefix: prefix && `${chalk.bold.blue(prefix)}: `, + errPrefix: prefix && `${chalk.bold.red(prefix)}: `, + }); + return { console, stdio: [undefined, out, err] }; +}; diff --git a/runner/lib/test-local-chain.js b/runner/lib/test-local-chain.js index 0aa78dc..bd4f1a1 100644 --- a/runner/lib/test-local-chain.js +++ b/runner/lib/test-local-chain.js @@ -1,118 +1,26 @@ /* global process Buffer */ -import chalk from 'chalk'; - import { dirname } from 'path'; import { promisify } from 'util'; import { PassThrough, pipeline as pipelineCallback } from 'stream'; -// TODO: pass an "httpRequest" as power instead of importing -import http from 'http'; - import { childProcessDone, makeSpawnWithPrintAndPipeOutput, } from './helpers/child-process.js'; import LineStreamTransform from './helpers/line-stream-transform.js'; -import { PromiseAllOrErrors, sleep, tryTimeout } from './helpers/async.js'; -import { makeOutputter } from './helpers/outputter.js'; +import { PromiseAllOrErrors, tryTimeout } from './helpers/async.js'; import { whenStreamSteps } from './helpers/stream-steps.js'; +import { + getArgvMatcher, + getChildMatchingArgv, + wrapArgvMatcherIgnoreEnvShebang, + httpRequest, + getConsoleAndStdio, +} from './test-helpers.js'; const pipeline = promisify(pipelineCallback); -/** - * @param {string | URL} url - * @param {http.RequestOptions & {body?: Buffer}} options - * @returns {Promise} - */ -const httpRequest = (url, options) => { - return new Promise((resolve, reject) => { - const { body, ...httpOptions } = options; - - const req = http.request(url, httpOptions); - req.on('response', resolve).on('error', reject); - if (body) { - req.write(body); - } - req.end(); - }); -}; - -/** - * - * @param {import('./helpers/process-info.js').ProcessInfo} info - * @param {number} [retries] - * @returns {Promise} - */ -const untilArgv = async (info, retries = 50) => { - const argv = await info.getArgv(); - return ( - argv || - (retries > 0 ? (await sleep(100), untilArgv(info, retries - 1)) : null) - ); -}; - -/** - * - * @param {import('./helpers/process-info.js').ProcessInfo} info - * @param {number} [retries] - * @returns {Promise} - */ -const untilChildren = async (info, retries = 50) => { - const children = await info.getChildren(); - return children.length || retries === 0 - ? children - : (await sleep(100), untilChildren(info, retries - 1)); -}; - -/** @typedef {(argv: string[]) => boolean} ArgvMatcher */ - -/** - * @param {(RegExp | null | undefined)[]} argMatchers - * @returns {ArgvMatcher} - */ -const getArgvMatcher = (argMatchers) => (argv) => - argv.every((arg, idx) => { - const matcher = argMatchers[idx]; - return !matcher || matcher.test(arg); - }); - -/** - * @param {ArgvMatcher} argvMatcher - * @returns {ArgvMatcher} - */ -const wrapArgvMatcherIgnoreEnvShebang = (argvMatcher) => (argv) => - argvMatcher(argv) || (/env$/.test(argv[0]) && argvMatcher(argv.slice(1))); - -/** - * @param {import('./helpers/process-info.js').ProcessInfo} launcherInfo - * @param {ArgvMatcher} argvMatcher - */ -const getChildMatchingArgv = async (launcherInfo, argvMatcher) => { - const childrenWithArgv = await Promise.all( - (await untilChildren(launcherInfo)).map(async (info) => ({ - info, - argv: await untilArgv(info), - })), - ); - - const result = childrenWithArgv.find(({ argv }) => argv && argvMatcher(argv)); - - if (result) { - return result.info; - } - - console.error( - `getChildMatchingArgv: ${ - childrenWithArgv.length - } child process, none of ["${childrenWithArgv - .map(({ argv }) => (argv || ['no argv']).join(' ')) - .join('", "')}"] match expected arguments`, - ); - - throw new Error("Couldn't find child process"); -}; - const chainDirPrefix = '_agstate/agoric-servers/local-chain-'; const chainStartRE = /ag-chain-cosmos start --home=(.*)$/; @@ -154,22 +62,6 @@ export const makeTestOperations = ({ end: false, }); - /** - * @param {string} prefix - * @param {import("stream").Writable} stdout - * @param {import("stream").Writable} stderr - * @returns {{stdio: [undefined, import("stream").Writable, import("stream").Writable], console: Console}} - */ - const getConsoleAndStdio = (prefix, stdout, stderr) => { - const { console, out, err } = makeOutputter({ - out: stdout, - err: stderr, - outPrefix: prefix && `${chalk.bold.blue(prefix)}: `, - errPrefix: prefix && `${chalk.bold.red(prefix)}: `, - }); - return { console, stdio: [undefined, out, err] }; - }; - return harden({ setupTest: async ({ stdout, stderr, config = {} }) => { const { console, stdio } = getConsoleAndStdio( From a770765b4b11aa3557877a4383a40af2636c54b8 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 1 Jul 2021 21:44:55 +0000 Subject: [PATCH 11/34] Split loadgen operation into shared file --- runner/lib/test-local-chain.js | 130 +++--------------------------- runner/lib/test-shared-loadgen.js | 117 +++++++++++++++++++++++++++ 2 files changed, 130 insertions(+), 117 deletions(-) create mode 100644 runner/lib/test-shared-loadgen.js diff --git a/runner/lib/test-local-chain.js b/runner/lib/test-local-chain.js index bd4f1a1..7024051 100644 --- a/runner/lib/test-local-chain.js +++ b/runner/lib/test-local-chain.js @@ -1,8 +1,8 @@ -/* global process Buffer */ +/* global process */ import { dirname } from 'path'; import { promisify } from 'util'; -import { PassThrough, pipeline as pipelineCallback } from 'stream'; +import { pipeline as pipelineCallback } from 'stream'; import { childProcessDone, @@ -15,9 +15,9 @@ import { getArgvMatcher, getChildMatchingArgv, wrapArgvMatcherIgnoreEnvShebang, - httpRequest, getConsoleAndStdio, } from './test-helpers.js'; +import { makeLoadgenOperation } from './test-shared-loadgen.js'; const pipeline = promisify(pipelineCallback); @@ -27,8 +27,6 @@ const chainStartRE = /ag-chain-cosmos start --home=(.*)$/; const chainBlockBeginRE = /block-manager: block (\d+) begin$/; const clientStartRE = /\bsolo\b\S+entrypoint\.[cm]?js start/; const clientWalletReadyRE = /(?:Deployed Wallet!|Don't need our provides: wallet)/; -const loadgenStartRE = /deploy.*loadgen\/loop\.js/; -const loadgenReadyRE = /server running/; const chainNodeArgvMatcher = wrapArgvMatcherIgnoreEnvShebang( getArgvMatcher([/node$/, /chain-entrypoint/]), @@ -57,7 +55,7 @@ export const makeTestOperations = ({ makeFIFO, getProcessInfo, }) => { - const spawnPrintAndPipeOutput = makeSpawnWithPrintAndPipeOutput({ + const pipedSpawn = makeSpawnWithPrintAndPipeOutput({ spawn, end: false, }); @@ -77,18 +75,14 @@ export const makeTestOperations = ({ if (reset) { console.log('Resetting state'); const stateDir = dirname(chainDirPrefix); + await childProcessDone(pipedSpawn('rm', ['-rf', stateDir], { stdio })); await childProcessDone( - spawnPrintAndPipeOutput('rm', ['-rf', stateDir], { stdio }), - ); - await childProcessDone( - spawnPrintAndPipeOutput('git', ['checkout', '--', stateDir], { + pipedSpawn('git', ['checkout', '--', stateDir], { stdio, }), ); } - await childProcessDone( - spawnPrintAndPipeOutput('agoric', ['install'], { stdio }), - ); + await childProcessDone(pipedSpawn('agoric', ['install'], { stdio })); console.log('Done'); }, @@ -104,7 +98,7 @@ export const makeTestOperations = ({ const chainEnv = Object.create(process.env); chainEnv.SLOGFILE = slogFifo.path; - const launcherCp = spawnPrintAndPipeOutput( + const launcherCp = pipedSpawn( 'agoric', ['start', 'local-chain', '--verbose'], { stdio, env: chainEnv, detached: true }, @@ -191,11 +185,10 @@ export const makeTestOperations = ({ console.log('Starting client'); - const launcherCp = spawnPrintAndPipeOutput( - 'agoric', - ['start', 'local-solo'], - { stdio, detached: true }, - ); + const launcherCp = pipedSpawn('agoric', ['start', 'local-solo'], { + stdio, + detached: true, + }); const clientDone = childProcessDone(launcherCp); @@ -248,103 +241,6 @@ export const makeTestOperations = ({ }, ); }, - runLoadgen: async ({ stdout, stderr, timeout = 10, config = {} }) => { - const { console, stdio } = getConsoleAndStdio('loadgen', stdout, stderr); - - console.log('Starting load gen'); - - const loadgenEnv = Object.create(process.env); - // loadgenEnv.DEBUG = 'agoric'; - - const launcherCp = spawnPrintAndPipeOutput( - 'agoric', - ['deploy', 'loadgen/loop.js'], - { stdio, env: loadgenEnv, detached: true }, - ); - - let stopped = false; - const stop = () => { - stopped = true; - launcherCp.kill(); - }; - - // Load gen exit with non-zero code when killed - const loadgenDone = childProcessDone(launcherCp).catch((err) => - stopped ? 0 : Promise.reject(err), - ); - - loadgenDone.then( - () => console.log('Load gen app stopped successfully'), - (error) => console.error('Load gen app stopped with error', error), - ); - - // The agoric deploy output is currently sent to stderr - // Combine both stderr and stdout in to detect both steps - // accommodating future changes - const combinedOutput = new PassThrough(); - const outLines = new LineStreamTransform({ lineEndings: true }); - const errLines = new LineStreamTransform({ lineEndings: true }); - launcherCp.stdout.pipe(outLines).pipe(combinedOutput); - launcherCp.stderr.pipe(errLines).pipe(combinedOutput); - - const [deploying, tasksReady, outputParsed] = whenStreamSteps( - combinedOutput, - [{ matcher: loadgenStartRE }, { matcher: loadgenReadyRE }], - { - waitEnd: false, - }, - ); - - const cleanCombined = () => { - launcherCp.stdout.unpipe(outLines); - launcherCp.stderr.unpipe(errLines); - }; - outputParsed.then(cleanCombined, cleanCombined); - - const done = PromiseAllOrErrors([ - outputParsed, - loadgenDone, - ]).then(() => {}); - - return tryTimeout( - timeout * 1000, - async () => { - await deploying; - - console.log('Load gen app running'); - - const ready = tasksReady.then(async () => { - console.log('Making request to start faucet'); - const body = Buffer.from(JSON.stringify(config), 'utf8'); - - const res = await httpRequest('http://127.0.0.1:3352/config', { - body, - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - 'Content-Length': body.byteLength, - }, - }); - // Consume and discard the response - for await (const _ of res); - - if (!res.statusCode || res.statusCode >= 400) { - throw new Error('Could not start faucet'); - } - }); - - return harden({ - stop, - done, - ready, - }); - }, - async () => { - // Avoid unhandled rejections for promises that can no longer be handled - Promise.allSettled([loadgenDone, tasksReady]); - launcherCp.kill(); - }, - ); - }, + runLoadgen: makeLoadgenOperation({ pipedSpawn }), }); }; diff --git a/runner/lib/test-shared-loadgen.js b/runner/lib/test-shared-loadgen.js new file mode 100644 index 0000000..1e7d76b --- /dev/null +++ b/runner/lib/test-shared-loadgen.js @@ -0,0 +1,117 @@ +/* global process Buffer */ + +import { PassThrough } from 'stream'; + +import { childProcessDone } from './helpers/child-process.js'; +import LineStreamTransform from './helpers/line-stream-transform.js'; +import { PromiseAllOrErrors, tryTimeout } from './helpers/async.js'; +import { whenStreamSteps } from './helpers/stream-steps.js'; +import { httpRequest, getConsoleAndStdio } from './test-helpers.js'; + +const loadgenStartRE = /deploy.*loadgen\/loop\.js/; +const loadgenReadyRE = /server running/; + +/** + * + * @param {Object} powers + * @param {import("./helpers/child-process.js").PipedSpawn} powers.pipedSpawn Spawn with piped output + * @returns {import("./test-operations.js").TestOperations['runLoadgen']} + * + */ +export const makeLoadgenOperation = ({ pipedSpawn }) => { + return harden(async ({ stdout, stderr, timeout = 10, config = {} }) => { + const { console, stdio } = getConsoleAndStdio('loadgen', stdout, stderr); + + console.log('Starting loadgen'); + + const loadgenEnv = Object.create(process.env); + // loadgenEnv.DEBUG = 'agoric'; + + const launcherCp = pipedSpawn('agoric', ['deploy', 'loadgen/loop.js'], { + stdio, + env: loadgenEnv, + detached: true, + }); + + let stopped = false; + const stop = () => { + stopped = true; + launcherCp.kill(); + }; + + // Load gen exit with non-zero code when killed + const loadgenDone = childProcessDone(launcherCp).catch((err) => + stopped ? 0 : Promise.reject(err), + ); + + loadgenDone.then( + () => console.log('Load gen app stopped successfully'), + (error) => console.error('Load gen app stopped with error', error), + ); + + // The agoric deploy output is currently sent to stderr + // Combine both stderr and stdout in to detect both steps + // accommodating future changes + const combinedOutput = new PassThrough(); + const outLines = new LineStreamTransform({ lineEndings: true }); + const errLines = new LineStreamTransform({ lineEndings: true }); + launcherCp.stdout.pipe(outLines).pipe(combinedOutput); + launcherCp.stderr.pipe(errLines).pipe(combinedOutput); + + const [deploying, tasksReady, outputParsed] = whenStreamSteps( + combinedOutput, + [{ matcher: loadgenStartRE }, { matcher: loadgenReadyRE }], + { + waitEnd: false, + }, + ); + + const cleanCombined = () => { + launcherCp.stdout.unpipe(outLines); + launcherCp.stderr.unpipe(errLines); + }; + outputParsed.then(cleanCombined, cleanCombined); + + const done = PromiseAllOrErrors([outputParsed, loadgenDone]).then(() => {}); + + return tryTimeout( + timeout * 1000, + async () => { + await deploying; + + console.log('Load gen app running'); + + const ready = tasksReady.then(async () => { + console.log('Making request to start faucet'); + const body = Buffer.from(JSON.stringify(config), 'utf8'); + + const res = await httpRequest('http://127.0.0.1:3352/config', { + body, + method: 'PUT', + headers: { + 'Content-Type': 'application/json', + 'Content-Length': body.byteLength, + }, + }); + // Consume and discard the response + for await (const _ of res); + + if (!res.statusCode || res.statusCode >= 400) { + throw new Error('Could not start faucet'); + } + }); + + return harden({ + stop, + done, + ready, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([loadgenDone, tasksReady]); + launcherCp.kill(); + }, + ); + }); +}; From b55b5e9424f2866ee35d3715720fab3ae1f47ed5 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 2 Jul 2021 00:15:33 +0000 Subject: [PATCH 12/34] Formatting of local chain operations --- runner/lib/test-local-chain.js | 340 ++++++++++++++++----------------- 1 file changed, 170 insertions(+), 170 deletions(-) diff --git a/runner/lib/test-local-chain.js b/runner/lib/test-local-chain.js index 7024051..1e55169 100644 --- a/runner/lib/test-local-chain.js +++ b/runner/lib/test-local-chain.js @@ -60,187 +60,187 @@ export const makeTestOperations = ({ end: false, }); - return harden({ - setupTest: async ({ stdout, stderr, config = {} }) => { - const { console, stdio } = getConsoleAndStdio( - 'setup-test', - stdout, - stderr, - ); - - console.log('Starting'); - - const { reset } = /** @type {{reset?: boolean}} */ (config); - - if (reset) { - console.log('Resetting state'); - const stateDir = dirname(chainDirPrefix); - await childProcessDone(pipedSpawn('rm', ['-rf', stateDir], { stdio })); - await childProcessDone( - pipedSpawn('git', ['checkout', '--', stateDir], { - stdio, - }), - ); - } - await childProcessDone(pipedSpawn('agoric', ['install'], { stdio })); - - console.log('Done'); - }, - runChain: async ({ stdout, stderr, timeout = 30 }) => { - const { console, stdio } = getConsoleAndStdio('chain', stdout, stderr); - - console.log('Starting chain'); - - const slogFifo = await makeFIFO('chain.slog'); - const slogLines = new LineStreamTransform(); - const slogPipeResult = pipeline(slogFifo, slogLines); - - const chainEnv = Object.create(process.env); - chainEnv.SLOGFILE = slogFifo.path; - - const launcherCp = pipedSpawn( - 'agoric', - ['start', 'local-chain', '--verbose'], - { stdio, env: chainEnv, detached: true }, - ); - - let stopped = false; - - // Chain exit with code 98 when killed - const chainDone = childProcessDone(launcherCp, { - ignoreExitCode: true, - }).then((code) => { - if (code !== 0 && (!stopped || code !== 98)) { - return Promise.reject( - new Error(`Chain exited with non-zero code: ${code}`), - ); - } - return 0; - }); - - chainDone.then( - () => console.log('Chain exited successfully'), - (error) => console.error('Chain exited with error', error), + /** @param {import("./test-operations.js").OperationBaseOption & {config?: {reset?: boolean}}} options */ + const setupTest = async ({ stdout, stderr, config: { reset } = {} }) => { + const { console, stdio } = getConsoleAndStdio('setup-test', stdout, stderr); + + console.log('Starting'); + + if (reset) { + console.log('Resetting chain node and client state'); + const stateDir = dirname(chainDirPrefix); + await childProcessDone(pipedSpawn('rm', ['-rf', stateDir], { stdio })); + await childProcessDone( + pipedSpawn('git', ['checkout', '--', stateDir], { + stdio, + }), ); + } + await childProcessDone(pipedSpawn('agoric', ['install'], { stdio })); - const [chainStarted, firstBlock, outputParsed] = whenStreamSteps( - launcherCp.stdout, - [ - { matcher: chainStartRE }, - { matcher: chainBlockBeginRE, resultIndex: -1 }, - ], - { - waitEnd: false, - }, - ); - - const done = PromiseAllOrErrors([ - slogPipeResult, - outputParsed, - chainDone, - ]).then(() => {}); - - return tryTimeout( - timeout * 1000, - async () => { - await chainStarted; - - console.log('Chain running'); - - const [storageLocation, processInfo] = await PromiseAllOrErrors([ - chainStarted.then(findDirByPrefix), - getProcessInfo( - /** @type {number} */ (launcherCp.pid), - ).then((launcherInfo) => - getChildMatchingArgv(launcherInfo, chainArgvMatcher), - ), - ]); - - const stop = () => { - stopped = true; - process.kill(processInfo.pid); - }; - - return harden({ - stop, - done, - ready: firstBlock, - slogLines: { - [Symbol.asyncIterator]: () => slogLines[Symbol.asyncIterator](), - }, - storageLocation, - processInfo, - }); - }, - async () => { - // Avoid unhandled rejections for promises that can no longer be handled - Promise.allSettled([done, firstBlock]); - launcherCp.kill(); - slogFifo.close(); - }, - ); - }, - runClient: async ({ stdout, stderr, timeout = 20 }) => { - const { console, stdio } = getConsoleAndStdio('client', stdout, stderr); - - console.log('Starting client'); + console.log('Done'); + }; - const launcherCp = pipedSpawn('agoric', ['start', 'local-solo'], { - stdio, - detached: true, - }); + /** @param {import("./test-operations.js").OperationBaseOption} options */ + const runChain = async ({ stdout, stderr, timeout = 30 }) => { + const { console, stdio } = getConsoleAndStdio('chain', stdout, stderr); - const clientDone = childProcessDone(launcherCp); + console.log('Starting chain'); - clientDone.then( - () => console.log('Client exited successfully'), - (error) => console.error('Client exited with error', error), - ); - - const [clientStarted, walletReady, outputParsed] = whenStreamSteps( - launcherCp.stdout, - [ - { matcher: clientStartRE, resultIndex: -1 }, - { matcher: clientWalletReadyRE, resultIndex: -1 }, - ], - { - waitEnd: false, - }, - ); + const slogFifo = await makeFIFO('chain.slog'); + const slogLines = new LineStreamTransform(); + const slogPipeResult = pipeline(slogFifo, slogLines); - const done = PromiseAllOrErrors([ - outputParsed, - clientDone, - ]).then(() => {}); + const chainEnv = Object.create(process.env); + chainEnv.SLOGFILE = slogFifo.path; - return tryTimeout( - timeout * 1000, - async () => { - await clientStarted; + const launcherCp = pipedSpawn( + 'agoric', + ['start', 'local-chain', '--verbose'], + { stdio, env: chainEnv, detached: true }, + ); - console.log('Client running'); + let stopped = false; - const processInfo = await getProcessInfo( + // Chain exit with code 98 when killed + const chainDone = childProcessDone(launcherCp, { + ignoreExitCode: true, + }).then((code) => { + if (code !== 0 && (!stopped || code !== 98)) { + return Promise.reject( + new Error(`Chain exited with non-zero code: ${code}`), + ); + } + return 0; + }); + + chainDone.then( + () => console.log('Chain exited successfully'), + (error) => console.error('Chain exited with error', error), + ); + + const [chainStarted, firstBlock, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: chainStartRE }, + { matcher: chainBlockBeginRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + const done = PromiseAllOrErrors([ + slogPipeResult, + outputParsed, + chainDone, + ]).then(() => {}); + + return tryTimeout( + timeout * 1000, + async () => { + await chainStarted; + + console.log('Chain running'); + + const [storageLocation, processInfo] = await PromiseAllOrErrors([ + chainStarted.then(findDirByPrefix), + getProcessInfo( /** @type {number} */ (launcherCp.pid), ).then((launcherInfo) => - getChildMatchingArgv(launcherInfo, clientArgvMatcher), - ); - - const stop = () => process.kill(processInfo.pid); - - return harden({ - stop, - done, - ready: walletReady, - }); - }, - async () => { - // Avoid unhandled rejections for promises that can no longer be handled - Promise.allSettled([done, walletReady]); - launcherCp.kill(); - }, - ); - }, + getChildMatchingArgv(launcherInfo, chainArgvMatcher), + ), + ]); + + const stop = () => { + stopped = true; + process.kill(processInfo.pid); + }; + + return harden({ + stop, + done, + ready: firstBlock, + slogLines: { + [Symbol.asyncIterator]: () => slogLines[Symbol.asyncIterator](), + }, + storageLocation, + processInfo, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([done, firstBlock]); + launcherCp.kill(); + slogFifo.close(); + }, + ); + }; + + /** @param {import("./test-operations.js").OperationBaseOption} options */ + const runClient = async ({ stdout, stderr, timeout = 20 }) => { + const { console, stdio } = getConsoleAndStdio('client', stdout, stderr); + + console.log('Starting client'); + + const launcherCp = pipedSpawn('agoric', ['start', 'local-solo'], { + stdio, + detached: true, + }); + + const clientDone = childProcessDone(launcherCp); + + clientDone.then( + () => console.log('Client exited successfully'), + (error) => console.error('Client exited with error', error), + ); + + const [clientStarted, walletReady, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: clientStartRE, resultIndex: -1 }, + { matcher: clientWalletReadyRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + const done = PromiseAllOrErrors([outputParsed, clientDone]).then(() => {}); + + return tryTimeout( + timeout * 1000, + async () => { + await clientStarted; + + console.log('Client running'); + + const processInfo = await getProcessInfo( + /** @type {number} */ (launcherCp.pid), + ).then((launcherInfo) => + getChildMatchingArgv(launcherInfo, clientArgvMatcher), + ); + + const stop = () => process.kill(processInfo.pid); + + return harden({ + stop, + done, + ready: walletReady, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([done, walletReady]); + launcherCp.kill(); + }, + ); + }; + + return harden({ + setupTest, + runChain, + runClient, runLoadgen: makeLoadgenOperation({ pipedSpawn }), }); }; From 78a876ef4fe97fca3251bfd4f23b53015836b7c5 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 2 Jul 2021 08:02:22 +0000 Subject: [PATCH 13/34] Tweak helpers --- runner/lib/helpers/async.d.ts | 2 +- runner/lib/helpers/async.js | 23 +++++++++++++---------- runner/lib/test-helpers.js | 24 ++++++++++++++++++++---- 3 files changed, 34 insertions(+), 15 deletions(-) diff --git a/runner/lib/helpers/async.d.ts b/runner/lib/helpers/async.d.ts index b1492e8..1a1e4f8 100644 --- a/runner/lib/helpers/async.d.ts +++ b/runner/lib/helpers/async.d.ts @@ -23,7 +23,7 @@ export declare function aggregateTryFinally( export declare function tryTimeout( timeoutMs: number, trier: () => Promise, - canceler?: () => Promise, + onError?: () => Promise, ): Promise; export declare function PromiseAllOrErrors< diff --git a/runner/lib/helpers/async.js b/runner/lib/helpers/async.js index 148d091..890e187 100644 --- a/runner/lib/helpers/async.js +++ b/runner/lib/helpers/async.js @@ -3,9 +3,12 @@ /** @type {import("./async.js").sleep} */ export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); -/** @param {Error[]} errors */ -const makeAggregateError = (errors) => { - const err = new Error(); +/** + * @param {Error[]} errors + * @param {string} [message] + */ +const makeAggregateError = (errors, message) => { + const err = new Error(message); Object.defineProperties(err, { name: { value: 'AggregateError', @@ -68,8 +71,8 @@ export const warnOnRejection = (operation, console, ...messages) => { /** @type {import("./async.js").aggregateTryFinally} */ export const aggregateTryFinally = async (trier, finalizer) => trier().then( - (result) => finalizer().then(() => result), - (tryError) => + async (result) => finalizer().then(() => result), + async (tryError) => finalizer() .then( () => tryError, @@ -79,19 +82,19 @@ export const aggregateTryFinally = async (trier, finalizer) => ); /** @type {import("./async.js").tryTimeout} */ -export const tryTimeout = async (timeoutMs, trier, canceler) => { +export const tryTimeout = async (timeoutMs, trier, onError) => { const result = Promise.race([ sleep(timeoutMs).then(() => Promise.reject(new Error('Timeout'))), trier(), ]); - return !canceler + return !onError ? result - : result.catch((error) => - canceler() + : result.catch(async (error) => + onError() .then( () => error, - (cancelerError) => makeAggregateError([error, cancelerError]), + (cleanupError) => makeAggregateError([error, cleanupError]), ) .then((finalError) => Promise.reject(finalError)), ); diff --git a/runner/lib/test-helpers.js b/runner/lib/test-helpers.js index 70aaa22..c8a9426 100644 --- a/runner/lib/test-helpers.js +++ b/runner/lib/test-helpers.js @@ -2,20 +2,36 @@ import chalk from 'chalk'; // TODO: pass an "httpRequest" as power instead of importing import http from 'http'; +import https from 'https'; import { sleep } from './helpers/async.js'; import { makeOutputter } from './helpers/outputter.js'; +const protocolModules = { + 'http:': http, + 'https:': https, +}; + /** - * @param {string | URL} url - * @param {http.RequestOptions & {body?: Buffer}} options + * @param {string | URL} urlOrString + * @param {http.RequestOptions & {body?: Buffer}} [options] * @returns {Promise} */ -export const httpRequest = (url, options) => { +export const httpRequest = (urlOrString, options = {}) => { return new Promise((resolve, reject) => { + const url = + typeof urlOrString === 'string' ? new URL(urlOrString) : urlOrString; + + if (!(url.protocol in protocolModules)) { + throw new Error(`Invalid protocol ${url.protocol}`); + } + + const protocolModule = + protocolModules[/** @type {keyof protocolModules} */ (url.protocol)]; + const { body, ...httpOptions } = options; - const req = http.request(url, httpOptions); + const req = protocolModule.request(url, httpOptions); req.on('response', resolve).on('error', reject); if (body) { req.write(body); From 359f65e35411af2b962410d566332b00f91bcf1c Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 2 Jul 2021 08:06:28 +0000 Subject: [PATCH 14/34] Add testnet support to runner --- _agstate/agoric-servers/.gitignore | 4 +- runner/lib/main.js | 40 ++- runner/lib/test-testnet.js | 425 +++++++++++++++++++++++++++++ 3 files changed, 458 insertions(+), 11 deletions(-) create mode 100644 runner/lib/test-testnet.js diff --git a/_agstate/agoric-servers/.gitignore b/_agstate/agoric-servers/.gitignore index a8b18cd..757e71c 100644 --- a/_agstate/agoric-servers/.gitignore +++ b/_agstate/agoric-servers/.gitignore @@ -78,7 +78,7 @@ integration-test/transform-tests/output solo dev -testnet +testnet-* chain ve* -local* +local-* diff --git a/runner/lib/main.js b/runner/lib/main.js index 63f0397..efa1c7d 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -25,7 +25,8 @@ import { makeFsHelper } from './helpers/fs.js'; import { makeProcfsHelper } from './helpers/procsfs.js'; import { makeOutputter } from './helpers/outputter.js'; -import { makeTestOperations } from './test-local-chain.js'; +import { makeTestOperations as makeLocalTestOperations } from './test-local-chain.js'; +import { makeTestOperations as makeTestnetTestOperations } from './test-testnet.js'; const pipeline = promisify(pipelineCallback); const finished = promisify(finishedCallback); @@ -178,13 +179,6 @@ const main = async (progName, rawArgs, powers) => { tmpDir, }); - const { setupTest, runChain, runClient, runLoadgen } = makeTestOperations({ - spawn, - findDirByPrefix: findByPrefix, - makeFIFO, - getProcessInfo, - }); - /** * @param {string} [prefix] * @param {import("stream").Writable} [out] @@ -204,6 +198,34 @@ const main = async (progName, rawArgs, powers) => { console.log(`Outputting to ${resolvePath(outputDir)}`); await fs.mkdir(outputDir, { recursive: true }); + let makeTestOperations; + /** @type {string} */ + let testnetOrigin; + + switch (argv.profile) { + case null: + case undefined: + case 'local': + makeTestOperations = makeLocalTestOperations; + testnetOrigin = ''; + break; + case 'testnet': + case 'stage': + makeTestOperations = makeTestnetTestOperations; + testnetOrigin = `https://${argv.profile}.agoric.net`; + break; + default: + throw new Error(`Unexpected profile option: ${argv.profile}`); + } + + const { setupTest, runChain, runClient, runLoadgen } = makeTestOperations({ + spawn, + fs, + findDirByPrefix: findByPrefix, + makeFIFO, + getProcessInfo, + }); + const outputStream = fsStream.createWriteStream( joinPath(outputDir, 'perf.jsonl'), ); @@ -724,7 +746,7 @@ const main = async (progName, rawArgs, powers) => { }); const reset = coerceBooleanOption(argv.reset, true); - const setupConfig = { reset }; + const setupConfig = { reset, testnetOrigin }; logPerfEvent('setup-test-start', setupConfig); await setupTest({ stdout: out, stderr: err, config: setupConfig }); logPerfEvent('setup-test-finish'); diff --git a/runner/lib/test-testnet.js b/runner/lib/test-testnet.js new file mode 100644 index 0000000..3a6c1c9 --- /dev/null +++ b/runner/lib/test-testnet.js @@ -0,0 +1,425 @@ +/* global process Buffer */ +/* eslint-disable no-await-in-loop */ + +import { join as joinPath } from 'path'; +import { promisify } from 'util'; +import { pipeline as pipelineCallback } from 'stream'; + +import TOML from '@iarna/toml'; + +import { + childProcessDone, + makeSpawnWithPrintAndPipeOutput, +} from './helpers/child-process.js'; +import LineStreamTransform from './helpers/line-stream-transform.js'; +import { + PromiseAllOrErrors, + tryTimeout, + sleep, + aggregateTryFinally, +} from './helpers/async.js'; +import { whenStreamSteps } from './helpers/stream-steps.js'; +import { + getArgvMatcher, + getChildMatchingArgv, + wrapArgvMatcherIgnoreEnvShebang, + getConsoleAndStdio, + httpRequest, +} from './test-helpers.js'; +import { makeLoadgenOperation } from './test-shared-loadgen.js'; + +const pipeline = promisify(pipelineCallback); + +/** + * @param {string} url + * @returns {Promise} + */ +const fetchAsJSON = async (url) => { + const res = await httpRequest(url); + const chunks = []; + for await (const chunk of res) { + chunks.push(chunk); + } + + if (!res.statusCode || res.statusCode >= 400) { + throw new Error(`HTTP request error: ${res.statusCode}`); + } + + // TODO: Check `res.headers['content-type']` for type and charset + return JSON.parse(Buffer.concat(chunks).toString('utf-8')); +}; + +const clientStateDir = '_agstate/agoric-servers/testnet-8000'; + +const chainSwingSetLaunchRE = /launch-chain: Launching SwingSet kernel$/; +const chainBlockBeginRE = /block-manager: block (\d+) begin$/; +const clientStartRE = /\bsolo\b\S+entrypoint\.[cm]?js.* setup(?: .*)$/; +const clientWalletReadyRE = /(?:Deployed Wallet!|Don't need our provides: wallet)/; +const clientSwingSetReadyRE = /start: swingset running$/; + +const clientArgvMatcher = wrapArgvMatcherIgnoreEnvShebang( + getArgvMatcher([/node$/, /\bsolo\b\S+entrypoint\.[cm]?js/]), +); + +/** + * + * @param {Object} powers + * @param {import("child_process").spawn} powers.spawn Node.js spawn + * @param {import("fs/promises")} powers.fs Node.js promisified fs object + * @param {import("./helpers/fs.js").MakeFIFO} powers.makeFIFO Make a FIFO file readable stream + * @param {import("./helpers/procsfs.js").GetProcessInfo} powers.getProcessInfo + * @returns {import("./test-operations.js").TestOperations} + * + */ +export const makeTestOperations = ({ spawn, fs, makeFIFO, getProcessInfo }) => { + const pipedSpawn = makeSpawnWithPrintAndPipeOutput({ + spawn, + end: false, + }); + + const chainStateDir = String( + process.env.AG_CHAIN_COSMOS_HOME || + joinPath(process.env.HOME || '~', '.ag-chain-cosmos'), + ); + + let testnetOrigin = 'https://testnet.agoric.net'; + + /** @param {import("./test-operations.js").OperationBaseOption & {config?: {reset?: boolean, chainOnly?: boolean, testnetOrigin?: string}}} options */ + const setupTest = async ({ + stdout, + stderr, + timeout = 120, + config: { + reset = true, + chainOnly, + testnetOrigin: testnetOriginOption, + } = {}, + }) => { + const { console, stdio } = getConsoleAndStdio('setup-test', stdout, stderr); + + console.log('Starting'); + + if (testnetOriginOption) { + testnetOrigin = testnetOriginOption; + } + + if (reset) { + console.log('Resetting chain node'); + await childProcessDone( + pipedSpawn('rm', ['-rf', chainStateDir], { stdio }), + ); + } + + const chainDirStat = await fs + .stat(chainStateDir) + .catch((err) => (err.code === 'ENOENT' ? null : Promise.reject(err))); + + if (!chainDirStat) { + console.log('Fetching network config and genesis'); + const { + chainName, + peers, + seeds, + } = /** @type {{chainName: string, peers: string[], seeds: string[]}} */ (await fetchAsJSON( + `${testnetOrigin}/network-config`, + )); + const genesis = await fetchAsJSON(`${testnetOrigin}/genesis.json`); + + await childProcessDone( + pipedSpawn( + 'ag-chain-cosmos', + ['init', '--chain-id', chainName, `loadgen-monitor-${Date.now()}`], + { stdio }, + ), + ); + + fs.writeFile( + joinPath(chainStateDir, 'config', 'genesis.json'), + JSON.stringify(genesis), + ); + + await childProcessDone( + pipedSpawn('ag-chain-cosmos', ['unsafe-reset-all'], { stdio }), + ); + + const configPath = joinPath(chainStateDir, 'config', 'config.toml'); + + console.log('Patching config'); + const config = await TOML.parse.async( + await fs.readFile(configPath, 'utf-8'), + ); + const configP2p = /** @type {TOML.JsonMap} */ (config.p2p); + configP2p.persistent_peers = peers.join(','); + configP2p.seeds = seeds.join(','); + delete config.log_level; + await fs.writeFile(configPath, TOML.stringify(config)); + } + + if (reset) { + console.log('Resetting client'); + await childProcessDone( + pipedSpawn('rm', ['-rf', clientStateDir], { stdio }), + ); + + // TODO: start client to provision the first time then kill it + } + + // Make sure client is provisioned + if (chainOnly !== true) { + console.log('Provisioning client'); + + const launcherCp = pipedSpawn( + 'agoric', + ['start', 'testnet', '8000', `${testnetOrigin}/network-config`], + { + stdio, + }, + ); + + const clientDone = childProcessDone(launcherCp); + + const [clientStarted, clientProvisioned, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: clientStartRE, resultIndex: -1 }, + { matcher: clientSwingSetReadyRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + Promise.allSettled([clientProvisioned, outputParsed, clientDone]); + + await aggregateTryFinally( + async () => { + await clientStarted; + + const processInfo = await getProcessInfo( + /** @type {number} */ (launcherCp.pid), + ).then((launcherInfo) => + getChildMatchingArgv(launcherInfo, clientArgvMatcher), + ); + + await aggregateTryFinally( + async () => + tryTimeout(timeout * 1000, async () => clientProvisioned), + async () => { + try { + process.kill(processInfo.pid); + } catch (_) { + // Ignore kill errors + } + }, + ); + + await PromiseAllOrErrors([outputParsed, clientDone]).then(() => {}); + }, + async () => { + launcherCp.kill(); + }, + ); + } + + await childProcessDone(pipedSpawn('agoric', ['install'], { stdio })); + + console.log('Done'); + }; + + /** @param {import("./test-operations.js").OperationBaseOption} options */ + const runChain = async ({ stdout, stderr, timeout = 30 }) => { + const { console, stdio } = getConsoleAndStdio('chain', stdout, stderr); + + console.log('Starting chain monitor'); + + const slogFifo = await makeFIFO('chain.slog'); + const slogLines = new LineStreamTransform(); + const slogPipeResult = pipeline(slogFifo, slogLines); + + const chainEnv = Object.create(process.env); + chainEnv.SLOGFILE = slogFifo.path; + // chainEnv.DEBUG = 'agoric'; + + const launcherCp = pipedSpawn('ag-chain-cosmos', ['start'], { + stdio, + env: chainEnv, + detached: true, + }); + + let stopped = false; + + // Chain exit with code 98 when killed + const chainDone = childProcessDone(launcherCp, { + ignoreExitCode: true, + }).then((code) => { + if (code !== 0 && (!stopped || code !== 98)) { + return Promise.reject( + new Error(`Chain exited with non-zero code: ${code}`), + ); + } + return 0; + }); + + chainDone.then( + () => console.log('Chain exited successfully'), + (error) => console.error('Chain exited with error', error), + ); + + const [swingSetLaunched, firstBlock, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: chainSwingSetLaunchRE }, + { matcher: chainBlockBeginRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + const done = PromiseAllOrErrors([ + slogPipeResult, + outputParsed, + chainDone, + ]).then(() => {}); + + const ready = firstBlock.then(async () => { + let retries = 0; + while (!stopped) { + // Don't pipe output to console, it's too noisy + const statusCp = spawn('ag-chain-cosmos', ['status'], { + stdio: ['ignore', 'pipe', 'pipe'], + }); + + const chunks = []; + for await (const chunk of statusCp.stdout) { + chunks.push(chunk); + } + if ( + (await childProcessDone(statusCp, { + ignoreExitCode: retries < 3, + })) !== 0 + ) { + retries += 1; + await sleep(1 * 1000); + continue; // eslint-disable-line no-continue + } else { + retries = 0; + } + + const status = JSON.parse(Buffer.concat(chunks).toString('utf-8')); + + if (status.SyncInfo.catching_up === false) { + return; + } + + await sleep(5 * 1000); + } + }); + + return tryTimeout( + timeout * 1000, + async () => { + await swingSetLaunched; + + console.log('Chain running'); + + const stop = () => { + stopped = true; + launcherCp.kill(); + }; + + const processInfo = await getProcessInfo( + /** @type {number} */ (launcherCp.pid), + ); + + return harden({ + stop, + done, + ready, + slogLines: { + [Symbol.asyncIterator]: () => slogLines[Symbol.asyncIterator](), + }, + storageLocation: chainStateDir, + processInfo, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([done, firstBlock]); + launcherCp.kill(); + slogFifo.close(); + }, + ); + }; + + /** @param {import("./test-operations.js").OperationBaseOption} options */ + const runClient = async ({ stdout, stderr, timeout = 20 }) => { + const { console, stdio } = getConsoleAndStdio('client', stdout, stderr); + + console.log('Starting client'); + + const launcherCp = pipedSpawn( + 'agoric', + ['start', 'testnet', '8000', `${testnetOrigin}/network-config`], + { + stdio, + detached: true, + }, + ); + + const clientDone = childProcessDone(launcherCp); + + clientDone.then( + () => console.log('Client exited successfully'), + (error) => console.error('Client exited with error', error), + ); + + const [clientStarted, walletReady, outputParsed] = whenStreamSteps( + launcherCp.stdout, + [ + { matcher: clientStartRE, resultIndex: -1 }, + { matcher: clientWalletReadyRE, resultIndex: -1 }, + ], + { + waitEnd: false, + }, + ); + + const done = PromiseAllOrErrors([outputParsed, clientDone]).then(() => {}); + + return tryTimeout( + timeout * 1000, + async () => { + await clientStarted; + + console.log('Client running'); + + const processInfo = await getProcessInfo( + /** @type {number} */ (launcherCp.pid), + ).then((launcherInfo) => + getChildMatchingArgv(launcherInfo, clientArgvMatcher), + ); + + const stop = () => process.kill(processInfo.pid); + + return harden({ + stop, + done, + ready: walletReady, + }); + }, + async () => { + // Avoid unhandled rejections for promises that can no longer be handled + Promise.allSettled([done, walletReady]); + launcherCp.kill(); + }, + ); + }; + + return harden({ + setupTest, + runChain, + runClient, + runLoadgen: makeLoadgenOperation({ pipedSpawn }), + }); +}; From 6100cf22ec78f26a013460ba95c00f5718781832 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 2 Jul 2021 08:07:00 +0000 Subject: [PATCH 15/34] Refactor runner to better support interruptions --- runner/lib/main.js | 312 ++++++++++++++++++++++++++++----------------- 1 file changed, 193 insertions(+), 119 deletions(-) diff --git a/runner/lib/main.js b/runner/lib/main.js index efa1c7d..bf63660 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -153,6 +153,47 @@ const coerceBooleanOption = ( } }; +const makeInterrupterKit = () => { + const signal = makePromiseKit(); + /** @type {Error | null} */ + let rejection = null; + const onInterrupt = () => { + if (rejection) { + console.warn('Interruption already in progress'); + } else { + rejection = new Error('Interrupted'); + signal.reject(rejection); + } + }; + const onExit = () => { + throw new Error('Interrupt was not cleaned up'); + }; + process.on('SIGINT', onInterrupt); + process.on('SIGTERM', onInterrupt); + process.on('exit', onExit); + + let orInterruptCalled = false; + + const orInterrupt = async (job = Promise.resolve()) => { + orInterruptCalled = true; + return Promise.race([signal.promise, job]); + }; + + const releaseInterrupt = async () => { + process.off('SIGINT', onInterrupt); + process.off('SIGTERM', onInterrupt); + process.off('exit', onExit); + if (!orInterruptCalled && rejection) { + throw rejection; + } + }; + + // Prevent unhandled rejection when orInterrupt is called after interruption + signal.promise.catch(() => {}); + + return { orInterrupt, releaseInterrupt }; +}; + /** * * @param {string} progName @@ -578,15 +619,118 @@ const main = async (progName, rawArgs, powers) => { /** @type {import("stream").Writable} */ let err; + /** @type {string | void} */ + let chainStorageLocation; currentStageElapsedOffsetNs = performance.now() * 1000; ({ console, out, err } = makeConsole(`stage-${currentStage}`)); const { console: stageConsole } = makeConsole('runner', out, err); + const { orInterrupt, releaseInterrupt } = makeInterrupterKit(); + logPerfEvent('stage-start'); const stageStart = performance.now(); - const stageSleep = async () => { + /** @param {() => Promise} nextStep */ + const spawnChain = async (nextStep) => { + stageConsole.log('Running chain', { chainOnly, duration, loadgenConfig }); + logPerfEvent('run-chain-start'); + const runChainResult = await runChain({ stdout: out, stderr: err }); + logPerfEvent('run-chain-finish'); + + currentStageElapsedOffsetNs = + (runChainResult.processInfo.startTimestamp - cpuTimeOffset) * 1e6; + chainStorageLocation = runChainResult.storageLocation; + /** @type {import("@agoric/promise-kit").PromiseRecord} */ + const { + promise: chainFirstEmptyBlock, + resolve: resolveFirstEmptyBlock, + } = makePromiseKit(); + const monitorChainDone = monitorChain(runChainResult, { + resolveFirstEmptyBlock, + out, + err, + }); + + await aggregateTryFinally( + async () => { + await orInterrupt(runChainResult.ready); + logPerfEvent('chain-ready'); + stageConsole.log('Chain ready'); + + await orInterrupt(chainFirstEmptyBlock); + + await nextStep(); + }, + async () => { + stageConsole.log('Stopping chain'); + + runChainResult.stop(); + await runChainResult.done; + logPerfEvent('chain-stopped'); + + await monitorChainDone; + }, + ); + }; + + /** @param {() => Promise} nextStep */ + const spawnClient = async (nextStep) => { + stageConsole.log('Running client'); + logPerfEvent('run-client-start'); + const runClientStart = performance.now(); + const runClientResult = await runClient({ stdout: out, stderr: err }); + logPerfEvent('run-client-finish'); + + await aggregateTryFinally( + async () => { + await orInterrupt(runClientResult.ready); + logPerfEvent('client-ready', { + duration: + Math.round((performance.now() - runClientStart) * 1000) / 1e6, + }); + + await nextStep(); + }, + async () => { + stageConsole.log('Stopping client'); + + runClientResult.stop(); + await runClientResult.done; + logPerfEvent('client-stopped'); + }, + ); + }; + + /** @param {() => Promise} nextStep */ + const spawnLoadgen = async (nextStep) => { + stageConsole.log('Running load gen'); + logPerfEvent('run-loadgen-start'); + const runLoadgenResult = await runLoadgen({ + stdout: out, + stderr: err, + config: loadgenConfig, + }); + logPerfEvent('run-loadgen-finish'); + + await aggregateTryFinally( + async () => { + await orInterrupt(runLoadgenResult.ready); + logPerfEvent('loadgen-ready'); + + await nextStep(); + }, + async () => { + stageConsole.log('Stopping loadgen'); + + runLoadgenResult.stop(); + await runLoadgenResult.done; + logPerfEvent('loadgen-stopped'); + }, + ); + }; + + const stageReady = async () => { /** @type {Promise} */ let sleeping; if (duration < 0) { @@ -611,132 +755,51 @@ const main = async (progName, rawArgs, powers) => { } } logPerfEvent('stage-ready'); - - const signal = makePromiseKit(); - const onInterrupt = () => signal.reject(new Error('Interrupted')); - process.once('SIGINT', onInterrupt); - process.once('SIGTERM', onInterrupt); - - await aggregateTryFinally( - async () => { - await Promise.race([sleeping, signal.promise]); - logPerfEvent('stage-shutdown'); - }, - async () => { - process.off('SIGINT', onInterrupt); - process.off('SIGTERM', onInterrupt); - }, - ); + await orInterrupt(sleeping); + logPerfEvent('stage-shutdown'); }; - stageConsole.log('Running chain', { chainOnly, duration, loadgenConfig }); - logPerfEvent('run-chain-start'); - const runChainResult = await runChain({ stdout: out, stderr: err }); - logPerfEvent('run-chain-finish'); - - currentStageElapsedOffsetNs = - (runChainResult.processInfo.startTimestamp - cpuTimeOffset) * 1e6; - const chainStorageLocation = runChainResult.storageLocation; - /** @type {import("@agoric/promise-kit").PromiseRecord} */ - const { - promise: chainFirstEmptyBlock, - resolve: resolveFirstEmptyBlock, - } = makePromiseKit(); - const monitorChainDone = monitorChain(runChainResult, { - resolveFirstEmptyBlock, - out, - err, - }); - await aggregateTryFinally( - async () => { - await runChainResult.ready; - logPerfEvent('chain-ready'); - stageConsole.log('Chain ready'); - - await chainFirstEmptyBlock; - - if (chainOnly) { - await stageSleep(); - } else { - stageConsole.log('Running client'); - logPerfEvent('run-client-start'); - const runClientStart = performance.now(); - const runClientResult = await runClient({ stdout: out, stderr: err }); - logPerfEvent('run-client-finish'); - - await aggregateTryFinally( - async () => { - await runClientResult.ready; - logPerfEvent('client-ready', { - duration: - Math.round((performance.now() - runClientStart) * 1000) / 1e6, - }); - - stageConsole.log('Running load gen'); - logPerfEvent('run-loadgen-start'); - const runLoadgenResult = await runLoadgen({ - stdout: out, - stderr: err, - config: loadgenConfig, - }); - logPerfEvent('run-loadgen-finish'); - - await aggregateTryFinally( - async () => { - await runLoadgenResult.ready; - logPerfEvent('loadgen-ready'); - - await stageSleep(); - }, - async () => { - stageConsole.log('Stopping loadgen'); - - runLoadgenResult.stop(); - await runLoadgenResult.done; - logPerfEvent('loadgen-stopped'); - }, + async () => + spawnChain( + chainOnly + ? stageReady + : async () => spawnClient(async () => spawnLoadgen(stageReady)), + ), + async () => + aggregateTryFinally( + async () => { + if (chainStorageLocation != null) { + stageConsole.log('Saving chain storage'); + await childProcessDone( + spawn('tar', [ + '-cSJf', + joinPath( + outputDir, + `chain-storage-stage-${currentStage}.tar.xz`, + ), + chainStorageLocation, + ]), ); - }, - async () => { - stageConsole.log('Stopping client'); + } + }, + async () => { + releaseInterrupt(); - runClientResult.stop(); - await runClientResult.done; - logPerfEvent('client-stopped'); - }, - ); - } - }, - async () => { - stageConsole.log('Stopping chain'); - - runChainResult.stop(); - await runChainResult.done; - logPerfEvent('chain-stopped'); - - await PromiseAllOrErrors([ - childProcessDone( - spawn('tar', [ - '-cSJf', - joinPath(outputDir, `chain-storage-stage-${currentStage}.tar.xz`), - chainStorageLocation, - ]), - ), - monitorChainDone, - ]); - }, + logPerfEvent('stage-finish'); + currentStageElapsedOffsetNs = 0; + }, + ), ); - - logPerfEvent('stage-finish'); - currentStageElapsedOffsetNs = 0; }; // Main await aggregateTryFinally( async () => { + /** @type {import("stream").Writable} */ let out; + /** @type {import("stream").Writable} */ let err; ({ console, out, err } = makeConsole('init')); logPerfEvent('start', { @@ -745,11 +808,22 @@ const main = async (progName, rawArgs, powers) => { // TODO: add other interesting info here }); - const reset = coerceBooleanOption(argv.reset, true); - const setupConfig = { reset, testnetOrigin }; - logPerfEvent('setup-test-start', setupConfig); - await setupTest({ stdout: out, stderr: err, config: setupConfig }); - logPerfEvent('setup-test-finish'); + { + const { releaseInterrupt } = makeInterrupterKit(); + + const reset = coerceBooleanOption(argv.reset, true); + const setupConfig = { reset, testnetOrigin }; + logPerfEvent('setup-test-start', setupConfig); + await aggregateTryFinally( + // Do not short-circuit on interrupt, let the spawned setup process terminate + async () => + setupTest({ stdout: out, stderr: err, config: setupConfig }), + + // This will throw if there was any interrupt, and prevent further execution + async () => releaseInterrupt(), + ); + logPerfEvent('setup-test-finish'); + } const stages = argv.stages != null From 5c4084ae00e7b33833e477e078a784c7a52c22ad Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 2 Jul 2021 09:02:23 +0000 Subject: [PATCH 16/34] Add option to run loadgen without monitor node --- runner/lib/main.js | 32 +++++++++---- runner/lib/test-testnet.js | 95 ++++++++++++++++++++------------------ 2 files changed, 73 insertions(+), 54 deletions(-) diff --git a/runner/lib/main.js b/runner/lib/main.js index bf63660..2b41a3a 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -612,8 +612,14 @@ const main = async (progName, rawArgs, powers) => { * @param {boolean} param0.chainOnly * @param {number} param0.duration * @param {unknown} param0.loadgenConfig + * @param {boolean} param0.withMonitor */ - const runStage = async ({ chainOnly, duration, loadgenConfig }) => { + const runStage = async ({ + chainOnly, + duration, + loadgenConfig, + withMonitor, + }) => { /** @type {import("stream").Writable} */ let out; /** @type {import("stream").Writable} */ @@ -760,12 +766,19 @@ const main = async (progName, rawArgs, powers) => { }; await aggregateTryFinally( - async () => - spawnChain( - chainOnly - ? stageReady - : async () => spawnClient(async () => spawnLoadgen(stageReady)), - ), + async () => { + const mainTask = chainOnly + ? stageReady + : async () => spawnClient(async () => spawnLoadgen(stageReady)); + + if (withMonitor) { + return spawnChain(mainTask); + } else if (!chainOnly) { + return mainTask(); + } else { + throw new Error('Nothing to do'); + } + }, async () => aggregateTryFinally( async () => { @@ -808,11 +821,12 @@ const main = async (progName, rawArgs, powers) => { // TODO: add other interesting info here }); + const withMonitor = coerceBooleanOption(argv.monitor, true); { const { releaseInterrupt } = makeInterrupterKit(); const reset = coerceBooleanOption(argv.reset, true); - const setupConfig = { reset, testnetOrigin }; + const setupConfig = { reset, withMonitor, testnetOrigin }; logPerfEvent('setup-test-start', setupConfig); await aggregateTryFinally( // Do not short-circuit on interrupt, let the spawned setup process terminate @@ -866,6 +880,7 @@ const main = async (progName, rawArgs, powers) => { withLoadgen != null ? !withLoadgen // use boolean loadgen option value as default chainOnly : loadgenConfig === sharedLoadgenConfig && // user provided stage loadgen config implies chain + withMonitor && // If monitor is disabled, chainOnly has no meaning (currentStage === 0 || currentStage === stages - 1), ); @@ -881,6 +896,7 @@ const main = async (progName, rawArgs, powers) => { chainOnly, duration, loadgenConfig, + withMonitor, }); } }, diff --git a/runner/lib/test-testnet.js b/runner/lib/test-testnet.js index 3a6c1c9..9a1f6b2 100644 --- a/runner/lib/test-testnet.js +++ b/runner/lib/test-testnet.js @@ -84,7 +84,7 @@ export const makeTestOperations = ({ spawn, fs, makeFIFO, getProcessInfo }) => { let testnetOrigin = 'https://testnet.agoric.net'; - /** @param {import("./test-operations.js").OperationBaseOption & {config?: {reset?: boolean, chainOnly?: boolean, testnetOrigin?: string}}} options */ + /** @param {import("./test-operations.js").OperationBaseOption & {config?: {reset?: boolean, chainOnly?: boolean, withMonitor?: boolean, testnetOrigin?: string}}} options */ const setupTest = async ({ stdout, stderr, @@ -92,6 +92,7 @@ export const makeTestOperations = ({ spawn, fs, makeFIFO, getProcessInfo }) => { config: { reset = true, chainOnly, + withMonitor = true, testnetOrigin: testnetOriginOption, } = {}, }) => { @@ -103,56 +104,58 @@ export const makeTestOperations = ({ spawn, fs, makeFIFO, getProcessInfo }) => { testnetOrigin = testnetOriginOption; } - if (reset) { - console.log('Resetting chain node'); - await childProcessDone( - pipedSpawn('rm', ['-rf', chainStateDir], { stdio }), - ); - } - - const chainDirStat = await fs - .stat(chainStateDir) - .catch((err) => (err.code === 'ENOENT' ? null : Promise.reject(err))); - - if (!chainDirStat) { - console.log('Fetching network config and genesis'); - const { - chainName, - peers, - seeds, - } = /** @type {{chainName: string, peers: string[], seeds: string[]}} */ (await fetchAsJSON( - `${testnetOrigin}/network-config`, - )); - const genesis = await fetchAsJSON(`${testnetOrigin}/genesis.json`); + if (withMonitor !== false) { + if (reset) { + console.log('Resetting chain node'); + await childProcessDone( + pipedSpawn('rm', ['-rf', chainStateDir], { stdio }), + ); + } - await childProcessDone( - pipedSpawn( - 'ag-chain-cosmos', - ['init', '--chain-id', chainName, `loadgen-monitor-${Date.now()}`], - { stdio }, - ), - ); + const chainDirStat = await fs + .stat(chainStateDir) + .catch((err) => (err.code === 'ENOENT' ? null : Promise.reject(err))); + + if (!chainDirStat) { + console.log('Fetching network config and genesis'); + const { + chainName, + peers, + seeds, + } = /** @type {{chainName: string, peers: string[], seeds: string[]}} */ (await fetchAsJSON( + `${testnetOrigin}/network-config`, + )); + const genesis = await fetchAsJSON(`${testnetOrigin}/genesis.json`); + + await childProcessDone( + pipedSpawn( + 'ag-chain-cosmos', + ['init', '--chain-id', chainName, `loadgen-monitor-${Date.now()}`], + { stdio }, + ), + ); - fs.writeFile( - joinPath(chainStateDir, 'config', 'genesis.json'), - JSON.stringify(genesis), - ); + fs.writeFile( + joinPath(chainStateDir, 'config', 'genesis.json'), + JSON.stringify(genesis), + ); - await childProcessDone( - pipedSpawn('ag-chain-cosmos', ['unsafe-reset-all'], { stdio }), - ); + await childProcessDone( + pipedSpawn('ag-chain-cosmos', ['unsafe-reset-all'], { stdio }), + ); - const configPath = joinPath(chainStateDir, 'config', 'config.toml'); + const configPath = joinPath(chainStateDir, 'config', 'config.toml'); - console.log('Patching config'); - const config = await TOML.parse.async( - await fs.readFile(configPath, 'utf-8'), - ); - const configP2p = /** @type {TOML.JsonMap} */ (config.p2p); - configP2p.persistent_peers = peers.join(','); - configP2p.seeds = seeds.join(','); - delete config.log_level; - await fs.writeFile(configPath, TOML.stringify(config)); + console.log('Patching config'); + const config = await TOML.parse.async( + await fs.readFile(configPath, 'utf-8'), + ); + const configP2p = /** @type {TOML.JsonMap} */ (config.p2p); + configP2p.persistent_peers = peers.join(','); + configP2p.seeds = seeds.join(','); + delete config.log_level; + await fs.writeFile(configPath, TOML.stringify(config)); + } } if (reset) { From c826a5c953ed569085977579933efb4ddd40c81c Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 2 Jul 2021 17:41:05 +0000 Subject: [PATCH 17/34] Allow overriding testnet origin to local files --- runner/lib/main.js | 3 ++- runner/lib/test-helpers.js | 14 ++++++++++++++ runner/lib/test-shared-loadgen.js | 2 +- 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/runner/lib/main.js b/runner/lib/main.js index 2b41a3a..0c76a92 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -253,7 +253,8 @@ const main = async (progName, rawArgs, powers) => { case 'testnet': case 'stage': makeTestOperations = makeTestnetTestOperations; - testnetOrigin = `https://${argv.profile}.agoric.net`; + testnetOrigin = + argv.testnetOrigin || `https://${argv.profile}.agoric.net`; break; default: throw new Error(`Unexpected profile option: ${argv.profile}`); diff --git a/runner/lib/test-helpers.js b/runner/lib/test-helpers.js index c8a9426..4ddc6f8 100644 --- a/runner/lib/test-helpers.js +++ b/runner/lib/test-helpers.js @@ -3,6 +3,7 @@ import chalk from 'chalk'; // TODO: pass an "httpRequest" as power instead of importing import http from 'http'; import https from 'https'; +import fs from 'fs'; import { sleep } from './helpers/async.js'; import { makeOutputter } from './helpers/outputter.js'; @@ -22,6 +23,19 @@ export const httpRequest = (urlOrString, options = {}) => { const url = typeof urlOrString === 'string' ? new URL(urlOrString) : urlOrString; + if (url.protocol === 'file:') { + const stream = fs.createReadStream(url.pathname); + // Ugly cast hack to make res look like what the consumer cares about + const res = /** @type {http.IncomingMessage} */ (harden( + /** @type {unknown} */ ({ + [Symbol.asyncIterator]: () => stream[Symbol.asyncIterator](), + statusCode: 200, + }), + )); + resolve(res); + return; + } + if (!(url.protocol in protocolModules)) { throw new Error(`Invalid protocol ${url.protocol}`); } diff --git a/runner/lib/test-shared-loadgen.js b/runner/lib/test-shared-loadgen.js index 1e7d76b..5548cc5 100644 --- a/runner/lib/test-shared-loadgen.js +++ b/runner/lib/test-shared-loadgen.js @@ -82,7 +82,7 @@ export const makeLoadgenOperation = ({ pipedSpawn }) => { console.log('Load gen app running'); const ready = tasksReady.then(async () => { - console.log('Making request to start faucet'); + console.log('Making request to loadgen'); const body = Buffer.from(JSON.stringify(config), 'utf8'); const res = await httpRequest('http://127.0.0.1:3352/config', { From fc04336c96a9ce913633d425a3dc897aeb002f0b Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 16 Jul 2021 20:07:25 +0000 Subject: [PATCH 18/34] Longer timeouts for local chain start operations --- runner/lib/test-local-chain.js | 4 ++-- runner/lib/test-shared-loadgen.js | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/runner/lib/test-local-chain.js b/runner/lib/test-local-chain.js index 1e55169..e61bc29 100644 --- a/runner/lib/test-local-chain.js +++ b/runner/lib/test-local-chain.js @@ -82,7 +82,7 @@ export const makeTestOperations = ({ }; /** @param {import("./test-operations.js").OperationBaseOption} options */ - const runChain = async ({ stdout, stderr, timeout = 30 }) => { + const runChain = async ({ stdout, stderr, timeout = 120 }) => { const { console, stdio } = getConsoleAndStdio('chain', stdout, stderr); console.log('Starting chain'); @@ -178,7 +178,7 @@ export const makeTestOperations = ({ }; /** @param {import("./test-operations.js").OperationBaseOption} options */ - const runClient = async ({ stdout, stderr, timeout = 20 }) => { + const runClient = async ({ stdout, stderr, timeout = 60 }) => { const { console, stdio } = getConsoleAndStdio('client', stdout, stderr); console.log('Starting client'); diff --git a/runner/lib/test-shared-loadgen.js b/runner/lib/test-shared-loadgen.js index 5548cc5..3f54b22 100644 --- a/runner/lib/test-shared-loadgen.js +++ b/runner/lib/test-shared-loadgen.js @@ -19,7 +19,7 @@ const loadgenReadyRE = /server running/; * */ export const makeLoadgenOperation = ({ pipedSpawn }) => { - return harden(async ({ stdout, stderr, timeout = 10, config = {} }) => { + return harden(async ({ stdout, stderr, timeout = 30, config = {} }) => { const { console, stdio } = getConsoleAndStdio('loadgen', stdout, stderr); console.log('Starting loadgen'); From b64f9f7b4cc742912b507cfef54ebb47d78c898b Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 16 Jul 2021 20:10:47 +0000 Subject: [PATCH 19/34] fix(runner): Prevent hang on interupt at chain start Close slog fifo stream when chain process has started but swingset hasn't yet --- runner/lib/test-local-chain.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/runner/lib/test-local-chain.js b/runner/lib/test-local-chain.js index e61bc29..7322f9d 100644 --- a/runner/lib/test-local-chain.js +++ b/runner/lib/test-local-chain.js @@ -155,6 +155,9 @@ export const makeTestOperations = ({ const stop = () => { stopped = true; process.kill(processInfo.pid); + if (slogFifo.pending) { + slogFifo.close(); + } }; return harden({ From 889df836e67a11c4e60bf19875fd1065892fc558 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 20 Jul 2021 22:43:34 +0000 Subject: [PATCH 20/34] Add support for new bootstrap block slog event --- runner/lib/main.js | 39 +++++++++++++++++++++++++++++---------- 1 file changed, 29 insertions(+), 10 deletions(-) diff --git a/runner/lib/main.js b/runner/lib/main.js index 0c76a92..34ab56a 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -44,19 +44,27 @@ const knownVatsNamesWithoutProcess = ['comms', 'vattp']; /** * @typedef { | - * 'create-vat' | - * 'vat-startup-finish' | - * 'replay-transcript-start' | + * 'cosmic-swingset-bootstrap-block-start' | + * 'cosmic-swingset-bootstrap-block-finish' | * 'cosmic-swingset-end-block-start' | * 'cosmic-swingset-end-block-finish' | * 'cosmic-swingset-begin-block' - * } SupportedSlogEventTypes + * } SlogCosmicSwingsetEventTypes + */ + +/** + * @typedef { | + * 'create-vat' | + * 'vat-startup-finish' | + * 'replay-transcript-start' | + * SlogCosmicSwingsetEventTypes + * } SlogSupportedEventTypes */ /** * @typedef {{ * time: number, - * type: SupportedSlogEventTypes + * type: SlogSupportedEventTypes * }} SlogEventBase */ @@ -85,18 +93,19 @@ const knownVatsNamesWithoutProcess = ['comms', 'vattp']; /** * @typedef {{ * time: number, - * type: 'cosmic-swingset-end-block-start' | - * 'cosmic-swingset-end-block-finish' | - * 'cosmic-swingset-begin-block', - * vatID: string + * type: SlogCosmicSwingsetEventTypes, + * blockHeight?: number, + * blockTime: number * } & Record} SlogCosmicSwingsetEvent */ -/** @type {SupportedSlogEventTypes[]} */ +/** @type {SlogSupportedEventTypes[]} */ const supportedSlogEventTypes = [ 'create-vat', 'vat-startup-finish', 'replay-transcript-start', + 'cosmic-swingset-bootstrap-block-start', + 'cosmic-swingset-bootstrap-block-finish', 'cosmic-swingset-end-block-start', 'cosmic-swingset-end-block-finish', 'cosmic-swingset-begin-block', @@ -554,8 +563,18 @@ const main = async (progName, rawArgs, powers) => { } break; } + case 'cosmic-swingset-bootstrap-block-start': { + logPerfEvent('chain-first-init-start'); + break; + } + case 'cosmic-swingset-bootstrap-block-finish': { + logPerfEvent('chain-first-init-finish'); + break; + } case 'cosmic-swingset-end-block-start': { if (event.blockHeight === 0) { + // Before https://github.com/Agoric/agoric-sdk/pull/3491 + // bootstrap didn't have it's own slog entry logPerfEvent('chain-first-init-start'); } slogLinesInBlock = 0; From 61fc513c9e96876f25905bd93967af6f20312db3 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 22 Jul 2021 02:05:09 +0000 Subject: [PATCH 21/34] Add tini to Dockerfile to reap zombie xsnap --- Dockerfile | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 1e1fde8..44b1b67 100644 --- a/Dockerfile +++ b/Dockerfile @@ -135,6 +135,10 @@ COPY library-scripts/go-debian.sh /tmp/library-scripts/ RUN bash /tmp/library-scripts/go-debian.sh "none" "/usr/local/go" "${GOPATH}" "node" "false" \ && apt-get clean -y && rm -rf /tmp/library-scripts +# Add Tini +ENV TINI_VERSION v0.19.0 +ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini +RUN chmod +x /tini ############################## FROM base @@ -154,4 +158,4 @@ RUN mkdir -p $SDK_SRC $OUTPUT_DIR && chown $USER_UID:$USER_GID $SDK_SRC $OUTPUT_ USER $USER_UID:$USER_GID -ENTRYPOINT ["/app/start.sh"] \ No newline at end of file +ENTRYPOINT ["/tini", "--", "/app/start.sh"] \ No newline at end of file From e4934fac03737f7da28bb497717796d785af444a Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 22 Jul 2021 10:13:38 +0000 Subject: [PATCH 22/34] Avoid saving storage after final chain only restart --- runner/lib/main.js | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/runner/lib/main.js b/runner/lib/main.js index 34ab56a..0066d5a 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -633,12 +633,14 @@ const main = async (progName, rawArgs, powers) => { * @param {number} param0.duration * @param {unknown} param0.loadgenConfig * @param {boolean} param0.withMonitor + * @param {boolean} param0.saveStorage */ const runStage = async ({ chainOnly, duration, loadgenConfig, withMonitor, + saveStorage, }) => { /** @type {import("stream").Writable} */ let out; @@ -802,7 +804,7 @@ const main = async (progName, rawArgs, powers) => { async () => aggregateTryFinally( async () => { - if (chainStorageLocation != null) { + if (saveStorage && chainStorageLocation != null) { stageConsole.log('Saving chain storage'); await childProcessDone( spawn('tar', [ @@ -904,6 +906,11 @@ const main = async (progName, rawArgs, powers) => { (currentStage === 0 || currentStage === stages - 1), ); + const saveStorage = coerceBooleanOption( + stageConfig.saveStorage, + !chainOnly || currentStage === 0, + ); + const duration = (stageConfig.duration != null ? Number(stageConfig.duration) @@ -917,6 +924,7 @@ const main = async (progName, rawArgs, powers) => { duration, loadgenConfig, withMonitor, + saveStorage, }); } }, From 98f2c922dcf594686a4a4cbe28a805d8b1f70c18 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 22 Jul 2021 10:54:51 +0000 Subject: [PATCH 23/34] Cleanly stop nested tasks when a parent task fails --- runner/lib/helpers/async.d.ts | 12 +++++ runner/lib/helpers/async.js | 46 +++++++++++++++++ runner/lib/main.js | 93 ++++++++++++++++++++++++----------- 3 files changed, 123 insertions(+), 28 deletions(-) diff --git a/runner/lib/helpers/async.d.ts b/runner/lib/helpers/async.d.ts index 1a1e4f8..6aeac0b 100644 --- a/runner/lib/helpers/async.d.ts +++ b/runner/lib/helpers/async.d.ts @@ -136,3 +136,15 @@ export declare function PromiseAllOrErrors( export declare function PromiseAllOrErrors( values: readonly (T | PromiseLike)[], ): Promise; + +export interface NextStep { + (stop: Promise): Promise; +} + +export interface Task { + (nextStep: NextStep): Promise; +} + +export declare function sequential(...tasks: readonly Task[]): Task; + +export declare function parallel(...tasks: readonly Task[]): Task; diff --git a/runner/lib/helpers/async.js b/runner/lib/helpers/async.js index 890e187..fbfb23a 100644 --- a/runner/lib/helpers/async.js +++ b/runner/lib/helpers/async.js @@ -1,5 +1,7 @@ /* global setTimeout */ +import { makePromiseKit } from '@agoric/promise-kit'; + /** @type {import("./async.js").sleep} */ export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); @@ -99,3 +101,47 @@ export const tryTimeout = async (timeoutMs, trier, onError) => { .then((finalError) => Promise.reject(finalError)), ); }; + +/** @typedef {import("./async.js").Task} Task */ +/** + * @template T + * @typedef {import('@agoric/promise-kit').PromiseRecord} PromiseRecord + */ + +/** + * @param {Task[]} tasks + * @returns {Task} + */ +export const sequential = (...tasks) => { + return tasks.reduceRight((accumulatedTask, prevTask) => async (nextStep) => { + await prevTask(async (stopPrev) => { + await accumulatedTask(async (stopAcc) => { + await nextStep(Promise.race([stopAcc, stopPrev])); + }); + }); + }); +}; + +/** + * @param {Task[]} tasks + * @returns {Task} + */ +export const parallel = (...tasks) => async (nextStep) => { + /** @type {PromiseRecord<{stop: Promise}>[]} */ + const kits = tasks.map(() => makePromiseKit()); + /** @type {PromiseRecord} */ + const nextStepDone = makePromiseKit(); + Promise.all(kits.map((kit) => kit.promise)).then((wrappedStops) => { + nextStepDone.resolve( + nextStep(Promise.race(wrappedStops.map(({ stop }) => stop))), + ); + }); + await Promise.all( + tasks.map((task, i) => + task((stop) => { + kits[i].resolve({ stop }); + return nextStepDone.promise; + }), + ), + ); +}; diff --git a/runner/lib/main.js b/runner/lib/main.js index 0066d5a..5a38b96 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -19,6 +19,7 @@ import { PromiseAllOrErrors, warnOnRejection, aggregateTryFinally, + sequential, } from './helpers/async.js'; import { childProcessDone } from './helpers/child-process.js'; import { makeFsHelper } from './helpers/fs.js'; @@ -28,6 +29,8 @@ import { makeOutputter } from './helpers/outputter.js'; import { makeTestOperations as makeLocalTestOperations } from './test-local-chain.js'; import { makeTestOperations as makeTestnetTestOperations } from './test-testnet.js'; +/** @typedef {import('./helpers/async.js').Task} Task */ + const pipeline = promisify(pipelineCallback); const finished = promisify(finishedCallback); @@ -183,7 +186,7 @@ const makeInterrupterKit = () => { let orInterruptCalled = false; - const orInterrupt = async (job = Promise.resolve()) => { + const orInterrupt = async (job = new Promise(() => {})) => { orInterruptCalled = true; return Promise.race([signal.promise, job]); }; @@ -659,13 +662,19 @@ const main = async (progName, rawArgs, powers) => { logPerfEvent('stage-start'); const stageStart = performance.now(); - /** @param {() => Promise} nextStep */ + /** @type {Task} */ const spawnChain = async (nextStep) => { stageConsole.log('Running chain', { chainOnly, duration, loadgenConfig }); logPerfEvent('run-chain-start'); const runChainResult = await runChain({ stdout: out, stderr: err }); logPerfEvent('run-chain-finish'); + let chainExited = false; + runChainResult.done.finally(() => { + chainExited = true; + logPerfEvent('chain-stopped'); + }); + currentStageElapsedOffsetNs = (runChainResult.processInfo.startTimestamp - cpuTimeOffset) * 1e6; chainStorageLocation = runChainResult.storageLocation; @@ -688,21 +697,22 @@ const main = async (progName, rawArgs, powers) => { await orInterrupt(chainFirstEmptyBlock); - await nextStep(); + await nextStep(runChainResult.done); }, async () => { - stageConsole.log('Stopping chain'); + if (!chainExited) { + stageConsole.log('Stopping chain'); - runChainResult.stop(); - await runChainResult.done; - logPerfEvent('chain-stopped'); + runChainResult.stop(); + await runChainResult.done; + } await monitorChainDone; }, ); }; - /** @param {() => Promise} nextStep */ + /** @type {Task} */ const spawnClient = async (nextStep) => { stageConsole.log('Running client'); logPerfEvent('run-client-start'); @@ -710,6 +720,12 @@ const main = async (progName, rawArgs, powers) => { const runClientResult = await runClient({ stdout: out, stderr: err }); logPerfEvent('run-client-finish'); + let clientExited = false; + runClientResult.done.finally(() => { + clientExited = true; + logPerfEvent('client-stopped'); + }); + await aggregateTryFinally( async () => { await orInterrupt(runClientResult.ready); @@ -718,19 +734,20 @@ const main = async (progName, rawArgs, powers) => { Math.round((performance.now() - runClientStart) * 1000) / 1e6, }); - await nextStep(); + await nextStep(runClientResult.done); }, async () => { - stageConsole.log('Stopping client'); + if (!clientExited) { + stageConsole.log('Stopping client'); - runClientResult.stop(); - await runClientResult.done; - logPerfEvent('client-stopped'); + runClientResult.stop(); + await runClientResult.done; + } }, ); }; - /** @param {() => Promise} nextStep */ + /** @type {Task} */ const spawnLoadgen = async (nextStep) => { stageConsole.log('Running load gen'); logPerfEvent('run-loadgen-start'); @@ -741,24 +758,32 @@ const main = async (progName, rawArgs, powers) => { }); logPerfEvent('run-loadgen-finish'); + let loadgenExited = false; + runLoadgenResult.done.finally(() => { + loadgenExited = true; + logPerfEvent('loadgen-stopped'); + }); + await aggregateTryFinally( async () => { await orInterrupt(runLoadgenResult.ready); logPerfEvent('loadgen-ready'); - await nextStep(); + await nextStep(runLoadgenResult.done); }, async () => { - stageConsole.log('Stopping loadgen'); + if (!loadgenExited) { + stageConsole.log('Stopping loadgen'); - runLoadgenResult.stop(); - await runLoadgenResult.done; - logPerfEvent('loadgen-stopped'); + runLoadgenResult.stop(); + await runLoadgenResult.done; + } }, ); }; - const stageReady = async () => { + /** @type {Task} */ + const stageReady = async (nextStep) => { /** @type {Promise} */ let sleeping; if (duration < 0) { @@ -783,23 +808,35 @@ const main = async (progName, rawArgs, powers) => { } } logPerfEvent('stage-ready'); - await orInterrupt(sleeping); + await nextStep(sleeping); logPerfEvent('stage-shutdown'); }; await aggregateTryFinally( async () => { - const mainTask = chainOnly - ? stageReady - : async () => spawnClient(async () => spawnLoadgen(stageReady)); + /** @type {Task} */ + const rootTask = async (nextStep) => { + await nextStep(orInterrupt()); + }; + + /** @type {Task[]} */ + const tasks = [rootTask]; if (withMonitor) { - return spawnChain(mainTask); - } else if (!chainOnly) { - return mainTask(); - } else { + tasks.push(spawnChain); + } + + if (!chainOnly) { + tasks.push(spawnClient, spawnLoadgen); + } + + if (tasks.length === 1) { throw new Error('Nothing to do'); + } else { + tasks.push(stageReady); } + + await sequential(...tasks)((stop) => stop); }, async () => aggregateTryFinally( From 38a273a40b852f26212b565a35779def4fab447e Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 22 Jul 2021 18:13:23 +0000 Subject: [PATCH 24/34] Prevent unhandled rejections when nested task is stopped by parent trigger --- runner/lib/main.js | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/runner/lib/main.js b/runner/lib/main.js index 5a38b96..6743ff1 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -670,7 +670,7 @@ const main = async (progName, rawArgs, powers) => { logPerfEvent('run-chain-finish'); let chainExited = false; - runChainResult.done.finally(() => { + const done = runChainResult.done.finally(() => { chainExited = true; logPerfEvent('chain-stopped'); }); @@ -697,14 +697,14 @@ const main = async (progName, rawArgs, powers) => { await orInterrupt(chainFirstEmptyBlock); - await nextStep(runChainResult.done); + await nextStep(done); }, async () => { if (!chainExited) { stageConsole.log('Stopping chain'); runChainResult.stop(); - await runChainResult.done; + await done; } await monitorChainDone; @@ -721,7 +721,7 @@ const main = async (progName, rawArgs, powers) => { logPerfEvent('run-client-finish'); let clientExited = false; - runClientResult.done.finally(() => { + const done = runClientResult.done.finally(() => { clientExited = true; logPerfEvent('client-stopped'); }); @@ -734,14 +734,14 @@ const main = async (progName, rawArgs, powers) => { Math.round((performance.now() - runClientStart) * 1000) / 1e6, }); - await nextStep(runClientResult.done); + await nextStep(done); }, async () => { if (!clientExited) { stageConsole.log('Stopping client'); runClientResult.stop(); - await runClientResult.done; + await done; } }, ); @@ -759,7 +759,7 @@ const main = async (progName, rawArgs, powers) => { logPerfEvent('run-loadgen-finish'); let loadgenExited = false; - runLoadgenResult.done.finally(() => { + const done = runLoadgenResult.done.finally(() => { loadgenExited = true; logPerfEvent('loadgen-stopped'); }); @@ -769,14 +769,14 @@ const main = async (progName, rawArgs, powers) => { await orInterrupt(runLoadgenResult.ready); logPerfEvent('loadgen-ready'); - await nextStep(runLoadgenResult.done); + await nextStep(done); }, async () => { if (!loadgenExited) { stageConsole.log('Stopping loadgen'); runLoadgenResult.stop(); - await runLoadgenResult.done; + await done; } }, ); @@ -816,7 +816,9 @@ const main = async (progName, rawArgs, powers) => { async () => { /** @type {Task} */ const rootTask = async (nextStep) => { - await nextStep(orInterrupt()); + const done = orInterrupt(); + done.catch(() => {}); + await nextStep(done); }; /** @type {Task[]} */ From 85348449f1332a312ea9e9f52acf790dac3ded96 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 23 Jul 2021 00:40:39 +0000 Subject: [PATCH 25/34] Add daily perf runner script --- .dockerignore | 3 ++- .gitignore | 4 +++- Makefile | 5 +++++ results/.keep | 0 runner/lib/main.js | 2 +- scripts/run-daily-perf.sh | 37 +++++++++++++++++++++++++++++++++++++ 6 files changed, 48 insertions(+), 3 deletions(-) create mode 100644 results/.keep create mode 100755 scripts/run-daily-perf.sh diff --git a/.dockerignore b/.dockerignore index b3023cb..5bcd58f 100644 --- a/.dockerignore +++ b/.dockerignore @@ -5,4 +5,5 @@ _agstate/yarn-links/ .idea/ ui/.cache/ ui/dist/ -./Dockerfile +/Dockerfile +/results diff --git a/.gitignore b/.gitignore index cb93085..096afc3 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,6 @@ installationConstants.js _agstate/yarn-links/ .idea/ ui/.cache/ -ui/dist/ \ No newline at end of file +ui/dist/ +/results/* +!/results/.keep diff --git a/Makefile b/Makefile index 84044b5..acbd4a5 100644 --- a/Makefile +++ b/Makefile @@ -30,6 +30,11 @@ run-chain: run-client: $(MAKE) -C $(CSDIR) scenario2-run-client +build-docker: + docker build -t loadgen-runner . + +daily-perf: + cd results && ../scripts/run-daily-perf.sh run-loadgen: yarn loadgen diff --git a/results/.keep b/results/.keep new file mode 100644 index 0000000..e69de29 diff --git a/runner/lib/main.js b/runner/lib/main.js index 6743ff1..ace994f 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -247,7 +247,7 @@ const main = async (progName, rawArgs, powers) => { let { console } = makeConsole(); - const outputDir = String(argv.outputDir || `run-results-${Date.now()}`); + const outputDir = String(argv.outputDir || `results/run-${Date.now()}`); console.log(`Outputting to ${resolvePath(outputDir)}`); await fs.mkdir(outputDir, { recursive: true }); diff --git a/scripts/run-daily-perf.sh b/scripts/run-daily-perf.sh new file mode 100755 index 0000000..8d264e0 --- /dev/null +++ b/scripts/run-daily-perf.sh @@ -0,0 +1,37 @@ +#!/bin/sh +set -x + +# Runs a full 24h loadgen on the latest HEAD and save the output in the current directory +# Requires a docker image named `loadgen-runner` + +running=0 +SDK_REPO="${SDK_REPO:-https://github.com/Agoric/agoric-sdk.git}" +DOCKER_ID= + +stop() { [ $running -eq 0 ] && exit 0 || running=0; } +stop_container() { [ -z "${DOCKER_ID}" ] || docker stop ${DOCKER_ID}; } + +trap '' HUP +trap 'stop; stop_container' INT TERM + +while true +do + while true + do + SDK_REVISION=$(git ls-remote ${SDK_REPO} HEAD | awk '{ print substr($1,1,12) }') + OUTPUT_DIR="daily-perf-${SDK_REVISION}" + [ ! -d "${OUTPUT_DIR}" ] && break + sleep 60 + done + echo "processing ${SDK_REVISION}" + mkdir "${OUTPUT_DIR}" + running=1 + DOCKER_ID=$(docker create -v "$(pwd)/${OUTPUT_DIR}:/out" -e SDK_REVISION=${SDK_REVISION} --name "${OUTPUT_DIR}" loadgen-runner --no-reset) || exit $? + docker start ${DOCKER_ID} + docker wait ${DOCKER_ID} >"${OUTPUT_DIR}/exit_code" + docker logs ${DOCKER_ID} >"${OUTPUT_DIR}/docker.log" 2>&1 + [ -d "/var/lib/docker" ] && sudo cat /var/lib/docker/containers/${DOCKER_ID}/${DOCKER_ID}-json.log >"${OUTPUT_DIR}/docker.json.log" + docker rm ${DOCKER_ID} + DOCKER_ID= + stop +done From c600717db1f24d6ed23217b58b8055949ae73a21 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Sat, 24 Jul 2021 03:40:53 +0000 Subject: [PATCH 26/34] Fix daily perf script interruptibility The perf script would ignore directed signals while sleeping or waiting for docker, launch those in the background instead and wait on them. Send a term signal to docker instead of stopping to prevent abrupt kill Streamline signal handling logic --- scripts/run-daily-perf.sh | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/scripts/run-daily-perf.sh b/scripts/run-daily-perf.sh index 8d264e0..bd74033 100755 --- a/scripts/run-daily-perf.sh +++ b/scripts/run-daily-perf.sh @@ -4,34 +4,38 @@ set -x # Runs a full 24h loadgen on the latest HEAD and save the output in the current directory # Requires a docker image named `loadgen-runner` -running=0 SDK_REPO="${SDK_REPO:-https://github.com/Agoric/agoric-sdk.git}" +running=1 DOCKER_ID= +SLEEP_PID= -stop() { [ $running -eq 0 ] && exit 0 || running=0; } -stop_container() { [ -z "${DOCKER_ID}" ] || docker stop ${DOCKER_ID}; } +stop_sleep() { [ -z "$SLEEP_PID" ] && return; kill -TERM $SLEEP_PID; exit 0; } +stop_container() { [ -z "${DOCKER_ID}" ] && return; docker kill --signal=SIGTERM ${DOCKER_ID}; } trap '' HUP -trap 'stop; stop_container' INT TERM +trap 'running=0; stop_sleep; stop_container' INT TERM -while true +while [ $running -eq 1 ] do while true do SDK_REVISION=$(git ls-remote ${SDK_REPO} HEAD | awk '{ print substr($1,1,12) }') OUTPUT_DIR="daily-perf-${SDK_REVISION}" [ ! -d "${OUTPUT_DIR}" ] && break - sleep 60 + sleep 60 & + SLEEP_PID=$! + wait $SLEEP_PID + SLEEP_PID= done echo "processing ${SDK_REVISION}" mkdir "${OUTPUT_DIR}" - running=1 DOCKER_ID=$(docker create -v "$(pwd)/${OUTPUT_DIR}:/out" -e SDK_REVISION=${SDK_REVISION} --name "${OUTPUT_DIR}" loadgen-runner --no-reset) || exit $? docker start ${DOCKER_ID} - docker wait ${DOCKER_ID} >"${OUTPUT_DIR}/exit_code" + docker wait ${DOCKER_ID} >"${OUTPUT_DIR}/exit_code" & + DOCKER_WAIT_PID=$! + while kill -0 $DOCKER_WAIT_PID 2>/dev/null; do wait $DOCKER_WAIT_PID; done docker logs ${DOCKER_ID} >"${OUTPUT_DIR}/docker.log" 2>&1 - [ -d "/var/lib/docker" ] && sudo cat /var/lib/docker/containers/${DOCKER_ID}/${DOCKER_ID}-json.log >"${OUTPUT_DIR}/docker.json.log" + [ -d "/var/lib/docker" ] && sudo -n cat /var/lib/docker/containers/${DOCKER_ID}/${DOCKER_ID}-json.log >"${OUTPUT_DIR}/docker.json.log" docker rm ${DOCKER_ID} DOCKER_ID= - stop done From c0f1db6644c54212a215fe54ccd49f74dd7b02dc Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Sat, 24 Jul 2021 04:06:36 +0000 Subject: [PATCH 27/34] Add daily perf service --- scripts/loadgen-daily-perf.service | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 scripts/loadgen-daily-perf.service diff --git a/scripts/loadgen-daily-perf.service b/scripts/loadgen-daily-perf.service new file mode 100644 index 0000000..fa713e5 --- /dev/null +++ b/scripts/loadgen-daily-perf.service @@ -0,0 +1,17 @@ +[Unit] +Description=Agoric SDK loadgen daily perf runner +DefaultDependencies=no +After=docker.service + +[Service] +Type=exec +User=benchmark +Group=benchmark +WorkingDirectory=/home/benchmark/workspace/daily-perf/ +ExecStart=/home/benchmark/workspace/daily-perf/run.sh +TimeoutStartSec=0 +TimeoutStopSec=3600 +KillMode=mixed + +[Install] +WantedBy=default.target From aa87b59787a397d0b6b469ca3ee3f3906e730c07 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman <86499+mhofman@users.noreply.github.com> Date: Fri, 23 Jul 2021 21:24:48 -0700 Subject: [PATCH 28/34] Drop Node 12 support --- .github/workflows/lint-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml index 105462b..d80edaf 100644 --- a/.github/workflows/lint-test.yml +++ b/.github/workflows/lint-test.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [12.x, 14.x] + node-version: [14.x] steps: - name: Checkout dapp uses: actions/checkout@v2 From dce8e8f53d2ba979934c96e807664991042e6133 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Sat, 24 Jul 2021 05:23:47 +0000 Subject: [PATCH 29/34] Fix repo linting --- loadgen/agent-create-vault.js | 2 +- loadgen/allValues.js | 2 +- loadgen/loop.js | 4 ++-- package.json | 4 +++- runner/package.json | 2 ++ yarn.lock | 5 +++++ 6 files changed, 14 insertions(+), 5 deletions(-) diff --git a/loadgen/agent-create-vault.js b/loadgen/agent-create-vault.js index e961dd1..9c038d8 100644 --- a/loadgen/agent-create-vault.js +++ b/loadgen/agent-create-vault.js @@ -42,7 +42,7 @@ export default async function startAgent([key, home]) { // we only withdraw half the value of the collateral, giving us 200% // collateralization const collaterals = await E(treasuryPublicFacet).getCollaterals(); - const cdata = collaterals.find(c => c.brand === bldBrand); + const cdata = collaterals.find((c) => c.brand === bldBrand); const priceRate = cdata.marketPrice; const half = makeRatio(BigInt(50), runBrand); const wantedRun = multiplyBy(multiplyBy(bldToLock, priceRate), half); diff --git a/loadgen/allValues.js b/loadgen/allValues.js index 41745ef..89d01c8 100644 --- a/loadgen/allValues.js +++ b/loadgen/allValues.js @@ -1,4 +1,4 @@ const zip = (xs, ys) => xs.map((x, i) => [x, ys[i]]); const { keys, values, fromEntries } = Object; -export const allValues = async obj => +export const allValues = async (obj) => fromEntries(zip(keys(obj), await Promise.all(values(obj)))); diff --git a/loadgen/loop.js b/loadgen/loop.js index 11ddb1d..fd01312 100644 --- a/loadgen/loop.js +++ b/loadgen/loop.js @@ -55,7 +55,7 @@ function maybeStartOneCycle(name, limit) { logdata({ type: 'finish', task: name, seq, success: true }); s.succeeded += 1; }, - err => { + (err) => { console.log(`[${name}] failed:`, err); logdata({ type: 'finish', task: name, seq, success: false }); s.failed += 1; @@ -128,7 +128,7 @@ async function startServer() { if (req.method === 'PUT') { let body = ''; req.setEncoding('utf8'); - req.on('data', chunk => { + req.on('data', (chunk) => { body += chunk; }); req.on('end', () => { diff --git a/package.json b/package.json index c2e74c2..7a36da0 100644 --- a/package.json +++ b/package.json @@ -24,10 +24,12 @@ }, "scripts": { "preinstall": "node -e \"process.env.AGORIC_INSTALL && process.exit(0); console.warn('please use: agoric install . For details, see https://agoric.com/documentation/'); process.exit(1)\"", - "lint": "yarn workspaces run lint-fix", + "lint": "yarn workspaces run lint-check", + "lint-fix": "yarn workspaces run lint-fix", "lint-check": "yarn workspaces run lint-check", "test": "yarn workspaces run test", "build": "yarn workspaces run build", + "runner": "runner/bin/loadgen-runner", "loadgen": "agoric deploy loadgen/loop.js" }, "dependencies": { diff --git a/runner/package.json b/runner/package.json index 07bf294..08346cb 100644 --- a/runner/package.json +++ b/runner/package.json @@ -14,6 +14,7 @@ "lint:js": "eslint '**/*.{js,ts}'", "lint:prettier": "prettier --check '**/*.{js,ts}'", "lint:types": "tsc -p jsconfig.json", + "lint-check": "yarn lint", "lint-fix": "yarn lint-fix:js && yarn lint-fix:prettier", "lint-fix:js": "eslint --fix '**/*.{js,ts}'", "lint-fix:prettier": "prettier --write '**/*.{js,ts}'" @@ -21,6 +22,7 @@ "devDependencies": { "@agoric/eslint-config": "^0.3.6", "@types/readline-transform": "^1.0.0", + "@types/yargs-parser": "^20.2.1", "ava": "^3.13.0", "eslint": "^7.11.0", "prettier": "^2.1.2" diff --git a/yarn.lock b/yarn.lock index 2508c8e..a361e94 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1286,6 +1286,11 @@ resolved "https://registry.yarnpkg.com/@types/tmp/-/tmp-0.2.0.tgz#e3f52b4d7397eaa9193592ef3fdd44dc0af4298c" integrity sha512-flgpHJjntpBAdJD43ShRosQvNC0ME97DCfGvZEDlAThQmnerRXrLbX6YgzRBQCZTthET9eAWFAMaYP0m0Y4HzQ== +"@types/yargs-parser@^20.2.1": + version "20.2.1" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-20.2.1.tgz#3b9ce2489919d9e4fea439b76916abc34b2df129" + integrity sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw== + "@typescript-eslint/parser@^4.18.0": version "4.25.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.25.0.tgz#6b2cb6285aa3d55bfb263c650739091b0f19aceb" From b6b268b97516566d42220b3036dc8427331432b2 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 12 Aug 2021 23:29:42 +0000 Subject: [PATCH 30/34] Update readme --- README.md | 100 +++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 76 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index 73ed9be..7c6fec0 100644 --- a/README.md +++ b/README.md @@ -2,66 +2,116 @@ ## Runner -### All-in-one Docker +The loadgen runner automates running any number of load generation cycles on a local or testnet chain, monitoring the local chain node and vats processes. Depending on the use case, it can be ran using a local Agoric SDK repo, or checkout and setup any given revision, with multiple layers of helpers to automate the execution: -First build the image: +- `loadgen-runner` executable: core tool automating loadgen cycles against an installed agoric SDK (available on PATH) +- `start.sh` script: helper to automate checking out any agoric-sdk revision, compile and install it in a temporary location, and run the load generator with it. Can also be used an existing agoric-sdk repo. +- docker image: A Linux Debian environment setup with all dependencies to allow compiling the agoric-sdk. The entrypoint executes the start script, and has mount points for output directory and optionally an SDK repo. +- `run-daily-perf.sh` script: service entrypoint to continuously execute a `loadgen-runner` docker image against the latest revision with the default options. + +### `loadgen-runner` executable + +At the core, the loadgen-runner can be configured to run multiple stages of load generation, either on a local solo chain, or against an existing chain, automatically spawning a ag-solo client and deploying the loadgen tool. It captures the slog file of the local chain node, the state storage of the chain at the end of each stage, and process and disk usage information. + +#### Command + +Assuming the agoric-sdk and cosmic-swingset are built and installed, and the agoric cli is available in `PATH`. ```sh -docker build . -t loadgen-runner +mkdir -p $HOME/loadgen-output +./runner/bin/loadgen-runner --output-dir=$HOME/loadgen-output ``` -#### Mount points +#### Options -- `/out`: directory containing output artifacts -- `/src`: directory containing `agoric-sdk` repo. Automatically checked out if not a git repo (empty) +The runner uses `yargs-parser` to parse the string command line arguments, and handles dynamically creating a complex `argv` object from them. It automatically converts unary arguments into boolean (with support for `no-` negation prefix), number conversion, nested objects with dot (`.`) notation, and does kebab to camel case conversion. + +Currently the following options are available: + +- `--output-dir`: the directory location where to put the results from the loadgen cycles (`perf.jsonl`, chain node slogs, chain node storage). Defaults to `results/run-{posixtime}` in the working directory. +- `--profile`: (experimental) the chain target, either `local` (default), `testnet` or `stage`. +- `--no-monitor`: allows disabling running a chain monitor node (for non `local` profiles). +- `--monitor-interval`: a number in minutes for the interval at which to capture process stats for the chain. +- `--no-reset`: a boolean option to control whether the local chain state directory should be checked out clean before starting. +- `--stages`: the total number of stages to run. Default to 6 +- `--stage.loadgen.*`: the object to use as default loadgen config for the stages. Created from multiple arguments and passed as-is to the loadgen tool. +- `--stage.duration`: the time in minutes to use as default duration for each loadgen stage (non chain-only, see below). Defaults to 360 minutes (6 hours). +- `--stage.n.*`: Override config for a given stage 0 <= n < `--stages` +- `--stage.n.loadgen.*`: the object to use as loadgen config for the given stage. If specified and `chain-only` is missing Created from multiple arguments and passed as-is to the loadgen tool. +- `--stage.n. +- `--stage.n.chain-only`: boolean flag specifying if the stage should only run the chain node and not start a client or loadgen. Defaults to `true` for first and last stage. Defaults to `false` for other stages, or if `--stage.n.loadgen.*` is specified. +- `--stage.n.save-storage`: boolean indicating if the storage of the chain node should be saved at the end of the stage. Defaults to `true` for non stage-only stages (where the loadgen runs), as well as for stage 0 (to capture local bootstrap). +- `--stage.n.duration`: the time in minutes for the stage duration. Defaults to the shared duration above for non chain-only stages, or 0 (immediate stop after start) otherwise. Use a negative value to run until interrupted. + +### `start.sh` script + +The start script automates checking out and setting up any revision of the Agoric SDK before launching the loadgen-runner. It does so without interfering with an existing sdk installation by default, but can also be pointed to run the setup steps on an existing checked out repository. + +All command line arguments are passed through to `loadgen-runner`. #### Environment -- `SDK_REVISION`: The agoric-sdk git revision to checkout for the test +- `OUTPUT_DIR`: directory containing output artifacts. Creates temporary folder derived from revision if not set (`/tmp/agoric-sdk-out-{SDK_REVISION}`) +- `SDK_SRC`: directory containing `agoric-sdk` repo. Creates temporary folder if not set (`/tmp/agoric-sdk-src-{SDK_REVISION}`) +- `SDK_REVISION`: The agoric-sdk git revision to checkout for the test, if no existing repo found. Remote head if not set #### Examples ```sh -OUTPUT_DIR=$HOME/loadgen-output -mkdir -p $OUTPUT_DIR -docker run --rm -v $OUTPUT_DIR:/out -e SDK_REVISION=fa7ff5e55e loadgen-runner +SDK_REVISION=fa7ff5e55e OUTPUT_DIR=$HOME/loadgen-output ./start.sh ``` ```sh -OUTPUT_DIR=$HOME/loadgen-output -mkdir -p $OUTPUT_DIR -docker run --rm -v $OUTPUT_DIR:/out -v ../agoric-sdk:/src loadgen-runner +SDK_SRC=../agoric-sdk ./start.sh --stage.duration=10 ``` -### All-in-one Linux Shell +### Docker image + +The Docker image provides a Linux Debian environment setup with all dependencies to allow compiling the agoric-sdk. The entrypoint executes the start script, and has mount points for output directory and optionally an SDK repo. + +#### Mount points + +- `/out`: directory containing output artifacts +- `/src`: directory containing `agoric-sdk` repo. Automatically checked out if not a git repo (empty) #### Environment -- `OUTPUT_DIR`: directory containing output artifacts. Creates temporary folder if not set -- `SDK_DIR`: directory containing `agoric-sdk` repo. Creates temporary folder if not set -- `SDK_REVISION`: The agoric-sdk git revision to checkout for the test, if no existing repo found. Remote head if not set +- `SDK_REVISION`: The agoric-sdk git revision to checkout for the test #### Examples +First build the image: + ```sh -SDK_REVISION=fa7ff5e55e OUTPUT_DIR=$HOME/loadgen-output ./start.sh +docker build . -t loadgen-runner ``` +To perform a loadgen cycle on a given revision: + ```sh -SDK_DIR=../agoric-sdk ./start.sh +OUTPUT_DIR=$HOME/loadgen-output +mkdir -p $OUTPUT_DIR +docker run --rm -v $OUTPUT_DIR:/out -e SDK_REVISION=fa7ff5e55e loadgen-runner --no-reset ``` -### Direct linux shell - -Assuming the agoric-sdk and cosmic-swingset are built and installed, and the agoric cli is available in PATH. +To use an existing agoric-sdk copy ```sh -mkdir -p $HOME/loadgen-output -./runner/bin/loadgen-runner $HOME/loadgen-output +OUTPUT_DIR=$HOME/loadgen-output +mkdir -p $OUTPUT_DIR +docker run --rm -v $OUTPUT_DIR:/out -v ../agoric-sdk:/src loadgen-runner --no-reset --stage.duration=10 ``` +### `run-daily-perf.sh` script + +The script is used as a service entrypoint to continuously execute a `loadgen-runner` docker image against the latest SDK revision with the default options. It creates output folders in the current working directory based on the latest revision. The script waits for a new revision to be available if results already exist. + ## Manual +The loadgen is implemented as a dapp deploy script which runs forever, and opens an HTTP server on a local port to receive config updates. + +### Example + In one terminal: ```sh @@ -109,6 +159,8 @@ To disable all generators: curl -X PUT --data '{}' http://127.0.0.1:3352/config ``` +### Loadgen types + The load generators defined so far: - `faucet`: initialize by creating a `dapp-fungible-faucet` -style mint, then each cycle requests an invitation and completes it, adding 1000 Tokens to Bob's Purse. Takes 4 round-trips to complete. From 63257ca799de3e1fcd9768ce652fc680cd7fdef6 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 13 Aug 2021 00:24:57 +0000 Subject: [PATCH 31/34] Update to NESM for compatibility with latest SDKs Fix eslint-config dependency --- loadgen/package.json | 4 +- runner/bin/loadgen-runner | 3 +- runner/lib/entrypoint.js | 1 + runner/package.json | 6 +- yarn.lock | 436 ++++++++++++++++++++++++-------------- 5 files changed, 287 insertions(+), 163 deletions(-) mode change 100755 => 120000 runner/bin/loadgen-runner mode change 100644 => 100755 runner/lib/entrypoint.js diff --git a/loadgen/package.json b/loadgen/package.json index a59123a..c1d042b 100644 --- a/loadgen/package.json +++ b/loadgen/package.json @@ -28,10 +28,10 @@ "@agoric/ertp": "*", "@agoric/eventual-send": "*", "@agoric/install-ses": "*", - "@agoric/marshal": "^0.4.17", + "@agoric/marshal": "*", "@agoric/notifier": "*", "@agoric/store": "*", - "@agoric/ui-components": "^0.2.15", + "@agoric/ui-components": "*", "@agoric/zoe": "*", "esm": "^3.2.5" }, diff --git a/runner/bin/loadgen-runner b/runner/bin/loadgen-runner deleted file mode 100755 index 8ff0709..0000000 --- a/runner/bin/loadgen-runner +++ /dev/null @@ -1,2 +0,0 @@ -#!/usr/bin/env node -require('esm')(module)('../lib/entrypoint'); diff --git a/runner/bin/loadgen-runner b/runner/bin/loadgen-runner new file mode 120000 index 0000000..24ad568 --- /dev/null +++ b/runner/bin/loadgen-runner @@ -0,0 +1 @@ +../lib/entrypoint.js \ No newline at end of file diff --git a/runner/lib/entrypoint.js b/runner/lib/entrypoint.js old mode 100644 new mode 100755 index f4fa876..eecbd1c --- a/runner/lib/entrypoint.js +++ b/runner/lib/entrypoint.js @@ -1,3 +1,4 @@ +#!/usr/bin/env node /* global process */ // @ts-nocheck diff --git a/runner/package.json b/runner/package.json index 08346cb..1255b7b 100644 --- a/runner/package.json +++ b/runner/package.json @@ -6,6 +6,7 @@ "parsers": { "js": "mjs" }, + "type": "module", "bin": "bin/loadgen-runner", "scripts": { "build": "exit 0", @@ -20,7 +21,7 @@ "lint-fix:prettier": "prettier --write '**/*.{js,ts}'" }, "devDependencies": { - "@agoric/eslint-config": "^0.3.6", + "@endo/eslint-config": "^0.3.9", "@types/readline-transform": "^1.0.0", "@types/yargs-parser": "^20.2.1", "ava": "^3.13.0", @@ -36,7 +37,6 @@ "anylogger": "^0.21.0", "chalk": "^2.4.2", "deterministic-json": "^1.0.5", - "esm": "^3.2.25", "inquirer": "^6.3.1", "readline-transform": "^1.0.0", "yargs-parser": "^20.2.2" @@ -46,7 +46,7 @@ "license": "Apache-2.0", "eslintConfig": { "extends": [ - "@agoric" + "@endo" ], "rules": { "prettier/prettier": "off" diff --git a/yarn.lock b/yarn.lock index a361e94..dbb8410 100644 --- a/yarn.lock +++ b/yarn.lock @@ -14,10 +14,10 @@ dependencies: ses "^0.12.7" -"@agoric/assert@^0.3.5": - version "0.3.5" - resolved "https://registry.yarnpkg.com/@agoric/assert/-/assert-0.3.5.tgz#c7fcf6874e81f2d008acb83eeaafbb5edf8020bd" - integrity sha512-4nrozRJsE2Kn5YSWEalfbs4as5CtSSOWez9iBwqhDaJcRS+ndJHqoYkWNsNHGDstqPl/5X5GcjfhbjdLZQz5ow== +"@agoric/assert@^0.3.7": + version "0.3.7" + resolved "https://registry.yarnpkg.com/@agoric/assert/-/assert-0.3.7.tgz#ccee9e4f1eb6412fe977fa8b887b6224ef901dfa" + integrity sha512-vzNdcFeO/1SMazEGXrdKjutUVP1RVBoDv2L8DvMoVay7NRiV93A0OKvn3f1Xe6HPGRyTQJz3XBmgLKlR1FJYBg== dependencies: ses "^0.13.4" @@ -56,10 +56,10 @@ ses "^0.12.6" source-map "^0.7.3" -"@agoric/bundle-source@^1.4.3": - version "1.4.3" - resolved "https://registry.yarnpkg.com/@agoric/bundle-source/-/bundle-source-1.4.3.tgz#e0a937c219228a5a9ac67bd5148cdcc9f89bcbe2" - integrity sha512-9jEumlkAogE44IEkVTKWjqlCQiHEjRzXmE4gCg5PjRitzcsaxuIgKXDGoFqLulebWmwRIwl/VmQD+yO5Tja9rQ== +"@agoric/bundle-source@^1.4.5": + version "1.4.5" + resolved "https://registry.yarnpkg.com/@agoric/bundle-source/-/bundle-source-1.4.5.tgz#6f18fd1ceb3a649850ed6897fba9e55a39352b1e" + integrity sha512-NkPV4ap8QpU0KPaVpGlzF+vI5jmF2aeY706mcZ4P4zpXO9tvGJQPu5jY5cGDr3EjD+TAv1nwfpztJUQRiUYVzg== dependencies: "@agoric/babel-standalone" "^7.14.3" "@babel/generator" "^7.14.2" @@ -70,22 +70,11 @@ "@rollup/plugin-commonjs" "^19.0.0" "@rollup/plugin-node-resolve" "^13.0.0" acorn "^8.2.4" - esm agoric-labs/esm#Agoric-built + c8 "^7.7.2" rollup "^2.47.0" ses "^0.13.4" source-map "^0.7.3" -"@agoric/captp@^1.7.19": - version "1.7.19" - resolved "https://registry.yarnpkg.com/@agoric/captp/-/captp-1.7.19.tgz#6d9c610046a066365e3b7c65ea83524c8613f67a" - integrity sha512-zqNzrZGTefWngt2ZFzHpgBxBNOOi/qlyREGZ7SKHPSzfFzEm9jql7013S3onY9BZjaZ+qBeX2eXbT2rDAm+ddw== - dependencies: - "@agoric/eventual-send" "^0.13.21" - "@agoric/marshal" "^0.4.18" - "@agoric/nat" "^4.1.0" - "@agoric/promise-kit" "^0.2.19" - esm agoric-labs/esm#Agoric-built - "@agoric/captp@^1.7.6": version "1.7.6" resolved "https://registry.yarnpkg.com/@agoric/captp/-/captp-1.7.6.tgz#06b4f105781ced85951ba8d4957b15a7ec4bd94b" @@ -97,6 +86,17 @@ "@agoric/promise-kit" "^0.2.6" esm "^3.2.5" +"@agoric/captp@^1.8.0": + version "1.8.0" + resolved "https://registry.yarnpkg.com/@agoric/captp/-/captp-1.8.0.tgz#75f0bbdfba807f86925ca380c73e31b0ce9466e3" + integrity sha512-ZwxfKg1C3ENnGWBUskZhHdpw7NEwmVR7yQLrbybkWFDLRarOOZc2PLNUMxbiPyNt+u0ngoWGKa39kVU1YqvIow== + dependencies: + "@agoric/assert" "^0.3.7" + "@agoric/eventual-send" "^0.13.23" + "@agoric/marshal" "^0.4.20" + "@agoric/nat" "^4.1.0" + "@agoric/promise-kit" "^0.2.21" + "@agoric/compartment-mapper@^0.2.3", "@agoric/compartment-mapper@^0.2.4": version "0.2.4" resolved "https://registry.yarnpkg.com/@agoric/compartment-mapper/-/compartment-mapper-0.2.4.tgz#db78b3d3c34db2204aba6eaa29155a3e0a371e73" @@ -209,41 +209,36 @@ "@agoric/same-structure" "^0.1.6" "@agoric/store" "^0.4.7" -"@agoric/ertp@^0.11.9": - version "0.11.9" - resolved "https://registry.yarnpkg.com/@agoric/ertp/-/ertp-0.11.9.tgz#794d0a6e8f901d6d2739d0797b84f40fc6657e3d" - integrity sha512-LEjuRFZGmUmrWlvZckWU+rGtY35py0zVIZiCDlLNoXt7/bI13IkaN5Zeed6Altw9Re2hrS/NwQVKqRSZw7dBEw== +"@agoric/ertp@^0.11.11": + version "0.11.11" + resolved "https://registry.yarnpkg.com/@agoric/ertp/-/ertp-0.11.11.tgz#39b3812615b275d6ffd79f5305c27e831dcb4b37" + integrity sha512-kUi2Slr0orBgR2X6DCk8c3wlVlD1ynV2vXIRJJQ4PDi9FU514+j2lp/Lg8Xt8eiYfV26xmXPYiPENbe+OaphVg== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/eventual-send" "^0.13.21" - "@agoric/marshal" "^0.4.18" + "@agoric/assert" "^0.3.7" + "@agoric/eventual-send" "^0.13.23" + "@agoric/marshal" "^0.4.20" "@agoric/nat" "^4.1.0" - "@agoric/notifier" "^0.3.21" - "@agoric/promise-kit" "^0.2.19" - "@agoric/same-structure" "^0.1.19" - "@agoric/store" "^0.4.21" - -"@agoric/eslint-config@^0.3.6": - version "0.3.6" - resolved "https://registry.yarnpkg.com/@agoric/eslint-config/-/eslint-config-0.3.6.tgz#3c7bcbb1ed1c4584ff4ec530fa252db742a3ef9f" - integrity sha512-MftGwMGKzJiCZcrpGuyL4PppvJLigqOhT1nwOAhkHjVCdHLp5jzu+SDB8tMKYkOz/zaXHOjRU/xld+vzeCS6Ew== + "@agoric/notifier" "^0.3.23" + "@agoric/promise-kit" "^0.2.21" + "@agoric/same-structure" "^0.1.21" + "@agoric/store" "^0.4.23" "@agoric/eventual-send@*", "@agoric/eventual-send@^0.13.6", "@agoric/eventual-send@^0.13.7": version "0.13.7" resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.7.tgz#6414ffa8b025a34c7074b03a63eb739698ad6c12" integrity sha512-2R94fuM0PXMvZPOotKoYc7dM2PaR2AJ9ZQqnsj7glec32HC/rrm1tZDqBqdpx81oRmdQbsmceMTTlUtcrHoPEg== -"@agoric/eventual-send@^0.13.21": - version "0.13.21" - resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.21.tgz#d715e4353c2c3680714610682d01181398833466" - integrity sha512-z8ARMFuEj8/EO8wZG7LZ91D0GZfh5Y78mLSHnSKp3zs5ncA11Yakj4cLYSSZLl6KnRRKMRMhP4XLJmC+CnZmHQ== +"@agoric/eventual-send@^0.13.23": + version "0.13.23" + resolved "https://registry.yarnpkg.com/@agoric/eventual-send/-/eventual-send-0.13.23.tgz#93d5a535e59af8ed43dfeb2398ddb470398bb5d2" + integrity sha512-+Ct1+rPrthfTULIZVsxSFJ6sbu0iWVytpmKhUQUELCnKyVANrDrH1OJkEREYuYjPCE1s5wMHW+GCsa3oqZh2WQ== -"@agoric/import-bundle@^0.2.21": - version "0.2.21" - resolved "https://registry.yarnpkg.com/@agoric/import-bundle/-/import-bundle-0.2.21.tgz#50ad82f94fe138a22b119bf67f4ac4a7a6d0f3b8" - integrity sha512-MKWiODn4ghIiD63ra+DQaVjr391Ivgui4KOlnbnaoG2MMwTFOs5y3YW/sNAqfDRGgIeCe3GYwe/mVgO7x/LLnA== +"@agoric/import-bundle@^0.2.23": + version "0.2.23" + resolved "https://registry.yarnpkg.com/@agoric/import-bundle/-/import-bundle-0.2.23.tgz#d95f8206a443736cf7337cc74993b918f13ba1fa" + integrity sha512-MVutpscCNe3pLobBWQQ8NwKF7DTZK46/XCELkxQn6EX5/JInGNIIphf5K9mDm4PcyXQa5U+gZXmHhRIKoIV+og== dependencies: - "@agoric/assert" "^0.3.5" + "@agoric/assert" "^0.3.7" "@endo/base64" "^0.2.4" "@endo/compartment-mapper" "^0.4.1" @@ -278,12 +273,12 @@ "@agoric/eventual-send" "^0.13.6" ses "^0.12.7" -"@agoric/install-ses@^0.5.19": - version "0.5.19" - resolved "https://registry.yarnpkg.com/@agoric/install-ses/-/install-ses-0.5.19.tgz#721b01a4f7d5eac3dac561bf61d0190419a2d464" - integrity sha512-5cCRJWG/pwfvTYRxh5EODbpq8yCdC731GlTkhokXBewCSxfiKenFKU+zqsnyqDFOrDRirM/yA85vER+cwhu1LA== +"@agoric/install-ses@^0.5.21": + version "0.5.21" + resolved "https://registry.yarnpkg.com/@agoric/install-ses/-/install-ses-0.5.21.tgz#19cc528a0e3cece25c23c1756dbc030e255eca55" + integrity sha512-gAJMvAWenJGVM7LBkOe2F4YruVaGOAQTLN9proYjiVmlWAaZbMr8hNxw7WPUZJpBvLOKkmcGqf56z6A7L9OagQ== dependencies: - "@agoric/eventual-send" "^0.13.21" + "@agoric/eventual-send" "^0.13.23" ses "^0.13.4" "@agoric/make-hardener@^0.1.2": @@ -291,15 +286,15 @@ resolved "https://registry.yarnpkg.com/@agoric/make-hardener/-/make-hardener-0.1.3.tgz#807b0072bef95d935c3370d406d9dfeb719f69ee" integrity sha512-rc9M2ErE/Zu822OLCnAltr957ZVTsBvVZ7KA2unqDpjo3q7PqZF2hWFB1xXD2Qkfwt5exQ3BjFbkj+NUaTg4gA== -"@agoric/marshal@^0.4.17", "@agoric/marshal@^0.4.18": - version "0.4.18" - resolved "https://registry.yarnpkg.com/@agoric/marshal/-/marshal-0.4.18.tgz#23033b7b74bc24e2b4fb83311823b768ba5ced3a" - integrity sha512-vtcWBoguo2sYJrjfH6XmnqH9+7rNBCTzkGjccenoMlBW80RLyi7u2xJs6Y30XNW3E9wysgaLYuUeSIQ9B+YYzg== +"@agoric/marshal@*", "@agoric/marshal@^0.4.20": + version "0.4.20" + resolved "https://registry.yarnpkg.com/@agoric/marshal/-/marshal-0.4.20.tgz#51832e69819c1a9876d5f03f2b98213d8197ba09" + integrity sha512-YLLhkIqP7FMNoazwhouSSkZXxkMGlmaW+xIFx0FcR87djsHkI7uhDXCno0Bi//ChAxDqqXiLa2aL7GSKUgsqQw== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/eventual-send" "^0.13.21" + "@agoric/assert" "^0.3.7" + "@agoric/eventual-send" "^0.13.23" "@agoric/nat" "^4.1.0" - "@agoric/promise-kit" "^0.2.19" + "@agoric/promise-kit" "^0.2.21" "@agoric/marshal@^0.4.3", "@agoric/marshal@^0.4.4": version "0.4.4" @@ -331,15 +326,15 @@ "@agoric/marshal" "^0.4.4" "@agoric/promise-kit" "^0.2.6" -"@agoric/notifier@^0.3.21": - version "0.3.21" - resolved "https://registry.yarnpkg.com/@agoric/notifier/-/notifier-0.3.21.tgz#b16466a6d4702a329a926ac7e0236045be9de2e3" - integrity sha512-Q6/Nglt/7cdoAPG5LlgB9R2/35ugv+8fqYTVV4dz5OsxPWTLnAGjgcgtN7bWGSTWWaEuXS4k6EuBPU9moAWkVQ== +"@agoric/notifier@^0.3.23": + version "0.3.23" + resolved "https://registry.yarnpkg.com/@agoric/notifier/-/notifier-0.3.23.tgz#a6c67bf404cb58cd2be089ae950f9486d782acd4" + integrity sha512-aNGOXTt3hTbKUk8UqPicijX1O5huUst7IefMJuETgbJHsfU5alCuhayISEXScougbTyIQwpfUW3Cri3SsPdVSw== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/eventual-send" "^0.13.21" - "@agoric/marshal" "^0.4.18" - "@agoric/promise-kit" "^0.2.19" + "@agoric/assert" "^0.3.7" + "@agoric/eventual-send" "^0.13.23" + "@agoric/marshal" "^0.4.20" + "@agoric/promise-kit" "^0.2.21" "@agoric/pegasus@^0.2.0": version "0.2.0" @@ -368,12 +363,12 @@ dependencies: "@agoric/eventual-send" "^0.13.6" -"@agoric/promise-kit@^0.2.19": - version "0.2.19" - resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.19.tgz#64207de0a020680ad95575f6a10ebcb322b9f006" - integrity sha512-86E4pw0uVCiJ5bhDR8V7xGO8SSeqcdWGJPS5ilk8UJMMJxrKvAIKx32ZgXcx1Lw2kCHbWbc3/FVCDcGMvB67rA== +"@agoric/promise-kit@^0.2.21": + version "0.2.21" + resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.21.tgz#66aa16a020479c610608f169ae08eea9d018d62a" + integrity sha512-3v7FxmkH5qBkMjQ5Js99VJfKbi06ez5HaRTo/iX47x7i+rzyVB6f9wwVXnlPXWnbFyRxaOuiz1R4hqd6CHEDTA== dependencies: - "@agoric/eventual-send" "^0.13.21" + "@agoric/eventual-send" "^0.13.23" "@agoric/registrar@^0.2.7": version "0.2.7" @@ -383,13 +378,13 @@ "@agoric/assert" "^0.2.6" "@agoric/sparse-ints" "^0.1.6" -"@agoric/same-structure@^0.1.19": - version "0.1.19" - resolved "https://registry.yarnpkg.com/@agoric/same-structure/-/same-structure-0.1.19.tgz#e5f469f00399c8eebf563fb80bbd7c5bd0d42224" - integrity sha512-D6YVDBOcW16ZQvpX/kS8e82lJicRybQj0wpSzKI7S0pc8npwoZbIgG9v1/yi9Lm/NnQB5PKjlRQxHUOznpHsEA== +"@agoric/same-structure@^0.1.21": + version "0.1.21" + resolved "https://registry.yarnpkg.com/@agoric/same-structure/-/same-structure-0.1.21.tgz#647afa5de3ac3c728afe27659e773d4d529079fd" + integrity sha512-vv038JuNpr918WOlvnKO6rOkWdA9rJvF+tqyWBi0BIim9TwGYCbMoWHtGTy3z14Rw9pRV/CtHKXvrdTZKuGx8g== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/marshal" "^0.4.18" + "@agoric/assert" "^0.3.7" + "@agoric/marshal" "^0.4.20" "@agoric/same-structure@^0.1.6": version "0.1.6" @@ -436,13 +431,13 @@ "@agoric/assert" "^0.2.6" "@agoric/marshal" "^0.4.4" -"@agoric/store@^0.4.21": - version "0.4.21" - resolved "https://registry.yarnpkg.com/@agoric/store/-/store-0.4.21.tgz#6f7086eb19173d8967eef06782fc79ab92345bcb" - integrity sha512-U3tzAvyhASauDJ4vC88yZF0u/QuekIBJklbNejW6OlOf8KiEOuhdkJQcxAxPtSFcoOQ9OXadGdspjkY3XRRCCg== +"@agoric/store@^0.4.23": + version "0.4.23" + resolved "https://registry.yarnpkg.com/@agoric/store/-/store-0.4.23.tgz#2db7c85b1ba636a1d0277bd0207b0b522042e37c" + integrity sha512-CY9yr1yLnXFEJ8ukrYaPYxuN3asDuuV2vYbPaYCdbzXs7vv9mFkOj/c5c8xz/v3RfWYNRSOulH78bfh5uVN+yA== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/marshal" "^0.4.18" + "@agoric/assert" "^0.3.7" + "@agoric/marshal" "^0.4.20" "@agoric/swing-store-lmdb@^0.4.6": version "0.4.6" @@ -451,12 +446,12 @@ dependencies: node-lmdb "^0.9.4" -"@agoric/swing-store-lmdb@^0.5.5": - version "0.5.5" - resolved "https://registry.yarnpkg.com/@agoric/swing-store-lmdb/-/swing-store-lmdb-0.5.5.tgz#8f5bd3286d70d87c5caf4a605e24774c4a6d0e29" - integrity sha512-PBobEkwQ/xCAo7kU18n5qngy7hztukeickQAdpvqRdYOglwjxEdgmiF3TiIG2RBOD3QB+YHLLcP3zA+fLayllg== +"@agoric/swing-store-lmdb@^0.5.7": + version "0.5.7" + resolved "https://registry.yarnpkg.com/@agoric/swing-store-lmdb/-/swing-store-lmdb-0.5.7.tgz#82f7d8294ddaf89d6cdd4186dd62b97deafc56d8" + integrity sha512-YHFCKRigfAN/hYNQwIZ2Ln33b5pywkLuc6BsvErQY39mAoNxxLupwJTurYzd+CFuIsCYRFQIy1rhgfrFP/8rZw== dependencies: - "@agoric/assert" "^0.3.5" + "@agoric/assert" "^0.3.7" better-sqlite3 "^7.4.1" node-lmdb "^0.9.4" @@ -467,12 +462,12 @@ dependencies: n-readlines "^1.0.0" -"@agoric/swing-store-simple@^0.4.5": - version "0.4.5" - resolved "https://registry.yarnpkg.com/@agoric/swing-store-simple/-/swing-store-simple-0.4.5.tgz#5404a70f6a5e739e8adec3a97c553eeadf6e6524" - integrity sha512-WJI+QRGFBGbqHPuCtHd1BjvoQtnbuFOEla05fIaFMg/NlZT/Wc7nwyh2VTpTc5SATxuc2CFto5wue3pHbFAsAA== +"@agoric/swing-store-simple@^0.4.7": + version "0.4.7" + resolved "https://registry.yarnpkg.com/@agoric/swing-store-simple/-/swing-store-simple-0.4.7.tgz#c4cf6067181ef640883a201f54acadd92769a12e" + integrity sha512-pG+6zungnAWLuWpbjBWJmddQ6UTVeNfnL5MpS7OGOxBzAqX1z5oiPR9ZIDCHiToDlUVZyAKxgjpZQHC560AuYA== dependencies: - "@agoric/assert" "^0.3.5" + "@agoric/assert" "^0.3.7" n-readlines "^1.0.0" "@agoric/swingset-vat@^0.16.0": @@ -511,29 +506,26 @@ tmp "^0.2.1" yargs "^14.2.0" -"@agoric/swingset-vat@^0.18.5": - version "0.18.5" - resolved "https://registry.yarnpkg.com/@agoric/swingset-vat/-/swingset-vat-0.18.5.tgz#e5c3ad4e6fce9f0c23f6967abc26842eadd5ccde" - integrity sha512-3yIbschuJFlOKwOb+sGZ1OAd364CIoH7/EsxmOxNGCDEqIl/PW1HBxAN9vXa0uoI0g3oQDPvQmvzzRqBaX86Hg== +"@agoric/swingset-vat@^0.19.0": + version "0.19.0" + resolved "https://registry.yarnpkg.com/@agoric/swingset-vat/-/swingset-vat-0.19.0.tgz#7b4873d2c93316db3bb07d3756def463607ecf90" + integrity sha512-dgfP0Q/rmPVbsF+5/H8GuX02qn3Pv8ypqssf93hNcoDv2YE3jHEO5TDrPPWX6C9o8vAHlyj/XFaIAdpM+9VGnw== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/babel-parser" "^7.6.4" + "@agoric/assert" "^0.3.7" "@agoric/babel-standalone" "^7.14.3" - "@agoric/bundle-source" "^1.4.3" - "@agoric/captp" "^1.7.19" - "@agoric/eventual-send" "^0.13.21" - "@agoric/import-bundle" "^0.2.21" - "@agoric/install-ses" "^0.5.19" - "@agoric/marshal" "^0.4.18" + "@agoric/bundle-source" "^1.4.5" + "@agoric/captp" "^1.8.0" + "@agoric/eventual-send" "^0.13.23" + "@agoric/import-bundle" "^0.2.23" + "@agoric/install-ses" "^0.5.21" + "@agoric/marshal" "^0.4.20" "@agoric/nat" "^4.1.0" - "@agoric/notifier" "^0.3.21" - "@agoric/promise-kit" "^0.2.19" - "@agoric/store" "^0.4.21" - "@agoric/swing-store-lmdb" "^0.5.5" - "@agoric/swing-store-simple" "^0.4.5" - "@agoric/tame-metering" "^2.0.5" - "@agoric/transform-metering" "^1.4.18" - "@agoric/xsnap" "^0.6.8" + "@agoric/notifier" "^0.3.23" + "@agoric/promise-kit" "^0.2.21" + "@agoric/store" "^0.4.23" + "@agoric/swing-store-lmdb" "^0.5.7" + "@agoric/swing-store-simple" "^0.4.7" + "@agoric/xsnap" "^0.6.10" "@endo/base64" "^0.2.4" "@types/tmp" "^0.2.0" anylogger "^0.21.0" @@ -551,23 +543,23 @@ dependencies: esm "^3.2.5" -"@agoric/tame-metering@^2.0.5": - version "2.0.5" - resolved "https://registry.yarnpkg.com/@agoric/tame-metering/-/tame-metering-2.0.5.tgz#e608388a86a33fa5a6214ad7f6381fc20e5bc682" - integrity sha512-EOzB9oAEIcfg4bo2CzhysOLrYGbLdmywcEaFIpkYUjm2ogB4TAXNZL/FydV2n3ZYBopj7TpTjMGCXhlYJIENkQ== +"@agoric/tame-metering@^2.0.7": + version "2.0.7" + resolved "https://registry.yarnpkg.com/@agoric/tame-metering/-/tame-metering-2.0.7.tgz#140b548a33c100bc917ec88812fcc91aed5b3936" + integrity sha512-ryx2YVHAOqfhlVydSyZYptp2g5fWO03qnkaJtLHiRBnYb32Vu2kJy0NPVMsF+rkvjlKfcWugV6TjHY/BQJ3LfQ== "@agoric/transform-eventual-send@^1.4.6": version "1.4.6" resolved "https://registry.yarnpkg.com/@agoric/transform-eventual-send/-/transform-eventual-send-1.4.6.tgz#04f2dca657997d58f5222e67c1f2dce79cccd808" integrity sha512-XICAgqUeitTJusQ2ULUlNh+EVWMDMULTAn+JZV7j0NgUQysqlHB1TaB6pNPJnIkJSLkOFWNomVNtNXZ1WAhzUQ== -"@agoric/transform-metering@^1.4.18": - version "1.4.18" - resolved "https://registry.yarnpkg.com/@agoric/transform-metering/-/transform-metering-1.4.18.tgz#ca380c7d808a40c1215e0570670cea1feb59cbbf" - integrity sha512-NBjY4nib5mJEcxh54cBLjoQxn/nI0iSwriokFm6vNid3zEMa6NiKWey+fYZ+c+DgInngHD+tzCzISJerCID6MA== +"@agoric/transform-metering@^1.4.20": + version "1.4.20" + resolved "https://registry.yarnpkg.com/@agoric/transform-metering/-/transform-metering-1.4.20.tgz#0a9eb24053c560e804daf4deef3344632bd3bbdd" + integrity sha512-RfQhp8n9o3tvLUIozMCrI8HsViklInpIIh3sjcgLEWVVhuv0+sSNnBkN8F9u7dDE3ViWOpvWXQFq45sCWK2/UA== dependencies: "@agoric/nat" "^4.1.0" - "@agoric/tame-metering" "^2.0.5" + "@agoric/tame-metering" "^2.0.7" "@babel/generator" "^7.14.2" "@babel/parser" "^7.14.2" "@babel/traverse" "^7.14.2" @@ -607,17 +599,17 @@ "@agoric/swingset-vat" "^0.16.0" "@agoric/zoe" "^0.15.0" -"@agoric/ui-components@^0.2.15": - version "0.2.16" - resolved "https://registry.yarnpkg.com/@agoric/ui-components/-/ui-components-0.2.16.tgz#4aba4d3d9f5c9787ccbd3a9e7fab0c123c0ded58" - integrity sha512-R+5b0GOeGcrZYYRqX34thDSfnm45xh4k4QceaWrq5jzhPeoUtp95rIX4SA4Mi6P8VyYSOEbKzzoZ2gIjx+Q2lQ== +"@agoric/ui-components@*": + version "0.2.18" + resolved "https://registry.yarnpkg.com/@agoric/ui-components/-/ui-components-0.2.18.tgz#5bca681cedf2ccc034dad088603a8dde6e758037" + integrity sha512-CNPWZ9RYybp/8JIaCYhRwAZc7heMxul2UeniM3pnHpvZ+oYHfsy+5fDtO18JLDyzS8rJ9//pCnlM8b9r+GXs9g== dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/ertp" "^0.11.9" - "@agoric/eventual-send" "^0.13.21" - "@agoric/install-ses" "^0.5.19" + "@agoric/assert" "^0.3.7" + "@agoric/ertp" "^0.11.11" + "@agoric/eventual-send" "^0.13.23" + "@agoric/install-ses" "^0.5.21" "@agoric/nat" "^4.1.0" - "@agoric/zoe" "^0.17.4" + "@agoric/zoe" "^0.17.6" clsx "^1.1.1" "@agoric/xsnap@^0.5.1": @@ -633,16 +625,16 @@ glob "^7.1.6" ses "^0.12.7" -"@agoric/xsnap@^0.6.8": - version "0.6.8" - resolved "https://registry.yarnpkg.com/@agoric/xsnap/-/xsnap-0.6.8.tgz#b3e2a04287ded8e47101be86c850338570ffeace" - integrity sha512-Mvdr85Zsgvek88lQQgJBlma42I0I5et2aODGuiYMQHAwQQM+vAlLothEy5nqihY/exIXhiq17qabKrUkovOFfQ== +"@agoric/xsnap@^0.6.10": + version "0.6.10" + resolved "https://registry.yarnpkg.com/@agoric/xsnap/-/xsnap-0.6.10.tgz#acff6b343dd2d0db28f94662449fa458e89eb76a" + integrity sha512-0dJqOzqaQkCcnK3B6nZ4s71Fuz7PtNShAgwsqAt4078sUtbyi0Pq6m0pw5XGJrGpopMvZmPIrXG4fhsn4kz34w== dependencies: - "@agoric/assert" "^0.3.5" + "@agoric/assert" "^0.3.7" "@agoric/babel-standalone" "^7.14.3" - "@agoric/bundle-source" "^1.4.3" - "@agoric/eventual-send" "^0.13.21" - "@agoric/install-ses" "^0.5.19" + "@agoric/bundle-source" "^1.4.5" + "@agoric/eventual-send" "^0.13.23" + "@agoric/install-ses" "^0.5.21" esm agoric-labs/esm#Agoric-built glob "^7.1.6" ses "^0.13.4" @@ -666,24 +658,24 @@ "@agoric/swingset-vat" "^0.16.0" "@agoric/transform-metering" "^1.4.6" -"@agoric/zoe@^0.17.4": - version "0.17.4" - resolved "https://registry.yarnpkg.com/@agoric/zoe/-/zoe-0.17.4.tgz#3f577bce6005a05849ef4ce596692625067d5938" - integrity sha512-GLIT5+qTSniHKUOX93x9PcbSVALiEJoXpiuBeUv1sB8Zvb6ziq03JfYsyIKy1lVNnYjm1w7LnSd8dAESUE9ZQg== - dependencies: - "@agoric/assert" "^0.3.5" - "@agoric/bundle-source" "^1.4.3" - "@agoric/ertp" "^0.11.9" - "@agoric/eventual-send" "^0.13.21" - "@agoric/import-bundle" "^0.2.21" - "@agoric/marshal" "^0.4.18" +"@agoric/zoe@^0.17.6": + version "0.17.6" + resolved "https://registry.yarnpkg.com/@agoric/zoe/-/zoe-0.17.6.tgz#45ce7002854ff48dee1a1566efedb75eace63379" + integrity sha512-pPVjObYh+A2l8Ih2MofLFyDY3eOZPFa6eSWfXE0EM7m/QufL58AcWL1wVXJcKXyMCaPD2UydXnQk/kmMvtLdvA== + dependencies: + "@agoric/assert" "^0.3.7" + "@agoric/bundle-source" "^1.4.5" + "@agoric/ertp" "^0.11.11" + "@agoric/eventual-send" "^0.13.23" + "@agoric/import-bundle" "^0.2.23" + "@agoric/marshal" "^0.4.20" "@agoric/nat" "^4.1.0" - "@agoric/notifier" "^0.3.21" - "@agoric/promise-kit" "^0.2.19" - "@agoric/same-structure" "^0.1.19" - "@agoric/store" "^0.4.21" - "@agoric/swingset-vat" "^0.18.5" - "@agoric/transform-metering" "^1.4.18" + "@agoric/notifier" "^0.3.23" + "@agoric/promise-kit" "^0.2.21" + "@agoric/same-structure" "^0.1.21" + "@agoric/store" "^0.4.23" + "@agoric/swingset-vat" "^0.19.0" + "@agoric/transform-metering" "^1.4.20" "@babel/code-frame@7.12.11": version "7.12.11" @@ -996,6 +988,11 @@ "@babel/helper-validator-identifier" "^7.14.5" to-fast-properties "^2.0.0" +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + "@concordance/react@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@concordance/react/-/react-2.0.0.tgz#aef913f27474c53731f4fd79cc2f54897de90fde" @@ -1089,6 +1086,11 @@ resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + "@nodelib/fs.scandir@2.1.4": version "2.1.4" resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz#d4b3549a5db5de2683e0c1071ab4f140904bbf69" @@ -1245,6 +1247,11 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== +"@types/istanbul-lib-coverage@^2.0.1": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz#4ba8ddb720221f432e443bd5f9117fd22cfd4762" + integrity sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw== + "@types/json5@^0.0.29": version "0.0.29" resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" @@ -1828,6 +1835,24 @@ bytes@3.1.0: resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== +c8@^7.7.2: + version "7.8.0" + resolved "https://registry.yarnpkg.com/c8/-/c8-7.8.0.tgz#8fcfe848587d9d5796f22e9b0546a387a66d1b3b" + integrity sha512-x2Bx+IIEd608B1LmjiNQ/kizRPkCWo5XzuV57J9afPjAHSnYXALwbCSOkQ7cSaNXBNblfqcvdycj+klmL+j6yA== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@istanbuljs/schema" "^0.1.2" + find-up "^5.0.0" + foreground-child "^2.0.0" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-report "^3.0.0" + istanbul-reports "^3.0.2" + rimraf "^3.0.0" + test-exclude "^6.0.0" + v8-to-istanbul "^8.0.0" + yargs "^16.2.0" + yargs-parser "^20.2.7" + cacheable-request@^6.0.0: version "6.1.0" resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" @@ -2144,6 +2169,13 @@ content-type@~1.0.4: resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== +convert-source-map@^1.6.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" @@ -2176,7 +2208,7 @@ core-util-is@1.0.2, core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= -cross-spawn@^7.0.2: +cross-spawn@^7.0.0, cross-spawn@^7.0.2: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== @@ -2961,6 +2993,14 @@ find-up@^4.0.0: locate-path "^5.0.0" path-exists "^4.0.0" +find-up@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + flat-cache@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" @@ -2974,6 +3014,14 @@ flatted@^3.1.0: resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.1.1.tgz#c4b489e80096d9df1dfc97c79871aea7c617c469" integrity sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA== +foreground-child@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-2.0.0.tgz#71b32800c9f15aa8f2f83f4a6bd9bff35d861a53" + integrity sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA== + dependencies: + cross-spawn "^7.0.0" + signal-exit "^3.0.2" + forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" @@ -3228,6 +3276,11 @@ hosted-git-info@^2.1.4: resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + http-cache-semantics@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390" @@ -3586,6 +3639,28 @@ isstream@~0.1.2: resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= +istanbul-lib-coverage@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz#f5944a37c70b550b02a78a5c3b2055b280cec8ec" + integrity sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg== + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-reports@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.2.tgz#d593210e5000683750cb09fc0644e4b6e27fd53b" + integrity sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + js-string-escape@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/js-string-escape/-/js-string-escape-1.0.1.tgz#e2625badbc0d67c7533e9edc1068c587ae4137ef" @@ -3786,6 +3861,13 @@ locate-path@^5.0.0: dependencies: p-locate "^4.1.0" +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + lodash.clonedeep@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" @@ -4298,6 +4380,13 @@ p-limit@^2.0.0, p-limit@^2.2.0: dependencies: p-try "^2.0.0" +p-limit@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + p-locate@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" @@ -4319,6 +4408,13 @@ p-locate@^4.1.0: dependencies: p-limit "^2.2.0" +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + p-map@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" @@ -5331,6 +5427,15 @@ temp@^0.9.1: mkdirp "^0.5.1" rimraf "~2.6.2" +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + text-table@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" @@ -5557,6 +5662,15 @@ v8-compile-cache@^2.0.3: resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== +v8-to-istanbul@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.0.0.tgz#4229f2a99e367f3f018fa1d5c2b8ec684667c69c" + integrity sha512-LkmXi8UUNxnCC+JlH7/fsfsKr5AU110l+SYGJimWNkWhxbN5EyeOtm1MJ0hhvqMMOhGwBj1Fp70Yv9i+hX0QAg== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + source-map "^0.7.3" + validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" @@ -5704,6 +5818,11 @@ yargs-parser@^20.2.2: resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a" integrity sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw== +yargs-parser@^20.2.7: + version "20.2.9" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== + yargs@^14.2.0: version "14.2.3" resolved "https://registry.yarnpkg.com/yargs/-/yargs-14.2.3.tgz#1a1c3edced1afb2a2fea33604bc6d1d8d688a414" @@ -5733,3 +5852,8 @@ yargs@^16.2.0: string-width "^4.2.0" y18n "^5.0.5" yargs-parser "^20.2.2" + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== From 94f967814a83a4a70e4826191b0bcc103359861b Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 13 Aug 2021 01:31:50 +0000 Subject: [PATCH 32/34] Check for children from scratch until we find a match Do not fail fast, but avoid exec races --- runner/lib/test-helpers.js | 62 ++++++++++++++------------------------ 1 file changed, 22 insertions(+), 40 deletions(-) diff --git a/runner/lib/test-helpers.js b/runner/lib/test-helpers.js index 4ddc6f8..3529caa 100644 --- a/runner/lib/test-helpers.js +++ b/runner/lib/test-helpers.js @@ -54,33 +54,6 @@ export const httpRequest = (urlOrString, options = {}) => { }); }; -/** - * - * @param {import('./helpers/process-info.js').ProcessInfo} info - * @param {number} [retries] - * @returns {Promise} - */ -export const untilArgv = async (info, retries = 50) => { - const argv = await info.getArgv(); - return ( - argv || - (retries > 0 ? (await sleep(100), untilArgv(info, retries - 1)) : null) - ); -}; - -/** - * - * @param {import('./helpers/process-info.js').ProcessInfo} info - * @param {number} [retries] - * @returns {Promise} - */ -export const untilChildren = async (info, retries = 50) => { - const children = await info.getChildren(); - return children.length || retries === 0 - ? children - : (await sleep(100), untilChildren(info, retries - 1)); -}; - /** @typedef {(argv: string[]) => boolean} ArgvMatcher */ /** @@ -103,12 +76,18 @@ export const wrapArgvMatcherIgnoreEnvShebang = (argvMatcher) => (argv) => /** * @param {import('./helpers/process-info.js').ProcessInfo} launcherInfo * @param {ArgvMatcher} argvMatcher + * @param {number} [retries] + * @returns {Promise} */ -export const getChildMatchingArgv = async (launcherInfo, argvMatcher) => { +export const getChildMatchingArgv = async ( + launcherInfo, + argvMatcher, + retries = 50, +) => { const childrenWithArgv = await Promise.all( - (await untilChildren(launcherInfo)).map(async (info) => ({ + (await launcherInfo.getChildren()).map(async (info) => ({ info, - argv: await untilArgv(info), + argv: await info.getArgv(), })), ); @@ -116,17 +95,20 @@ export const getChildMatchingArgv = async (launcherInfo, argvMatcher) => { if (result) { return result.info; + } else if (retries > 0) { + await sleep(100); + return getChildMatchingArgv(launcherInfo, argvMatcher, retries - 1); + } else { + console.error( + `getChildMatchingArgv: ${ + childrenWithArgv.length + } child process, none of ["${childrenWithArgv + .map(({ argv }) => (argv || ['no argv']).join(' ')) + .join('", "')}"] match expected arguments`, + ); + + throw new Error("Couldn't find child process"); } - - console.error( - `getChildMatchingArgv: ${ - childrenWithArgv.length - } child process, none of ["${childrenWithArgv - .map(({ argv }) => (argv || ['no argv']).join(' ')) - .join('", "')}"] match expected arguments`, - ); - - throw new Error("Couldn't find child process"); }; /** From 00367f980d5b80f8af4f15cd62402ce4e5882dfb Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 13 Aug 2021 16:13:02 +0000 Subject: [PATCH 33/34] Rename tasks to avoid `test` --- runner/lib/main.js | 20 +++++----- .../lib/{test-helpers.js => tasks/helpers.js} | 8 ++-- .../local-chain.js} | 40 ++++++++++--------- .../shared-loadgen.js} | 16 ++++---- .../lib/{test-testnet.js => tasks/testnet.js} | 38 ++++++++++-------- runner/lib/tasks/types.d.ts | 29 ++++++++++++++ runner/lib/test-operations.d.ts | 29 -------------- 7 files changed, 94 insertions(+), 86 deletions(-) rename runner/lib/{test-helpers.js => tasks/helpers.js} (93%) rename runner/lib/{test-local-chain.js => tasks/local-chain.js} (84%) rename runner/lib/{test-shared-loadgen.js => tasks/shared-loadgen.js} (84%) rename runner/lib/{test-testnet.js => tasks/testnet.js} (90%) create mode 100644 runner/lib/tasks/types.d.ts delete mode 100644 runner/lib/test-operations.d.ts diff --git a/runner/lib/main.js b/runner/lib/main.js index ace994f..b2a6679 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -26,8 +26,8 @@ import { makeFsHelper } from './helpers/fs.js'; import { makeProcfsHelper } from './helpers/procsfs.js'; import { makeOutputter } from './helpers/outputter.js'; -import { makeTestOperations as makeLocalTestOperations } from './test-local-chain.js'; -import { makeTestOperations as makeTestnetTestOperations } from './test-testnet.js'; +import { makeTasks as makeLocalChainTasks } from './tasks/local-chain.js'; +import { makeTasks as makeTestnetTasks } from './tasks/testnet.js'; /** @typedef {import('./helpers/async.js').Task} Task */ @@ -251,7 +251,7 @@ const main = async (progName, rawArgs, powers) => { console.log(`Outputting to ${resolvePath(outputDir)}`); await fs.mkdir(outputDir, { recursive: true }); - let makeTestOperations; + let makeTasks; /** @type {string} */ let testnetOrigin; @@ -259,12 +259,12 @@ const main = async (progName, rawArgs, powers) => { case null: case undefined: case 'local': - makeTestOperations = makeLocalTestOperations; + makeTasks = makeLocalChainTasks; testnetOrigin = ''; break; case 'testnet': case 'stage': - makeTestOperations = makeTestnetTestOperations; + makeTasks = makeTestnetTasks; testnetOrigin = argv.testnetOrigin || `https://${argv.profile}.agoric.net`; break; @@ -272,7 +272,7 @@ const main = async (progName, rawArgs, powers) => { throw new Error(`Unexpected profile option: ${argv.profile}`); } - const { setupTest, runChain, runClient, runLoadgen } = makeTestOperations({ + const { setupTasks, runChain, runClient, runLoadgen } = makeTasks({ spawn, fs, findDirByPrefix: findByPrefix, @@ -315,7 +315,7 @@ const main = async (progName, rawArgs, powers) => { }; /** - * @param {import("./test-operations.js").RunChainInfo} chainInfo + * @param {import("./tasks/types.js").RunChainInfo} chainInfo * @param {Object} param1 * @param {() => void} param1.resolveFirstEmptyBlock * @param {import("stream").Writable} param1.out @@ -888,16 +888,16 @@ const main = async (progName, rawArgs, powers) => { const reset = coerceBooleanOption(argv.reset, true); const setupConfig = { reset, withMonitor, testnetOrigin }; - logPerfEvent('setup-test-start', setupConfig); + logPerfEvent('setup-tasks-start', setupConfig); await aggregateTryFinally( // Do not short-circuit on interrupt, let the spawned setup process terminate async () => - setupTest({ stdout: out, stderr: err, config: setupConfig }), + setupTasks({ stdout: out, stderr: err, config: setupConfig }), // This will throw if there was any interrupt, and prevent further execution async () => releaseInterrupt(), ); - logPerfEvent('setup-test-finish'); + logPerfEvent('setup-tasks-finish'); } const stages = diff --git a/runner/lib/test-helpers.js b/runner/lib/tasks/helpers.js similarity index 93% rename from runner/lib/test-helpers.js rename to runner/lib/tasks/helpers.js index 3529caa..0f1a288 100644 --- a/runner/lib/test-helpers.js +++ b/runner/lib/tasks/helpers.js @@ -5,8 +5,8 @@ import http from 'http'; import https from 'https'; import fs from 'fs'; -import { sleep } from './helpers/async.js'; -import { makeOutputter } from './helpers/outputter.js'; +import { sleep } from '../helpers/async.js'; +import { makeOutputter } from '../helpers/outputter.js'; const protocolModules = { 'http:': http, @@ -74,10 +74,10 @@ export const wrapArgvMatcherIgnoreEnvShebang = (argvMatcher) => (argv) => argvMatcher(argv) || (/env$/.test(argv[0]) && argvMatcher(argv.slice(1))); /** - * @param {import('./helpers/process-info.js').ProcessInfo} launcherInfo + * @param {import('../helpers/process-info.js').ProcessInfo} launcherInfo * @param {ArgvMatcher} argvMatcher * @param {number} [retries] - * @returns {Promise} + * @returns {Promise} */ export const getChildMatchingArgv = async ( launcherInfo, diff --git a/runner/lib/test-local-chain.js b/runner/lib/tasks/local-chain.js similarity index 84% rename from runner/lib/test-local-chain.js rename to runner/lib/tasks/local-chain.js index 7322f9d..c643e23 100644 --- a/runner/lib/test-local-chain.js +++ b/runner/lib/tasks/local-chain.js @@ -7,17 +7,17 @@ import { pipeline as pipelineCallback } from 'stream'; import { childProcessDone, makeSpawnWithPrintAndPipeOutput, -} from './helpers/child-process.js'; -import LineStreamTransform from './helpers/line-stream-transform.js'; -import { PromiseAllOrErrors, tryTimeout } from './helpers/async.js'; -import { whenStreamSteps } from './helpers/stream-steps.js'; +} from '../helpers/child-process.js'; +import LineStreamTransform from '../helpers/line-stream-transform.js'; +import { PromiseAllOrErrors, tryTimeout } from '../helpers/async.js'; +import { whenStreamSteps } from '../helpers/stream-steps.js'; import { getArgvMatcher, getChildMatchingArgv, wrapArgvMatcherIgnoreEnvShebang, getConsoleAndStdio, -} from './test-helpers.js'; -import { makeLoadgenOperation } from './test-shared-loadgen.js'; +} from './helpers.js'; +import { makeLoadgenTask } from './shared-loadgen.js'; const pipeline = promisify(pipelineCallback); @@ -43,13 +43,13 @@ const clientArgvMatcher = wrapArgvMatcherIgnoreEnvShebang( * * @param {Object} powers * @param {import("child_process").spawn} powers.spawn Node.js spawn - * @param {import("./helpers/fs.js").MakeFIFO} powers.makeFIFO Make a FIFO file readable stream - * @param {import("./helpers/fs.js").FindByPrefix} powers.findDirByPrefix - * @param {import("./helpers/procsfs.js").GetProcessInfo} powers.getProcessInfo - * @returns {import("./test-operations.js").TestOperations} + * @param {import("../helpers/fs.js").MakeFIFO} powers.makeFIFO Make a FIFO file readable stream + * @param {import("../helpers/fs.js").FindByPrefix} powers.findDirByPrefix + * @param {import("../helpers/procsfs.js").GetProcessInfo} powers.getProcessInfo + * @returns {import("./types.js").OrchestratorTasks} * */ -export const makeTestOperations = ({ +export const makeTasks = ({ spawn, findDirByPrefix, makeFIFO, @@ -60,9 +60,13 @@ export const makeTestOperations = ({ end: false, }); - /** @param {import("./test-operations.js").OperationBaseOption & {config?: {reset?: boolean}}} options */ - const setupTest = async ({ stdout, stderr, config: { reset } = {} }) => { - const { console, stdio } = getConsoleAndStdio('setup-test', stdout, stderr); + /** @param {import("./types.js").TaskBaseOptions & {config?: {reset?: boolean}}} options */ + const setupTasks = async ({ stdout, stderr, config: { reset } = {} }) => { + const { console, stdio } = getConsoleAndStdio( + 'setup-tasks', + stdout, + stderr, + ); console.log('Starting'); @@ -81,7 +85,7 @@ export const makeTestOperations = ({ console.log('Done'); }; - /** @param {import("./test-operations.js").OperationBaseOption} options */ + /** @param {import("./types.js").TaskBaseOptions} options */ const runChain = async ({ stdout, stderr, timeout = 120 }) => { const { console, stdio } = getConsoleAndStdio('chain', stdout, stderr); @@ -180,7 +184,7 @@ export const makeTestOperations = ({ ); }; - /** @param {import("./test-operations.js").OperationBaseOption} options */ + /** @param {import("./types.js").TaskBaseOptions} options */ const runClient = async ({ stdout, stderr, timeout = 60 }) => { const { console, stdio } = getConsoleAndStdio('client', stdout, stderr); @@ -241,9 +245,9 @@ export const makeTestOperations = ({ }; return harden({ - setupTest, + setupTasks, runChain, runClient, - runLoadgen: makeLoadgenOperation({ pipedSpawn }), + runLoadgen: makeLoadgenTask({ pipedSpawn }), }); }; diff --git a/runner/lib/test-shared-loadgen.js b/runner/lib/tasks/shared-loadgen.js similarity index 84% rename from runner/lib/test-shared-loadgen.js rename to runner/lib/tasks/shared-loadgen.js index 3f54b22..bced442 100644 --- a/runner/lib/test-shared-loadgen.js +++ b/runner/lib/tasks/shared-loadgen.js @@ -2,11 +2,11 @@ import { PassThrough } from 'stream'; -import { childProcessDone } from './helpers/child-process.js'; -import LineStreamTransform from './helpers/line-stream-transform.js'; -import { PromiseAllOrErrors, tryTimeout } from './helpers/async.js'; -import { whenStreamSteps } from './helpers/stream-steps.js'; -import { httpRequest, getConsoleAndStdio } from './test-helpers.js'; +import { childProcessDone } from '../helpers/child-process.js'; +import LineStreamTransform from '../helpers/line-stream-transform.js'; +import { PromiseAllOrErrors, tryTimeout } from '../helpers/async.js'; +import { whenStreamSteps } from '../helpers/stream-steps.js'; +import { httpRequest, getConsoleAndStdio } from './helpers.js'; const loadgenStartRE = /deploy.*loadgen\/loop\.js/; const loadgenReadyRE = /server running/; @@ -14,11 +14,11 @@ const loadgenReadyRE = /server running/; /** * * @param {Object} powers - * @param {import("./helpers/child-process.js").PipedSpawn} powers.pipedSpawn Spawn with piped output - * @returns {import("./test-operations.js").TestOperations['runLoadgen']} + * @param {import("../helpers/child-process.js").PipedSpawn} powers.pipedSpawn Spawn with piped output + * @returns {import("./types.js").OrchestratorTasks['runLoadgen']} * */ -export const makeLoadgenOperation = ({ pipedSpawn }) => { +export const makeLoadgenTask = ({ pipedSpawn }) => { return harden(async ({ stdout, stderr, timeout = 30, config = {} }) => { const { console, stdio } = getConsoleAndStdio('loadgen', stdout, stderr); diff --git a/runner/lib/test-testnet.js b/runner/lib/tasks/testnet.js similarity index 90% rename from runner/lib/test-testnet.js rename to runner/lib/tasks/testnet.js index 9a1f6b2..d81b63f 100644 --- a/runner/lib/test-testnet.js +++ b/runner/lib/tasks/testnet.js @@ -10,23 +10,23 @@ import TOML from '@iarna/toml'; import { childProcessDone, makeSpawnWithPrintAndPipeOutput, -} from './helpers/child-process.js'; -import LineStreamTransform from './helpers/line-stream-transform.js'; +} from '../helpers/child-process.js'; +import LineStreamTransform from '../helpers/line-stream-transform.js'; import { PromiseAllOrErrors, tryTimeout, sleep, aggregateTryFinally, -} from './helpers/async.js'; -import { whenStreamSteps } from './helpers/stream-steps.js'; +} from '../helpers/async.js'; +import { whenStreamSteps } from '../helpers/stream-steps.js'; import { getArgvMatcher, getChildMatchingArgv, wrapArgvMatcherIgnoreEnvShebang, getConsoleAndStdio, httpRequest, -} from './test-helpers.js'; -import { makeLoadgenOperation } from './test-shared-loadgen.js'; +} from './helpers.js'; +import { makeLoadgenTask } from './shared-loadgen.js'; const pipeline = promisify(pipelineCallback); @@ -66,12 +66,12 @@ const clientArgvMatcher = wrapArgvMatcherIgnoreEnvShebang( * @param {Object} powers * @param {import("child_process").spawn} powers.spawn Node.js spawn * @param {import("fs/promises")} powers.fs Node.js promisified fs object - * @param {import("./helpers/fs.js").MakeFIFO} powers.makeFIFO Make a FIFO file readable stream - * @param {import("./helpers/procsfs.js").GetProcessInfo} powers.getProcessInfo - * @returns {import("./test-operations.js").TestOperations} + * @param {import("../helpers/fs.js").MakeFIFO} powers.makeFIFO Make a FIFO file readable stream + * @param {import("../helpers/procsfs.js").GetProcessInfo} powers.getProcessInfo + * @returns {import("./types.js").OrchestratorTasks} * */ -export const makeTestOperations = ({ spawn, fs, makeFIFO, getProcessInfo }) => { +export const makeTasks = ({ spawn, fs, makeFIFO, getProcessInfo }) => { const pipedSpawn = makeSpawnWithPrintAndPipeOutput({ spawn, end: false, @@ -84,8 +84,8 @@ export const makeTestOperations = ({ spawn, fs, makeFIFO, getProcessInfo }) => { let testnetOrigin = 'https://testnet.agoric.net'; - /** @param {import("./test-operations.js").OperationBaseOption & {config?: {reset?: boolean, chainOnly?: boolean, withMonitor?: boolean, testnetOrigin?: string}}} options */ - const setupTest = async ({ + /** @param {import("./types.js").TaskBaseOptions & {config?: {reset?: boolean, chainOnly?: boolean, withMonitor?: boolean, testnetOrigin?: string}}} options */ + const setupTasks = async ({ stdout, stderr, timeout = 120, @@ -96,7 +96,11 @@ export const makeTestOperations = ({ spawn, fs, makeFIFO, getProcessInfo }) => { testnetOrigin: testnetOriginOption, } = {}, }) => { - const { console, stdio } = getConsoleAndStdio('setup-test', stdout, stderr); + const { console, stdio } = getConsoleAndStdio( + 'setup-tasks', + stdout, + stderr, + ); console.log('Starting'); @@ -229,7 +233,7 @@ export const makeTestOperations = ({ spawn, fs, makeFIFO, getProcessInfo }) => { console.log('Done'); }; - /** @param {import("./test-operations.js").OperationBaseOption} options */ + /** @param {import("./types.js").TaskBaseOptions} options */ const runChain = async ({ stdout, stderr, timeout = 30 }) => { const { console, stdio } = getConsoleAndStdio('chain', stdout, stderr); @@ -355,7 +359,7 @@ export const makeTestOperations = ({ spawn, fs, makeFIFO, getProcessInfo }) => { ); }; - /** @param {import("./test-operations.js").OperationBaseOption} options */ + /** @param {import("./types.js").TaskBaseOptions} options */ const runClient = async ({ stdout, stderr, timeout = 20 }) => { const { console, stdio } = getConsoleAndStdio('client', stdout, stderr); @@ -420,9 +424,9 @@ export const makeTestOperations = ({ spawn, fs, makeFIFO, getProcessInfo }) => { }; return harden({ - setupTest, + setupTasks, runChain, runClient, - runLoadgen: makeLoadgenOperation({ pipedSpawn }), + runLoadgen: makeLoadgenTask({ pipedSpawn }), }); }; diff --git a/runner/lib/tasks/types.d.ts b/runner/lib/tasks/types.d.ts new file mode 100644 index 0000000..74e166e --- /dev/null +++ b/runner/lib/tasks/types.d.ts @@ -0,0 +1,29 @@ +/* eslint-disable no-unused-vars,no-redeclare */ + +export type TaskResult = { + readonly stop: () => void; + readonly done: Promise; + readonly ready: Promise; +}; + +export type RunChainInfo = { + readonly slogLines: AsyncIterable; + readonly processInfo: import('../helpers/process-info.js').ProcessInfo; + readonly storageLocation: string; +}; + +export type RunChainResult = TaskResult & RunChainInfo; + +export interface TaskBaseOptions { + readonly stdout: import('stream').Writable; + readonly stderr: import('stream').Writable; + readonly timeout?: number; + readonly config?: unknown; +} + +export interface OrchestratorTasks { + setupTasks(options: TaskBaseOptions): Promise; + runChain(options: TaskBaseOptions): Promise; + runClient(options: TaskBaseOptions): Promise; + runLoadgen(options: TaskBaseOptions): Promise; +} diff --git a/runner/lib/test-operations.d.ts b/runner/lib/test-operations.d.ts deleted file mode 100644 index c5645bc..0000000 --- a/runner/lib/test-operations.d.ts +++ /dev/null @@ -1,29 +0,0 @@ -/* eslint-disable no-unused-vars,no-redeclare */ - -export type RunResult = { - readonly stop: () => void; - readonly done: Promise; - readonly ready: Promise; -}; - -export type RunChainInfo = { - readonly slogLines: AsyncIterable; - readonly processInfo: import('./helpers/process-info.js').ProcessInfo; - readonly storageLocation: string; -}; - -export type RunChainResult = RunResult & RunChainInfo; - -interface OperationBaseOption { - readonly stdout: import('stream').Writable; - readonly stderr: import('stream').Writable; - readonly timeout?: number; - readonly config?: unknown; -} - -export interface TestOperations { - setupTest(options: OperationBaseOption): Promise; - runChain(options: OperationBaseOption): Promise; - runClient(options: OperationBaseOption): Promise; - runLoadgen(options: OperationBaseOption): Promise; -} From e79f4f0349123f01003704a3aee02612755d3fe2 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 13 Aug 2021 18:14:57 +0000 Subject: [PATCH 34/34] Fork install-ses and promise-kit dependencies into runner --- runner/lib/entrypoint.js | 2 +- runner/lib/helpers/async.js | 4 +- runner/lib/helpers/procsfs.js | 2 - runner/lib/helpers/stream-steps.js | 4 +- runner/lib/main.js | 4 +- runner/lib/sdk/install-ses.js | 149 ++++++++++++++++++ runner/lib/sdk/promise-kit.js | 82 ++++++++++ .../ses-types.d.ts} | 0 runner/package.json | 5 +- yarn.lock | 19 ++- 10 files changed, 251 insertions(+), 20 deletions(-) create mode 100644 runner/lib/sdk/install-ses.js create mode 100644 runner/lib/sdk/promise-kit.js rename runner/lib/{ses-types-patch.d.ts => sdk/ses-types.d.ts} (100%) diff --git a/runner/lib/entrypoint.js b/runner/lib/entrypoint.js index eecbd1c..c609a72 100755 --- a/runner/lib/entrypoint.js +++ b/runner/lib/entrypoint.js @@ -2,7 +2,7 @@ /* global process */ // @ts-nocheck -import '@agoric/install-ses'; +import './sdk/install-ses.js'; import path from 'path'; import { spawn } from 'child_process'; diff --git a/runner/lib/helpers/async.js b/runner/lib/helpers/async.js index fbfb23a..823e4b3 100644 --- a/runner/lib/helpers/async.js +++ b/runner/lib/helpers/async.js @@ -1,6 +1,6 @@ /* global setTimeout */ -import { makePromiseKit } from '@agoric/promise-kit'; +import { makePromiseKit } from '../sdk/promise-kit.js'; /** @type {import("./async.js").sleep} */ export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); @@ -105,7 +105,7 @@ export const tryTimeout = async (timeoutMs, trier, onError) => { /** @typedef {import("./async.js").Task} Task */ /** * @template T - * @typedef {import('@agoric/promise-kit').PromiseRecord} PromiseRecord + * @typedef {import('../sdk/promise-kit.js').PromiseRecord} PromiseRecord */ /** diff --git a/runner/lib/helpers/procsfs.js b/runner/lib/helpers/procsfs.js index d977896..982a90e 100644 --- a/runner/lib/helpers/procsfs.js +++ b/runner/lib/helpers/procsfs.js @@ -9,8 +9,6 @@ import { performance } from 'perf_hooks'; -import assert from 'assert'; - import { childProcessDone } from './child-process.js'; const statusLineFormat = /^([^:]+):[\s]+(.+)$/; diff --git a/runner/lib/helpers/stream-steps.js b/runner/lib/helpers/stream-steps.js index 4ff23b3..b443d7a 100644 --- a/runner/lib/helpers/stream-steps.js +++ b/runner/lib/helpers/stream-steps.js @@ -1,7 +1,7 @@ import { promisify } from 'util'; import { finished as finishedCallback } from 'stream'; -import { makePromiseKit } from '@agoric/promise-kit'; +import { makePromiseKit } from '../sdk/promise-kit.js'; import LineStreamTransform from './line-stream-transform.js'; @@ -31,7 +31,7 @@ export const whenStreamSteps = (stream, steps, { waitEnd = true } = {}) => { if (stepsAndKits.length) { const match = stepsAndKits[0].step.matcher.exec(line); if (match) { - const stepAndKit = /** @type {{step: StepConfig, kit: import('@agoric/promise-kit').PromiseRecord}} */ (stepsAndKits.shift()); + const stepAndKit = /** @type {{step: StepConfig, kit: import('../sdk/promise-kit.js').PromiseRecord}} */ (stepsAndKits.shift()); const { step: { resultIndex = 1 }, kit: { resolve }, diff --git a/runner/lib/main.js b/runner/lib/main.js index b2a6679..40cc34a 100644 --- a/runner/lib/main.js +++ b/runner/lib/main.js @@ -12,7 +12,7 @@ import { import yargsParser from 'yargs-parser'; import chalk from 'chalk'; -import { makePromiseKit } from '@agoric/promise-kit'; +import { makePromiseKit } from './sdk/promise-kit.js'; import { sleep, @@ -678,7 +678,7 @@ const main = async (progName, rawArgs, powers) => { currentStageElapsedOffsetNs = (runChainResult.processInfo.startTimestamp - cpuTimeOffset) * 1e6; chainStorageLocation = runChainResult.storageLocation; - /** @type {import("@agoric/promise-kit").PromiseRecord} */ + /** @type {import("./sdk/promise-kit.js").PromiseRecord} */ const { promise: chainFirstEmptyBlock, resolve: resolveFirstEmptyBlock, diff --git a/runner/lib/sdk/install-ses.js b/runner/lib/sdk/install-ses.js new file mode 100644 index 0000000..32c159e --- /dev/null +++ b/runner/lib/sdk/install-ses.js @@ -0,0 +1,149 @@ +/* global LOCKDOWN_OPTIONS process */ +// 'lockdown' appears on the global as a side-effect of importing 'ses' +import 'ses'; + +// Install our HandledPromise global. +// import '@agoric/eventual-send/shim.js'; + +// For testing under Ava, and also sometimes for testing and debugging in +// general, when safety is not needed, you perhaps want to use +// packages/SwingSet/tools/install-ses-debug.js instead of this one. +// If you're using a prepare-test-env-ava.js, it is probably already doing that +// for you. + +// The`@agoric/import-ses` package exists so the "main" of production code can +// start with the following import or its equivalent. +// ```js +// import '@agoric/install-ses'; +// ``` +// But production code must also be tested. Normal ocap discipline of passing +// explicit arguments into the `lockdown` +// call would require an awkward structuring of start modules, since +// the `install-ses` module calls `lockdown` during its initialization, +// before any explicit code in the start module gets to run. Even if other code +// does get to run first, the `lockdown` call in this module happens during +// module initialization, before it can legitimately receive parameters by +// explicit parameter passing. +// +// Instead, for now, `install-ses` violates normal ocap discipline by feature +// testing global state for a passed "parameter". This is something that a +// module can but normally should not do, during initialization or otherwise. +// Initialization is often awkward. +// +// The `install-ses` module tests, first, +// for a JavaScript global named `LOCKDOWN_OPTIONS`, and second, for an +// environment +// variable named `LOCKDOWN_OPTIONS`. If either is present, its value should be +// a JSON encoding of the options bag to pass to the `lockdown` call. If so, +// then `install-ses` calls `lockdown` with those options. If there is no such +// feature, `install-ses` calls `lockdown` with appropriate settings for +// production use. + +let optionsString; +if (typeof LOCKDOWN_OPTIONS === 'string') { + optionsString = LOCKDOWN_OPTIONS; + console.log( + `'@agoric/install-ses' sniffed and found a 'LOCKDOWN_OPTIONS' global variable\n`, + ); +} else if ( + typeof process === 'object' && + typeof process.env.LOCKDOWN_OPTIONS === 'string' +) { + optionsString = process.env.LOCKDOWN_OPTIONS; + console.log( + `'@agoric/install-ses' sniffed and found a 'LOCKDOWN_OPTIONS' environment variable\n`, + ); +} + +if (typeof optionsString === 'string') { + let options; + try { + options = JSON.parse(optionsString); + } catch (err) { + console.error('Environment variable LOCKDOWN_OPTIONS must be JSON', err); + throw err; + } + if (typeof options !== 'object' || Array.isArray(options)) { + const err = new TypeError( + 'Environment variable LOCKDOWN_OPTIONS must be a JSON object', + ); + console.error('', err, options); + throw err; + } + lockdown(options); +} else { + lockdown({ + // The default `{errorTaming: 'safe'}` setting, if possible, redacts the + // stack trace info from the error instances, so that it is not available + // merely by saying `errorInstance.stack`. However, some tools + // will look for the stack there and become much less useful if it is + // missing. In production, the settings in this file need to preserve + // security, so the 'unsafe' setting below MUST always be commented out + // except during private development. + // + // NOTE TO REVIEWERS: If you see the following line *not* commented out, + // this may be a development accident that MUST be fixed before merging. + // + // errorTaming: 'unsafe', + // + // + // The default `{stackFiltering: 'concise'}` setting usually makes for a + // better debugging experience, by severely reducing the noisy distractions + // of the normal verbose stack traces. Which is why we comment + // out the `'verbose'` setting is commented out below. However, some + // tools look for the full filename that it expects in order + // to fetch the source text for diagnostics, + // + // Another reason for not commenting it out: The cause + // of the bug may be anywhere, so the `'noise'` thrown out by the default + // `'concise'` setting may also contain the signal you need. To see it, + // uncomment out the following line. But please do not commit it in that + // state. + // + // NOTE TO REVIEWERS: If you see the following line *not* commented out, + // this may be a development accident that MUST be fixed before merging. + // + // stackFiltering: 'verbose', + // + // + // The default `{overrideTaming: 'moderate'}` setting does not hurt the + // debugging experience much. But it will introduce noise into, for example, + // the vscode debugger's object inspector. During debug and test, if you can + // avoid legacy code that needs the `'moderate'` setting, then the `'min'` + // setting reduces debugging noise yet further, by turning fewer inherited + // properties into accessors. + // + // NOTE TO REVIEWERS: If you see the following line *not* commented out, + // this may be a development accident that MUST be fixed before merging. + // + // overrideTaming: 'min', + // + // + // The default `{consoleTaming: 'safe'}` setting usually makes for a + // better debugging experience, by wrapping the original `console` with + // the SES replacement `console` that provides more information about + // errors, expecially those thrown by the `assert` system. However, + // in case the SES `console` is getting in the way, we provide the + // `'unsafe'` option for leaving the original `console` in place. + // + // NOTE TO REVIEWERS: If you see the following line *not* commented out, + // this may be a development accident that MUST be fixed before merging. + // + // consoleTaming: 'unsafe', + }); +} + +// We are now in the "Start Compartment". Our global has all the same +// powerful things it had before, but the primordials have changed to make +// them safe to use in the arguments of API calls we make into more limited +// compartments + +// 'Compartment' and 'harden' (and `StaticModuleRecord`) are now present in +// our global scope. + +// Even on non-v8, we tame the start compartment's Error constructor so +// this assignment is not rejected, even if it does nothing. +Error.stackTraceLimit = Infinity; + +harden(TextEncoder); +harden(TextDecoder); diff --git a/runner/lib/sdk/promise-kit.js b/runner/lib/sdk/promise-kit.js new file mode 100644 index 0000000..e09129c --- /dev/null +++ b/runner/lib/sdk/promise-kit.js @@ -0,0 +1,82 @@ +/* global globalThis */ +// @ts-check + +// eslint-disable-next-line spaced-comment +/// + +/** @type {import('@agoric/eventual-send').HandledPromiseConstructor | PromiseConstructor} */ +const BestPipelinablePromise = globalThis.HandledPromise || Promise; + +/** + * @template T + * @typedef {Object} PromiseRecord A reified Promise + * @property {(value: ERef) => void} resolve + * @property {(reason: any) => void} reject + * @property {Promise} promise + */ + +/** + * @template T + * @typedef {T | PromiseLike} ERef + * A reference of some kind for to an object of type T. It may be a direct + * reference to a local T. It may be a local presence for a remote T. It may + * be a promise for a local or remote T. Or it may even be a thenable + * (a promise-like non-promise with a "then" method) for a T. + */ + +/** + * Needed to prevent type errors where functions are detected to be undefined. + */ +const NOOP_INITIALIZER = harden(() => {}); + +/** + * makePromiseKit() builds a Promise object, and returns a record + * containing the promise itself, as well as separate facets for resolving + * and rejecting it. + * + * @template T + * @returns {PromiseRecord} + */ +export function makePromiseKit() { + /** @type {(value: ERef) => void} */ + let res = NOOP_INITIALIZER; + /** @type {(reason: any) => void} */ + let rej = NOOP_INITIALIZER; + + /** @type {Promise & {domain?: unknown}} */ + const p = new BestPipelinablePromise((resolve, reject) => { + res = resolve; + rej = reject; + }); + // Node.js adds the `domain` property which is not a standard + // property on Promise. Because we do not know it to be ocap-safe, + // we remove it. + if ('domain' in p) { + // deleting p.domain may break functionality. To retain current + // functionality at the expense of safety, set unsafe to true. + const unsafe = false; + if (unsafe) { + const originalDomain = p.domain; + Object.defineProperty(p, 'domain', { + get() { + return originalDomain; + }, + }); + } else { + delete p.domain; + } + } + return harden({ promise: p, resolve: res, reject: rej }); +} +harden(makePromiseKit); + +/** + * Determine if the argument is a Promise. + * + * @param {any} maybePromise The value to examine + * @returns {maybePromise is Promise} Whether it is a promise + */ +export function isPromise(maybePromise) { + return Promise.resolve(maybePromise) === maybePromise; +} +harden(isPromise); diff --git a/runner/lib/ses-types-patch.d.ts b/runner/lib/sdk/ses-types.d.ts similarity index 100% rename from runner/lib/ses-types-patch.d.ts rename to runner/lib/sdk/ses-types.d.ts diff --git a/runner/package.json b/runner/package.json index 1255b7b..76a5e1d 100644 --- a/runner/package.json +++ b/runner/package.json @@ -29,16 +29,13 @@ "prettier": "^2.1.2" }, "dependencies": { - "@agoric/assert": "*", - "@agoric/install-ses": "*", - "@agoric/promise-kit": "*", "@iarna/toml": "^2.2.3", - "agoric": "*", "anylogger": "^0.21.0", "chalk": "^2.4.2", "deterministic-json": "^1.0.5", "inquirer": "^6.3.1", "readline-transform": "^1.0.0", + "ses": "^0.14.0", "yargs-parser": "^20.2.2" }, "keywords": [], diff --git a/yarn.lock b/yarn.lock index dbb8410..946bdbd 100644 --- a/yarn.lock +++ b/yarn.lock @@ -356,13 +356,6 @@ "@agoric/swingset-vat" "^0.16.0" "@agoric/zoe" "^0.15.0" -"@agoric/promise-kit@*", "@agoric/promise-kit@^0.2.6": - version "0.2.6" - resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.6.tgz#e11ac8fbeb00c144941fbf03f53203d55a5ec8ff" - integrity sha512-6RCPgt5RpRq6/a9NgH/7xAR2GaHTiUn7K+PJuW1vBMPMhn56qxx2Mw5Ieol4+DZ6TIcC4WOBGtCwL6YTAF5DKg== - dependencies: - "@agoric/eventual-send" "^0.13.6" - "@agoric/promise-kit@^0.2.21": version "0.2.21" resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.21.tgz#66aa16a020479c610608f169ae08eea9d018d62a" @@ -370,6 +363,13 @@ dependencies: "@agoric/eventual-send" "^0.13.23" +"@agoric/promise-kit@^0.2.6": + version "0.2.6" + resolved "https://registry.yarnpkg.com/@agoric/promise-kit/-/promise-kit-0.2.6.tgz#e11ac8fbeb00c144941fbf03f53203d55a5ec8ff" + integrity sha512-6RCPgt5RpRq6/a9NgH/7xAR2GaHTiUn7K+PJuW1vBMPMhn56qxx2Mw5Ieol4+DZ6TIcC4WOBGtCwL6YTAF5DKg== + dependencies: + "@agoric/eventual-send" "^0.13.6" + "@agoric/registrar@^0.2.7": version "0.2.7" resolved "https://registry.yarnpkg.com/@agoric/registrar/-/registrar-0.2.7.tgz#2e15a6f566fd5054b3b5a3006e666f1b0831f32c" @@ -5071,6 +5071,11 @@ ses@^0.13.4: resolved "https://registry.yarnpkg.com/ses/-/ses-0.13.4.tgz#d547202ccb180c4a161e52007a9edff51d5fa9fd" integrity sha512-7pgHZF4i6tuMAhD6GeNgA8fxfgzeQ5queyRFnKfKPM6nvCEKd8JP3ityQ04rVspTzkzu9R0b2I4nb05ukblQDA== +ses@^0.14.0: + version "0.14.0" + resolved "https://registry.yarnpkg.com/ses/-/ses-0.14.0.tgz#d71a5ccac8e9054155c7e63a52325e924edd3b41" + integrity sha512-LQPb6IS19K12Osvndmu7lHoJVuYyxPPBAUSZlcVeOmHF4LMzjT8vIVoMYLyk4LHvmt6wBbrJph9JkIiD1+femQ== + set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"