From 3e5df0a69e12b7ec47b166bca3e8ba85591e13b7 Mon Sep 17 00:00:00 2001 From: RnkSngh Date: Tue, 30 Apr 2024 10:14:35 -0400 Subject: [PATCH] add npm package --- .github/workflows/lint.yml | 6 +- .gitignore | 1 + README.md | 123 ++++++++++--- hardhat.config.cjs | 6 + package.json | 82 ++++++++- specs/contracts.spec.yaml | 77 ++++++++ specs/evm.accounts.yaml | 3 + specs/upgrade.spec.yaml | 16 ++ src/deploy.ts | 171 ++++++++++++++++++ src/evm/account.ts | 135 ++++++++++++++ src/evm/chain.ts | 25 +++ src/evm/index.ts | 3 + src/evm/schemas/contract.ts | 117 ++++++++++++ src/evm/schemas/tx.ts | 28 +++ src/index.ts | 21 +++ src/scripts/deploy-script.ts | 53 ++++++ src/scripts/upgrade-script.ts | 47 +++++ src/tx.ts | 81 +++++++++ src/utils/cli.ts | 327 ++++++++++++++++++++++++++++++++++ src/utils/constants.ts | 10 ++ src/utils/index.ts | 3 + src/utils/io.ts | 253 ++++++++++++++++++++++++++ src/utils/registry.ts | 174 ++++++++++++++++++ tsconfig.json | 112 ++++++++++++ tsup.config.ts | 23 +++ 25 files changed, 1868 insertions(+), 29 deletions(-) create mode 100644 hardhat.config.cjs create mode 100644 specs/contracts.spec.yaml create mode 100644 specs/evm.accounts.yaml create mode 100644 specs/upgrade.spec.yaml create mode 100644 src/deploy.ts create mode 100644 src/evm/account.ts create mode 100644 src/evm/chain.ts create mode 100644 src/evm/index.ts create mode 100644 src/evm/schemas/contract.ts create mode 100644 src/evm/schemas/tx.ts create mode 100644 src/index.ts create mode 100644 src/scripts/deploy-script.ts create mode 100644 src/scripts/upgrade-script.ts create mode 100644 src/tx.ts create mode 100644 src/utils/cli.ts create mode 100644 src/utils/constants.ts create mode 100644 src/utils/index.ts create mode 100644 src/utils/io.ts create mode 100644 src/utils/registry.ts create mode 100644 tsconfig.json create mode 100644 tsup.config.ts diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c1fc5097..10ed23c3 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -13,17 +13,17 @@ jobs: runs-on: ubuntu-latest steps: + - name: Install Foundry + uses: foundry-rs/foundry-toolchain@v1 - uses: actions/checkout@v3 with: submodules: recursive - uses: actions/setup-node@v3 with: - node-version: '16' + node-version: "16" - uses: bahmutov/npm-install@v1 with: install-command: npm install - run: npm run lint - - name: Install Foundry - uses: foundry-rs/foundry-toolchain@v1 - name: Check formatting run: forge fmt --check diff --git a/.gitignore b/.gitignore index d47d7a74..f88da103 100644 --- a/.gitignore +++ b/.gitignore @@ -36,3 +36,4 @@ artifacts report/ lcov.info +src/evm/contracts diff --git a/README.md b/README.md index 61e8b785..3242d63a 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # vIBC Core Smart Contracts -This project includes the core smart contracts for the vIBC protocol, and a few demo contracts that simulate testing and serve as a template for integrating dapp devs. +This project includes the core smart contracts for the vIBC protocol, a few demo contracts that simulate testing and serve as a template for integrating dapp devs, and an npm package to aid with deploying and sending transactions to deployed contracts. ![](./diagrams/vibcContractsOverview.jpg) @@ -35,35 +35,118 @@ The optimisticProofVerifier verifies proofs for the optimistic light client. ## UniversalChannelHandler The UniversalChannelHandler is a middleware contract that can be used to save dapps from having to go through the 4-step channel handshake to send or receive Ibc packets. -## Quick Start with Forge/Foundry +## Building Contracts and Testing +This repository uses Foundry for testing and development of smart contracts -### Install Forge +## Deploying Contracts +All deployments can either be done through the command line, or through javascript code through importing modules. +After each deployment, deployment files are saved in deployment artifacts as json files, structured similar to how [hardhat deploy stores its deploy files](https://github.com/wighawag/hardhat-deploy). -```sh -curl -L https://foundry.paradigm.xyz | bash -``` - -This will install Foundryup, then simply follow the instructions on-screen, which will make the `foundryup` command available in your CLI. +Before deploying, the accounts used to deploy and any constructor arguments must be configured. This configuration can either be read from a yaml file or set through environment variables (see the sections below on how to configure each deployments). -Running `foundryup` by itself will install the latest (nightly) precompiled binaries: `forge`, `cast`, `anvil`, and `chisel`. See `foundryup --help` for more options, like installing from a specific version or commit. +The constructor arguments for each deployment. This supports syntax - which looks through written. You can also specify. +This file is read in-order, so each entry in this file should be in-order where dependencies come first and the contract that depends on them comes later. -Or go to https://book.getfoundry.sh/getting-started/installation for more installation options. +### Deploying via Command Line +This npm package exposes two commands - one to deploy new contacts (which automatically creates persisted deployment files), and one to send transactions to contracts from persisted artifact files. The following steps are needed to deploy contracts via the command line: -### Build contracts +1. Ensure that your deployer account and constructor arguments are configured. This can either be done through adding contract spec yaml files located in the specs/ from the root of where this npm module is installed from (requires adding a `specs/evm.accounts.yaml` file and either a `specs/contracts.spec.yaml` or `specs/upgrade.spec.yaml`), or by setting the KEY_POLYMER, RPC_URL, DEPLOYMENT_CHAIN_ID, CHAIN_NAME environment variables. For examples of contract and account spec files, see the `/specs` folder in this repo. +2. Run either `npx deploy-vibc-core-smart-contracts` to deploy contracts from the contract spec, or `npx upgrade-vibc-core-smart-contracts` to send an upgrade transaction. -```sh -forge build -``` +### Deploying via imports +Deployments can also be done through calls through the `deployToChain` and the `sendTxToChain` methods. -### Run Tests +#### Deploying new contracts via imports -```sh -forge test ``` +import { + AccountRegistry, + Chain, + ContractRegistryLoader, + deployToChain, + parseObjFromFile, +} from "@open-ibc/vibc-core-smart-contracts"; + +import { getMainLogger } from "@open-ibc/vibc-core-smart-contracts/utils/cli"; +import { DEFAULT_RPC_URL } from "../utils/constants"; + +// Or can parse it form the env +const accountConfig = { + name: "local", + registry: [ + { + name: "KEY_POLYMER", + privateKey: process.env.KEY_POLYMER + }, + ], +}; + +const accounts = AccountRegistry.loadMultiple([accountConfig]); +const contracts = ContractRegistryLoader.loadSingle( + parseObjFromFile("specs/contracts.spec.yaml") +); + +const chain: Chain = { + rpc: process.env.RPC_URL , + chainId: process.env.DEPLOYMENT_CHAIN_ID, + chainName: process.env.CHAIN_NAME, + vmType: "evm", + description: "local chain", +}; + +deployToChain( + chain, + accounts.mustGet(chain.chainName), + contracts.subset(), + getMainLogger(), + false +); +``` + +similar to the command line deploy, this will create a deployment artifact file in the `deployments/` folder. -### Clean environment +#### Upgrading existing contracts via imports +Proxy upgrades to existing contracts can be done through the `sendTxToChain` method : -```sh -forge clean +``` +#!/usr/bin/env node +import { + AccountRegistry, + Chain, + parseObjFromFile, +} from "@open-ibc/vibc-core-smart-contracts"; +import { loadTxRegistry } from "@open-ibc/vibc-core-smart-contracts/evm/schemas/tx"; +import { sendTxToChain } from "@open-ibc/vibc-core-smart-contracts"; + +import { getOutputLogger } from "@open-ibc/vibc-core-smart-contracts/utils/cli"; +// Or can parse it form the env +const accountConfig = { + name: "local", + registry: [ + { + name: "KEY_POLYMER", + privateKey: process.env.KEY_POLYMER, + }, + ], +}; + +const accounts = AccountRegistry.loadMultiple([accountConfig]); +const upgradeTxs = loadTxRegistry(parseObjFromFile("specs/upgrade.spec.yaml")); + +const chain: Chain = { + rpc: process.env.RPC_URL, + chainId: process.env.CHAIN_ID, + chainName: "local", + vmType: "evm", + description: "local chain", +}; + +sendTxToChain( + chain, + accounts.mustGet(chain.chainName), + upgradeTxs.subset(), + getOutputLogger(), + false +); ``` diff --git a/hardhat.config.cjs b/hardhat.config.cjs new file mode 100644 index 00000000..8ba99bc9 --- /dev/null +++ b/hardhat.config.cjs @@ -0,0 +1,6 @@ +require("@nomicfoundation/hardhat-toolbox"); + +/** @type import('hardhat/config').HardhatUserConfig */ +module.exports = { + solidity: "0.8.24", +}; diff --git a/package.json b/package.json index 3ccb6a53..93fcfba0 100644 --- a/package.json +++ b/package.json @@ -1,13 +1,83 @@ { - "name": "vibc-core-smart-contracts", - "version": "1.0.0", - "main": "index.js", - "repository": "https://github.com/open-ibc/vibc-core-smart-contracts", + "name": "@open-ibc/vibc-core-smart-contracts", + "version": "0.1.3", + "main": "dist/index.js", + "bin": { + "deploy-vibc-core-smart-contracts": "./dist/scripts/deploy-script.js", + "upgrade-vibc-core-smart-contracts": "./dist/scripts/upgrade-script.js" + }, "license": "MIT", "dependencies": { - "solhint": "^4.1.1" + "@commander-js/extra-typings": "^12.0.1", + "@typechain/ethers-v6": "^0.5.0", + "ethers": "^6.4.0", + "nunjucks": "^3.2.4", + "solhint": "^4.1.1", + "typechain": "^8.3.0", + "winston": "^3.13.0", + "yaml": "^2.4.1", + "zod": "^3.23.4", + "zx": "^8.0.2" + }, + "devDependencies": { + "@nomicfoundation/hardhat-chai-matchers": "^2.0.0", + "@nomicfoundation/hardhat-ethers": "^3.0.0", + "@nomicfoundation/hardhat-ignition": "^0.15.0", + "@nomicfoundation/hardhat-ignition-ethers": "^0.15.0", + "@nomicfoundation/hardhat-network-helpers": "^1.0.0", + "@nomicfoundation/hardhat-toolbox": "^5.0.0", + "@nomicfoundation/hardhat-verify": "^2.0.0", + "@typechain/hardhat": "^9.0.0", + "@types/nunjucks": "^3.2.6", + "@types/winston": "^2.4.4", + "chai": "^4.2.0", + "hardhat": "^2.14.0", + "hardhat-gas-reporter": "^1.0.8", + "solidity-coverage": "^0.8.0", + "tsup": "^8.0.2" }, "scripts": { - "lint": "solhint contracts/**/*.sol" + "lint": "solhint contracts/**/*.sol", + "test": "forge test", + "build": "npm run gen-types && tsup", + "build-contracts": "forge build", + "gen-types": "npm run build-contracts && typechain --target ethers-v6 --out-dir src/evm/contracts/ './out/?(OpProofVerifier|ProofVerifier|Ibc|Channel|Dispatcher|Mars|Earth|UniversalChannelHandler|DummyProofVerifier|DummyLightClient|ERC1967Proxy|OpLightClient).sol/*.json'", + "deploy-contracts": "npm run build && node dist/deploy.js", + "deploy-simple": "node dist/deploy.js", + "prepublish": "npm run build" + }, + "keywords": [ + "evm", + "cosmos", + "rollup", + "op-stack", + "interoperability", + "solidity" + ], + "author": "Polymer Labs", + "type": "module", + "exports": { + ".": { + "require": "./dist/index.js", + "import": "./dist/index.js", + "types": "./dist/index.d.ts" + }, + "./evm": { + "require": "./dist/evm/index.js", + "import": "./dist/evm/index.js", + "types": "./dist/evm/index.d.ts" + }, + "./evm/account": "./dist/evm/account.js", + "./evm/chain": "./dist/evm/chain.js", + "./evm/schemas/contract": "./dist/evm/schemas/contract.js", + "./evm/schemas/tx": "./dist/evm/schemas/tx.js", + "./utils": { + "require": "./dist/utils/index.js", + "import": "./dist/utils/index.js", + "types": "./dist/utils/index.d.ts" + }, + "./utils/cli": "./dist/utils/cli.js", + "./utils/io": "./dist/utils/io.js" } + } diff --git a/specs/contracts.spec.yaml b/specs/contracts.spec.yaml new file mode 100644 index 00000000..bfd1737d --- /dev/null +++ b/specs/contracts.spec.yaml @@ -0,0 +1,77 @@ +# spec for deploying contracts +# {{name}} is replaced with one of the following, whichever matches first +# - the deployed contract address whose name matches `name` (not factoryName) +# - variables of the running chain, e.g. {{chain.chainName}}, {{chain.chainId}} +# NOTE: order of the contracts matters, as some contracts depend on others +# contracts with no deps should be placed before those with deps +# deployer: must be a valid name in accountRegistry; default to 'default' if not specified + +- name: LightClient + description: 'DummyLightClient' + factoryName: 'DummyLightClient' + deployer: 'KEY_POLYMER' + +- name: Ibc + description: 'IBC library' + factoryName: 'Ibc' + deployer: 'KEY_POLYMER' + +- name: IbcUtils + description: 'IBC utils library' + factoryName: 'IbcUtils' + deployer: 'KEY_POLYMER' + +- name: Dispatcher + description: 'IBC Core contract' + factoryName: 'Dispatcher' + libraries: + - name: 'contracts/libs/Ibc.sol:Ibc' + address: '{{Ibc}}' + deployer: 'KEY_POLYMER' + +- name: DispatcherProxy + description: 'Dispatcher proxy contract' + factoryName: 'ERC1967Proxy' + deployArgs: + - '{{Dispatcher}}' + - '$INITARGS' + init: + signature: 'initialize(string,address)' + args: + - 'polyibc.{{chain.chainName}}.' + - '{{LightClient}}' + deployer: 'KEY_POLYMER' + +- name: UC + description: 'Universal Chanel IBC-middleware contract' + factoryName: 'UniversalChannelHandler' + deployer: 'KEY_POLYMER' + libraries: + - name: 'contracts/libs/Ibc.sol:IbcUtils' + address: '{{IbcUtils}}' + +- name: UCProxy + description: 'Universal Chanel IBC-middleware proxy' + factoryName: 'ERC1967Proxy' + deployArgs: + - '{{UC}}' + - '$INITARGS' + init: + signature: 'initialize(address)' + args: + - '{{DispatcherProxy}}' + deployer: 'KEY_POLYMER' + +# dApp contracts for testing and as examples + +- name: Mars + description: 'Mars contract directly owns a IBC channel' + deployArgs: + - '{{DispatcherProxy}}' + deployer: 'KEY_POLYMER' + +- name: Earth + description: 'Earth contract uses shared universal channel' + deployArgs: + - '{{UCProxy}}' + deployer: 'KEY_POLYMER' diff --git a/specs/evm.accounts.yaml b/specs/evm.accounts.yaml new file mode 100644 index 00000000..d917dbc9 --- /dev/null +++ b/specs/evm.accounts.yaml @@ -0,0 +1,3 @@ +# These accounts are derived from a test mnemonic by Anvil/Hardhat and used for testing purposes only. +- name: 'KEY_POLYMER' + privateKey: '0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80' \ No newline at end of file diff --git a/specs/upgrade.spec.yaml b/specs/upgrade.spec.yaml new file mode 100644 index 00000000..9f0c51a8 --- /dev/null +++ b/specs/upgrade.spec.yaml @@ -0,0 +1,16 @@ +# spec for deploying contracts +# {{name}} is replaced with one of the following, whichever matches first +# - the deployed contract address whose name matches `name` (not factoryName) +# - variables of the running chain, e.g. {{chain.chainName}}, {{chain.chainId}} +# NOTE: order of the contracts matters, as some contracts depend on others +# contracts with no deps should be placed before those with deps +# deployer: must be a valid name in accountRegistry; default to 'default' if not specified + +# call on a given factoryname +- name: DispatcherUpgradeII + description: 'Upgrade for dispatcher contract' + deployer: 'KEY_POLYMER' # can be set in the accounts.yaml + signature: "upgradeTo(address)" + factoryName: "Dispatcher" + args: + - '{{Dispatcher}}' \ No newline at end of file diff --git a/src/deploy.ts b/src/deploy.ts new file mode 100644 index 00000000..10b03416 --- /dev/null +++ b/src/deploy.ts @@ -0,0 +1,171 @@ +import { ethers } from "ethers"; +import { + StringToStringMap, + readDeploymentFilesIntoEnv, + renderArgs, + renderString, + writeDeployedContractToFile, +} from "./utils/io"; +import assert from "assert"; +import { AccountRegistry } from "./evm/account"; +import { + ContractRegistry, + ContractRegistryLoader, +} from "./evm/schemas/contract"; +import { Logger } from "./utils/cli"; +import { DEFAULT_DEPLOYER } from "./utils/constants"; +import { Chain } from "./evm/chain"; +import { contracts } from "."; + +/** + * Return deployment libraries, factory, factory constructor, + * and rendered arguments for a contract deployment + */ +const getDeployData = ( + factoryName: string, + deployArgs: any[] | undefined, + env: StringToStringMap, + libraries: any[] = [], + init: { args: any[]; signature: string } | undefined +) => { + // @ts-ignore + const contractFactoryConstructor = contracts[`${factoryName}__factory`]; + assert( + contractFactoryConstructor, + `cannot find contract factory constructor for contract: ${factoryName}` + ); + + const libs = libraries + ? libraries.map((arg: any) => { + return { [arg.name]: renderString(arg.address, env) }; + }) + : []; + + const factory = new contractFactoryConstructor(...libs); + if (!factory) { + throw new Error( + `cannot load contract factory for contract: ${factoryName} with factory name: ${factoryName}__factory` + ); + } + + // var encodedInitData = ""; + let initData = ""; + + if (init) { + const initArgs = init.args.map((arg: any) => { + return typeof arg === "string" ? renderString(arg, env) : arg; + }); + const iFace = new ethers.Interface([`function ${init.signature}`]); + initData = iFace.encodeFunctionData(init.signature, initArgs); + } + return { + args: renderArgs(deployArgs, initData, env), + libraries: libs, + factory, + contractFactoryConstructor, + }; +}; + +export async function deployToChain( + chain: Chain, + accountRegistry: AccountRegistry, + deploySpec: ContractRegistry, + logger: Logger, + dryRun = false, + forceDeployNewContracts = false // true if you want to use existing deployments when possible +) { + logger.info( + `deploying ${deploySpec.size} contract(s) to chain ${ + chain.chainName + } with contractNames: [${deploySpec.keys()}]` + ); + + if (!dryRun) { + const provider = ethers.getDefaultProvider(chain.rpc); + const newAccounts = accountRegistry.subset([]); + for (const [name, wallet] of accountRegistry.entries()) { + newAccounts.set(name, wallet.connect(provider)); + } + accountRegistry = newAccounts; + } + + // @ts-ignore + const env: StringToStringMap = { chain }; + if (!forceDeployNewContracts) { + // Only read from existing contract files if we want to deploy new ones + await readDeploymentFilesIntoEnv(env); + } + + // result is the final contract registry after deployment, modified in place + const result = ContractRegistryLoader.loadSingle( + JSON.parse(JSON.stringify(deploySpec.serialize())) + ); + + const eachContract = async ( + contract: ReturnType + ) => { + try { + const factoryName = contract.factoryName + ? contract.factoryName + : contract.name; + + const constructorData = getDeployData( + factoryName, + contract.deployArgs, + env, + contract.libraries, + contract.init + ); + + logger.info( + `[${chain.chainName}]: deploying ${contract.name} with args: [${ + constructorData.args + }] with libraries: ${JSON.stringify(constructorData.libraries)}` + ); + let deployedAddr = `new.${contract.name}.address`; + const deployer = accountRegistry.mustGet( + contract.deployer ? contract.deployer : DEFAULT_DEPLOYER + ); + + if (!dryRun) { + const deployed = await constructorData.factory + .connect(deployer) + .deploy(...constructorData.args); + await deployed.deploymentTransaction()?.wait(); + deployedAddr = await deployed.getAddress(); + } + // save deployed contract address for its dependencies + env[contract.name] = deployedAddr; + // update contract in registry as output result + contract.address = deployedAddr; + contract.deployer = deployer.address; + + const contractObject = { + factory: factoryName, + address: deployedAddr, + abi: constructorData.contractFactoryConstructor.abi, + bytecode: constructorData.factory.bytecode, + name: contract.name, + args: constructorData.args, + }; + writeDeployedContractToFile(chain, contractObject); + } catch (err) { + logger.error( + `[${chain.chainName}] deploy ${contract.name} failed: ${err}` + ); + throw err; + } + }; + for (const contract of result.values()) { + await eachContract(contract); + } + + logger.info( + `[${chain.chainName}]: finished deploying ${result.size} contracts` + ); + + return { + chainName: chain.chainName, + contracts: result, + }; +} diff --git a/src/evm/account.ts b/src/evm/account.ts new file mode 100644 index 00000000..aa9cce8d --- /dev/null +++ b/src/evm/account.ts @@ -0,0 +1,135 @@ +import { z } from "zod"; +import { ethers } from "ethers"; +import fs from "fs"; +import path from "path"; +import { Registry } from "../utils/registry"; +import { parseZodSchema } from "../utils/io"; + +const privateKey = z.object({ + name: z.string().min(1), + // privateKey should be a hex string prefixed with 0x + privateKey: z.string().min(1), +}); + +const mnemonic = z.object({ + name: z.string().min(1), + // a 12-word mnemonic; or more words per BIP-39 spec + mnemonic: z.string().min(1), + path: z.optional(z.string().min(1)), + index: z.optional(z.number().int().min(0)), +}); + +// geth compatible keystore +const keyStore = z.object({ + dir: z.string().min(1), + password: z.optional(z.string()), +}); + +export const EvmAccountsSchema = z.union([ + z.array(z.union([privateKey, mnemonic])), + keyStore, +]); + +// ethers wallet with encryption +export type Wallet = ethers.Wallet | ethers.HDNodeWallet; + +export class AccountRegistry extends Registry { + static load(config: any[], name: string): AccountRegistry { + return new AccountRegistry(loadEvmAccounts(config), config, name); + } + + static loadMultiple(registryItems: { name: string; registry: any }[]) { + const result = new Registry([] as AccountRegistry[], { + toObj: (t) => { + return { name: t.name, registry: t.serialize() }; + }, + }); + for (const item of registryItems) { + result.set(item.name, AccountRegistry.load(item.registry, item.name)); + } + return result; + } + + constructor(r: Registry, private config: any[], name: string) { + super([], { nameInParent: name }); + for (const [name, wallet] of r.entries()) { + this.set(name, wallet); + } + } + + // return the same config obj that was used to load the accounts, but filtered by current account names + public serialize() { + const wallets = this.toList(); + return this.config.map((item, index) => { + return { + name: item.name, + privateKey: wallets[index].privateKey, + address: wallets[index].address, + ...item, + }; + }); + } +} + +// load a Map of { [name: string]: Wallet } from EvmAccountsSchema object +export function loadEvmAccounts(config: any): Registry { + const accountsConfig = parseZodSchema( + "EvmAccountsSchema", + config, + EvmAccountsSchema.parse + ); + const walletMap = new Registry([]); + if (Array.isArray(accountsConfig)) { + for (const account of accountsConfig) { + walletMap.set(account.name, createWallet(account)); + } + } else if ("dir" in accountsConfig) { + // TODO: add tests for dir-based accounts + const files = fs.readdirSync(accountsConfig.dir); + for (const file of files) { + const filePath = path.join(accountsConfig.dir, file); + const json = fs.readFileSync(filePath, "utf8"); + const wallet = ethers.Wallet.fromEncryptedJsonSync( + json, + accountsConfig.password ?? "" + ); + walletMap.set(wallet.address, wallet); + } + } else { + throw new Error( + `invalid accounts config: ${JSON.stringify(accountsConfig)}` + ); + } + return walletMap; +} + +export function createWallet(opt: { + privateKey?: string; + mnemonic?: string; + path?: string; + index?: number; +}): Wallet { + if (opt.privateKey && typeof opt.privateKey === "string") { + return new ethers.Wallet(opt.privateKey); + } + if (opt.mnemonic && typeof opt.mnemonic === "string") { + let wallet = ethers.Wallet.fromPhrase(opt.mnemonic); + if (typeof opt.path === "string" && opt.path.length > 0) { + wallet = ethers.HDNodeWallet.fromPhrase( + opt.mnemonic, + undefined, + opt.path + ); + } + // if account.index is specified, derive the child wallet by index + if (Number.isInteger(opt.index)) { + wallet = wallet.deriveChild(opt.index!); + } + return wallet; + } + throw new Error( + `invalid wallet config, must provide at least one of {privateKey, mnemonic}, but got: ${JSON.stringify( + opt + )}` + ); +} diff --git a/src/evm/chain.ts b/src/evm/chain.ts new file mode 100644 index 00000000..d1370dac --- /dev/null +++ b/src/evm/chain.ts @@ -0,0 +1,25 @@ +import { parseZodSchema } from "../utils/io"; +import { Registry } from "../utils/registry"; +import { z } from "zod"; + +const ChainConfigSchema = z.object({ + chainName: z.string().min(1), + chainId: z.number().int().min(1), + vmType: z.enum(["evm", "polymer", "cosmos"]).optional().default("evm"), + description: z.optional(z.string()), + rpc: z.string().min(1), +}); + +export const chainRegistrySchema = z.array(ChainConfigSchema).min(1); +export type ChainRegistry = Registry>; +export type Chain = ChainRegistry["Element"]; + +// load chain registry from a config object +export function loadChainRegistry(config: z.input) { + const chainRegistry = parseZodSchema( + "ChainRegistry", + config, + chainRegistrySchema.parse + ); + return new Registry(chainRegistry, { nameFunc: (c) => c.chainName }); +} diff --git a/src/evm/index.ts b/src/evm/index.ts new file mode 100644 index 00000000..9b0c4fdc --- /dev/null +++ b/src/evm/index.ts @@ -0,0 +1,3 @@ +export * as contract from "./schemas/contract"; +export * as chain from "./chain"; +export * as account from "./account"; diff --git a/src/evm/schemas/contract.ts b/src/evm/schemas/contract.ts new file mode 100644 index 00000000..7045f072 --- /dev/null +++ b/src/evm/schemas/contract.ts @@ -0,0 +1,117 @@ +import { z } from "zod"; +import { Registry } from "../../utils/registry"; +import { parseZodSchema } from "../../utils/io"; + +// A contract may or may not be deployed (null address). +const ContractItemSchema = z.object({ + name: z.string().min(1), + description: z.optional(z.string()), + factoryName: z.optional(z.string()), + deployArgs: z.optional(z.array(z.any())), + libraries: z.optional( + z.array( + z.object({ + name: z.string().min(1), + address: z.string().min(1), + }) + ) + ), + // either a account name from account registry, or a private key or a mnemonic signer + deployer: z.string().nullish(), + address: z.string().nullish(), + init: z.optional( + z.object({ + signature: z.string().min(1), + args: z.array(z.string().min(1)), + }) + ), +}); + +const ContractItemList = z.array(ContractItemSchema); +const registryName = "contracts"; + +const MultiChainContractRegistrySchema = z.array( + z.object({ chainName: z.string().min(1), [registryName]: ContractItemList }) +); + +type ContractItem = z.infer; + +// export type ContractRegistry = Registry +export type MultiChainContractRegistry = Registry<{ + chainName: string; + [registryName]: ContractRegistry; +}>; + +export class ContractRegistry extends Registry {} + +export class ContractRegistryLoader { + static loadSingle(config: any): ContractRegistry { + return loadContractRegistry(config); + } + + static loadMultiple(config: any): MultiChainContractRegistry { + return loadMultiChainContractRegistry(config); + } + + static emptyMultiple(): MultiChainContractRegistry { + // @ts-ignore + return new Registry<{ chainName: string; contracts: ContractRegistry }>( + [], + { + // @ts-ignore + toObj: (c) => { + return { + chainName: c.chainName, + [registryName]: c.contracts.serialize(), + }; + }, + } + ); + } + + static newMultiple( + items: { chainName: string; contracts: ContractRegistry }[] + ): MultiChainContractRegistry { + // @ts-ignore + return new Registry(items, { + nameFunc: (c) => c.chainName, + toObj: (c) => { + return { + chainName: c.chainName, + [registryName]: c.contracts.serialize(), + }; + }, + }); + } + + static emptySingle(): ContractRegistry { + return new ContractRegistry([], { nameInParent: registryName }); + } +} + +function loadContractRegistry(config: any): ContractRegistry { + const parsed = parseZodSchema( + "ContractRegistry", + config, + ContractItemList.parse + ); + return new ContractRegistry(parsed, { + nameFunc: (c) => c.name, + nameInParent: registryName, + }); +} + +function loadMultiChainContractRegistry( + config: any +): MultiChainContractRegistry { + const parsed = parseZodSchema( + "MultiChainContractRegistry", + config, + MultiChainContractRegistrySchema.parse + ); + const contractRegistries = parsed.map((item) => ({ + chainName: item.chainName, + contracts: loadContractRegistry(item.contracts), + })); + return ContractRegistryLoader.newMultiple(contractRegistries); +} diff --git a/src/evm/schemas/tx.ts b/src/evm/schemas/tx.ts new file mode 100644 index 00000000..5212b263 --- /dev/null +++ b/src/evm/schemas/tx.ts @@ -0,0 +1,28 @@ +import { z } from "zod"; +import { Registry } from "../../utils/registry"; +import { parseZodSchema } from "../../utils/io"; + +const TxItemSchema = z.object({ + name: z.string().min(1), + description: z.optional(z.string()), + // either a account name from account registry, or a private key or a mnemonic signer + deployer: z.string().nullish(), + signature: z.string().min(1), + address: z.string().nullish(), + factoryName: z.optional(z.string()), + args: z.optional(z.array(z.any())), +}); + +type TxItem = z.infer; +const TxItemList = z.array(TxItemSchema); + +export class TxRegistry extends Registry {} + +export function loadTxRegistry(config: any): TxRegistry { + const parsed = parseZodSchema("TxRegistry", config, TxItemList.parse); + return new TxRegistry(parsed, { + nameFunc: (c) => c.name, + nameInParent: "transactions", + }); +} + diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 00000000..2cf31647 --- /dev/null +++ b/src/index.ts @@ -0,0 +1,21 @@ +import { AccountRegistry } from "./evm/account"; +import { Chain } from "./evm/chain"; +import { Registry } from "./utils/registry"; +import { ContractRegistryLoader } from "./evm/schemas/contract"; +import { parseObjFromFile } from "./utils/io"; +import * as contracts from "./evm/contracts"; +import { loadEvmAccounts } from "./evm/account"; +import { deployToChain } from "./deploy"; +import { sendTxToChain } from "./tx"; + +export { + deployToChain, + sendTxToChain, + Chain, + contracts, + Registry, + loadEvmAccounts, + parseObjFromFile, + AccountRegistry, + ContractRegistryLoader, +}; diff --git a/src/scripts/deploy-script.ts b/src/scripts/deploy-script.ts new file mode 100644 index 00000000..6152c542 --- /dev/null +++ b/src/scripts/deploy-script.ts @@ -0,0 +1,53 @@ +#!/usr/bin/env node +import { + AccountRegistry, + Chain, + ContractRegistryLoader, + deployToChain, + parseObjFromFile, +} from ".."; + +import { getMainLogger } from "../utils/cli"; +import { DEFAULT_RPC_URL } from "../utils/constants"; + +// can either parse +const accountConfigFromYaml = { + name: "local", + registry: parseObjFromFile("specs/evm.accounts.yaml"), +}; + +// Or can parse it form the env +const accountConfig = { + name: "local", + registry: [ + { + name: "KEY_POLYMER", + privateKey: process.env.KEY_POLYMER + ? process.env.KEY_POLYMER + : accountConfigFromYaml.registry[0].privateKey, + }, + ], +}; + +const accounts = AccountRegistry.loadMultiple([accountConfig]); +const contracts = ContractRegistryLoader.loadSingle( + parseObjFromFile("specs/contracts.spec.yaml") +); + +const chain: Chain = { + rpc: process.env.RPC_URL ? process.env.RPC_URL : DEFAULT_RPC_URL, + chainId: process.env.DEPLOYMENT_CHAIN_ID + ? parseInt(process.env.DEPLOYMENT_CHAIN_ID) + : 31337, + chainName: process.env.CHAIN_NAME ? process.env.CHAIN_NAME : "local", + vmType: "evm", + description: "local chain", +}; + +deployToChain( + chain, + accounts.mustGet(chain.chainName), + contracts.subset(), + getMainLogger(), + false +); diff --git a/src/scripts/upgrade-script.ts b/src/scripts/upgrade-script.ts new file mode 100644 index 00000000..6ff086a5 --- /dev/null +++ b/src/scripts/upgrade-script.ts @@ -0,0 +1,47 @@ +#!/usr/bin/env node +import { AccountRegistry, Chain, parseObjFromFile } from ".."; +import { loadTxRegistry } from "../evm/schemas/tx"; +import { sendTxToChain } from "../tx"; + +import { getOutputLogger } from "../utils/cli"; +import { DEFAULT_CHAIN_ID, DEFAULT_RPC_URL } from "../utils/constants"; + +// can either parse +const accountConfigFromYaml = { + name: "local", + registry: parseObjFromFile("specs/evm.accounts.yaml"), +}; + +// Or can parse it form the env +const accountConfig = { + name: "local", + registry: [ + { + name: "KEY_POLYMER", + privateKey: process.env.KEY_POLYMER + ? process.env.KEY_POLYMER + : accountConfigFromYaml.registry[0].privateKey, + }, + ], +}; + +const accounts = AccountRegistry.loadMultiple([accountConfig]); +const upgradeTxs = loadTxRegistry(parseObjFromFile("specs/upgrade.spec.yaml")); + +const chain: Chain = { + rpc: process.env.RPC_URL ? process.env.RPC_URL : DEFAULT_RPC_URL, + chainId: process.env.CHAIN_ID + ? parseInt(process.env.CHAIN_ID) + : DEFAULT_CHAIN_ID, + chainName: "local", + vmType: "evm", + description: "local chain", +}; + +sendTxToChain( + chain, + accounts.mustGet(chain.chainName), + upgradeTxs.subset(), + getOutputLogger(), + false +); diff --git a/src/tx.ts b/src/tx.ts new file mode 100644 index 00000000..0af489c1 --- /dev/null +++ b/src/tx.ts @@ -0,0 +1,81 @@ +import { ethers } from "ethers"; +import { AccountRegistry } from "./evm/account"; +import { Chain } from "./evm/chain"; +import { TxRegistry, loadTxRegistry } from "./evm/schemas/tx"; +import { Logger } from "./utils/cli"; +import { + readDeploymentFilesIntoEnv, + readFromDeploymentFile, + renderArgs, +} from "./utils/io"; +import { DEFAULT_DEPLOYER } from "./utils/constants"; + +/** + * Send a tx to an existing contract. Reads contract from the _existingContracts args. Can be used for upgrading proxy to new implementation contracts as well + */ +export async function sendTxToChain( + chain: Chain, // existing contract registry for this chain + accountRegistry: AccountRegistry, + transactionSpec: TxRegistry, + logger: Logger, + dryRun = false +) { + logger.debug( + `sending ${transactionSpec.size} transaction(s) to chain ${ + chain.chainName + } with contractNames: [${transactionSpec.keys()}]` + ); + + if (!dryRun) { + const provider = ethers.getDefaultProvider(chain.rpc); + const newAccounts = accountRegistry.subset([]); + for (const [name, wallet] of accountRegistry.entries()) { + newAccounts.set(name, wallet.connect(provider)); + } + accountRegistry = newAccounts; + } + + // result is the final contract registry after deployment, modified in place + const result = loadTxRegistry( + JSON.parse(JSON.stringify(transactionSpec.serialize())) + ); + + // @ts-ignore + const env: StringToStringMap = { chain }; + await readDeploymentFilesIntoEnv(env); + + const eachTx = async (tx: ReturnType) => { + try { + const factoryName = tx.factoryName ? tx.factoryName : tx.name; + const deployedContract: any = await readFromDeploymentFile( + factoryName, + chain + ); + const deployer = accountRegistry.mustGet( + tx.deployer ? tx.deployer : DEFAULT_DEPLOYER + ); + + const deployedContractAddress = renderArgs([tx.address], "", env)[0]; + + const ethersContract = new ethers.Contract( + deployedContractAddress, + deployedContract.abi, + deployer + ); + const args = renderArgs(tx.args, "", env); + logger.info( + `calling ${tx.signature} on ${tx.name} @:${deployedContractAddress} with args: \n [${args}]` + ); + if (!dryRun) { + await ethersContract.getFunction(tx.signature!)(...args); + } + } catch (err) { + logger.error(`[${chain.chainName}] sendTx ${tx.name} failed: ${err}`); + throw err; + } + }; + + for (const tx of result.values()) { + await eachTx(tx); + } +} diff --git a/src/utils/cli.ts b/src/utils/cli.ts new file mode 100644 index 00000000..16de5b4f --- /dev/null +++ b/src/utils/cli.ts @@ -0,0 +1,327 @@ +import { + Command, + Option, + InvalidArgumentError, +} from "@commander-js/extra-typings"; +import fs from "fs"; +import path from "path"; +import yaml from "yaml"; +import * as winston from "winston"; +import { Writable } from "stream"; + +export { Command, Option, InvalidArgumentError }; +export type CmdArgsType any> = Parameters< + ReturnType["handler"] +>[0]; + +/** + * Extract type of a Command's action function + */ +export type Action Command> = + Parameters["action"]>[0]; + +/** + * Extract params type of a Command's action function + */ +export type ActionParams Command> = + Parameters>; + +/** + * Arg/Opt: an array of strings, split by comma. Whitespaces are not ignored. + */ +export function CommaStrs(value: string) { + return value.split(","); +} + +/** + * Arg/Opt: an array of strings, split by comma or whitespace. Whitespaces are ignored. + */ +export function CommaOrWhitespaceStrs(value: string) { + return value.split(/[, ]+/); +} + +/** + * Arg/Opt: an integer. NaN causes error. + */ +export function Int(value: string) { + const parsed = parseInt(value); + if (isNaN(parsed)) { + throw new InvalidArgumentError(`invalid integer: ${value}`); + } + return parsed; +} + +/** + * Arg/Opt: a port integer [0, 65535] + */ +export function Port(value: string) { + const parsed = Int(value); + if (parsed < 0 || parsed > 65535) { + throw new InvalidArgumentError( + `invalid port: ${value}, must be [0, 65535]` + ); + } + return parsed; +} + +/** + * Arg/Opt: a parsed yaml object + */ +export function YamlFile(value: string) { + const file = FilePath(value); + return yaml.parse(fs.readFileSync(file, "utf8")); +} + +/** + * Arg/Opt: an existing file path + */ +export function FilePath(value: string) { + if (!fs.existsSync(value)) { + throw new InvalidArgumentError(`file not found: ${value}`); + } + if (!fs.statSync(value).isFile()) { + throw new InvalidArgumentError(`not a file: ${value}`); + } + return value; +} + +/** + * Arg/Opt: an existing directory path + */ +export function DirPath(value: string) { + if (!fs.existsSync(value)) { + throw new InvalidArgumentError(`dir not found: ${value}`); + } + if (!fs.statSync(value).isDirectory()) { + throw new InvalidArgumentError(`not a dir: ${value}`); + } + return value; +} + +const OUTPUT_LOGGER_NAME = "output"; +const MAIN_LOGGER_NAME = "main"; + +export const DEFAULT_OUTPUT = "-.yaml"; +export const DEFAULT_LOGGER = "info:-"; +export const MEM_TRANSPORT_NAME = "__mem__"; + +export type Logger = winston.Logger; + +/** + * returns the output logger + * It returns a default logger (defined by DEFAULT_OUTPUT) if not set by user's cli option + * Must use 'info' level for output logger + */ +export function getOutputLogger(): Logger { + if (!winston.loggers.has(OUTPUT_LOGGER_NAME)) { + OutputTarget(DEFAULT_OUTPUT); + } + return winston.loggers.get(OUTPUT_LOGGER_NAME); +} + +/** + * returns the main logger + */ +export function getMainLogger(): Logger { + if (!winston.loggers.has(MAIN_LOGGER_NAME)) { + SetMainLogger(DEFAULT_LOGGER); + } + return winston.loggers.get(MAIN_LOGGER_NAME); +} + +/** + * Arg/Opt: an output target formatted as ., where base is a file path or "-" for stdout, and ext is a file + * extension that determines the output format. + */ +export function OutputTarget(value: string) { + // remove logger if already set + if (winston.loggers.has(OUTPUT_LOGGER_NAME)) { + winston.loggers.close(OUTPUT_LOGGER_NAME); + } + + const valildExtensions = ["json", "yaml", "yml"]; + const parsed = path.parse(value); + const base = path.join(parsed.dir, parsed.name); + const ext = parsed.ext.length > 0 ? parsed.ext.slice(1) : ""; + if (!ext || valildExtensions.indexOf(ext) < 0) { + throw new InvalidArgumentError( + `invalid output extension: ${ext}, must be one of [${valildExtensions.join( + "," + )}]` + ); + } + let outFmt: winston.Logform.Format; + // log the message only; no level or timestamp + switch (ext) { + case "json": + outFmt = winston.format.printf((info: any) => + JSON.stringify(info.message, null, 2) + ); + break; + case "yaml": + case "yml": + outFmt = winston.format.printf((info) => yaml.stringify(info.message)); + break; + default: + throw new InvalidArgumentError( + `invalid output extension: ${ext}, must be one of [${valildExtensions.join( + "," + )}]` + ); + } + let transport: winston.LoggerOptions["transports"]; + + switch (base) { + case "-": + transport = new winston.transports.Console({}); + break; + case MEM_TRANSPORT_NAME: + transport = new winston.transports.Stream({ stream: new MemStream() }); + break; + default: + transport = new winston.transports.File({ filename: value, options: {} }); + break; + } + winston.loggers.add(OUTPUT_LOGGER_NAME, { + transports: transport, + format: outFmt, + level: "info", + }); + return value; +} + +export function getOutputInMem() { + const logger = getOutputLogger(); + const transport: { _stream: MemStream } = logger.transports[0] as any; + if ( + !transport._stream || + typeof transport._stream.getContent() !== "string" + ) { + throw new Error("output logger is not in memory"); + } + return transport._stream.getContent(); +} + +class MemStream extends Writable { + _content: string = ""; + + _write( + chunk: any, + encoding: BufferEncoding, + callback: (error?: Error | null | undefined) => void + ): void { + this._content += chunk; + if (callback) { + callback(); + } + } + + getContent() { + return this._content; + } +} + +// from 0 to 6 +const LogLevel = [ + "error", + "warn", + // default to info + "info", + "http", + "verbose", + "debug", + "silly", +]; + +/** + * parse a logger from a cli arg string, which may contain one or more logger transports, separated by comma + * Logger can be later accessed by calling `getMainLogger` or `winston.loggers.get(MAIN_LOGGER_NAME)` + * @param value a string of logger transports, separated by comma, eg. "info:-,error:err.log" + * by default, file transport will overwrite existing file; use '+' to append to existing file, eg. "info:-,debug:+debug.log" + * @returns same string as value + */ +export function SetMainLogger(value: string): string { + const transports = parseLoggerTransports(value); + // normally we wouldn't set logger multiple times; + // for testing, we clean up existing logger before setting a new one + if (winston.loggers.has(MAIN_LOGGER_NAME)) { + winston.loggers.close(MAIN_LOGGER_NAME); + } + winston.loggers.add(MAIN_LOGGER_NAME, { + transports, + format: winston.format.combine( + // winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss.SSS', options: { utc: true } }), // Use UTC time + winston.format.timestamp(), // UTC time by default + winston.format.printf((info) => { + return `${info.timestamp} ${info.level}: ${info.message}`; + }) + ), + }); + return value; +} +/** + * parse a logger from a arg string, which may contain one or more logger transports, separated by comma + * Logger can be later accessed by calling `getMainLogger` or `winston.loggers.get(MAIN_LOGGER_NAME)` + * @param value a string of logger transports, separated by comma, eg. "info:-,error:err.log" + * @returns a winston logger + */ +export function createLogger(value: string) { + return winston.createLogger({ + transports: parseLoggerTransports(value), + format: winston.format.combine( + // winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss.SSS', options: { utc: true } }), // Use UTC time + winston.format.timestamp(), // UTC time by default + winston.format.printf((info) => { + return `${info.timestamp} ${info.level}: ${info.message}`; + }) + ), + }); +} + +/** + * Create or use a logger from a string or use an existing Logger instance + * @param value a string of logger transports, separated by comma, eg. "info:-,error:err.log"; + * Or a logger instance created somewhere else. + * If undefined, use the `DEFAULT_LOGGER` string + * @returns a new or existing logger + */ +export function createOrUseLogger(value?: string | Logger) { + value = value || DEFAULT_LOGGER; + if (typeof value === "string") { + return createLogger(value); + } else if (value instanceof winston.Logger) { + return value; + } + throw new Error( + `invalid logger, should be a string or Logger instance, but got: ${value}` + ); +} + +function parseLoggerTransports(value: string) { + const transportStrs = value.split(/,\s*/); + const transports = transportStrs.map((str) => { + // trim whitespaces + const [level, filename] = str.split(/:\s*/); + if (!LogLevel.includes(level)) + throw new Error(`invalid log level: ${level}`); + if (!filename || filename === "-") { + return new winston.transports.Console({ level }); + } else { + // append to existing file + if (filename.startsWith("+")) { + return new winston.transports.File({ + filename: filename.slice(1), + level, + options: { flags: "a" }, + }); + } + // overwrite existing file + return new winston.transports.File({ + filename, + level, + options: { flags: "w" }, + }); + } + }); + return transports; +} diff --git a/src/utils/constants.ts b/src/utils/constants.ts new file mode 100644 index 00000000..522a3139 --- /dev/null +++ b/src/utils/constants.ts @@ -0,0 +1,10 @@ +export const DEFAULT_DEPLOYER = "default"; +export const DEFAULT_RPC_URL = "http://127.0.0.1:8545"; +export const DEFAULT_CHAIN_ID = 31337; +import path from "path"; +export const MODULE_PATH = "./node_modules/@open-ibc/vibc-core-smart-contracts"; +export const BASE_OUT_PATH = process.env.DEPLOYMENTS_PATH + ? process.env.DEPLOYMENTS_PATH + : MODULE_PATH; + +export const BASE_DEPLOYMENTS_PATH = path.join(BASE_OUT_PATH, "deployments"); diff --git a/src/utils/index.ts b/src/utils/index.ts new file mode 100644 index 00000000..debefc31 --- /dev/null +++ b/src/utils/index.ts @@ -0,0 +1,3 @@ +export * as cli from "./cli"; +export * as io from "./io"; +export * as registry from "./registry"; diff --git a/src/utils/io.ts b/src/utils/io.ts new file mode 100644 index 00000000..8e9fb2d9 --- /dev/null +++ b/src/utils/io.ts @@ -0,0 +1,253 @@ +import { fileURLToPath } from "url"; +import fs from "fs"; +import fsAsync from "fs/promises"; +import path from "path"; +import yaml from "yaml"; +import { z } from "zod"; +import nunjucks from "nunjucks"; +import assert from "assert"; +import { ProcessPromise } from "zx"; +import { Chain } from "../evm/chain"; +import { BASE_DEPLOYMENTS_PATH, MODULE_PATH } from "./constants"; + +export interface StringToStringMap { + [key: string]: string | null | undefined; +} + +export type DeployedContractObject = { + factory: string; + address: string; + abi: any; + bytecode: string; + args: any[]; + metadata?: string; + name: string; +}; + +// readYamlFile reads a yaml file and returns the parsed object. +export function readYamlFile(file: string): any { + return yaml.parse(fs.readFileSync(file, "utf-8")); +} + +export function contractNameToDeployFile( + contractName: string, + chainId: number +) { + return `${contractName}-${chainId}.json`; +} + +/** + * Load a json or yaml file and return the parsed object. + * @param file file path must have an extension of .json or .yaml|.yml + * @returns parsed object + * @throws error if file not found, file extension is not supported, or parsing error + */ +export function parseObjFromFile( + file: string, + options: BufferEncoding = "utf8" +): any { + if (!fs.existsSync(file)) { + throw new Error(`file not found: ${file}`); + } + const ext = path.parse(file).ext; + switch (ext) { + case ".json": + return JSON.parse(fs.readFileSync(file, options)); + case ".yaml": + case ".yml": + return yaml.parse(fs.readFileSync(file, options)); + default: + throw new Error( + `unsupported file extension: ${ext}, only {.json, .yaml|.yml} are supported` + ); + } +} + +// configure the renderer to throw an error if a template variable is not found +const renderEnv = nunjucks.configure({ throwOnUndefined: true }); + +/** + * Renders a template string using the provided environment variables. + * @param str - The string to render. `{{var1}}`/`{{obj1.name}}` will be replaced with the value of `env.var1`/`obj.name`. + * @param env - The environment variables (key/value pairs) to use for rendering. + * @returns The rendered string. + */ +export const renderString = (str: string, env: any) => { + try { + return renderEnv.renderString(str, env); + } catch (err: any) { + throw new Error( + `failed to render string: ${JSON.stringify(str)}, error: ${err}` + ); + } +}; + +/** + * Provides a way to parse a zod schema and throw a meaningful error message if parsing fails. + * @param className A meaningful name for the class being parsed. Used for error messages. + * @param config Config object to parse + * @param parseFunc Normally the `parse` method of a zod schema. + * @returns Parsed object that matches the zod schema. + */ +export function parseZodSchema( + className: string, + config: any, + parseFunc: (config: any) => T +) { + try { + return parseFunc(config); + } catch (e) { + const zErr: z.ZodError = e as any; + if (e instanceof z.ZodError) { + throw new Error( + `parsing ${className} failed. ${zErr.issues + .map((i) => i.path) + .join(", ")}: ${zErr.message}\nconfig obj:\n${JSON.stringify( + config, + null, + 2 + )}` + ); + } else { + throw e; + } + } +} + +/** + * create directories recursively if not exists, similar to `mkdir -p dirPath` + */ + +/** + * Remove a dir and its contents recursively, similar to `rm -rf dirPath` + * Then recreate the dir, similar to `mkdir -p dirPath` + * This effectively clears all existing content the dir if they exist; otherwise just create the dir. + */ +export function resetDir(dirPath: string) { + fs.rmSync(dirPath, { recursive: true, force: true }); + fs.mkdirSync(dirPath, { recursive: true }); +} + +export function setStdoutStderr( + proc: ProcessPromise, + wd: string, + stdoutName: string = "stdout", + stderrName: string = "stderr" +): ProcessPromise { + proc.pipe(fs.createWriteStream(path.resolve(wd, stdoutName))); + proc.stderr.pipe(fs.createWriteStream(path.resolve(wd, stderrName))); + proc.quiet(); + return proc; +} + +export function toEnvVarName(e: string) { + return e.replaceAll("-", "_").toUpperCase(); +} + +/** Reads a deployment metadata rom a foundry build file*/ +async function readMetadata(factoryName: string) { + const filePath = path.join( + MODULE_PATH, + "out", + `${factoryName}.sol`, + `${factoryName}.json` + ); + + try { + const data = await fsAsync.readFile(filePath, "utf8"); + return JSON.stringify(JSON.parse(data).metadata); + } catch (e) { + console.error(`error reading from file ${filePath}: \n`, e); + } +} + +const createFolderIfNeeded = async (folder: string) => { + fs.stat(folder, async (err, stats) => { + if (err) { + await fsAsync.mkdir(folder); // create the folder if it doesn't exist + } + }); +}; + +export async function writeDeployedContractToFile( + chain: Chain, + deployedContract: DeployedContractObject +) { + const deployFileName = contractNameToDeployFile( + deployedContract.name, + chain.chainId + ); + const fullPath = path.join(BASE_DEPLOYMENTS_PATH, deployFileName); + await createFolderIfNeeded(BASE_DEPLOYMENTS_PATH); + // get metadata from contract./ + const metadata = await readMetadata(deployedContract.factory); + deployedContract.metadata = metadata; + const outData = JSON.stringify(deployedContract); + + fs.writeFile(fullPath, outData, (err) => { + if (err) { + console.error(err); + return; + } + }); +} + +export async function readDeploymentFilesIntoEnv(env: any) { + await createFolderIfNeeded(BASE_DEPLOYMENTS_PATH); + let files: any[] = []; + try { + files = await fsAsync.readdir(BASE_DEPLOYMENTS_PATH); + } catch (e) { + console.log(`no files to read from`); + } + for (const file of files) { + if (file.endsWith(".json")) { + try { + const data = JSON.parse( + fs.readFileSync(path.join(BASE_DEPLOYMENTS_PATH, file), "utf8") + ); + env[data.name] = data.address; + } catch (e) { + console.error(`error reading file ${file}`, e); + } + } + } + return env; +} + +export async function readFromDeploymentFile( + deploymentName: string, + chain: Chain +) { + const filePath = path.join( + BASE_DEPLOYMENTS_PATH, + contractNameToDeployFile(deploymentName, chain.chainId) + ); + try { + const data = JSON.parse(fs.readFileSync(filePath, "utf8")); + return data; + } catch (e) { + console.error(`error reading file ${filePath}`, e); + } +} + +/** + * + * @param args Render the args for the contract deployment through looking them up in environment + * @param init replace initArgs with the init string + * @param env to look up the args in + * @returns + */ +export const renderArgs = (args: any[] | undefined, init: string, env: any) => { + return args + ? args.map((arg: any) => { + if (typeof arg !== "string") return arg; + if (arg === "$INITARGS") { + if (init === "") + throw new Error(`Found $INITARGS but no args to replace it with.`); + return init; + } + return renderString(arg, env); + }) + : []; +}; diff --git a/src/utils/registry.ts b/src/utils/registry.ts new file mode 100644 index 00000000..f2f71f96 --- /dev/null +++ b/src/utils/registry.ts @@ -0,0 +1,174 @@ +import assert from "assert"; + +/** + * A template for a registry that maps strings to items of type T. + * Items are ordered by insertion order. + * Invariants: + * - keys are unique + */ +export class Registry { + public readonly registry: Map; + + protected nameFunc: (t: T) => string; + protected itemToObj: (t: T) => S; + protected nameInParent?: string; + + /** + * Create a new registry from a list of items. + * @param src The source array of registry items + * @param nameFunc A function that takes an item and returns its name, which must be unique. + * Names are used for reigstry lookups. + * Duplicated names result in the last item with the name being used. + */ + constructor( + src: T[], + options?: { + nameInParent?: string; + nameFunc?: (t: T) => string; + toObj?: (t: T) => S; + } + ) { + const { nameFunc = (t: any) => t["name"], toObj = (t: any) => t } = options + ? options + : {}; + this.registry = new Map(src.map((item) => [nameFunc(item), item])); + this.itemToObj = toObj; + this.nameFunc = nameFunc; + this.nameInParent = options?.nameInParent; + } + + /** + * Get the element type of the registry. + * Should only use this for type checking. + * Returned run-time value is `undefined`. + */ + get Element(): T { + return undefined as any; + } + + get name(): string { + assert(this.nameInParent, "nameInParent not set"); + return this.nameInParent; + } + + public get size(): number { + return this.registry.size; + } + + public keys(): string[] { + return Array.from(this.registry.keys()); + } + + public values(): T[] { + return Array.from(this.registry.values()); + } + + /** + * Get an item by name. If the item doesn't exist, undefined is returned. + */ + public get(name: string): T | undefined { + return this.registry.get(name); + } + + /** + * Get an non-nullable item by name. If the item doesn't exist, an error is thrown. + */ + public mustGet(name: string): T { + const item = this.get(name); + assert(item, `item ${name} not found in registry keys: ${this.keys()}`); + return item; + } + + /** + * returns an iterator over the registry item tuples [key, value:T]. + */ + public entries(): IterableIterator<[string, T]> { + return this.registry.entries(); + } + + /** + * Copy items from another registry to this registry. + * @param entries + * @param allowDup If true, dup keys are allowed, ie. the last item with the same key is used. + */ + public copyFrom( + entries: IterableIterator<[string, T]>, + allowDup = false + ): void { + for (const [name, item] of entries) { + this.set(name, item, allowDup); + } + } + + /** + * Set an item in the registry. If the item already exists, an error is thrown, unless force is true. + */ + public set(name: string, item: T, force = false): void { + if (!force && this.registry.has(name)) { + throw new Error(`item ${name} already exists in registry`); + } + this.registry.set(name, item); + } + + public has(name: string): boolean { + return this.registry.has(name); + } + + /** + * Create a new registry instance from a subset of items in the current registry. + * Error is thrown if any of the items in names is not found in the current registry, unless ignoreErr is true. + * NOTE: Items in the new registry are shallow copies of the original items. + * @param names items to include in the new registry. + * If undefined, all items are included + * If empty array, an empty registry is returned. + * @param ignoreErr siliently ignore items not found in the current registry + * @returns + */ + public subset(names?: string[], ignoreErr = false): this { + const classConstrutor = this.constructor as new (...args: any[]) => this; + const subset = new classConstrutor([], { + toObj: this.itemToObj, + nameFunc: this.nameFunc, + nameInParent: this.nameInParent, + }); + const keys = names ? names : this.keys(); + for (const name of keys) { + if (this.has(name)) { + subset.set(name, this.mustGet(name), true); + } else if (!ignoreErr) { + throw new Error( + `item ${name} not found in registry keys: ${this.keys()}` + ); + } + } + return subset; + } + + /** + * @returns A list of items in the registry, ordered by insertion order. + */ + public toList(): T[] { + return Array.from(this.registry.values()); + } + + /** + * Serialize the registry to a list of items. Default to toList(). + * Subclasses should override this method if they want to serialize the registry differently. + * @returns A list of items in the registry, ordered by insertion order. + */ + public serialize(): S[] { + return this.toList().map((item) => this.itemToObj(item)); + } + + /** + * + * @returns An object with keys as the item names and values as the items. + */ + toSerializedObj(): Record { + const obj: Record = {}; + this.registry.forEach((item, key) => { + obj[key] = this.itemToObj(item); + }); + return obj; + } +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..1d2cc8e1 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,112 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + + /* Language and Environment */ + "target": "ESNext" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + + /* Modules */ + "module": "ESNext" /* Specify what module code is generated. */, + "rootDir": "./src" /* Specify the root folder within your source files. */, + "moduleResolution": "bundler", + "moduleDetection": "force", + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */, + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + + /* Emit */ + "declaration": true /* Generate .d.ts files from TypeScript and JavaScript files in your project. */, + "declarationMap": true /* Create sourcemaps for d.ts files. */, + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./dist" /* Specify an output folder for all emitted files. */, + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */, + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, + + /* Type Checking */ + "strict": true /* Enable all strict type-checking options. */, + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + + /* Completeness */ + "skipDefaultLibCheck": true /* Skip type checking .d.ts files that are included with TypeScript. */, + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "lib"] +} diff --git a/tsup.config.ts b/tsup.config.ts new file mode 100644 index 00000000..b635b60a --- /dev/null +++ b/tsup.config.ts @@ -0,0 +1,23 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: [ + "src/index.ts", + "src/utils/index.ts", + "src/evm/index.ts", + "src/utils/cli.ts", + "src/utils/io.ts", + "src/evm/schemas/contract.ts", + "src/evm/schemas/tx.ts", + "src/evm/chain.ts", + "src/evm/account.ts", + "src/scripts/deploy-script.ts", + "src/scripts/upgrade-script.ts", + ], + format: ["cjs", "esm"], // Build for commonJS and ESmodules + dts: true, // Generate declaration file (.d.ts) + splitting: false, + sourcemap: true, + clean: true, + outDir: "./dist", +});