diff --git a/.ctx/generate-local-network-context.md b/.ctx/generate-local-network-context.md new file mode 100644 index 0000000000..604854131b --- /dev/null +++ b/.ctx/generate-local-network-context.md @@ -0,0 +1,35 @@ +# Generating Local Network Context for Lit Protocol + +The Lit network contexts, which include smart contract addresses and ABIs, typically come from the `@lit-protocol/contracts` package (a separate repository at https://github.com/LIT-Protocol/lit-contracts/). However, these contexts are designed for established networks. + +## Local Network Setup + +For local development (running Lit nodes on your machine), you need to generate a `networkContext.json` file in the `lit-assets` directory. This is typically done by running the deploy script after starting your local Anvil chain. + +## Version Compatibility Changes + +In version 7 or earlier, you could simply copy and paste the `networkContext.json` file, and it would work when setting the network to `custom` when running with Tinny (E2E test suite). + +However, in version 8, we've optimised by removing redundant and unused ABIs from the JSON file and enforced strongly typed ABIs. This optimization introduced an additional conversion layer that extracts only the necessary ABI methods, which must be run manually for local network contexts. + +## Generating Custom Context + +To generate the proper context: + +1. Locate the `getCustomContext` file in the network-specific folder (in this case, `vNaga/naga-develop` folder) +2. Use the `generateSignaturesFromContext` helper function from the `@lit-protocol/contracts` repository + +Here's an example of how to use this function: + +```ts +import { generateSignaturesFromContext } from '@lit-protocol/contracts/custom-network-signatures'; + +await generateSignaturesFromContext({ + jsonFilePath: + '/Users/anson/Projects/lit-assets/blockchain/contracts/networkContext.json', // in lit assets repo + networkName: 'naga-develop', + outputDir: './naga-develop-signatures', + useScriptDirectory: true, + callerPath: import.meta.url, +}); +``` diff --git a/.ctx/v7-to-v8-migration-notes.md b/.ctx/v7-to-v8-migration-notes.md new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.gitignore b/.gitignore index 16c59f020c..e53043ad0a 100644 --- a/.gitignore +++ b/.gitignore @@ -77,4 +77,6 @@ local-tests/build packages/wrapped-keys-lit-actions/src/generated digest -generate-digest.ts \ No newline at end of file +generate-digest.ts + +.cursor/rules \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index 69a63afb63..a0883a5f11 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -3,8 +3,8 @@ "todo-tree.tree.scanMode": "workspace", "conventionalCommits.scopes": [ "contracts-sdk", - "lit-node-client-nodejs", - "core", + "lit-node-client", + "core" ], "workbench.colorCustomizations": { "activityBar.activeBackground": "#2f7c47", @@ -52,4 +52,4 @@ // ] // } // ] -} \ No newline at end of file +} diff --git a/README.md b/README.md index 147bb24a8e..9984acab2b 100644 --- a/README.md +++ b/README.md @@ -50,38 +50,37 @@ yarn add @lit-protocol/lit-node-client -| Package | Category | Download | -| -------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [@lit-protocol/lit-node-client-nodejs](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/lit-node-client-nodejs) | ![lit-node-client-nodejs](https://img.shields.io/badge/-nodejs-2E8B57 'lit-node-client-nodejs') | | -| [@lit-protocol/lit-node-client](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/lit-node-client) | ![lit-node-client](https://img.shields.io/badge/-universal-8A6496 'lit-node-client') | | +| Package | Category | Download | +| ------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| [@lit-protocol/lit-node-client](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/lit-node-client) | ![lit-node-client](https://img.shields.io/badge/-nodejs-2E8B57 'lit-node-client') | | If you're a tech-savvy user and wish to utilize only specific submodules that our main module relies upon, you can find individual packages listed below. This way, you can import only the necessary packages that cater to your specific use case:: -| Package | Category | Download | -| -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [@lit-protocol/access-control-conditions](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/access-control-conditions) | ![access-control-conditions](https://img.shields.io/badge/-universal-8A6496 'access-control-conditions') | | -| [@lit-protocol/auth-helpers](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/auth-helpers) | ![auth-helpers](https://img.shields.io/badge/-universal-8A6496 'auth-helpers') | | -| [@lit-protocol/constants](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/constants) | ![constants](https://img.shields.io/badge/-universal-8A6496 'constants') | | -| [@lit-protocol/contracts-sdk](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/contracts-sdk) | ![contracts-sdk](https://img.shields.io/badge/-universal-8A6496 'contracts-sdk') | | -| [@lit-protocol/core](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/core) | ![core](https://img.shields.io/badge/-universal-8A6496 'core') | | -| [@lit-protocol/crypto](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/crypto) | ![crypto](https://img.shields.io/badge/-universal-8A6496 'crypto') | | -| [@lit-protocol/encryption](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/encryption) | ![encryption](https://img.shields.io/badge/-universal-8A6496 'encryption') | | -| [@lit-protocol/event-listener](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/event-listener) | ![event-listener](https://img.shields.io/badge/-universal-8A6496 'event-listener') | | -| [@lit-protocol/logger](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/logger) | ![logger](https://img.shields.io/badge/-universal-8A6496 'logger') | | -| [@lit-protocol/misc](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/misc) | ![misc](https://img.shields.io/badge/-universal-8A6496 'misc') | | -| [@lit-protocol/nacl](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/nacl) | ![nacl](https://img.shields.io/badge/-universal-8A6496 'nacl') | | -| [@lit-protocol/pkp-base](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/pkp-base) | ![pkp-base](https://img.shields.io/badge/-universal-8A6496 'pkp-base') | | -| [@lit-protocol/pkp-cosmos](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/pkp-cosmos) | ![pkp-cosmos](https://img.shields.io/badge/-universal-8A6496 'pkp-cosmos') | | -| [@lit-protocol/pkp-ethers](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/pkp-ethers) | ![pkp-ethers](https://img.shields.io/badge/-universal-8A6496 'pkp-ethers') | | -| [@lit-protocol/pkp-sui](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/pkp-sui) | ![pkp-sui](https://img.shields.io/badge/-universal-8A6496 'pkp-sui') | | -| [@lit-protocol/pkp-walletconnect](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/pkp-walletconnect) | ![pkp-walletconnect](https://img.shields.io/badge/-universal-8A6496 'pkp-walletconnect') | | -| [@lit-protocol/types](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/types) | ![types](https://img.shields.io/badge/-universal-8A6496 'types') | | -| [@lit-protocol/uint8arrays](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/uint8arrays) | ![uint8arrays](https://img.shields.io/badge/-universal-8A6496 'uint8arrays') | | -| [@lit-protocol/wasm](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/wasm) | ![wasm](https://img.shields.io/badge/-universal-8A6496 'wasm') | | -| [@lit-protocol/wrapped-keys](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/wrapped-keys) | ![wrapped-keys](https://img.shields.io/badge/-universal-8A6496 'wrapped-keys') | | -| [@lit-protocol/wrapped-keys-lit-actions](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/wrapped-keys-lit-actions) | ![wrapped-keys-lit-actions](https://img.shields.io/badge/-universal-8A6496 'wrapped-keys-lit-actions') | | -| [@lit-protocol/auth-browser](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/auth-browser) | ![auth-browser](https://img.shields.io/badge/-browser-E98869 'auth-browser') | | -| [@lit-protocol/misc-browser](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/misc-browser) | ![misc-browser](https://img.shields.io/badge/-browser-E98869 'misc-browser') | | +| Package | Category | Download | +| ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| [@lit-protocol/access-control-conditions](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/access-control-conditions) | ![access-control-conditions](https://img.shields.io/badge/-universal-8A6496 'access-control-conditions') | | +| [@lit-protocol/access-control-conditions-schemas](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/access-control-conditions-schemas) | ![access-control-conditions-schemas](https://img.shields.io/badge/-universal-8A6496 'access-control-conditions-schemas') | | +| [@lit-protocol/auth](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/auth) | ![auth](https://img.shields.io/badge/-universal-8A6496 'auth') | | +| [@lit-protocol/auth-helpers](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/auth-helpers) | ![auth-helpers](https://img.shields.io/badge/-universal-8A6496 'auth-helpers') | | +| [@lit-protocol/constants](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/constants) | ![constants](https://img.shields.io/badge/-universal-8A6496 'constants') | | +| [@lit-protocol/contracts-sdk](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/contracts-sdk) | ![contracts-sdk](https://img.shields.io/badge/-universal-8A6496 'contracts-sdk') | | +| [@lit-protocol/core](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/core) | ![core](https://img.shields.io/badge/-universal-8A6496 'core') | | +| [@lit-protocol/crypto](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/crypto) | ![crypto](https://img.shields.io/badge/-universal-8A6496 'crypto') | | +| [@lit-protocol/event-listener](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/event-listener) | ![event-listener](https://img.shields.io/badge/-universal-8A6496 'event-listener') | | +| [@lit-protocol/lit-client](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/lit-client) | ![lit-client](https://img.shields.io/badge/-universal-8A6496 'lit-client') | | +| [@lit-protocol/logger](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/logger) | ![logger](https://img.shields.io/badge/-universal-8A6496 'logger') | | +| [@lit-protocol/networks](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/networks) | ![networks](https://img.shields.io/badge/-universal-8A6496 'networks') | | +| [@lit-protocol/pkp-base](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/pkp-base) | ![pkp-base](https://img.shields.io/badge/-universal-8A6496 'pkp-base') | | +| [@lit-protocol/pkp-cosmos](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/pkp-cosmos) | ![pkp-cosmos](https://img.shields.io/badge/-universal-8A6496 'pkp-cosmos') | | +| [@lit-protocol/pkp-ethers](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/pkp-ethers) | ![pkp-ethers](https://img.shields.io/badge/-universal-8A6496 'pkp-ethers') | | +| [@lit-protocol/pkp-sui](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/pkp-sui) | ![pkp-sui](https://img.shields.io/badge/-universal-8A6496 'pkp-sui') | | +| [@lit-protocol/pkp-walletconnect](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/pkp-walletconnect) | ![pkp-walletconnect](https://img.shields.io/badge/-universal-8A6496 'pkp-walletconnect') | | +| [@lit-protocol/schemas](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/schemas) | ![schemas](https://img.shields.io/badge/-universal-8A6496 'schemas') | | +| [@lit-protocol/types](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/types) | ![types](https://img.shields.io/badge/-universal-8A6496 'types') | | +| [@lit-protocol/wasm](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/wasm) | ![wasm](https://img.shields.io/badge/-universal-8A6496 'wasm') | | +| [@lit-protocol/wrapped-keys](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/wrapped-keys) | ![wrapped-keys](https://img.shields.io/badge/-universal-8A6496 'wrapped-keys') | | +| [@lit-protocol/wrapped-keys-lit-actions](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/wrapped-keys-lit-actions) | ![wrapped-keys-lit-actions](https://img.shields.io/badge/-universal-8A6496 'wrapped-keys-lit-actions') | | +| [@lit-protocol/misc-browser](https://github.com/LIT-Protocol/js-sdk/tree/master/packages/misc-browser) | ![misc-browser](https://img.shields.io/badge/-browser-E98869 'misc-browser') | | @@ -100,7 +99,7 @@ If you're a tech-savvy user and wish to utilize only specific submodules that ou ## Prerequisite -- node (v19.x or above) +- node (v20.x or above) - rust (v1.70.00 or above) - [wasm-pack](https://github.com/rustwasm/wasm-pack) diff --git a/jest.setup.js b/jest.setup.js index f44e2a7ef9..d2f1c82f82 100644 --- a/jest.setup.js +++ b/jest.setup.js @@ -3,3 +3,22 @@ const crypto = require('crypto'); global.TextEncoder = require('util').TextEncoder; global.TextDecoder = require('util').TextDecoder; global.crypto = crypto; + +if (!global.crypto.subtle) { + global.crypto.subtle = { + digest: async (algorithm, data) => { + const algo = algorithm.toLowerCase().replace('-', ''); + const hash = crypto.createHash(algo); + hash.update(Buffer.from(data)); + return hash.digest().buffer; + }, + }; +} + +// If this is not included, you will get the following error when running it in Jest: +// (Error) Details: Request is not defined +// The problem is that Jest is running in a Node.js environment where the global Request API (part of the Fetch API) might not be available or properly configured. Bun, on the other hand, has this API built into its runtime by default, which is why it works. +const { default: fetch, Request, Response } = require('node-fetch'); +global.fetch = fetch; +global.Request = Request; +global.Response = Response; diff --git a/local-tests/README.md b/local-tests/README.md index 8196ddc46b..de531ce201 100644 --- a/local-tests/README.md +++ b/local-tests/README.md @@ -122,21 +122,3 @@ The `TinnyPerson` class encapsulates various functionalities to manage wallet op | `createCapacityDelegationAuthSig(addresses)` | Mints a Capacity Credits NFT and creates an authentication signature for delegating capacity, which can be used to authorize other addresses to use the minted credits. | ## - -# esbuild benchmark - -```ts -// test-bundle-speed.ts -export const testBundleSpeed = async (devEnv: TinnyEnvironment) => { - const a = await import('@lit-protocol/lit-node-client'); - const b = await import('@lit-protocol/contracts-sdk'); - const c = await import('@lit-protocol/auth-helpers'); - const d = await import('@lit-protocol/constants'); - const e = await import('@lit-protocol/lit-auth-client'); - - console.log(a, b, c, d, e); -}; -// ---------------- -// Build time: 77ms -// ---------------- -``` diff --git a/local-tests/package.json b/local-tests/package.json index d4d06845a8..da3ef632ae 100644 --- a/local-tests/package.json +++ b/local-tests/package.json @@ -81,16 +81,14 @@ "synthetix-js": "^2.74.1", "tslib": "^2.3.0", "tweetnacl": "^1.0.3", - "tweetnacl-util": "^0.15.1", "uint8arrays": "^4.0.3", "@openagenda/verror": "^3.1.4", "ipfs-unixfs-importer": "12.0.1", "@solana/web3.js": "^1.95.3", "bech32": "^2.0.0", "pako": "^2.1.0", - "@lit-protocol/misc": "^7.0.0", - "@lit-protocol/lit-node-client": "^7.0.0", - "@lit-protocol/lit-auth-client": "^7.0.0", + "@lit-protocol/lit-node-client": "8.0.0-alpha.0", + "@lit-protocol/auth": "8.0.0-alpha.0", "@lit-protocol/contracts": "^0.0.71" } } diff --git a/local-tests/setup/networkContext.json b/local-tests/setup/networkContext.json index e2e4f67f5f..82da5e588a 100644 --- a/local-tests/setup/networkContext.json +++ b/local-tests/setup/networkContext.json @@ -1,6 +1,6 @@ { "Allowlist": { - "address": "0x7a2088a1bFc9d81c55368AE168C2C02570cB814F", + "address": "0xa82fF9aFd8f496c3d6ac40E2a0F282E47488CFc9", "abi": [ { "inputs": [], @@ -231,7 +231,7 @@ "name": "Allowlist" }, "LITToken": { - "address": "0x0DCd1Bf9A1b36cE34237eEaFef220932846BCD82", + "address": "0x59b670e9fA9D0A427751Af201D676719a970857b", "abi": [ { "inputs": [ @@ -1279,7 +1279,7 @@ "name": "LITToken" }, "Multisender": { - "address": "0x4826533B4897376654Bb4d4AD88B7faFD0C98528", + "address": "0x4c5859f0F772848b2D91F1D83E2Fe57935348029", "abi": [ { "anonymous": false, @@ -1388,7 +1388,7 @@ "name": "Multisender" }, "PKPHelper": { - "address": "0xcbEAF3BDe82155F56486Fb5a1072cb8baAf547cc", + "address": "0x04C89607413713Ec9775E14b954286519d836FEf", "abi": [ { "inputs": [ @@ -2370,7 +2370,7 @@ "name": "PKPHelper" }, "PKPNFT": { - "address": "0xa82fF9aFd8f496c3d6ac40E2a0F282E47488CFc9", + "address": "0x99bbA657f2BbC93c02D617f8bA121cB8Fc104Acf", "abi": [ { "inputs": [ @@ -2895,6 +2895,19 @@ "name": "Transfer", "type": "event" }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "newTrustedForwarder", + "type": "address" + } + ], + "name": "TrustedForwarderSet", + "type": "event" + }, { "anonymous": false, "inputs": [ @@ -3168,6 +3181,19 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "getTrustedForwarder", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [], "name": "initialize", @@ -3433,6 +3459,19 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "forwarder", + "type": "address" + } + ], + "name": "setTrustedForwarder", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -3574,7 +3613,7 @@ "name": "PKPNFT" }, "PKPNFTMetadata": { - "address": "0x5eb3Bc0a489C5A8288765d2336659EbCA68FCd00", + "address": "0x82e01223d51Eb87e16A03E24687EDF0F294da6f1", "abi": [ { "inputs": [ @@ -3732,7 +3771,7 @@ "name": "PKPNFTMetadata" }, "PKPPermissions": { - "address": "0xFD471836031dc5108809D173A067e8486B9047A3", + "address": "0xdbC43Ba45381e02825b14322cDdd15eC4B3164E6", "abi": [ { "inputs": [ @@ -4286,6 +4325,19 @@ "name": "RootHashUpdated", "type": "event" }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "newTrustedForwarder", + "type": "address" + } + ], + "name": "TrustedForwarderSet", + "type": "event" + }, { "inputs": [ { @@ -4687,6 +4739,19 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "getTrustedForwarder", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -4945,6 +5010,19 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "forwarder", + "type": "address" + } + ], + "name": "setTrustedForwarder", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -5022,7 +5100,7 @@ "name": "PKPPermissions" }, "PubkeyRouter": { - "address": "0x70e0bA845a1A0F2DA3359C97E0285013525FFC49", + "address": "0x809d550fca64d94Bd9F66E60752A544199cfAC3D", "abi": [ { "inputs": [ @@ -5545,6 +5623,19 @@ "name": "ToggleEvent", "type": "event" }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "newTrustedForwarder", + "type": "address" + } + ], + "name": "TrustedForwarderSet", + "type": "event" + }, { "inputs": [ { @@ -5814,6 +5905,19 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "getTrustedForwarder", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -5948,6 +6052,19 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "forwarder", + "type": "address" + } + ], + "name": "setTrustedForwarder", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -5982,7 +6099,7 @@ "name": "PubkeyRouter" }, "Staking": { - "address": "0x4A679253410272dd5232B3Ff7cF5dbB88f295319", + "address": "0x9E545E3C0baAB3E08CdfD552C960A1050f373042", "abi": [ { "inputs": [ @@ -6375,11 +6492,6 @@ "stateMutability": "nonpayable", "type": "function" }, - { - "inputs": [], - "name": "CallerNotOwner", - "type": "error" - }, { "inputs": [ { @@ -6404,6 +6516,25 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "stakerAddress", + "type": "address" + } + ], + "name": "getCurrentRealmIdForStakerAddress", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -6423,6 +6554,38 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "stakerAddress", + "type": "address" + } + ], + "name": "getShawdowRealmIdForStakerAddress", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getUnassignedStakerAddresses", + "outputs": [ + { + "internalType": "address[]", + "name": "", + "type": "address[]" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [], "name": "getUnassignedValidators", @@ -6471,18 +6634,43 @@ }, { "internalType": "uint256", - "name": "commission", + "name": "commissionRate", "type": "uint256" }, { "internalType": "uint256", - "name": "commissionRate", + "name": "lastRewardEpoch", "type": "uint256" }, { "internalType": "uint256", - "name": "lastRewardEpoch", + "name": "lastRealmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeAmount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeWeight", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedFixedCostRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedCommission", "type": "uint256" + }, + { + "internalType": "address", + "name": "operatorAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.Validator[]", @@ -6588,6 +6776,19 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "numRealms", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -6642,18 +6843,43 @@ }, { "internalType": "uint256", - "name": "commission", + "name": "commissionRate", "type": "uint256" }, { "internalType": "uint256", - "name": "commissionRate", + "name": "lastRewardEpoch", "type": "uint256" }, { "internalType": "uint256", - "name": "lastRewardEpoch", + "name": "lastRealmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeAmount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeWeight", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedFixedCostRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedCommission", "type": "uint256" + }, + { + "internalType": "address", + "name": "operatorAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.Validator", @@ -6664,11 +6890,31 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "CallerNotOwner", + "type": "error" + }, { "inputs": [], "name": "CallerNotOwnerOrDevopsAdmin", "type": "error" }, + { + "inputs": [], + "name": "CannotStakeZero", + "type": "error" + }, + { + "inputs": [], + "name": "InvalidNewSharePrice", + "type": "error" + }, + { + "inputs": [], + "name": "InvalidSlashPercentage", + "type": "error" + }, { "inputs": [ { @@ -6680,6 +6926,22 @@ "name": "MustBeInNextValidatorSetLockedOrReadyForNextEpochState", "type": "error" }, + { + "inputs": [ + { + "internalType": "address", + "name": "validator", + "type": "address" + }, + { + "internalType": "address[]", + "name": "validatorsInNextEpoch", + "type": "address[]" + } + ], + "name": "ValidatorIsNotInNextEpoch", + "type": "error" + }, { "anonymous": false, "inputs": [ @@ -6975,13 +7237,13 @@ { "indexed": false, "internalType": "uint256", - "name": "newMaxTripleCount", + "name": "newMaxPresignCount", "type": "uint256" }, { "indexed": false, "internalType": "uint256", - "name": "newMinTripleCount", + "name": "newMinPresignCount", "type": "uint256" }, { @@ -6993,7 +7255,7 @@ { "indexed": false, "internalType": "uint256", - "name": "newMaxTripleConcurrency", + "name": "newMaxPresignConcurrency", "type": "uint256" }, { @@ -7012,11 +7274,43 @@ { "indexed": false, "internalType": "address", - "name": "newStakingTokenAddress", + "name": "newResolverContractAddress", "type": "address" } ], - "name": "StakingTokenSet", + "name": "ResolverContractAddressSet", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "staker", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "Staked", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "newStakingTokenAddress", + "type": "address" + } + ], + "name": "StakingTokenSet", "type": "event" }, { @@ -7032,6 +7326,32 @@ "name": "StateChanged", "type": "event" }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "staker", + "type": "address" + } + ], + "name": "ValidatorBanned", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "staker", + "type": "address" + } + ], + "name": "ValidatorKickedFromNextEpoch", + "type": "event" + }, { "anonymous": false, "inputs": [ @@ -7045,6 +7365,32 @@ "name": "ValidatorRejoinedNextEpoch", "type": "event" }, + { + "inputs": [ + { + "internalType": "address", + "name": "staker", + "type": "address" + } + ], + "name": "addPermittedStaker", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "addRealm", + "outputs": [ + { + "internalType": "uint256", + "name": "realmId", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -7110,14 +7456,37 @@ { "inputs": [ { - "internalType": "address", - "name": "validatorStakerAddress", - "type": "address" + "internalType": "uint256", + "name": "source_realmId", + "type": "uint256" }, { "internalType": "uint256", - "name": "amountToPenalize", + "name": "target_realmId", "type": "uint256" + }, + { + "internalType": "address[]", + "name": "target_validators", + "type": "address[]" + } + ], + "name": "adminSetupShadowSplicing", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "percentage", + "type": "uint256" + }, + { + "internalType": "address", + "name": "stakerAddress", + "type": "address" } ], "name": "adminSlashValidator", @@ -7148,6 +7517,24 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "realmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "decreaseRewardPool", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -7174,6 +7561,50 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "realmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "increaseRewardPool", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "staker", + "type": "address" + } + ], + "name": "removePermittedStaker", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "realmId", + "type": "uint256" + } + ], + "name": "removeRealm", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -7317,6 +7748,16 @@ "internalType": "uint256", "name": "minSelfStakeTimelock", "type": "uint256" + }, + { + "internalType": "uint256", + "name": "minValidatorCountToClampMinimumThreshold", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "minThresholdToClampAt", + "type": "uint256" } ], "internalType": "struct LibStakingStorage.GlobalConfig", @@ -7329,6 +7770,19 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "newResolverAddress", + "type": "address" + } + ], + "name": "setContractResolver", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -7458,6 +7912,19 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { + "internalType": "bool", + "name": "permittedStakersOn", + "type": "bool" + } + ], + "name": "setPermittedStakersOn", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -7474,12 +7941,12 @@ }, { "internalType": "uint256", - "name": "maxTripleCount", + "name": "maxPresignCount", "type": "uint256" }, { "internalType": "uint256", - "name": "minTripleCount", + "name": "minPresignCount", "type": "uint256" }, { @@ -7489,13 +7956,18 @@ }, { "internalType": "uint256", - "name": "maxTripleConcurrency", + "name": "maxPresignConcurrency", "type": "uint256" }, { "internalType": "bool", "name": "rpcHealthcheckEnabled", "type": "bool" + }, + { + "internalType": "uint256", + "name": "minEpochForRewards", + "type": "uint256" } ], "internalType": "struct LibStakingStorage.RealmConfig", @@ -7508,6 +7980,19 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "newTotalSupply", + "type": "uint256" + } + ], + "name": "setTokenTotalSupplyStandIn", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [], "name": "CallerNotContract", @@ -7523,11 +8008,6 @@ "name": "CannotModifyUnfrozen", "type": "error" }, - { - "inputs": [], - "name": "CannotStakeZero", - "type": "error" - }, { "inputs": [], "name": "CannotWithdrawFrozen", @@ -7560,11 +8040,6 @@ "name": "InsufficientSelfStake", "type": "error" }, - { - "inputs": [], - "name": "InvalidNewSharePrice", - "type": "error" - }, { "inputs": [], "name": "InvalidRatio", @@ -7675,6 +8150,11 @@ "name": "TimeLockNotMet", "type": "error" }, + { + "inputs": [], + "name": "TooSoonToWithdraw", + "type": "error" + }, { "inputs": [ { @@ -7692,11 +8172,29 @@ { "indexed": false, "internalType": "address", - "name": "newResolverContractAddress", + "name": "stakerAddress", "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "rewards", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "fromEpoch", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "toEpoch", + "type": "uint256" } ], - "name": "ResolverContractAddressSet", + "name": "FixedCostRewardsClaimed", "type": "event" }, { @@ -7809,19 +8307,44 @@ "anonymous": false, "inputs": [ { - "indexed": true, + "indexed": false, "internalType": "address", - "name": "staker", + "name": "newTrustedForwarder", + "type": "address" + } + ], + "name": "TrustedForwarderSet", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "stakerAddress", "type": "address" }, { "indexed": false, "internalType": "uint256", - "name": "amount", + "name": "rewards", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "fromEpoch", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "toEpoch", "type": "uint256" } ], - "name": "Staked", + "name": "ValidatorCommissionClaimed", "type": "event" }, { @@ -7856,37 +8379,6 @@ "name": "Withdrawn", "type": "event" }, - { - "inputs": [ - { - "internalType": "address", - "name": "staker", - "type": "address" - } - ], - "name": "addPermittedStaker", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "percentage", - "type": "uint256" - }, - { - "internalType": "address", - "name": "stakerAddress", - "type": "address" - } - ], - "name": "adminSlashValidator", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, { "inputs": [ { @@ -7925,6 +8417,24 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "realmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxNumberOfEpochsToClaim", + "type": "uint256" + } + ], + "name": "claimFixedCostRewards", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -7941,6 +8451,11 @@ "internalType": "uint256", "name": "stakeRecordId", "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxNumberOfEpochsToClaim", + "type": "uint256" } ], "name": "claimStakeRewards", @@ -7949,7 +8464,18 @@ "type": "function" }, { - "inputs": [], + "inputs": [ + { + "internalType": "uint256", + "name": "realmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxNumberOfEpochsToClaim", + "type": "uint256" + } + ], "name": "claimValidatorCommission", "outputs": [], "stateMutability": "nonpayable", @@ -8023,7 +8549,17 @@ }, { "internalType": "uint256", - "name": "totalRewards", + "name": "totalStakeRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "validatorFixedCostRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "validatorCommission", "type": "uint256" }, { @@ -8074,46 +8610,89 @@ }, { "internalType": "uint256", - "name": "stakeRecordId", + "name": "rewardEpochNumber", "type": "uint256" - }, + } + ], + "name": "getRewardEpochView", + "outputs": [ { - "internalType": "uint256", - "name": "amount", - "type": "uint256" + "components": [ + { + "internalType": "uint256", + "name": "epochEnd", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "totalStakeWeight", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "totalStakeRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "validatorFixedCostRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "validatorCommission", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "slope", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "slopeIncrease", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "validatorSharePrice", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "stakeAmount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "validatorSharePriceAtLastUpdate", + "type": "uint256" + }, + { + "internalType": "bool", + "name": "initial", + "type": "bool" + } + ], + "internalType": "struct LibStakingStorage.RewardEpoch", + "name": "", + "type": "tuple" } ], - "name": "increaseStakeRecordAmount", - "outputs": [], - "stateMutability": "nonpayable", + "stateMutability": "view", "type": "function" }, { - "inputs": [ - { - "internalType": "uint256", - "name": "realmId", - "type": "uint256" - }, + "inputs": [], + "name": "getTrustedForwarder", + "outputs": [ { "internalType": "address", - "name": "stakerAddress", + "name": "", "type": "address" - }, - { - "internalType": "uint256", - "name": "stakeRecordId", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "newTimeLock", - "type": "uint256" } ], - "name": "increaseStakeRecordTimelock", - "outputs": [], - "stateMutability": "nonpayable", + "stateMutability": "view", "type": "function" }, { @@ -8125,25 +8704,25 @@ }, { "internalType": "uint256", - "name": "rewardEpochNumber", + "name": "stakeRecordId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "additionalAmount", "type": "uint256" } ], - "name": "initializeRewardEpoch", + "name": "increaseStakeRecordAmount", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ - { - "internalType": "uint256", - "name": "realmId", - "type": "uint256" - }, { "internalType": "address", - "name": "userStakerAddress", + "name": "stakerAddress", "type": "address" }, { @@ -8152,12 +8731,12 @@ "type": "uint256" }, { - "internalType": "address", - "name": "newStakerAddress", - "type": "address" + "internalType": "uint256", + "name": "additionalTimeLock", + "type": "uint256" } ], - "name": "migrateStakeRecord", + "name": "increaseStakeRecordTimelock", "outputs": [], "stateMutability": "nonpayable", "type": "function" @@ -8166,80 +8745,76 @@ "inputs": [ { "internalType": "address", - "name": "staker", + "name": "stakerAddress", "type": "address" - } - ], - "name": "removePermittedStaker", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ + }, { - "internalType": "address", - "name": "newResolverAddress", - "type": "address" - } - ], - "name": "setContractResolver", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ + "internalType": "uint256", + "name": "rewardEpochNumber", + "type": "uint256" + }, { "internalType": "bool", - "name": "permittedStakersOn", + "name": "isInitial", "type": "bool" } ], - "name": "setPermittedStakersOn", + "name": "initializeRewardEpoch", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ + { + "internalType": "uint256", + "name": "realmId", + "type": "uint256" + }, { "internalType": "address", - "name": "stakerAddress", + "name": "operatorAddressToMigrateFrom", "type": "address" }, { "internalType": "uint256", - "name": "rate", + "name": "stakeRecordId", "type": "uint256" + }, + { + "internalType": "address", + "name": "operatorAddressToMigrateTo", + "type": "address" } ], - "name": "setValidatorComissionRate", + "name": "migrateStakeRecord", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ - { - "internalType": "uint256", - "name": "percentage", - "type": "uint256" - }, { "internalType": "address", - "name": "stakerAddress", + "name": "forwarder", "type": "address" } ], - "name": "slashValidator", - "outputs": [ + "name": "setTrustedForwarder", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ { "internalType": "uint256", - "name": "", + "name": "rate", "type": "uint256" } ], + "name": "setValidatorCommissionRate", + "outputs": [], "stateMutability": "nonpayable", "type": "function" }, @@ -8307,25 +8882,6 @@ "stateMutability": "nonpayable", "type": "function" }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "realmId", - "type": "uint256" - } - ], - "name": "updateRewardEpoch", - "outputs": [ - { - "internalType": "uint256", - "name": "currentRewardEpoch", - "type": "uint256" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, { "inputs": [ { @@ -8419,6 +8975,11 @@ "name": "CouldNotMapNodeAddressToStakerAddress", "type": "error" }, + { + "inputs": [], + "name": "InvalidAttestedAddress", + "type": "error" + }, { "inputs": [ { @@ -8575,6 +9136,11 @@ "name": "SignaledReadyForWrongEpochNumber", "type": "error" }, + { + "inputs": [], + "name": "StakerAddressMismatch", + "type": "error" + }, { "inputs": [ { @@ -8602,6 +9168,17 @@ "name": "TryingToWithdrawMoreThanStaked", "type": "error" }, + { + "inputs": [ + { + "internalType": "address", + "name": "staker", + "type": "address" + } + ], + "name": "ValidatorAlreadyInNextValidatorSet", + "type": "error" + }, { "inputs": [ { @@ -8622,16 +9199,11 @@ "inputs": [ { "internalType": "address", - "name": "validator", + "name": "staker", "type": "address" - }, - { - "internalType": "address[]", - "name": "validatorsInNextEpoch", - "type": "address[]" } ], - "name": "ValidatorIsNotInNextEpoch", + "name": "ValidatorNotInNextEpoch", "type": "error" }, { @@ -8719,13 +9291,13 @@ { "indexed": false, "internalType": "uint256", - "name": "newMaxTripleCount", + "name": "newMaxPresignCount", "type": "uint256" }, { "indexed": false, "internalType": "uint256", - "name": "newMinTripleCount", + "name": "newMinPresignCount", "type": "uint256" }, { @@ -8737,7 +9309,7 @@ { "indexed": false, "internalType": "uint256", - "name": "newMaxTripleConcurrency", + "name": "newMaxPresignConcurrency", "type": "uint256" }, { @@ -8852,25 +9424,6 @@ "name": "RewardsDurationUpdated", "type": "event" }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "staker", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amountBurned", - "type": "uint256" - } - ], - "name": "ValidatorKickedFromNextEpoch", - "type": "event" - }, { "anonymous": false, "inputs": [ @@ -9000,39 +9553,27 @@ "internalType": "uint256", "name": "realmId", "type": "uint256" - }, - { - "internalType": "uint32", - "name": "ip", - "type": "uint32" - }, - { - "internalType": "uint128", - "name": "ipv6", - "type": "uint128" - }, - { - "internalType": "uint32", - "name": "port", - "type": "uint32" - }, - { - "internalType": "address", - "name": "nodeAddress", - "type": "address" - }, + } + ], + "name": "requestToJoin", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ { "internalType": "uint256", - "name": "senderPubKey", + "name": "realmId", "type": "uint256" }, { - "internalType": "uint256", - "name": "receiverPubKey", - "type": "uint256" + "internalType": "address", + "name": "stakerAddress", + "type": "address" } ], - "name": "requestToJoin", + "name": "requestToJoinAsAdmin", "outputs": [], "stateMutability": "nonpayable", "type": "function" @@ -9045,29 +9586,27 @@ "type": "uint256" }, { - "internalType": "uint32", - "name": "ip", - "type": "uint32" - }, - { - "internalType": "uint128", - "name": "ipv6", - "type": "uint128" - }, - { - "internalType": "uint32", - "name": "port", - "type": "uint32" - }, + "internalType": "address", + "name": "stakerAddress", + "type": "address" + } + ], + "name": "requestToJoinAsForShadowSplicing", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ { "internalType": "uint256", - "name": "senderPubKey", + "name": "realmId", "type": "uint256" }, { - "internalType": "uint256", - "name": "receiverPubKey", - "type": "uint256" + "internalType": "address", + "name": "stakerAddress", + "type": "address" } ], "name": "requestToJoinAsNode", @@ -9114,7 +9653,7 @@ }, { "internalType": "address", - "name": "nodeAddress", + "name": "operatorAddress", "type": "address" }, { @@ -9167,36 +9706,6 @@ "internalType": "uint256", "name": "amount", "type": "uint256" - }, - { - "internalType": "uint32", - "name": "ip", - "type": "uint32" - }, - { - "internalType": "uint128", - "name": "ipv6", - "type": "uint128" - }, - { - "internalType": "uint32", - "name": "port", - "type": "uint32" - }, - { - "internalType": "address", - "name": "nodeAddress", - "type": "address" - }, - { - "internalType": "uint256", - "name": "senderPubKey", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "receiverPubKey", - "type": "uint256" } ], "name": "stakeAndJoin", @@ -9477,11 +9986,6 @@ "name": "NodeAddressNotFoundForStaker", "type": "error" }, - { - "inputs": [], - "name": "StakeNotFound", - "type": "error" - }, { "inputs": [ { @@ -9497,14 +10001,14 @@ "type": "uint256" }, { - "internalType": "uint256", - "name": "rewards", - "type": "uint256" + "internalType": "address[]", + "name": "validatorsInCurrentEpoch", + "type": "address[]" }, { - "internalType": "bool", - "name": "isStarted", - "type": "bool" + "internalType": "uint256", + "name": "actualEpochLength", + "type": "uint256" } ], "internalType": "struct LibStakingStorage.RewardEpochGlobalStats", @@ -9512,7 +10016,7 @@ "type": "tuple" } ], - "name": "calculateRewardsPerEpoch", + "name": "calculateRewardsPerDay", "outputs": [ { "internalType": "uint256", @@ -9687,27 +10191,27 @@ }, { "internalType": "uint256", - "name": "endTime", + "name": "nextRewardEpochNumber", "type": "uint256" }, { "internalType": "uint256", - "name": "retries", + "name": "endTime", "type": "uint256" }, { "internalType": "uint256", - "name": "timeout", + "name": "retries", "type": "uint256" }, { "internalType": "uint256", - "name": "startTime", + "name": "timeout", "type": "uint256" }, { "internalType": "uint256", - "name": "lastEpochStart", + "name": "startTime", "type": "uint256" } ], @@ -9792,18 +10296,43 @@ }, { "internalType": "uint256", - "name": "commission", + "name": "commissionRate", "type": "uint256" }, { "internalType": "uint256", - "name": "commissionRate", + "name": "lastRewardEpoch", "type": "uint256" }, { "internalType": "uint256", - "name": "lastRewardEpoch", + "name": "lastRealmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeAmount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeWeight", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedFixedCostRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedCommission", "type": "uint256" + }, + { + "internalType": "address", + "name": "operatorAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.Validator[]", @@ -9843,27 +10372,27 @@ }, { "internalType": "uint256", - "name": "endTime", + "name": "nextRewardEpochNumber", "type": "uint256" }, { "internalType": "uint256", - "name": "retries", + "name": "endTime", "type": "uint256" }, { "internalType": "uint256", - "name": "timeout", + "name": "retries", "type": "uint256" }, { "internalType": "uint256", - "name": "startTime", + "name": "timeout", "type": "uint256" }, { "internalType": "uint256", - "name": "lastEpochStart", + "name": "startTime", "type": "uint256" } ], @@ -9900,38 +10429,63 @@ }, { "internalType": "uint256", - "name": "reward", + "name": "reward", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "senderPubKey", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "receiverPubKey", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastActiveEpoch", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "commissionRate", "type": "uint256" }, { "internalType": "uint256", - "name": "senderPubKey", + "name": "lastRewardEpoch", "type": "uint256" }, { "internalType": "uint256", - "name": "receiverPubKey", + "name": "lastRealmId", "type": "uint256" }, { "internalType": "uint256", - "name": "lastActiveEpoch", + "name": "delegatedStakeAmount", "type": "uint256" }, { "internalType": "uint256", - "name": "commission", + "name": "delegatedStakeWeight", "type": "uint256" }, { "internalType": "uint256", - "name": "commissionRate", + "name": "lastRewardEpochClaimedFixedCostRewards", "type": "uint256" }, { "internalType": "uint256", - "name": "lastRewardEpoch", + "name": "lastRewardEpochClaimedCommission", "type": "uint256" + }, + { + "internalType": "address", + "name": "operatorAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.Validator[]", @@ -9961,6 +10515,19 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "getAllValidators", + "outputs": [ + { + "internalType": "address[]", + "name": "", + "type": "address[]" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [], "name": "getKeyTypes", @@ -9993,6 +10560,82 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "user", + "type": "address" + }, + { + "internalType": "address", + "name": "stakerAddress", + "type": "address" + } + ], + "name": "getLastStakeRecord", + "outputs": [ + { + "components": [ + { + "internalType": "uint256", + "name": "id", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "unfreezeStart", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "timeLock", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastUpdateTimestamp", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimed", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "initialSharePrice", + "type": "uint256" + }, + { + "internalType": "bool", + "name": "loaded", + "type": "bool" + }, + { + "internalType": "bool", + "name": "frozen", + "type": "bool" + }, + { + "internalType": "address", + "name": "attributionAddress", + "type": "address" + } + ], + "internalType": "struct LibStakingStorage.StakeRecord", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [], "name": "getLitCirc", @@ -10003,7 +10646,25 @@ "type": "uint256" } ], - "stateMutability": "pure", + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getLowestRewardEpochNumber", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", "type": "function" }, { @@ -10087,6 +10748,25 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "realmId", + "type": "uint256" + } + ], + "name": "getNonShadowValidators", + "outputs": [ + { + "internalType": "address[]", + "name": "", + "type": "address[]" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -10110,14 +10790,14 @@ "type": "uint256" }, { - "internalType": "uint256", - "name": "rewards", - "type": "uint256" + "internalType": "address[]", + "name": "validatorsInCurrentEpoch", + "type": "address[]" }, { - "internalType": "bool", - "name": "isStarted", - "type": "bool" + "internalType": "uint256", + "name": "actualEpochLength", + "type": "uint256" } ], "internalType": "struct LibStakingStorage.RewardEpochGlobalStats", @@ -10147,6 +10827,25 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "realmId", + "type": "uint256" + } + ], + "name": "getShadowValidators", + "outputs": [ + { + "internalType": "address[]", + "name": "", + "type": "address[]" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -10196,7 +10895,7 @@ }, { "internalType": "uint256", - "name": "rewardEpochCheckpoint", + "name": "lastRewardEpochClaimed", "type": "uint256" }, { @@ -10213,6 +10912,11 @@ "internalType": "bool", "name": "frozen", "type": "bool" + }, + { + "internalType": "address", + "name": "attributionAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.StakeRecord", @@ -10291,7 +10995,7 @@ }, { "internalType": "uint256", - "name": "rewardEpochCheckpoint", + "name": "lastRewardEpochClaimed", "type": "uint256" }, { @@ -10308,6 +11012,11 @@ "internalType": "bool", "name": "frozen", "type": "bool" + }, + { + "internalType": "address", + "name": "attributionAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.StakeRecord[]", @@ -10326,56 +11035,14 @@ "type": "address" }, { - "components": [ - { - "internalType": "uint256", - "name": "id", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amount", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "unfreezeStart", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "timeLock", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "lastUpdateTimestamp", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "rewardEpochCheckpoint", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "initialSharePrice", - "type": "uint256" - }, - { - "internalType": "bool", - "name": "loaded", - "type": "bool" - }, - { - "internalType": "bool", - "name": "frozen", - "type": "bool" - } - ], - "internalType": "struct LibStakingStorage.StakeRecord", - "name": "stakeRecord", - "type": "tuple" + "internalType": "uint256", + "name": "recordId", + "type": "uint256" + }, + { + "internalType": "address", + "name": "userStakerAddress", + "type": "address" }, { "internalType": "uint256", @@ -10430,7 +11097,7 @@ }, { "internalType": "uint256", - "name": "rewardEpochCheckpoint", + "name": "lastRewardEpochClaimed", "type": "uint256" }, { @@ -10447,6 +11114,11 @@ "internalType": "bool", "name": "frozen", "type": "bool" + }, + { + "internalType": "address", + "name": "attributionAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.StakeRecord", @@ -10532,7 +11204,7 @@ }, { "internalType": "uint256", - "name": "rewardEpochCheckpoint", + "name": "lastRewardEpochClaimed", "type": "uint256" }, { @@ -10549,6 +11221,11 @@ "internalType": "bool", "name": "frozen", "type": "bool" + }, + { + "internalType": "address", + "name": "attributionAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.StakeRecord", @@ -10755,18 +11432,43 @@ }, { "internalType": "uint256", - "name": "commission", + "name": "commissionRate", "type": "uint256" }, { "internalType": "uint256", - "name": "commissionRate", + "name": "lastRewardEpoch", "type": "uint256" }, { "internalType": "uint256", - "name": "lastRewardEpoch", + "name": "lastRealmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeAmount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeWeight", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedFixedCostRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedCommission", "type": "uint256" + }, + { + "internalType": "address", + "name": "operatorAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.Validator[]", @@ -10831,18 +11533,43 @@ }, { "internalType": "uint256", - "name": "commission", + "name": "commissionRate", "type": "uint256" }, { "internalType": "uint256", - "name": "commissionRate", + "name": "lastRewardEpoch", "type": "uint256" }, { "internalType": "uint256", - "name": "lastRewardEpoch", + "name": "lastRealmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeAmount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeWeight", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedFixedCostRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedCommission", "type": "uint256" + }, + { + "internalType": "address", + "name": "operatorAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.Validator[]", @@ -10907,18 +11634,43 @@ }, { "internalType": "uint256", - "name": "commission", + "name": "commissionRate", "type": "uint256" }, { "internalType": "uint256", - "name": "commissionRate", + "name": "lastRewardEpoch", "type": "uint256" }, { "internalType": "uint256", - "name": "lastRewardEpoch", + "name": "lastRealmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeAmount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeWeight", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedFixedCostRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedCommission", "type": "uint256" + }, + { + "internalType": "address", + "name": "operatorAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.Validator[]", @@ -11073,6 +11825,16 @@ "internalType": "uint256", "name": "minSelfStakeTimelock", "type": "uint256" + }, + { + "internalType": "uint256", + "name": "minValidatorCountToClampMinimumThreshold", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "minThresholdToClampAt", + "type": "uint256" } ], "internalType": "struct LibStakingStorage.GlobalConfig", @@ -11083,6 +11845,30 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "realmId", + "type": "uint256" + }, + { + "internalType": "address", + "name": "stakerAddress", + "type": "address" + } + ], + "name": "isActiveShadowValidator", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -11344,6 +12130,25 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "operatorAddress", + "type": "address" + } + ], + "name": "operatorAddressToStakerAddress", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -11411,12 +12216,12 @@ }, { "internalType": "uint256", - "name": "maxTripleCount", + "name": "maxPresignCount", "type": "uint256" }, { "internalType": "uint256", - "name": "minTripleCount", + "name": "minPresignCount", "type": "uint256" }, { @@ -11426,13 +12231,18 @@ }, { "internalType": "uint256", - "name": "maxTripleConcurrency", + "name": "maxPresignConcurrency", "type": "uint256" }, { "internalType": "bool", "name": "rpcHealthcheckEnabled", "type": "bool" + }, + { + "internalType": "uint256", + "name": "minEpochForRewards", + "type": "uint256" } ], "internalType": "struct LibStakingStorage.RealmConfig", @@ -11488,10 +12298,20 @@ }, { "inputs": [ + { + "internalType": "uint256", + "name": "realmId", + "type": "uint256" + }, { "internalType": "address", "name": "stakerAddress", "type": "address" + }, + { + "internalType": "bool", + "name": "stakerInCurrentValidatorSet", + "type": "bool" } ], "name": "validatorSelfStakeWillExpire", @@ -11559,18 +12379,43 @@ }, { "internalType": "uint256", - "name": "commission", + "name": "commissionRate", "type": "uint256" }, { "internalType": "uint256", - "name": "commissionRate", + "name": "lastRewardEpoch", "type": "uint256" }, { "internalType": "uint256", - "name": "lastRewardEpoch", + "name": "lastRealmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeAmount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeWeight", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedFixedCostRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedCommission", "type": "uint256" + }, + { + "internalType": "address", + "name": "operatorAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.Validator", @@ -11794,6 +12639,19 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "FORWARDER_CONTRACT", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [], "name": "HD_KEY_DERIVER_CONTRACT", @@ -12258,7 +13116,7 @@ "name": "ContractResolver" }, "PriceFeed": { - "address": "0x1c85638e118b37167e9298c2268758e058DdfDA0", + "address": "0xf953b3A269d80e3eB0F2947630Da976B896A8C5b", "abi": [ { "inputs": [ @@ -12692,6 +13550,19 @@ "name": "MaxNetworkPriceSet", "type": "event" }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "newTrustedForwarder", + "type": "address" + } + ], + "name": "TrustedForwarderSet", + "type": "event" + }, { "anonymous": false, "inputs": [ @@ -12736,6 +13607,41 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "getNodeCapacityConfig", + "outputs": [ + { + "components": [ + { + "internalType": "uint256", + "name": "pkpSignMaxConcurrency", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "encSignMaxConcurrency", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "litActionMaxConcurrency", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "globalMaxCapacity", + "type": "uint256" + } + ], + "internalType": "struct LibPriceFeedStorage.NodeCapacityConfig", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -12807,18 +13713,43 @@ }, { "internalType": "uint256", - "name": "commission", + "name": "commissionRate", "type": "uint256" }, { "internalType": "uint256", - "name": "commissionRate", + "name": "lastRewardEpoch", "type": "uint256" }, { "internalType": "uint256", - "name": "lastRewardEpoch", + "name": "lastRealmId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeAmount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "delegatedStakeWeight", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedFixedCostRewards", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "lastRewardEpochClaimedCommission", "type": "uint256" + }, + { + "internalType": "address", + "name": "operatorAddress", + "type": "address" } ], "internalType": "struct LibStakingStorage.Validator", @@ -12852,6 +13783,19 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "getTrustedForwarder", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -12971,7 +13915,60 @@ "type": "uint256[]" } ], - "name": "setBaseNetworkPrices", + "name": "setBaseNetworkPrices", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "newPrice", + "type": "uint256" + }, + { + "internalType": "uint256[]", + "name": "productIds", + "type": "uint256[]" + } + ], + "name": "setMaxNetworkPrices", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "uint256", + "name": "pkpSignMaxConcurrency", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "encSignMaxConcurrency", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "litActionMaxConcurrency", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "globalMaxCapacity", + "type": "uint256" + } + ], + "internalType": "struct LibPriceFeedStorage.NodeCapacityConfig", + "name": "config", + "type": "tuple" + } + ], + "name": "setNodeCapacityConfig", "outputs": [], "stateMutability": "nonpayable", "type": "function" @@ -12979,17 +13976,12 @@ { "inputs": [ { - "internalType": "uint256", - "name": "newPrice", - "type": "uint256" - }, - { - "internalType": "uint256[]", - "name": "productIds", - "type": "uint256[]" + "internalType": "address", + "name": "forwarder", + "type": "address" } ], - "name": "setMaxNetworkPrices", + "name": "setTrustedForwarder", "outputs": [], "stateMutability": "nonpayable", "type": "function" @@ -13064,7 +14056,7 @@ "name": "PriceFeed" }, "Ledger": { - "address": "0xBEc49fA140aCaA83533fB00A2BB19bDdd0290f25", + "address": "0x4C2F7092C2aE51D986bEFEe378e50BD4dB99C901", "abi": [ { "inputs": [ @@ -13652,6 +14644,19 @@ "name": "RewardWithdrawRequest", "type": "event" }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "newTrustedForwarder", + "type": "address" + } + ], + "name": "TrustedForwarderSet", + "type": "event" + }, { "anonymous": false, "inputs": [ @@ -13815,6 +14820,19 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "getTrustedForwarder", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -13987,6 +15005,19 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "forwarder", + "type": "address" + } + ], + "name": "setTrustedForwarder", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -14054,5 +15085,227 @@ } ], "name": "Ledger" + }, + "Forwarder": { + "address": "0xAA292E8611aDF267e563f334Ee42320aC96D0463", + "abi": [ + { + "inputs": [], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "inputs": [], + "name": "InvalidShortString", + "type": "error" + }, + { + "inputs": [], + "name": "SignatureDoesNotMatch", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "string", + "name": "str", + "type": "string" + } + ], + "name": "StringTooLong", + "type": "error" + }, + { + "inputs": [], + "name": "TransactionRevertedSilently", + "type": "error" + }, + { + "anonymous": false, + "inputs": [], + "name": "EIP712DomainChanged", + "type": "event" + }, + { + "inputs": [], + "name": "eip712Domain", + "outputs": [ + { + "internalType": "bytes1", + "name": "fields", + "type": "bytes1" + }, + { + "internalType": "string", + "name": "name", + "type": "string" + }, + { + "internalType": "string", + "name": "version", + "type": "string" + }, + { + "internalType": "uint256", + "name": "chainId", + "type": "uint256" + }, + { + "internalType": "address", + "name": "verifyingContract", + "type": "address" + }, + { + "internalType": "bytes32", + "name": "salt", + "type": "bytes32" + }, + { + "internalType": "uint256[]", + "name": "extensions", + "type": "uint256[]" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "nonce", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + } + ], + "internalType": "struct Forwarder.ForwardRequest", + "name": "req", + "type": "tuple" + }, + { + "internalType": "bytes", + "name": "signature", + "type": "bytes" + } + ], + "name": "execute", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + }, + { + "internalType": "bytes", + "name": "", + "type": "bytes" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "from", + "type": "address" + } + ], + "name": "getNonce", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "nonce", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + } + ], + "internalType": "struct Forwarder.ForwardRequest", + "name": "req", + "type": "tuple" + }, + { + "internalType": "bytes", + "name": "signature", + "type": "bytes" + } + ], + "name": "verify", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + } + ], + "name": "Forwarder" } } diff --git a/local-tests/setup/session-sigs/get-eoa-session-sigs.ts b/local-tests/setup/session-sigs/get-eoa-session-sigs.ts index 3e54717b58..f6eb049974 100644 --- a/local-tests/setup/session-sigs/get-eoa-session-sigs.ts +++ b/local-tests/setup/session-sigs/get-eoa-session-sigs.ts @@ -9,7 +9,6 @@ import { AuthSig, LitResourceAbilityRequest, } from '@lit-protocol/types'; -import { log } from '@lit-protocol/misc'; import { ethers } from 'ethers'; import { LIT_ABILITY, diff --git a/local-tests/setup/session-sigs/get-pkp-session-sigs.ts b/local-tests/setup/session-sigs/get-pkp-session-sigs.ts index ae4fb06dca..5e1152242f 100644 --- a/local-tests/setup/session-sigs/get-pkp-session-sigs.ts +++ b/local-tests/setup/session-sigs/get-pkp-session-sigs.ts @@ -3,7 +3,6 @@ import { type AuthenticationContext, LitResourceAbilityRequest, } from '@lit-protocol/types'; -import { log } from '@lit-protocol/misc'; import { LIT_ABILITY, CENTRALISATION_BY_NETWORK, @@ -43,7 +42,7 @@ export const getPkpAuthContext = ( }), }); - log('[getPkpAuthContext]: ', authContext); + console.log('[getPkpAuthContext]: ', authContext); return authContext; }; diff --git a/local-tests/setup/tinny-environment.ts b/local-tests/setup/tinny-environment.ts index 0891664665..c38e5851f6 100644 --- a/local-tests/setup/tinny-environment.ts +++ b/local-tests/setup/tinny-environment.ts @@ -1,5 +1,6 @@ import { LitContracts } from '@lit-protocol/contracts-sdk'; import { LitNodeClient } from '@lit-protocol/lit-node-client'; +import { getChildLogger } from '@lit-protocol/logger'; import { AuthSig, LitContractContext, @@ -8,7 +9,6 @@ import { import { ProcessEnvs, TinnyEnvConfig } from './tinny-config'; import { TinnyPerson } from './tinny-person'; -import { createSiweMessage, generateAuthSig } from '@lit-protocol/auth-helpers'; import { CENTRALISATION_BY_NETWORK, LIT_NETWORK, @@ -20,7 +20,9 @@ import { ethers, Signer } from 'ethers'; import { ShivaClient, TestnetClient } from './shiva-client'; import { toErrorWithMessage } from './tinny-utils'; -console.log('checking env', process.env['DEBUG']); +const logger = getChildLogger({ module: 'tinny-environment' }); + +logger.info({ msg: 'checking env', env: process.env['DEBUG'] }); const DEFAULT_ANVIL_PRIVATE_KEYS = [ '0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d', @@ -49,7 +51,7 @@ export class TinnyEnvironment { DEBUG: process.env['DEBUG'] === 'true', REQUEST_PER_KILOSECOND: parseInt(process.env['REQUEST_PER_KILOSECOND']) || - (process.env['NETWORK'] as LIT_NETWORK_VALUES) === 'datil-dev' + (process.env['NETWORK'] as LIT_NETWORK_VALUES) === LIT_NETWORK.NagaDev ? 1 : 200, LIT_RPC_URL: process.env['LIT_RPC_URL'], @@ -147,10 +149,10 @@ export class TinnyEnvironment { ); } - console.log( - '[𐬺🧪 Tinny Environment𐬺] Done configuring environment current config: ', - this.processEnvs - ); + logger.info({ + msg: '[𐬺🧪 Tinny Environment𐬺] Done configuring environment current config: ', + processEnvs: this.processEnvs, + }); } world: Map = new Map(); @@ -183,11 +185,12 @@ export class TinnyEnvironment { if (index !== -1) { // If an available key is found this.processEnvs.KEY_IN_USE[index] = true; // Mark the key as in use - // console.log('[𐬺🧪 Tinny Environment𐬺] 🔑 Selected key at index', index); // Log a message indicating that we have selected a key + // logger.info({ msg: '[𐬺🧪 Tinny Environment𐬺] 🔑 Selected key at index', index }); // Log a message indicating that we have selected a key return { privateKey: this.processEnvs.PRIVATE_KEYS[index], index }; // Return the key and its index } else { - console.log('[𐬺🧪 Tinny Environment𐬺] No available keys. Waiting...', { + logger.info({ + msg: '[𐬺🧪 Tinny Environment𐬺] No available keys. Waiting...', keysInUse: this.processEnvs.KEY_IN_USE, }); // Log a message indicating that we are waiting // Wait for the specified interval before checking again @@ -205,9 +208,9 @@ export class TinnyEnvironment { releasePrivateKeyFromUser(user: TinnyPerson) { const index = this.processEnvs.PRIVATE_KEYS.indexOf(user.privateKey); this.processEnvs.KEY_IN_USE[index] = false; - // console.log( - // `[𐬺🧪 Tinny Environment𐬺] 🪽 Released key at index ${index}. Thank you for your service!` - // ); + // logger.info({ + // msg: `[𐬺🧪 Tinny Environment𐬺] 🪽 Released key at index ${index}. Thank you for your service!` + // }); } /** @@ -216,9 +219,9 @@ export class TinnyEnvironment { */ releasePrivateKey(index: number) { this.processEnvs.KEY_IN_USE[index] = false; - // console.log( - // `[𐬺🧪 Tinny Environment𐬺] 🪽 Released key at index ${index}. Thank you for your service!` - // ); + // logger.info({ + // msg: `[𐬺🧪 Tinny Environment𐬺] 🪽 Released key at index ${index}. Thank you for your service!` + // }); } /** @@ -234,9 +237,9 @@ export class TinnyEnvironment { */ async setupLitNodeClient() { - console.log('[𐬺🧪 Tinny Environment𐬺] Setting up LitNodeClient'); + logger.info({ msg: '[𐬺🧪 Tinny Environment𐬺] Setting up LitNodeClient' }); - console.log('this.network:', this.network); + logger.info({ msg: 'network', network: this.network }); const centralisation = CENTRALISATION_BY_NETWORK[this.network]; if (this.network === LIT_NETWORK.Custom || centralisation === 'unknown') { @@ -324,7 +327,7 @@ export class TinnyEnvironment { * @throws Error if the name is not provided. */ async createNewPerson(name: string) { - console.log('[𐬺🧪 Tinny Environment𐬺] Creating new person:', name); + logger.info({ msg: '[𐬺🧪 Tinny Environment𐬺] Creating new person', name }); if (!name) { throw new Error('Name is required'); } @@ -373,7 +376,7 @@ export class TinnyEnvironment { async init() { try { if (this.processEnvs.NO_SETUP) { - console.log('[𐬺🧪 Tinny Environment𐬺] Skipping setup'); + logger.info({ msg: '[𐬺🧪 Tinny Environment𐬺] Skipping setup' }); return; } if (this.network === LIT_NETWORK.Custom && this.processEnvs.USE_SHIVA) { @@ -381,9 +384,9 @@ export class TinnyEnvironment { // wait for the testnet to be active before we start the tests. let state = await this.testnet.pollTestnetForActive(); if (state === `UNKNOWN`) { - console.log( - 'Testnet state found to be Unknown meaning there was an error with testnet creation. shutting down' - ); + logger.info({ + msg: 'Testnet state found to be Unknown meaning there was an error with testnet creation. shutting down', + }); throw new Error(`Error while creating testnet, aborting test run`); } @@ -398,10 +401,10 @@ export class TinnyEnvironment { await this.setupSuperCapacityDelegationAuthSig(); } catch (e) { const err = toErrorWithMessage(e); - console.log( - `[𐬺🧪 Tinny Environment𐬺] Failed to init() tinny ${err.message}` - ); - console.log(err.stack); + logger.error({ + msg: `[𐬺🧪 Tinny Environment𐬺] Failed to init() tinny ${err.message}`, + stack: err.stack, + }); process.exit(1); } } @@ -417,7 +420,7 @@ export class TinnyEnvironment { ) { await this.testnet.stopTestnet(); } else { - console.log('skipping testnet shutdown.'); + logger.info('skipping testnet shutdown.'); } } //============= END SHIVA ENDPOINTS ============= @@ -498,9 +501,9 @@ export class TinnyEnvironment { // get wallet balance const balance = await wallet.getBalance(); - console.log('this.rpc:', rpc); - console.log('this.wallet.address', wallet.address); - console.log('Balance:', balance.toString()); + logger.info({ msg: 'this.rpc:', rpc }); + logger.info({ msg: 'this.wallet.address', address: wallet.address }); + logger.info({ msg: 'Balance:', balance: balance.toString() }); const transferTx = await wallet.sendTransaction({ to: capacityCreditWallet.address, @@ -520,7 +523,7 @@ export class TinnyEnvironment { } if (!this.contractsClient) { - console.log('❗️Contracts client not initialized'); + logger.info('❗️Contracts client not initialized'); process.exit(); } @@ -537,7 +540,7 @@ export class TinnyEnvironment { }; async mintSuperCapacityDelegationAuthSig(wallet: Signer) { - console.log( + logger.info( '[𐬺🧪 Tinny Environment𐬺] Mint a Capacity Credits NFT and get a capacity delegation authSig with it' ); @@ -552,9 +555,9 @@ export class TinnyEnvironment { ).capacityDelegationAuthSig; } catch (e: any) { if (e.message.includes(`Can't allocate capacity beyond the global max`)) { - console.log('❗️Skipping capacity delegation auth sig setup.', e); + logger.info('❗️Skipping capacity delegation auth sig setup.', e); } else { - console.log( + logger.info( '❗️Error while setting up capacity delegation auth sig', e ); diff --git a/local-tests/setup/tinny-person.ts b/local-tests/setup/tinny-person.ts index 9d1fad5cc0..1890d59a0e 100644 --- a/local-tests/setup/tinny-person.ts +++ b/local-tests/setup/tinny-person.ts @@ -1,19 +1,19 @@ -import { - AuthSig, - generateAuthSig, - createSiweMessage, -} from '@lit-protocol/auth-helpers'; +import { generateAuthSig, createSiweMessage } from '@lit-protocol/auth-helpers'; import { LitContracts } from '@lit-protocol/contracts-sdk'; import { AuthMethod, + AuthSig, BaseSiweMessage, LitContractContext, } from '@lit-protocol/types'; import { ethers } from 'ethers'; import { PKPInfo, TinnyEnvConfig } from './tinny-config'; -import { EthWalletProvider } from '@lit-protocol/lit-auth-client'; import { AUTH_METHOD_SCOPE, LIT_NETWORK } from '@lit-protocol/constants'; +import { authenticators } from '@lit-protocol/auth'; + +const { MetamaskAuthenticator } = authenticators; + export class TinnyPerson { public privateKey: string; public wallet: ethers.Wallet; @@ -53,7 +53,7 @@ export class TinnyPerson { } async getAuthMethodId(): Promise { - return EthWalletProvider.authMethodId(this.authMethod); + return MetamaskAuthenticator.authMethodId(this.authMethod); } /** @@ -116,7 +116,7 @@ export class TinnyPerson { console.log( '[𐬺🧪 Tinny Person𐬺] Crafting an authMethod from the authSig for the eth wallet auth method...' ); - this.authMethod = await EthWalletProvider.authenticate({ + this.authMethod = await MetamaskAuthenticator.authenticate({ signer: this.wallet, litNodeClient: this.envConfig.litNodeClient, }); diff --git a/local-tests/test.ts b/local-tests/test.ts index cb1a898ec4..f4ef5e7a81 100644 --- a/local-tests/test.ts +++ b/local-tests/test.ts @@ -1,293 +1,27 @@ -import { TinnyEnvironment } from './setup/tinny-environment'; -import { runInBand, runTestsParallel } from './setup/tinny-operations'; - -// import { testBundleSpeed } from './tests/test-bundle-speed'; -// import { testExample } from './tests/test-example'; - -import { testUseEoaSessionSigsToExecuteJsSigningInParallel } from './tests/testUseEoaSessionSigsToExecuteJsSigningInParallel'; -import { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigning } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigning'; -import { testUseValidLitActionCodeGeneratedSessionSigsToPkpSign } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToPkpSign'; -import { testUseValidLitActionIpfsCodeGeneratedSessionSigsToExecuteJsSigning } from './tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToExecuteJsSigning'; - -import { testUseEoaSessionSigsToEncryptDecryptFile } from './tests/testUseEoaSessionSigsToEncryptDecryptFile'; -import { testUseEoaSessionSigsToEncryptDecryptString } from './tests/testUseEoaSessionSigsToEncryptDecryptString'; -import { testUseEoaSessionSigsToEncryptDecryptUint8Array } from './tests/testUseEoaSessionSigsToEncryptDecryptUint8Array'; -import { testUseEoaSessionSigsToExecuteJsClaimKeys } from './tests/testUseEoaSessionSigsToExecuteJsClaimKeys'; -import { testUseEoaSessionSigsToExecuteJsClaimMultipleKeys } from './tests/testUseEoaSessionSigsToExecuteJsClaimMultipleKeys'; -import { testUseEoaSessionSigsToExecuteJsConsoleLog } from './tests/testUseEoaSessionSigsToExecuteJsConsoleLog'; -import { testUseEoaSessionSigsToExecuteJsJsonResponse } from './tests/testUseEoaSessionSigsToExecuteJsJsonResponse'; -import { testUseEoaSessionSigsToExecuteJsSigning } from './tests/testUseEoaSessionSigsToExecuteJsSigning'; -import { testUseEoaSessionSigsToPkpSign } from './tests/testUseEoaSessionSigsToPkpSign'; -import { testUseEoaSessionSigsToRequestSingleResponse } from './tests/testUseEoaSessionSigsToRequestSingleResponse'; - -import { testUseInvalidLitActionCodeToGenerateSessionSigs } from './tests/testUseInvalidLitActionCodeToGenerateSessionSigs'; -import { testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString'; -import { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInParallel } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInParallel'; - -import { testUsePkpSessionSigsToEncryptDecryptFile } from './tests/testUsePkpSessionSigsToEncryptDecryptFile'; -import { testUsePkpSessionSigsToEncryptDecryptString } from './tests/testUsePkpSessionSigsToEncryptDecryptString'; -import { testUsePkpSessionSigsToExecuteJsClaimKeys } from './tests/testUsePkpSessionSigsToExecuteJsClaimKeys'; -import { testUsePkpSessionSigsToExecuteJsClaimMultipleKeys } from './tests/testUsePkpSessionSigsToExecuteJsClaimMultipleKeys'; -import { testUsePkpSessionSigsToExecuteJsConsoleLog } from './tests/testUsePkpSessionSigsToExecuteJsConsoleLog'; -import { testUsePkpSessionSigsToExecuteJsJsonResponse } from './tests/testUsePkpSessionSigsToExecuteJsJsonResponse'; -import { testUsePkpSessionSigsToExecuteJsSigning } from './tests/testUsePkpSessionSigsToExecuteJsSigning'; -import { testUsePkpSessionSigsToExecuteJsSigningInParallel } from './tests/testUsePkpSessionSigsToExecuteJsSigningInParallel'; -import { testUsePkpSessionSigsToPkpSign } from './tests/testUsePkpSessionSigsToPkpSign'; - -import { testUseInvalidLitActionIpfsCodeToGenerateSessionSigs } from './tests/testUseInvalidLitActionIpfsCodeToGenerateSessionSigs'; -import { testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile'; -import { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimKeys } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimKeys'; -import { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimMultipleKeys } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimMultipleKeys'; -import { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsConsoleLog } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsConsoleLog'; -import { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsJsonResponse } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsJsonResponse'; -import { testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign } from './tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign'; -import { testPkpEthersWithEoaSessionSigsToEthSign } from './tests/testPkpEthersWithEoaSessionSigsToEthSign'; -import { testPkpEthersWithEoaSessionSigsToEthSignTransaction } from './tests/testPkpEthersWithEoaSessionSigsToEthSignTransaction'; -import { testPkpEthersWithEoaSessionSigsToPersonalSign } from './tests/testPkpEthersWithEoaSessionSigsToPersonalSign'; -import { testPkpEthersWithEoaSessionSigsToSendTx } from './tests/testPkpEthersWithEoaSessionSigsToSendTx'; -import { testPkpEthersWithEoaSessionSigsToSignMessage } from './tests/testPkpEthersWithEoaSessionSigsToSignMessage'; -import { testPkpEthersWithEoaSessionSigsToSignWithAuthContext } from './tests/testPkpEthersWithEoaSessionSigsToSignWithAuthContext'; -import { testPkpEthersWithPkpSessionSigsToEthSign } from './tests/testPkpEthersWithPkpSessionSigsToEthSign'; -import { testPkpEthersWithPkpSessionSigsToPersonalSign } from './tests/testPkpEthersWithPkpSessionSigsToPersonalSign'; -import { testPkpEthersWithPkpSessionSigsToSendTx } from './tests/testPkpEthersWithPkpSessionSigsToSendTx'; -import { testPkpEthersWithPkpSessionSigsToSignMessage } from './tests/testPkpEthersWithPkpSessionSigsToSignMessage'; - -import { testPkpEthersWithEoaSessionSigsToEthSignTypedData } from './tests/testPkpEthersWithEoaSessionSigsToEthSignTypedData'; -import { testPkpEthersWithEoaSessionSigsToEthSignTypedDataUtil } from './tests/testPkpEthersWithEoaSessionSigsToEthSignTypedDataUtil'; -import { testPkpEthersWithEoaSessionSigsToEthSignTypedDataV1 } from './tests/testPkpEthersWithEoaSessionSigsToEthSignTypedDataV1'; -import { testPkpEthersWithEoaSessionSigsToEthSignTypedDataV3 } from './tests/testPkpEthersWithEoaSessionSigsToEthSignTypedDataV3'; -import { testPkpEthersWithEoaSessionSigsToEthSignTypedDataV4 } from './tests/testPkpEthersWithEoaSessionSigsToEthSignTypedDataV4'; -import { testPkpEthersWithLitActionSessionSigsToEthSign } from './tests/testPkpEthersWithLitActionSessionSigsToEthSign'; -import { testPkpEthersWithLitActionSessionSigsToEthSignTransaction } from './tests/testPkpEthersWithLitActionSessionSigsToEthSignTransaction'; -import { testPkpEthersWithLitActionSessionSigsToEthSignTypedData } from './tests/testPkpEthersWithLitActionSessionSigsToEthSignTypedData'; -import { testPkpEthersWithLitActionSessionSigsToEthSignTypedDataUtil } from './tests/testPkpEthersWithLitActionSessionSigsToEthSignTypedDataUtil'; -import { testPkpEthersWithLitActionSessionSigsToEthSignTypedDataV1 } from './tests/testPkpEthersWithLitActionSessionSigsToEthSignTypedDataV1'; -import { testPkpEthersWithLitActionSessionSigsToEthSignTypedDataV3 } from './tests/testPkpEthersWithLitActionSessionSigsToEthSignTypedDataV3'; -import { testPkpEthersWithLitActionSessionSigsToEthSignTypedDataV4 } from './tests/testPkpEthersWithLitActionSessionSigsToEthSignTypedDataV4'; -import { testPkpEthersWithLitActionSessionSigsToPersonalSign } from './tests/testPkpEthersWithLitActionSessionSigsToPersonalSign'; -import { testPkpEthersWithLitActionSessionSigsToSendTx } from './tests/testPkpEthersWithLitActionSessionSigsToSendTx'; -import { testPkpEthersWithLitActionSessionSigsToSignMessage } from './tests/testPkpEthersWithLitActionSessionSigsToSignMessage'; -import { testPkpEthersWithPkpSessionSigsToEthSignTransaction } from './tests/testPkpEthersWithPkpSessionSigsToEthSignTransaction'; -import { testPkpEthersWithPkpSessionSigsToEthSignTypedData } from './tests/testPkpEthersWithPkpSessionSigsToEthSignTypedData'; -import { testPkpEthersWithPkpSessionSigsToEthSignTypedDataUtil } from './tests/testPkpEthersWithPkpSessionSigsToEthSignTypedDataUtil'; -import { testPkpEthersWithPkpSessionSigsToEthSignTypedDataV1 } from './tests/testPkpEthersWithPkpSessionSigsToEthSignTypedDataV1'; -import { testPkpEthersWithPkpSessionSigsToEthSignTypedDataV3 } from './tests/testPkpEthersWithPkpSessionSigsToEthSignTypedDataV3'; -import { testPkpEthersWithPkpSessionSigsToEthSignTypedDataV4 } from './tests/testPkpEthersWithPkpSessionSigsToEthSignTypedDataV4'; - -import { testUseCustomAuthSessionSigsToPkpSignExecuteJs } from './tests/testUseCustomAuthSessionSigsToPkpSignExecuteJs'; +import { setLoggerOptions, logger } from '@lit-protocol/logger'; -import { testExecuteJsBroadcastAndCollect } from './tests/testExecuteJsBroadcastAndCollect'; -import { testExecuteJsDecryptAndCombine } from './tests/testExecuteJsDecryptAndCombine'; -import { testExecuteJsSignAndCombineEcdsa } from './tests/testExecuteJsSignAndCombineEcdsa'; -import { testRelayer } from './tests/testRelayer'; - -// import { testEthereumSignMessageGeneratedKey } from './tests/wrapped-keys/testEthereumSignMessageGeneratedKey'; -// import { testEthereumBroadcastTransactionGeneratedKey } from './tests/wrapped-keys/testEthereumBroadcastTransactionGeneratedKey'; -// import { testEthereumSignMessageWrappedKey } from './tests/wrapped-keys/testEthereumSignMessageWrappedKey'; -// import { testFailEthereumSignTransactionWrappedKeyInvalidDecryption } from './tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyInvalidDecryption'; -// import { testEthereumSignTransactionWrappedKey } from './tests/wrapped-keys/testEthereumSignTransactionWrappedKey'; -// import { testFailEthereumSignTransactionWrappedKeyWithInvalidParam } from './tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyWithInvalidParam'; -// import { testFailEthereumSignTransactionWrappedKeyWithMissingParam } from './tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyWithMissingParam'; -// import { testEthereumBroadcastTransactionWrappedKey } from './tests/wrapped-keys/testEthereumBroadcastTransactionWrappedKey'; -// import { testEthereumBroadcastWrappedKeyWithFetchGasParams } from './tests/wrapped-keys/testEthereumBroadcastWrappedKeyWithFetchGasParams'; -// import { testImportWrappedKey } from './tests/wrapped-keys/testImportWrappedKey'; -// import { testGenerateEthereumWrappedKey } from './tests/wrapped-keys/testGenerateEthereumWrappedKey'; -// import { testGenerateSolanaWrappedKey } from './tests/wrapped-keys/testGenerateSolanaWrappedKey'; -// import { testFailImportWrappedKeysWithSamePrivateKey } from './tests/wrapped-keys/testFailImportWrappedKeysWithSamePrivateKey'; -// import { testFailImportWrappedKeysWithEoaSessionSig } from './tests/wrapped-keys/testFailImportWrappedKeysWithEoaSessionSig'; -// import { testFailImportWrappedKeysWithMaxExpirySessionSig } from './tests/wrapped-keys/testFailImportWrappedKeysWithMaxExpirySessionSig'; -// import { testFailImportWrappedKeysWithInvalidSessionSig } from './tests/wrapped-keys/testFailImportWrappedKeysWithInvalidSessionSig'; -// import { testFailImportWrappedKeysWithExpiredSessionSig } from './tests/wrapped-keys/testFailImportWrappedKeysWithExpiredSessionSig'; -// import { testExportWrappedKey } from './tests/wrapped-keys/testExportWrappedKey'; -// import { testSignMessageWithSolanaEncryptedKey } from './tests/wrapped-keys/testSignMessageWithSolanaEncryptedKey'; -// import { testSignTransactionWithSolanaEncryptedKey } from './tests/wrapped-keys/testSignTransactionWithSolanaEncryptedKey'; -// import { testBatchGeneratePrivateKeys } from './tests/wrapped-keys/testBatchGeneratePrivateKeys'; -// import { testFailBatchGeneratePrivateKeysAtomic } from './tests/wrapped-keys/testFailStoreEncryptedKeyBatchIsAtomic'; +setLoggerOptions({ + transport: { + target: 'pino-pretty', + }, +}); +import { TinnyEnvironment } from './setup/tinny-environment'; +import { runInBand, runTestsParallel } from './setup/tinny-operations'; +import { tinnyTests } from './tests'; import { setLitActionsCodeToLocal } from './tests/wrapped-keys/util'; // Use the current LIT action code to test against setLitActionsCodeToLocal(); (async () => { - console.log('[𐬺🧪 Tinny𐬺] Running tests...'); + logger.info({ msg: '[𐬺🧪 Tinny𐬺] Running tests...' }); const devEnv = new TinnyEnvironment(); await devEnv.init(); - const relayerTests = { - testRelayer, - }; - - // --filter=WrappedKey - const wrappedKeysTests = { - // // -- valid cases - // testBatchGeneratePrivateKeys, - // testEthereumSignMessageGeneratedKey, - // testEthereumBroadcastTransactionGeneratedKey, - // testEthereumSignMessageWrappedKey, - // testEthereumSignTransactionWrappedKey, - // testEthereumBroadcastTransactionWrappedKey, - // testEthereumBroadcastWrappedKeyWithFetchGasParams, - // - // // -- generate wrapped keys - // testGenerateEthereumWrappedKey, - // testGenerateSolanaWrappedKey, - // - // // -- import wrapped keys - // testImportWrappedKey, - // - // // -- export wrapped keys - // testExportWrappedKey, - // - // // -- solana wrapped keys - // testSignMessageWithSolanaEncryptedKey, - // testSignTransactionWithSolanaEncryptedKey, - // - // // -- invalid cases - // testFailEthereumSignTransactionWrappedKeyWithMissingParam, - // testFailEthereumSignTransactionWrappedKeyWithInvalidParam, - // testFailEthereumSignTransactionWrappedKeyInvalidDecryption, - // testFailBatchGeneratePrivateKeysAtomic, - // - // // -- import wrapped keys - // testFailImportWrappedKeysWithSamePrivateKey, - // testFailImportWrappedKeysWithEoaSessionSig, - // testFailImportWrappedKeysWithMaxExpirySessionSig, - // testFailImportWrappedKeysWithInvalidSessionSig, - // testFailImportWrappedKeysWithExpiredSessionSig, - }; - - const eoaSessionSigsTests = { - testUseEoaSessionSigsToExecuteJsSigning, - testUseEoaSessionSigsToRequestSingleResponse, - testUseEoaSessionSigsToPkpSign, - testUseEoaSessionSigsToExecuteJsSigningInParallel, - testUseEoaSessionSigsToExecuteJsClaimKeys, - testUseEoaSessionSigsToExecuteJsClaimMultipleKeys, - testUseEoaSessionSigsToExecuteJsJsonResponse, - testUseEoaSessionSigsToExecuteJsConsoleLog, - testUseEoaSessionSigsToEncryptDecryptString, - testUseEoaSessionSigsToEncryptDecryptUint8Array, - testUseEoaSessionSigsToEncryptDecryptFile, - }; - - const pkpSessionSigsTests = { - testUsePkpSessionSigsToExecuteJsSigning, - testUsePkpSessionSigsToPkpSign, - testUsePkpSessionSigsToExecuteJsSigningInParallel, - testUsePkpSessionSigsToExecuteJsClaimKeys, - testUsePkpSessionSigsToExecuteJsClaimMultipleKeys, - testUsePkpSessionSigsToExecuteJsJsonResponse, - testUsePkpSessionSigsToExecuteJsConsoleLog, - testUsePkpSessionSigsToEncryptDecryptString, - testUsePkpSessionSigsToEncryptDecryptFile, - }; - - const litActionSessionSigsTests = { - testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigning, - testUseValidLitActionCodeGeneratedSessionSigsToPkpSign, - testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInParallel, - testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimKeys, - testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimMultipleKeys, - testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsJsonResponse, - testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsConsoleLog, - testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString, - testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile, - - // -- invalid cases - testUseInvalidLitActionIpfsCodeToGenerateSessionSigs, - - // -- custom auth methods - testUseCustomAuthSessionSigsToPkpSignExecuteJs, - }; - - const litActionIpfsIdSessionSigsTests = { - testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign, - testUseValidLitActionIpfsCodeGeneratedSessionSigsToExecuteJsSigning, - - // -- invalid cases - testUseInvalidLitActionCodeToGenerateSessionSigs, - }; - - const eip1271AuthSigTests = { - // testKeccakEip1271AuthSigToEncryptDecryptString, - // testShaEip1271AuthSigToEncryptDecryptString, - }; - - const pkpEthersTest = { - eoaSessionSigs: { - testPkpEthersWithEoaSessionSigsToSignWithAuthContext, - testPkpEthersWithEoaSessionSigsToSignMessage, - testPkpEthersWithEoaSessionSigsToEthSign, - testPkpEthersWithEoaSessionSigsToPersonalSign, - testPkpEthersWithEoaSessionSigsToSendTx, - testPkpEthersWithEoaSessionSigsToEthSignTransaction, - testPkpEthersWithEoaSessionSigsToEthSignTypedDataV1, - testPkpEthersWithEoaSessionSigsToEthSignTypedDataV3, - testPkpEthersWithEoaSessionSigsToEthSignTypedDataV4, - testPkpEthersWithEoaSessionSigsToEthSignTypedData, - testPkpEthersWithEoaSessionSigsToEthSignTypedDataUtil, - }, - pkpSessionSigs: { - testPkpEthersWithPkpSessionSigsToSignMessage, - testPkpEthersWithPkpSessionSigsToEthSign, - testPkpEthersWithPkpSessionSigsToPersonalSign, - testPkpEthersWithPkpSessionSigsToSendTx, - testPkpEthersWithPkpSessionSigsToEthSignTransaction, - testPkpEthersWithPkpSessionSigsToEthSignTypedDataV1, - testPkpEthersWithPkpSessionSigsToEthSignTypedDataV3, - testPkpEthersWithPkpSessionSigsToEthSignTypedDataV4, - testPkpEthersWithPkpSessionSigsToEthSignTypedData, - testPkpEthersWithPkpSessionSigsToEthSignTypedDataUtil, - }, - litActionSessionSigs: { - testPkpEthersWithLitActionSessionSigsToSignMessage, - testPkpEthersWithLitActionSessionSigsToEthSign, - testPkpEthersWithLitActionSessionSigsToPersonalSign, - testPkpEthersWithLitActionSessionSigsToSendTx, - testPkpEthersWithLitActionSessionSigsToEthSignTransaction, - testPkpEthersWithLitActionSessionSigsToEthSignTypedDataV1, - testPkpEthersWithLitActionSessionSigsToEthSignTypedDataV3, - testPkpEthersWithLitActionSessionSigsToEthSignTypedDataV4, - testPkpEthersWithLitActionSessionSigsToEthSignTypedData, - testPkpEthersWithLitActionSessionSigsToEthSignTypedDataUtil, - }, - }; - - const litActionCombiningTests = { - ecdsaSignAndCombine: { - testExecuteJsSignAndCombineEcdsa, - }, - decryptAndCombine: { - testExecuteJsDecryptAndCombine, - }, - broadcastAndCombine: { - testExecuteJsBroadcastAndCollect, - }, - }; - const testConfig = { - tests: { - // testExample, - // testBundleSpeed, - ...eoaSessionSigsTests, - ...pkpSessionSigsTests, - ...litActionSessionSigsTests, - ...litActionIpfsIdSessionSigsTests, - ...eip1271AuthSigTests, - - ...pkpEthersTest.eoaSessionSigs, - ...pkpEthersTest.pkpSessionSigs, - ...pkpEthersTest.litActionSessionSigs, - - ...litActionCombiningTests.broadcastAndCombine, - ...litActionCombiningTests.decryptAndCombine, - ...litActionCombiningTests.ecdsaSignAndCombine, - - ...relayerTests, - ...wrappedKeysTests, - }, + tests: tinnyTests, devEnv, }; diff --git a/local-tests/tests.ts b/local-tests/tests.ts index ed882cd6d6..19ef4e873c 100644 --- a/local-tests/tests.ts +++ b/local-tests/tests.ts @@ -1,3 +1,4 @@ +import { testUseEoaSessionSigsToRequestSingleResponse } from './tests/testUseEoaSessionSigsToRequestSingleResponse'; import { testUseEoaSessionSigsToExecuteJsSigning } from './tests/testUseEoaSessionSigsToExecuteJsSigning'; import { testUseEoaSessionSigsToPkpSign } from './tests/testUseEoaSessionSigsToPkpSign'; import { testUsePkpSessionSigsToExecuteJsSigning } from './tests/testUsePkpSessionSigsToExecuteJsSigning'; @@ -10,24 +11,17 @@ import { testUseEoaSessionSigsToExecuteJsClaimKeys } from './tests/testUseEoaSes import { testUseEoaSessionSigsToExecuteJsClaimMultipleKeys } from './tests/testUseEoaSessionSigsToExecuteJsClaimMultipleKeys'; import { testUseEoaSessionSigsToExecuteJsJsonResponse } from './tests/testUseEoaSessionSigsToExecuteJsJsonResponse'; import { testUseEoaSessionSigsToExecuteJsConsoleLog } from './tests/testUseEoaSessionSigsToExecuteJsConsoleLog'; -import { testUseEoaSessionSigsToEncryptDecryptString } from './tests/testUseEoaSessionSigsToEncryptDecryptString'; -import { testUseEoaSessionSigsToEncryptDecryptUint8Array } from './tests/testUseEoaSessionSigsToEncryptDecryptUint8Array'; -import { testUsePkpSessionSigsToEncryptDecryptString } from './tests/testUsePkpSessionSigsToEncryptDecryptString'; -import { testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString'; import { testUseInvalidLitActionCodeToGenerateSessionSigs } from './tests/testUseInvalidLitActionCodeToGenerateSessionSigs'; -import { testUseEoaSessionSigsToEncryptDecryptFile } from './tests/testUseEoaSessionSigsToEncryptDecryptFile'; import { testUsePkpSessionSigsToExecuteJsSigningInParallel } from './tests/testUsePkpSessionSigsToExecuteJsSigningInParallel'; import { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInParallel } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInParallel'; import { testUsePkpSessionSigsToExecuteJsClaimKeys } from './tests/testUsePkpSessionSigsToExecuteJsClaimKeys'; import { testUsePkpSessionSigsToExecuteJsClaimMultipleKeys } from './tests/testUsePkpSessionSigsToExecuteJsClaimMultipleKeys'; import { testUsePkpSessionSigsToExecuteJsJsonResponse } from './tests/testUsePkpSessionSigsToExecuteJsJsonResponse'; import { testUsePkpSessionSigsToExecuteJsConsoleLog } from './tests/testUsePkpSessionSigsToExecuteJsConsoleLog'; -import { testUsePkpSessionSigsToEncryptDecryptFile } from './tests/testUsePkpSessionSigsToEncryptDecryptFile'; import { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimKeys } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimKeys'; import { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimMultipleKeys } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimMultipleKeys'; import { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsJsonResponse } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsJsonResponse'; import { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsConsoleLog } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsConsoleLog'; -import { testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile'; import { testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign } from './tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign'; import { testUseInvalidLitActionIpfsCodeToGenerateSessionSigs } from './tests/testUseInvalidLitActionIpfsCodeToGenerateSessionSigs'; import { testPkpEthersWithEoaSessionSigsToSignMessage } from './tests/testPkpEthersWithEoaSessionSigsToSignMessage'; @@ -64,7 +58,6 @@ import { testPkpEthersWithPkpSessionSigsToEthSignTypedDataUtil } from './tests/t import { testPkpEthersWithLitActionSessionSigsToEthSignTypedDataUtil } from './tests/testPkpEthersWithLitActionSessionSigsToEthSignTypedDataUtil'; import { testUseCustomAuthSessionSigsToPkpSignExecuteJs } from './tests/testUseCustomAuthSessionSigsToPkpSignExecuteJs'; import { testExecuteJsSignAndCombineEcdsa } from './tests/testExecuteJsSignAndCombineEcdsa'; -import { testExecuteJsDecryptAndCombine } from './tests/testExecuteJsDecryptAndCombine'; import { testExecuteJsBroadcastAndCollect } from './tests/testExecuteJsBroadcastAndCollect'; import { testRelayer } from './tests/testRelayer'; @@ -103,24 +96,17 @@ export { testUseEoaSessionSigsToExecuteJsClaimKeys } from './tests/testUseEoaSes export { testUseEoaSessionSigsToExecuteJsClaimMultipleKeys } from './tests/testUseEoaSessionSigsToExecuteJsClaimMultipleKeys'; export { testUseEoaSessionSigsToExecuteJsJsonResponse } from './tests/testUseEoaSessionSigsToExecuteJsJsonResponse'; export { testUseEoaSessionSigsToExecuteJsConsoleLog } from './tests/testUseEoaSessionSigsToExecuteJsConsoleLog'; -export { testUseEoaSessionSigsToEncryptDecryptString } from './tests/testUseEoaSessionSigsToEncryptDecryptString'; -export { testUseEoaSessionSigsToEncryptDecryptUint8Array } from './tests/testUseEoaSessionSigsToEncryptDecryptUint8Array'; -export { testUsePkpSessionSigsToEncryptDecryptString } from './tests/testUsePkpSessionSigsToEncryptDecryptString'; -export { testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString'; export { testUseInvalidLitActionCodeToGenerateSessionSigs } from './tests/testUseInvalidLitActionCodeToGenerateSessionSigs'; -export { testUseEoaSessionSigsToEncryptDecryptFile } from './tests/testUseEoaSessionSigsToEncryptDecryptFile'; export { testUsePkpSessionSigsToExecuteJsSigningInParallel } from './tests/testUsePkpSessionSigsToExecuteJsSigningInParallel'; export { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInParallel } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInParallel'; export { testUsePkpSessionSigsToExecuteJsClaimKeys } from './tests/testUsePkpSessionSigsToExecuteJsClaimKeys'; export { testUsePkpSessionSigsToExecuteJsClaimMultipleKeys } from './tests/testUsePkpSessionSigsToExecuteJsClaimMultipleKeys'; export { testUsePkpSessionSigsToExecuteJsJsonResponse } from './tests/testUsePkpSessionSigsToExecuteJsJsonResponse'; export { testUsePkpSessionSigsToExecuteJsConsoleLog } from './tests/testUsePkpSessionSigsToExecuteJsConsoleLog'; -export { testUsePkpSessionSigsToEncryptDecryptFile } from './tests/testUsePkpSessionSigsToEncryptDecryptFile'; export { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimKeys } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimKeys'; export { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimMultipleKeys } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimMultipleKeys'; export { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsJsonResponse } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsJsonResponse'; export { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsConsoleLog } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsConsoleLog'; -export { testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile } from './tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile'; export { testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign } from './tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign'; export { testUseInvalidLitActionIpfsCodeToGenerateSessionSigs } from './tests/testUseInvalidLitActionIpfsCodeToGenerateSessionSigs'; @@ -163,7 +149,6 @@ export { testPkpEthersWithPkpSessionSigsToEthSignTypedDataUtil } from './tests/t export { testPkpEthersWithLitActionSessionSigsToEthSignTypedDataUtil } from './tests/testPkpEthersWithLitActionSessionSigsToEthSignTypedDataUtil'; export { testUseCustomAuthSessionSigsToPkpSignExecuteJs } from './tests/testUseCustomAuthSessionSigsToPkpSignExecuteJs'; export { testExecuteJsSignAndCombineEcdsa } from './tests/testExecuteJsSignAndCombineEcdsa'; -export { testExecuteJsDecryptAndCombine } from './tests/testExecuteJsDecryptAndCombine'; export { testExecuteJsBroadcastAndCollect } from './tests/testExecuteJsBroadcastAndCollect'; export { testRelayer } from './tests/testRelayer'; @@ -197,7 +182,7 @@ const relayerTests = { // --filter=WrappedKey const wrappedKeysTests = { - // -- valid cases + // // -- valid cases // testBatchGeneratePrivateKeys, // testEthereumSignMessageGeneratedKey, // testEthereumBroadcastTransactionGeneratedKey, @@ -205,22 +190,22 @@ const wrappedKeysTests = { // testEthereumSignTransactionWrappedKey, // testEthereumBroadcastTransactionWrappedKey, // testEthereumBroadcastWrappedKeyWithFetchGasParams, - // -- generate wrapped keys + // // -- generate wrapped keys // testGenerateEthereumWrappedKey, // testGenerateSolanaWrappedKey, - // -- import wrapped keys + // // -- import wrapped keys // testImportWrappedKey, - // -- export wrapped keys + // // -- export wrapped keys // testExportWrappedKey, - // -- solana wrapped keys + // // -- solana wrapped keys // testSignMessageWithSolanaEncryptedKey, // testSignTransactionWithSolanaEncryptedKey, - // -- invalid cases + // // -- invalid cases // testFailEthereumSignTransactionWrappedKeyWithMissingParam, // testFailEthereumSignTransactionWrappedKeyWithInvalidParam, // testFailEthereumSignTransactionWrappedKeyInvalidDecryption, // testFailBatchGeneratePrivateKeysAtomic, - // -- import wrapped keys + // // -- import wrapped keys // testFailImportWrappedKeysWithSamePrivateKey, // testFailImportWrappedKeysWithEoaSessionSig, // testFailImportWrappedKeysWithMaxExpirySessionSig, @@ -230,15 +215,13 @@ const wrappedKeysTests = { const eoaSessionSigsTests = { testUseEoaSessionSigsToExecuteJsSigning, + testUseEoaSessionSigsToRequestSingleResponse, testUseEoaSessionSigsToPkpSign, testUseEoaSessionSigsToExecuteJsSigningInParallel, testUseEoaSessionSigsToExecuteJsClaimKeys, testUseEoaSessionSigsToExecuteJsClaimMultipleKeys, testUseEoaSessionSigsToExecuteJsJsonResponse, testUseEoaSessionSigsToExecuteJsConsoleLog, - testUseEoaSessionSigsToEncryptDecryptString, - testUseEoaSessionSigsToEncryptDecryptUint8Array, - testUseEoaSessionSigsToEncryptDecryptFile, }; const pkpSessionSigsTests = { @@ -249,8 +232,6 @@ const pkpSessionSigsTests = { testUsePkpSessionSigsToExecuteJsClaimMultipleKeys, testUsePkpSessionSigsToExecuteJsJsonResponse, testUsePkpSessionSigsToExecuteJsConsoleLog, - testUsePkpSessionSigsToEncryptDecryptString, - testUsePkpSessionSigsToEncryptDecryptFile, }; const litActionSessionSigsTests = { @@ -261,8 +242,6 @@ const litActionSessionSigsTests = { testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsClaimMultipleKeys, testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsJsonResponse, testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsConsoleLog, - testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString, - testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile, // -- invalid cases testUseInvalidLitActionIpfsCodeToGenerateSessionSigs, @@ -288,6 +267,11 @@ const bareAuthSigTests = { // testCosmosAuthSigToEncryptDecryptString, }; +const eip1271AuthSigTests = { + // testKeccakEip1271AuthSigToEncryptDecryptString, + // testShaEip1271AuthSigToEncryptDecryptString, +}; + const pkpEthersTest = { eoaSessionSigs: { testPkpEthersWithEoaSessionSigsToSignWithAuthContext, @@ -332,9 +316,6 @@ const litActionCombiningTests = { ecdsaSignAndCombine: { testExecuteJsSignAndCombineEcdsa, }, - decryptAndCombine: { - testExecuteJsDecryptAndCombine, - }, broadcastAndCombine: { testExecuteJsBroadcastAndCollect, }, @@ -347,6 +328,7 @@ export const tinnyTests = { ...pkpSessionSigsTests, ...litActionSessionSigsTests, ...litActionIpfsIdSessionSigsTests, + ...eip1271AuthSigTests, ...bareAuthSigTests, ...pkpEthersTest.eoaSessionSigs, @@ -354,7 +336,6 @@ export const tinnyTests = { ...pkpEthersTest.litActionSessionSigs, ...litActionCombiningTests.broadcastAndCombine, - ...litActionCombiningTests.decryptAndCombine, ...litActionCombiningTests.ecdsaSignAndCombine, ...relayerTests, diff --git a/local-tests/tests/test-bundle-speed.ts b/local-tests/tests/test-bundle-speed.ts index 703b9017f3..f7eb09c867 100644 --- a/local-tests/tests/test-bundle-speed.ts +++ b/local-tests/tests/test-bundle-speed.ts @@ -9,7 +9,7 @@ export const testBundleSpeed = async (devEnv: TinnyEnvironment) => { const b = await import('@lit-protocol/contracts-sdk'); const c = await import('@lit-protocol/auth-helpers'); const d = await import('@lit-protocol/constants'); - const e = await import('@lit-protocol/lit-auth-client'); + const e = await import('@lit-protocol/auth'); console.log(a, b, c, d, e); }; diff --git a/local-tests/tests/test-example.ts b/local-tests/tests/test-example.ts deleted file mode 100644 index 7cf55fd939..0000000000 --- a/local-tests/tests/test-example.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { getEoaSessionSigs } from 'local-tests/setup/session-sigs/get-eoa-session-sigs'; -import { getLitActionSessionSigs } from 'local-tests/setup/session-sigs/get-lit-action-session-sigs'; -import { getPkpSessionSigs } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; - -import { LIT_NETWORK } from '@lit-protocol/constants'; -import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; - -export const testExample = async (devEnv: TinnyEnvironment) => { - // Note: This test will be skipped if we are testing on the DatilDev network - devEnv.setUnavailable(LIT_NETWORK.DatilDev); - - const alice = await devEnv.createRandomPerson(); - - const aliceEoaSessionSigs = await getEoaSessionSigs(devEnv, alice); - - const aliceExecuteJsRes = await devEnv.litNodeClient.executeJs({ - sessionSigs: aliceEoaSessionSigs, - code: `(async () => { - const sigShare = await LitActions.signEcdsa({ - toSign: dataToSign, - publicKey, - sigName: "sig", - }); - })();`, - jsParams: { - dataToSign: alice.loveLetter, - publicKey: alice.pkp.publicKey, - }, - }); - - console.log('aliceExecuteJsRes:', aliceExecuteJsRes); - - devEnv.releasePrivateKeyFromUser(alice); - - // console.log('aliceEoaSessionSigs: ', aliceEoaSessionSigs); - - // const alicePkpSessionSigs = await getPkpSessionSigs(devEnv, alice); - // console.log('alicePkpSessionSigs: ', alicePkpSessionSigs); - - // const aliceLitActionSessionSigs = await getLitActionSessionSigs( - // devEnv, - // alice - // ); - // console.log('aliceLitActionSessionSigs: ', aliceLitActionSessionSigs); -}; diff --git a/local-tests/tests/testExecuteJsDecryptAndCombine.ts b/local-tests/tests/testExecuteJsDecryptAndCombine.ts deleted file mode 100644 index 5022f9f252..0000000000 --- a/local-tests/tests/testExecuteJsDecryptAndCombine.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { LIT_NETWORK } from '@lit-protocol/constants'; -import { ILitNodeClient } from '@lit-protocol/types'; -import { AccessControlConditions } from 'local-tests/setup/accs/accs'; -import { getLitActionAuthContext } from 'local-tests/setup/session-sigs/get-lit-action-session-sigs'; -import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; -import { log } from '@lit-protocol/misc'; -import { encryptString } from '@lit-protocol/encryption'; - -/** - * Test Commands: - * ✅ NETWORK=datil-dev yarn test:local --filter=testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString - * ✅ NETWORK=custom yarn test:local --filter=testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString - * - */ -export const testExecuteJsDecryptAndCombine = async ( - devEnv: TinnyEnvironment -) => { - const alice = await devEnv.createRandomPerson(); - // set access control conditions for encrypting and decrypting - const accs = AccessControlConditions.getEvmBasicAccessControlConditions({ - userAddress: alice.authMethodOwnedPkp.ethAddress, - }); - - const encryptRes = await encryptString( - { - accessControlConditions: accs, - dataToEncrypt: 'Hello world', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - log('encryptRes:', encryptRes); - - // -- Expected output: - // { - // ciphertext: "pSP1Rq4xdyLBzSghZ3DtTtHp2UL7/z45U2JDOQho/WXjd2ntr4IS8BJfqJ7TC2U4CmktrvbVT3edoXJgFqsE7vy9uNrBUyUSTuUdHLfDVMIgh4a7fqMxsdQdkWZjHign3JOaVBihtOjAF5VthVena28D", - // dataToEncryptHash: "64ec88ca00b268e5ba1a35678a1b5316d212f4f366b2477232534a8aeca37f3c", - // } - - // -- assertions - if (!encryptRes.ciphertext) { - throw new Error(`Expected "ciphertext" in encryptRes`); - } - - if (!encryptRes.dataToEncryptHash) { - throw new Error(`Expected "dataToEncryptHash" to in encryptRes`); - } - - const res = await devEnv.litNodeClient.executeJs({ - authContext: getLitActionAuthContext(devEnv, alice), - code: `(async () => { - const resp = await Lit.Actions.decryptAndCombine({ - accessControlConditions, - ciphertext, - dataToEncryptHash, - authSig: null, - chain: 'ethereum', - }); - Lit.Actions.setResponse({ - response: resp - }); - })();`, - jsParams: { - accessControlConditions: accs, - dataToEncryptHash: encryptRes.dataToEncryptHash, - ciphertext: encryptRes.ciphertext, - }, - }); - - devEnv.releasePrivateKeyFromUser(alice); - - if (res.response !== 'Hello world') { - throw new Error('content does not match what was expected'); - } -}; diff --git a/local-tests/tests/testRelayer.ts b/local-tests/tests/testRelayer.ts index 6b07955bc8..57c6b148ab 100644 --- a/local-tests/tests/testRelayer.ts +++ b/local-tests/tests/testRelayer.ts @@ -1,7 +1,8 @@ -import { log } from '@lit-protocol/misc'; import { ClaimRequest, ClientClaimProcessor } from '@lit-protocol/types'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; -import { EthWalletProvider, LitRelay } from '@lit-protocol/lit-auth-client'; +import { authenticators, LitRelay } from '@lit-protocol/auth'; + +const { MetamaskAuthenticator } = authenticators; /** * Test Commands: @@ -18,20 +19,6 @@ export const testRelayer = async (devEnv: TinnyEnvironment) => { relayUrl: LitRelay.getRelayUrl(devEnv.network), relayApiKey: 'test-api-key', }); - const ethWalletProvider = new EthWalletProvider({ - relay: litRelay, - litNodeClient: devEnv.litNodeClient, - }); - - const pkps = await ethWalletProvider.fetchPKPsThroughRelayer( - alice.authMethod - ); - - if (pkps.length <= 0) { - throw new Error('No PKPs found'); - } else { - console.log('✅ 1. [testRelayer] /fetch-pkps-by-auth-method works'); - } // -- test claims const claimRequest: ClaimRequest = { @@ -102,5 +89,5 @@ export const testRelayer = async (devEnv: TinnyEnvironment) => { } }); - log('✅ 2. [testRelayer] Claim works'); + console.log('✅ 2. [testRelayer] Claim works'); }; diff --git a/local-tests/tests/testUseEoaSessionSigsToEncryptDecryptFile.ts b/local-tests/tests/testUseEoaSessionSigsToEncryptDecryptFile.ts deleted file mode 100644 index 18b76d5a58..0000000000 --- a/local-tests/tests/testUseEoaSessionSigsToEncryptDecryptFile.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { getEoaAuthContext } from 'local-tests/setup/session-sigs/get-eoa-session-sigs'; -import { LIT_ABILITY } from '@lit-protocol/constants'; -import { ILitNodeClient } from '@lit-protocol/types'; -import { AccessControlConditions } from 'local-tests/setup/accs/accs'; -import { LitAccessControlConditionResource } from '@lit-protocol/auth-helpers'; -import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; -import { log } from '@lit-protocol/misc'; -import { encryptString, decryptToFile } from '@lit-protocol/encryption'; - -/** - * Test Commands: - * ✅ NETWORK=datil-dev yarn test:local --filter=testUseEoaSessionSigsToEncryptDecryptFile - * ✅ NETWORK=datil-test yarn test:local --filter=testUseEoaSessionSigsToEncryptDecryptFile - * ✅ NETWORK=custom yarn test:local --filter=testUseEoaSessionSigsToEncryptDecryptFile - */ -export const testUseEoaSessionSigsToEncryptDecryptFile = async ( - devEnv: TinnyEnvironment -) => { - const alice = await devEnv.createRandomPerson(); - const message = 'Hello world'; - const blob = new Blob([message], { type: 'text/plain' }); - const blobArray = new Uint8Array(await blob.arrayBuffer()); - - // set access control conditions for encrypting and decrypting - const accs = AccessControlConditions.getEvmBasicAccessControlConditions({ - userAddress: alice.wallet.address, - }); - - const encryptRes = await encryptString( - { - accessControlConditions: accs, - dataToEncrypt: 'Hello world', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - log('encryptRes:', encryptRes); - - // await 5 seconds for the encryption to be mined - - // -- Expected output: - // { - // ciphertext: "pSP1Rq4xdyLBzSghZ3DtTtHp2UL7/z45U2JDOQho/WXjd2ntr4IS8BJfqJ7TC2U4CmktrvbVT3edoXJgFqsE7vy9uNrBUyUSTuUdHLfDVMIgh4a7fqMxsdQdkWZjHign3JOaVBihtOjAF5VthVena28D", - // dataToEncryptHash: "64ec88ca00b268e5ba1a35678a1b5316d212f4f366b2477232534a8aeca37f3c", - // } - - // -- assertions - if (!encryptRes.ciphertext) { - throw new Error(`Expected "ciphertext" in encryptRes`); - } - - if (!encryptRes.dataToEncryptHash) { - throw new Error(`Expected "dataToEncryptHash" to in encryptRes`); - } - - const accsResourceString = - await LitAccessControlConditionResource.generateResourceString( - accs, - encryptRes.dataToEncryptHash - ); - - // -- Decrypt the encrypted string - const decriptedFile = await decryptToFile( - { - accessControlConditions: accs, - ciphertext: encryptRes.ciphertext, - dataToEncryptHash: encryptRes.dataToEncryptHash, - authContext: getEoaAuthContext(devEnv, alice, [ - { - resource: new LitAccessControlConditionResource(accsResourceString), - ability: LIT_ABILITY.AccessControlConditionDecryption, - }, - ]), - chain: 'ethereum', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - devEnv.releasePrivateKeyFromUser(alice); - - if (blobArray.length !== decriptedFile.length) { - throw new Error( - `decrypted file should match the original file but received ${decriptedFile}` - ); - } - for (let i = 0; i < blobArray.length; i++) { - if (blobArray[i] !== decriptedFile[i]) { - throw new Error(`decrypted file should match the original file`); - } - } - - console.log('decriptedFile:', decriptedFile); -}; diff --git a/local-tests/tests/testUseEoaSessionSigsToEncryptDecryptString.ts b/local-tests/tests/testUseEoaSessionSigsToEncryptDecryptString.ts deleted file mode 100644 index 0b7a6676d3..0000000000 --- a/local-tests/tests/testUseEoaSessionSigsToEncryptDecryptString.ts +++ /dev/null @@ -1,83 +0,0 @@ -// DEBUG=true LIT_RPC_URL=https://yellowstone-rpc.litprotocol.com NETWORK=custom yarn test:local --filter=testUseEoaSessionSigsToEncryptDecryptString -import { getEoaAuthContext } from 'local-tests/setup/session-sigs/get-eoa-session-sigs'; -import { LIT_ABILITY } from '@lit-protocol/constants'; -import { ILitNodeClient } from '@lit-protocol/types'; -import { AccessControlConditions } from 'local-tests/setup/accs/accs'; -import { LitAccessControlConditionResource } from '@lit-protocol/auth-helpers'; -import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; -import { log } from '@lit-protocol/misc'; -import { encryptString, decryptToString } from '@lit-protocol/encryption'; - -/** - * Test Commands: - * ✅ NETWORK=datil-dev yarn test:local --filter=testUseEoaSessionSigsToEncryptDecryptString - * ✅ NETWORK=datil-test yarn test:local --filter=testUseEoaSessionSigsToEncryptDecryptString - * ✅ NETWORK=custom yarn test:local --filter=testUseEoaSessionSigsToEncryptDecryptString - */ -export const testUseEoaSessionSigsToEncryptDecryptString = async ( - devEnv: TinnyEnvironment -) => { - const alice = await devEnv.createRandomPerson(); - // set access control conditions for encrypting and decrypting - const accs = AccessControlConditions.getEvmBasicAccessControlConditions({ - userAddress: alice.wallet.address, - }); - - const encryptRes = await encryptString( - { - accessControlConditions: accs, - dataToEncrypt: 'Hello world', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - log('encryptRes:', encryptRes); - - // await 5 seconds for the encryption to be mined - - // -- Expected output: - // { - // ciphertext: "pSP1Rq4xdyLBzSghZ3DtTtHp2UL7/z45U2JDOQho/WXjd2ntr4IS8BJfqJ7TC2U4CmktrvbVT3edoXJgFqsE7vy9uNrBUyUSTuUdHLfDVMIgh4a7fqMxsdQdkWZjHign3JOaVBihtOjAF5VthVena28D", - // dataToEncryptHash: "64ec88ca00b268e5ba1a35678a1b5316d212f4f366b2477232534a8aeca37f3c", - // } - - // -- assertions - if (!encryptRes.ciphertext) { - throw new Error(`Expected "ciphertext" in encryptRes`); - } - - if (!encryptRes.dataToEncryptHash) { - throw new Error(`Expected "dataToEncryptHash" to in encryptRes`); - } - - const accsResourceString = - await LitAccessControlConditionResource.generateResourceString( - accs, - encryptRes.dataToEncryptHash - ); - - // -- Decrypt the encrypted string - const decryptRes = await decryptToString( - { - accessControlConditions: accs, - ciphertext: encryptRes.ciphertext, - dataToEncryptHash: encryptRes.dataToEncryptHash, - authContext: getEoaAuthContext(devEnv, alice, [ - { - resource: new LitAccessControlConditionResource(accsResourceString), - ability: LIT_ABILITY.AccessControlConditionDecryption, - }, - ]), - chain: 'ethereum', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - devEnv.releasePrivateKeyFromUser(alice); - - if (decryptRes !== 'Hello world') { - throw new Error( - `Expected decryptRes to be 'Hello world' but got ${decryptRes}` - ); - } -}; diff --git a/local-tests/tests/testUseEoaSessionSigsToEncryptDecryptUint8Array.ts b/local-tests/tests/testUseEoaSessionSigsToEncryptDecryptUint8Array.ts deleted file mode 100644 index 20e29dba02..0000000000 --- a/local-tests/tests/testUseEoaSessionSigsToEncryptDecryptUint8Array.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { getEoaAuthContext } from 'local-tests/setup/session-sigs/get-eoa-session-sigs'; -import { LIT_ABILITY } from '@lit-protocol/constants'; -import { ILitNodeClient } from '@lit-protocol/types'; -import { AccessControlConditions } from 'local-tests/setup/accs/accs'; -import { LitAccessControlConditionResource } from '@lit-protocol/auth-helpers'; -import { - encryptUint8Array, - decryptToUint8Array, -} from '@lit-protocol/encryption'; -import { - uint8arrayFromString, - uint8arrayToString, -} from '@lit-protocol/uint8arrays'; -import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; -import { log } from '@lit-protocol/misc'; - -/** - * Test Commands: - * ✅ NETWORK=datil-dev yarn test:local --filter=testUseEoaSessionSigsToEncryptDecryptUint8Array - * ✅ NETWORK=datil-test yarn test:local --filter=testUseEoaSessionSigsToEncryptDecryptUint8Array - * ✅ NETWORK=custom yarn test:local --filter=testUseEoaSessionSigsToEncryptDecryptUint8Array - */ -export const testUseEoaSessionSigsToEncryptDecryptUint8Array = async ( - devEnv: TinnyEnvironment -) => { - const alice = await devEnv.createRandomPerson(); - // set access control conditions for encrypting and decrypting - const accs = AccessControlConditions.getEvmBasicAccessControlConditions({ - userAddress: alice.wallet.address, - }); - - const message = 'Hello world'; - const messageToEncrypt = uint8arrayFromString(message, 'utf8'); - - const encryptRes = await encryptUint8Array( - { - accessControlConditions: accs, - dataToEncrypt: messageToEncrypt, - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - log('encryptRes:', encryptRes); - - // await 5 seconds for the encryption to be mined - - // -- Expected output: - // { - // ciphertext: "pSP1Rq4xdyLBzSghZ3DtTtHp2UL7/z45U2JDOQho/WXjd2ntr4IS8BJfqJ7TC2U4CmktrvbVT3edoXJgFqsE7vy9uNrBUyUSTuUdHLfDVMIgh4a7fqMxsdQdkWZjHign3JOaVBihtOjAF5VthVena28D", - // dataToEncryptHash: "64ec88ca00b268e5ba1a35678a1b5316d212f4f366b2477232534a8aeca37f3c", - // } - - // -- assertions - if (!encryptRes.ciphertext) { - throw new Error(`Expected "ciphertext" in encryptRes`); - } - - if (!encryptRes.dataToEncryptHash) { - throw new Error(`Expected "dataToEncryptHash" to in encryptRes`); - } - - const accsResourceString = - await LitAccessControlConditionResource.generateResourceString( - accs, - encryptRes.dataToEncryptHash - ); - - // -- Decrypt the encrypted string - const decryptRes = await decryptToUint8Array( - { - accessControlConditions: accs, - ciphertext: encryptRes.ciphertext, - dataToEncryptHash: encryptRes.dataToEncryptHash, - authContext: getEoaAuthContext(devEnv, alice, [ - { - resource: new LitAccessControlConditionResource(accsResourceString), - ability: LIT_ABILITY.AccessControlConditionDecryption, - }, - ]), - chain: 'ethereum', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - const decryptResString = uint8arrayToString(decryptRes, 'utf8'); - - devEnv.releasePrivateKeyFromUser(alice); - - if (decryptResString !== message) { - throw new Error( - `Expected decryptRes to be 'Hello world' but got ${decryptRes}` - ); - } -}; diff --git a/local-tests/tests/testUseEoaSessionSigsToExecuteJsClaimKeys.ts b/local-tests/tests/testUseEoaSessionSigsToExecuteJsClaimKeys.ts index 9fb521bd90..ba0b98a7da 100644 --- a/local-tests/tests/testUseEoaSessionSigsToExecuteJsClaimKeys.ts +++ b/local-tests/tests/testUseEoaSessionSigsToExecuteJsClaimKeys.ts @@ -1,13 +1,5 @@ -// import { LitContracts } from '@lit-protocol/contracts-sdk'; -// import { log } from '@lit-protocol/misc'; -// import { -// ClaimRequest, -// ClaimResult, -// ClientClaimProcessor, -// } from '@lit-protocol/types'; import { getEoaAuthContext } from 'local-tests/setup/session-sigs/get-eoa-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; -import { log } from '@lit-protocol/misc'; /** * ## Scenario: @@ -174,5 +166,5 @@ export const testUseEoaSessionSigsToExecuteJsClaimKeys = async ( // } // }); - log('✅ testUseEoaSessionSigsToExecuteJsClaimKeys'); + console.log('✅ testUseEoaSessionSigsToExecuteJsClaimKeys'); }; diff --git a/local-tests/tests/testUseEoaSessionSigsToExecuteJsSigning.ts b/local-tests/tests/testUseEoaSessionSigsToExecuteJsSigning.ts index 415572de85..cc17a962d8 100644 --- a/local-tests/tests/testUseEoaSessionSigsToExecuteJsSigning.ts +++ b/local-tests/tests/testUseEoaSessionSigsToExecuteJsSigning.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { getEoaAuthContext } from 'local-tests/setup/session-sigs/get-eoa-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; @@ -62,5 +61,5 @@ export const testUseEoaSessionSigsToExecuteJsSigning = async ( throw new Error(`Expected "publicKey" in res.signatures.sig`); } - log('✅ testUseEoaSessionSigsToExecuteJsSigning'); + console.log('✅ testUseEoaSessionSigsToExecuteJsSigning'); }; diff --git a/local-tests/tests/testUseEoaSessionSigsToExecuteJsSigningInParallel.ts b/local-tests/tests/testUseEoaSessionSigsToExecuteJsSigningInParallel.ts index a3782ebbcd..ca2894abaa 100644 --- a/local-tests/tests/testUseEoaSessionSigsToExecuteJsSigningInParallel.ts +++ b/local-tests/tests/testUseEoaSessionSigsToExecuteJsSigningInParallel.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { getEoaAuthContext } from 'local-tests/setup/session-sigs/get-eoa-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; @@ -14,7 +13,7 @@ export const testUseEoaSessionSigsToExecuteJsSigningInParallel = async ( const alice = await devEnv.createRandomPerson(); const fn = async (index: number) => { - log(`Index: ${index}`); + console.log(`Index: ${index}`); return await devEnv.litNodeClient.executeJs({ authContext: getEoaAuthContext(devEnv, alice), @@ -34,7 +33,7 @@ export const testUseEoaSessionSigsToExecuteJsSigningInParallel = async ( const res = await Promise.all([fn(1), fn(2), fn(3)]); devEnv.releasePrivateKeyFromUser(alice); - log('res:', res); + console.log('res:', res); // -- Expected output: // [ @@ -114,5 +113,5 @@ export const testUseEoaSessionSigsToExecuteJsSigningInParallel = async ( } }); - log('✅ testUseEoaSessionSigsToExecuteJsSigningInParallel'); + console.log('✅ testUseEoaSessionSigsToExecuteJsSigningInParallel'); }; diff --git a/local-tests/tests/testUseEoaSessionSigsToPkpSign.ts b/local-tests/tests/testUseEoaSessionSigsToPkpSign.ts index 0431da2142..852c1da223 100644 --- a/local-tests/tests/testUseEoaSessionSigsToPkpSign.ts +++ b/local-tests/tests/testUseEoaSessionSigsToPkpSign.ts @@ -1,6 +1,5 @@ import { ethers } from 'ethers'; -import { log } from '@lit-protocol/misc'; import { getEoaAuthContext } from 'local-tests/setup/session-sigs/get-eoa-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; @@ -82,5 +81,5 @@ export const testUseEoaSessionSigsToPkpSign = async ( ); } - log('✅ testUseEoaSessionSigsToPkpSign'); + console.log('✅ testUseEoaSessionSigsToPkpSign'); }; diff --git a/local-tests/tests/testUsePkpSessionSigsToEncryptDecryptFile.ts b/local-tests/tests/testUsePkpSessionSigsToEncryptDecryptFile.ts deleted file mode 100644 index d854df3574..0000000000 --- a/local-tests/tests/testUsePkpSessionSigsToEncryptDecryptFile.ts +++ /dev/null @@ -1,94 +0,0 @@ -import { LIT_ABILITY } from '@lit-protocol/constants'; -import { ILitNodeClient } from '@lit-protocol/types'; -import { AccessControlConditions } from 'local-tests/setup/accs/accs'; -import { LitAccessControlConditionResource } from '@lit-protocol/auth-helpers'; -import { getPkpAuthContext } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; -import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; -import { log } from '@lit-protocol/misc'; -import { encryptString, decryptToFile } from '@lit-protocol/encryption'; - -/** - * Test Commands: - * ✅ NETWORK=datil-dev yarn test:local --filter=testUsePkpSessionSigsToEncryptDecryptFile - * ✅ NETWORK=datil-test yarn test:local --filter=testUsePkpSessionSigsToEncryptDecryptFile - * ✅ NETWORK=custom yarn test:local --filter=testUsePkpSessionSigsToEncryptDecryptFile - */ -export const testUsePkpSessionSigsToEncryptDecryptFile = async ( - devEnv: TinnyEnvironment -) => { - const alice = await devEnv.createRandomPerson(); - - const message = 'Hello world'; - const blob = new Blob([message], { type: 'text/plain' }); - const blobArray = new Uint8Array(await blob.arrayBuffer()); - - // set access control conditions for encrypting and decrypting - const accs = AccessControlConditions.getEvmBasicAccessControlConditions({ - userAddress: alice.authMethodOwnedPkp.ethAddress, - }); - - const encryptRes = await encryptString( - { - accessControlConditions: accs, - dataToEncrypt: 'Hello world', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - log('encryptRes:', encryptRes); - - // await 5 seconds for the encryption to be mined - - // -- Expected output: - // { - // ciphertext: "pSP1Rq4xdyLBzSghZ3DtTtHp2UL7/z45U2JDOQho/WXjd2ntr4IS8BJfqJ7TC2U4CmktrvbVT3edoXJgFqsE7vy9uNrBUyUSTuUdHLfDVMIgh4a7fqMxsdQdkWZjHign3JOaVBihtOjAF5VthVena28D", - // dataToEncryptHash: "64ec88ca00b268e5ba1a35678a1b5316d212f4f366b2477232534a8aeca37f3c", - // } - - // -- assertions - if (!encryptRes.ciphertext) { - throw new Error(`Expected "ciphertext" in encryptRes`); - } - - if (!encryptRes.dataToEncryptHash) { - throw new Error(`Expected "dataToEncryptHash" to in encryptRes`); - } - - const accsResourceString = - await LitAccessControlConditionResource.generateResourceString( - accs, - encryptRes.dataToEncryptHash - ); - - // -- Decrypt the encrypted string - const decriptedFile = await decryptToFile( - { - accessControlConditions: accs, - ciphertext: encryptRes.ciphertext, - dataToEncryptHash: encryptRes.dataToEncryptHash, - authContext: getPkpAuthContext(devEnv, alice, [ - { - resource: new LitAccessControlConditionResource(accsResourceString), - ability: LIT_ABILITY.AccessControlConditionDecryption, - }, - ]), - chain: 'ethereum', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - devEnv.releasePrivateKeyFromUser(alice); - - if (blobArray.length !== decriptedFile.length) { - throw new Error( - `decrypted file should match the original file but received ${decriptedFile}` - ); - } - for (let i = 0; i < blobArray.length; i++) { - if (blobArray[i] !== decriptedFile[i]) { - throw new Error(`decrypted file should match the original file`); - } - } - - console.log('decriptedFile:', decriptedFile); -}; diff --git a/local-tests/tests/testUsePkpSessionSigsToEncryptDecryptString.ts b/local-tests/tests/testUsePkpSessionSigsToEncryptDecryptString.ts deleted file mode 100644 index 54c8c76207..0000000000 --- a/local-tests/tests/testUsePkpSessionSigsToEncryptDecryptString.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { LIT_ABILITY } from '@lit-protocol/constants'; -import { ILitNodeClient } from '@lit-protocol/types'; -import { AccessControlConditions } from 'local-tests/setup/accs/accs'; -import { LitAccessControlConditionResource } from '@lit-protocol/auth-helpers'; -import { getPkpAuthContext } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; -import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; -import { log } from '@lit-protocol/misc'; -import { encryptString, decryptToString } from '@lit-protocol/encryption'; - -/** - * Test Commands: - * ✅ NETWORK=datil-dev yarn test:local --filter=testUsePkpSessionSigsToEncryptDecryptString - * ✅ NETWORK=datil-test yarn test:local --filter=testUsePkpSessionSigsToEncryptDecryptString - * ✅ NETWORK=custom yarn test:local --filter=testUsePkpSessionSigsToEncryptDecryptString - */ -export const testUsePkpSessionSigsToEncryptDecryptString = async ( - devEnv: TinnyEnvironment -) => { - const alice = await devEnv.createRandomPerson(); - // set access control conditions for encrypting and decrypting - const accs = AccessControlConditions.getEvmBasicAccessControlConditions({ - userAddress: alice.authMethodOwnedPkp.ethAddress, - }); - - const encryptRes = await encryptString( - { - accessControlConditions: accs, - dataToEncrypt: 'Hello world', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - log('encryptRes:', encryptRes); - - // -- Expected output: - // { - // ciphertext: "pSP1Rq4xdyLBzSghZ3DtTtHp2UL7/z45U2JDOQho/WXjd2ntr4IS8BJfqJ7TC2U4CmktrvbVT3edoXJgFqsE7vy9uNrBUyUSTuUdHLfDVMIgh4a7fqMxsdQdkWZjHign3JOaVBihtOjAF5VthVena28D", - // dataToEncryptHash: "64ec88ca00b268e5ba1a35678a1b5316d212f4f366b2477232534a8aeca37f3c", - // } - - // -- assertions - if (!encryptRes.ciphertext) { - throw new Error(`Expected "ciphertext" in encryptRes`); - } - - if (!encryptRes.dataToEncryptHash) { - throw new Error(`Expected "dataToEncryptHash" to in encryptRes`); - } - - const accsResourceString = - await LitAccessControlConditionResource.generateResourceString( - accs, - encryptRes.dataToEncryptHash - ); - - // -- Decrypt the encrypted string - const decryptRes = await decryptToString( - { - accessControlConditions: accs, - ciphertext: encryptRes.ciphertext, - dataToEncryptHash: encryptRes.dataToEncryptHash, - authContext: getPkpAuthContext(devEnv, alice, [ - { - resource: new LitAccessControlConditionResource(accsResourceString), - ability: LIT_ABILITY.AccessControlConditionDecryption, - }, - ]), - chain: 'ethereum', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - devEnv.releasePrivateKeyFromUser(alice); - - if (decryptRes !== 'Hello world') { - throw new Error( - `Expected decryptRes to be 'Hello world' but got ${decryptRes}` - ); - } -}; diff --git a/local-tests/tests/testUsePkpSessionSigsToExecuteJsSigning.ts b/local-tests/tests/testUsePkpSessionSigsToExecuteJsSigning.ts index 982dbe0142..53a17cc73a 100644 --- a/local-tests/tests/testUsePkpSessionSigsToExecuteJsSigning.ts +++ b/local-tests/tests/testUsePkpSessionSigsToExecuteJsSigning.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { getPkpAuthContext } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; @@ -76,5 +75,5 @@ export const testUsePkpSessionSigsToExecuteJsSigning = async ( throw new Error(`Expected "recid" to be parseable as a number`); } - log('✅ res:', res); + console.log('✅ res:', res); }; diff --git a/local-tests/tests/testUsePkpSessionSigsToExecuteJsSigningInParallel.ts b/local-tests/tests/testUsePkpSessionSigsToExecuteJsSigningInParallel.ts index 28b467e4c8..ec530e2950 100644 --- a/local-tests/tests/testUsePkpSessionSigsToExecuteJsSigningInParallel.ts +++ b/local-tests/tests/testUsePkpSessionSigsToExecuteJsSigningInParallel.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { getPkpAuthContext } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; @@ -14,7 +13,7 @@ export const testUsePkpSessionSigsToExecuteJsSigningInParallel = async ( const alice = await devEnv.createRandomPerson(); const fn = async (index: number) => { - log(`Index: ${index}`); + console.log(`Index: ${index}`); return await devEnv.litNodeClient.executeJs({ authContext: getPkpAuthContext(devEnv, alice), @@ -35,7 +34,7 @@ export const testUsePkpSessionSigsToExecuteJsSigningInParallel = async ( devEnv.releasePrivateKeyFromUser(alice); const res = await Promise.all([fn(1), fn(2), fn(3)]); - log('res:', res); + console.log('res:', res); // -- Expected output: // [ @@ -115,5 +114,5 @@ export const testUsePkpSessionSigsToExecuteJsSigningInParallel = async ( } }); - log('✅ testUsePkpSessionSigsToExecuteJsSigningInParallel'); + console.log('✅ testUsePkpSessionSigsToExecuteJsSigningInParallel'); }; diff --git a/local-tests/tests/testUsePkpSessionSigsToPkpSign.ts b/local-tests/tests/testUsePkpSessionSigsToPkpSign.ts index 3f54db9f58..87c369b059 100644 --- a/local-tests/tests/testUsePkpSessionSigsToPkpSign.ts +++ b/local-tests/tests/testUsePkpSessionSigsToPkpSign.ts @@ -1,6 +1,5 @@ import { ethers } from 'ethers'; -import { log } from '@lit-protocol/misc'; import { getPkpAuthContext } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; @@ -88,5 +87,5 @@ export const testUsePkpSessionSigsToPkpSign = async ( // ); } - log('✅ res:', res); + console.log('✅ res:', res); }; diff --git a/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile.ts b/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile.ts deleted file mode 100644 index e5772f31a9..0000000000 --- a/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile.ts +++ /dev/null @@ -1,94 +0,0 @@ -import { LIT_ABILITY } from '@lit-protocol/constants'; -import { ILitNodeClient } from '@lit-protocol/types'; -import { AccessControlConditions } from 'local-tests/setup/accs/accs'; -import { LitAccessControlConditionResource } from '@lit-protocol/auth-helpers'; -import { getPkpAuthContext } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; -import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; -import { log } from '@lit-protocol/misc'; -import { encryptString, decryptToFile } from '@lit-protocol/encryption'; - -/** - * Test Commands: - * ✅ NETWORK=datil-dev yarn test:local --filter=testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile - * ✅ NETWORK=datil-test yarn test:local --filter=testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile - * ✅ NETWORK=custom yarn test:local --filter=testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile - * ✅ NETWORK=datil-dev yarn test:local --filter=testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile - */ -export const testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptFile = - async (devEnv: TinnyEnvironment) => { - const alice = await devEnv.createRandomPerson(); - - const message = 'Hello world'; - const blob = new Blob([message], { type: 'text/plain' }); - const blobArray = new Uint8Array(await blob.arrayBuffer()); - - // set access control conditions for encrypting and decrypting - const accs = AccessControlConditions.getEvmBasicAccessControlConditions({ - userAddress: alice.authMethodOwnedPkp.ethAddress, - }); - - const encryptRes = await encryptString( - { - accessControlConditions: accs, - dataToEncrypt: 'Hello world', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - log('encryptRes:', encryptRes); - - // await 5 seconds for the encryption to be mined - - // -- Expected output: - // { - // ciphertext: "pSP1Rq4xdyLBzSghZ3DtTtHp2UL7/z45U2JDOQho/WXjd2ntr4IS8BJfqJ7TC2U4CmktrvbVT3edoXJgFqsE7vy9uNrBUyUSTuUdHLfDVMIgh4a7fqMxsdQdkWZjHign3JOaVBihtOjAF5VthVena28D", - // dataToEncryptHash: "64ec88ca00b268e5ba1a35678a1b5316d212f4f366b2477232534a8aeca37f3c", - // } - - // -- assertions - if (!encryptRes.ciphertext) { - throw new Error(`Expected "ciphertext" in encryptRes`); - } - - if (!encryptRes.dataToEncryptHash) { - throw new Error(`Expected "dataToEncryptHash" to in encryptRes`); - } - - const accsResourceString = - await LitAccessControlConditionResource.generateResourceString( - accs, - encryptRes.dataToEncryptHash - ); - - // -- Decrypt the encrypted string - const decriptedFile = await decryptToFile( - { - authContext: getPkpAuthContext(devEnv, alice, [ - { - resource: new LitAccessControlConditionResource(accsResourceString), - ability: LIT_ABILITY.AccessControlConditionDecryption, - }, - ]), - accessControlConditions: accs, - ciphertext: encryptRes.ciphertext, - dataToEncryptHash: encryptRes.dataToEncryptHash, - chain: 'ethereum', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - devEnv.releasePrivateKeyFromUser(alice); - - if (blobArray.length !== decriptedFile.length) { - throw new Error( - `decrypted file should match the original file but received ${decriptedFile}` - ); - } - for (let i = 0; i < blobArray.length; i++) { - if (blobArray[i] !== decriptedFile[i]) { - throw new Error(`decrypted file should match the original file`); - } - } - - console.log('decriptedFile:', decriptedFile); - }; diff --git a/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString.ts b/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString.ts deleted file mode 100644 index b066bdcbf2..0000000000 --- a/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { LIT_ABILITY } from '@lit-protocol/constants'; -import { ILitNodeClient } from '@lit-protocol/types'; -import { AccessControlConditions } from 'local-tests/setup/accs/accs'; -import { LitAccessControlConditionResource } from '@lit-protocol/auth-helpers'; -import { getLitActionAuthContext } from 'local-tests/setup/session-sigs/get-lit-action-session-sigs'; -import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; -import { log } from '@lit-protocol/misc'; -import { encryptString, decryptToString } from '@lit-protocol/encryption'; - -/** - * Test Commands: - * ✅ NETWORK=datil-dev yarn test:local --filter=testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString - * ✅ NETWORK=custom yarn test:local --filter=testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString - * - */ -export const testUseValidLitActionCodeGeneratedSessionSigsToEncryptDecryptString = - async (devEnv: TinnyEnvironment) => { - const alice = await devEnv.createRandomPerson(); - // set access control conditions for encrypting and decrypting - const accs = AccessControlConditions.getEvmBasicAccessControlConditions({ - userAddress: alice.authMethodOwnedPkp.ethAddress, - }); - - const encryptRes = await encryptString( - { - accessControlConditions: accs, - dataToEncrypt: 'Hello world', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - log('encryptRes:', encryptRes); - - // -- Expected output: - // { - // ciphertext: "pSP1Rq4xdyLBzSghZ3DtTtHp2UL7/z45U2JDOQho/WXjd2ntr4IS8BJfqJ7TC2U4CmktrvbVT3edoXJgFqsE7vy9uNrBUyUSTuUdHLfDVMIgh4a7fqMxsdQdkWZjHign3JOaVBihtOjAF5VthVena28D", - // dataToEncryptHash: "64ec88ca00b268e5ba1a35678a1b5316d212f4f366b2477232534a8aeca37f3c", - // } - - // -- assertions - if (!encryptRes.ciphertext) { - throw new Error(`Expected "ciphertext" in encryptRes`); - } - - if (!encryptRes.dataToEncryptHash) { - throw new Error(`Expected "dataToEncryptHash" to in encryptRes`); - } - - const accsResourceString = - await LitAccessControlConditionResource.generateResourceString( - accs, - encryptRes.dataToEncryptHash - ); - - // -- Decrypt the encrypted string - const decryptRes = await decryptToString( - { - accessControlConditions: accs, - ciphertext: encryptRes.ciphertext, - dataToEncryptHash: encryptRes.dataToEncryptHash, - authContext: getLitActionAuthContext(devEnv, alice, [ - { - resource: new LitAccessControlConditionResource(accsResourceString), - ability: LIT_ABILITY.AccessControlConditionDecryption, - }, - ]), - chain: 'ethereum', - }, - devEnv.litNodeClient as unknown as ILitNodeClient - ); - - devEnv.releasePrivateKeyFromUser(alice); - - if (decryptRes !== 'Hello world') { - throw new Error( - `Expected decryptRes to be 'Hello world' but got ${decryptRes}` - ); - } - }; diff --git a/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigning.ts b/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigning.ts index 3191ae21ef..3d68cc4412 100644 --- a/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigning.ts +++ b/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigning.ts @@ -1,6 +1,6 @@ import { LitActionResource, LitPKPResource } from '@lit-protocol/auth-helpers'; -import { log } from '@lit-protocol/misc'; import { LIT_ABILITY } from '@lit-protocol/constants'; + import { getLitActionAuthContext } from 'local-tests/setup/session-sigs/get-lit-action-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; @@ -74,5 +74,5 @@ export const testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigning = throw new Error(`Expected "publicKey" in res.signatures.sig`); } - log('✅ res:', res); + console.log('✅ res:', res); }; diff --git a/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInParallel.ts b/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInParallel.ts index 867098a092..138f15cc38 100644 --- a/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInParallel.ts +++ b/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInParallel.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { getLitActionAuthContext } from 'local-tests/setup/session-sigs/get-lit-action-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; @@ -12,7 +11,7 @@ export const testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInPa const alice = await devEnv.createRandomPerson(); const fn = async (index: number) => { - log(`Index: ${index}`); + console.log(`Index: ${index}`); return await devEnv.litNodeClient.executeJs({ authContext: getLitActionAuthContext(devEnv, alice), @@ -33,7 +32,7 @@ export const testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInPa devEnv.releasePrivateKeyFromUser(alice); const res = await Promise.all([fn(1), fn(2), fn(3)]); - log('res:', res); + console.log('res:', res); // -- Expected output: // [ @@ -113,5 +112,5 @@ export const testUseValidLitActionCodeGeneratedSessionSigsToExecuteJsSigningInPa } }); - log('✅ testUsePkpSessionSigsToExecuteJsSigningInParallel'); + console.log('✅ testUsePkpSessionSigsToExecuteJsSigningInParallel'); }; diff --git a/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToPkpSign.ts b/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToPkpSign.ts index b59abac7cc..b60f2ed78a 100644 --- a/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToPkpSign.ts +++ b/local-tests/tests/testUseValidLitActionCodeGeneratedSessionSigsToPkpSign.ts @@ -1,6 +1,5 @@ import { ethers } from 'ethers'; -import { log } from '@lit-protocol/misc'; import { getLitActionAuthContext } from 'local-tests/setup/session-sigs/get-lit-action-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; @@ -78,5 +77,5 @@ export const testUseValidLitActionCodeGeneratedSessionSigsToPkpSign = async ( ); } - log('✅ res:', res); + console.log('✅ res:', res); }; diff --git a/local-tests/tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToExecuteJsSigning.ts b/local-tests/tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToExecuteJsSigning.ts index ab92a4127e..09762db841 100644 --- a/local-tests/tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToExecuteJsSigning.ts +++ b/local-tests/tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToExecuteJsSigning.ts @@ -1,6 +1,6 @@ import { LitActionResource, LitPKPResource } from '@lit-protocol/auth-helpers'; -import { log } from '@lit-protocol/misc'; import { LIT_ABILITY } from '@lit-protocol/constants'; + import { getLitActionAuthContextUsingIpfsId } from 'local-tests/setup/session-sigs/get-lit-action-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; @@ -73,5 +73,5 @@ export const testUseValidLitActionIpfsCodeGeneratedSessionSigsToExecuteJsSigning throw new Error(`Expected "publicKey" in res.signatures.sig`); } - log('✅ res:', res); + console.log('✅ res:', res); }; diff --git a/local-tests/tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign.ts b/local-tests/tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign.ts index 94fd353569..205fbe329a 100644 --- a/local-tests/tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign.ts +++ b/local-tests/tests/testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign.ts @@ -1,6 +1,5 @@ import { ethers } from 'ethers'; -import { log } from '@lit-protocol/misc'; import { getLitActionAuthContextUsingIpfsId } from 'local-tests/setup/session-sigs/get-lit-action-session-sigs'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; @@ -78,5 +77,5 @@ export const testUseValidLitActionIpfsCodeGeneratedSessionSigsToPkpSign = ); } - log('✅ res:', res); + console.log('✅ res:', res); }; diff --git a/local-tests/tests/wrapped-keys/testBatchGeneratePrivateKeys.ts b/local-tests/tests/wrapped-keys/testBatchGeneratePrivateKeys.ts index b2c40701bf..486750a7ca 100644 --- a/local-tests/tests/wrapped-keys/testBatchGeneratePrivateKeys.ts +++ b/local-tests/tests/wrapped-keys/testBatchGeneratePrivateKeys.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api } from '@lit-protocol/wrapped-keys'; import { getPkpSessionSigs } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; @@ -129,7 +128,7 @@ export const testBatchGeneratePrivateKeys = async ( await verifyEvmSignature(results[0], evmMessageToSign); console.log('results', results); - log('✅ testBatchGenerateEncryptedKeys'); + console.log('✅ testBatchGenerateEncryptedKeys'); } catch (err) { console.log(err.message, err, err.stack); throw err; diff --git a/local-tests/tests/wrapped-keys/testEthereumBroadcastTransactionGeneratedKey.ts b/local-tests/tests/wrapped-keys/testEthereumBroadcastTransactionGeneratedKey.ts index 3df75c6de5..3a32b2ea98 100644 --- a/local-tests/tests/wrapped-keys/testEthereumBroadcastTransactionGeneratedKey.ts +++ b/local-tests/tests/wrapped-keys/testEthereumBroadcastTransactionGeneratedKey.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { ethers } from 'ethers'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api, EthereumLitTransaction } from '@lit-protocol/wrapped-keys'; @@ -75,7 +74,7 @@ export const testEthereumBroadcastTransactionGeneratedKey = async ( throw new Error(`signedTx isn't hex: ${signedTx}`); } - log('✅ testEthereumBroadcastTransactionGeneratedKey'); + console.log('✅ testEthereumBroadcastTransactionGeneratedKey'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testEthereumBroadcastTransactionWrappedKey.ts b/local-tests/tests/wrapped-keys/testEthereumBroadcastTransactionWrappedKey.ts index c10593c782..47e678506d 100644 --- a/local-tests/tests/wrapped-keys/testEthereumBroadcastTransactionWrappedKey.ts +++ b/local-tests/tests/wrapped-keys/testEthereumBroadcastTransactionWrappedKey.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { ethers } from 'ethers'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api, EthereumLitTransaction } from '@lit-protocol/wrapped-keys'; @@ -80,7 +79,7 @@ export const testEthereumBroadcastTransactionWrappedKey = async ( throw new Error(`signedTx isn't hex: ${signedTx}`); } - log('✅ testEthereumBroadcastTransactionWrappedKey'); + console.log('✅ testEthereumBroadcastTransactionWrappedKey'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testEthereumBroadcastWrappedKeyWithFetchGasParams.ts b/local-tests/tests/wrapped-keys/testEthereumBroadcastWrappedKeyWithFetchGasParams.ts index 75804f35cb..6fa8268183 100644 --- a/local-tests/tests/wrapped-keys/testEthereumBroadcastWrappedKeyWithFetchGasParams.ts +++ b/local-tests/tests/wrapped-keys/testEthereumBroadcastWrappedKeyWithFetchGasParams.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { ethers } from 'ethers'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api, EthereumLitTransaction } from '@lit-protocol/wrapped-keys'; @@ -84,7 +83,7 @@ export const testEthereumBroadcastWrappedKeyWithFetchGasParams = async ( throw new Error(`signedTx isn't hex: ${signedTx}`); } - log('✅ testEthereumBroadcastWrappedKeyWithDefaultGasParams'); + console.log('✅ testEthereumBroadcastWrappedKeyWithDefaultGasParams'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testEthereumSignMessageGeneratedKey.ts b/local-tests/tests/wrapped-keys/testEthereumSignMessageGeneratedKey.ts index e85336caad..8318421c65 100644 --- a/local-tests/tests/wrapped-keys/testEthereumSignMessageGeneratedKey.ts +++ b/local-tests/tests/wrapped-keys/testEthereumSignMessageGeneratedKey.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { ethers } from 'ethers'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api } from '@lit-protocol/wrapped-keys'; @@ -90,7 +89,7 @@ export const testEthereumSignMessageGeneratedKey = async ( ); } - log('✅ testEthereumSignMessageGeneratedKey'); + console.log('✅ testEthereumSignMessageGeneratedKey'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testEthereumSignMessageWrappedKey.ts b/local-tests/tests/wrapped-keys/testEthereumSignMessageWrappedKey.ts index 08594ace4d..d95afdba41 100644 --- a/local-tests/tests/wrapped-keys/testEthereumSignMessageWrappedKey.ts +++ b/local-tests/tests/wrapped-keys/testEthereumSignMessageWrappedKey.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { ethers } from 'ethers'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api } from '@lit-protocol/wrapped-keys'; @@ -94,7 +93,7 @@ export const testEthereumSignMessageWrappedKey = async ( ); } - log('✅ testEthereumSignMessageWrappedKey'); + console.log('✅ testEthereumSignMessageWrappedKey'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testEthereumSignTransactionWrappedKey.ts b/local-tests/tests/wrapped-keys/testEthereumSignTransactionWrappedKey.ts index ac6c6bc983..6e0f83a3a1 100644 --- a/local-tests/tests/wrapped-keys/testEthereumSignTransactionWrappedKey.ts +++ b/local-tests/tests/wrapped-keys/testEthereumSignTransactionWrappedKey.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { ethers } from 'ethers'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api } from '@lit-protocol/wrapped-keys'; @@ -76,7 +75,7 @@ export const testEthereumSignTransactionWrappedKey = async ( throw new Error(`signedTx isn't hex: ${signedTx}`); } - log('✅ testEthereumSignTransactionWrappedKey'); + console.log('✅ testEthereumSignTransactionWrappedKey'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testExportWrappedKey.ts b/local-tests/tests/wrapped-keys/testExportWrappedKey.ts index 50f41c142b..f59b562023 100644 --- a/local-tests/tests/wrapped-keys/testExportWrappedKey.ts +++ b/local-tests/tests/wrapped-keys/testExportWrappedKey.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { TinnyEnvironment } from '../../setup/tinny-environment'; import { api } from '@lit-protocol/wrapped-keys'; import { getPkpSessionSigs } from '../../setup/session-sigs/get-pkp-session-sigs'; @@ -64,7 +63,7 @@ export const testExportWrappedKey = async (devEnv: TinnyEnvironment) => { ); } - log('✅ testExportWrappedKey'); + console.log('✅ testExportWrappedKey'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyInvalidDecryption.ts b/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyInvalidDecryption.ts index b62bc52c84..e13011e78e 100644 --- a/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyInvalidDecryption.ts +++ b/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyInvalidDecryption.ts @@ -1,8 +1,6 @@ -import { log } from '@lit-protocol/misc'; import { ethers } from 'ethers'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { getPkpSessionSigs } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; -import { encryptString } from '@lit-protocol/encryption'; import { LIT_PREFIX } from 'packages/wrapped-keys/src/lib/constants'; import { LIT_ACTION_CID_REPOSITORY } from '../../../packages/wrapped-keys/src/lib/lit-actions-client/constants'; import { getBaseTransactionForNetwork } from './util'; @@ -25,13 +23,11 @@ export const testFailEthereumSignTransactionWrappedKeyInvalidDecryption = const alicePkpAddress = alice.authMethodOwnedPkp.ethAddress; const decryptionAccessControlCondition = getPkpAccessControlCondition(alicePkpAddress); - const { ciphertext, dataToEncryptHash } = await encryptString( - { + const { ciphertext, dataToEncryptHash } = + await devEnv.litNodeClient.encrypt({ accessControlConditions: [decryptionAccessControlCondition], - dataToEncrypt: LIT_PREFIX + privateKey, - }, - devEnv.litNodeClient - ); + dataToEncrypt: Buffer.from(LIT_PREFIX + privateKey, 'utf8'), + }); const pkpSessionSigsSigning = await getPkpSessionSigs( devEnv, @@ -79,7 +75,9 @@ export const testFailEthereumSignTransactionWrappedKeyInvalidDecryption = } } - log('✅ testFailEthereumSignTransactionWrappedKeyInvalidDecryption'); + console.log( + '✅ testFailEthereumSignTransactionWrappedKeyInvalidDecryption' + ); } finally { devEnv.releasePrivateKeyFromUser(alice); devEnv.releasePrivateKeyFromUser(bob); diff --git a/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyWithInvalidParam.ts b/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyWithInvalidParam.ts index 88b98abbf5..4ee61c0b86 100644 --- a/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyWithInvalidParam.ts +++ b/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyWithInvalidParam.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { ethers } from 'ethers'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api, EthereumLitTransaction } from '@lit-protocol/wrapped-keys'; @@ -82,7 +81,7 @@ export const testFailEthereumSignTransactionWrappedKeyWithInvalidParam = async ( } } - log('✅ testFailEthereumSignTransactionWrappedKeyWithInvalidParam'); + console.log('✅ testFailEthereumSignTransactionWrappedKeyWithInvalidParam'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyWithMissingParam.ts b/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyWithMissingParam.ts index adb2b76a35..cec8b565e2 100644 --- a/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyWithMissingParam.ts +++ b/local-tests/tests/wrapped-keys/testFailEthereumSignTransactionWrappedKeyWithMissingParam.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { ethers } from 'ethers'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api } from '@lit-protocol/wrapped-keys'; @@ -77,7 +76,7 @@ export const testFailEthereumSignTransactionWrappedKeyWithMissingParam = async ( } } - log('✅ testFailEthereumSignTransactionWrappedKeyWithMissingParam'); + console.log('✅ testFailEthereumSignTransactionWrappedKeyWithMissingParam'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testFailStoreEncryptedKeyBatchIsAtomic.ts b/local-tests/tests/wrapped-keys/testFailStoreEncryptedKeyBatchIsAtomic.ts index 7716838387..073c500588 100644 --- a/local-tests/tests/wrapped-keys/testFailStoreEncryptedKeyBatchIsAtomic.ts +++ b/local-tests/tests/wrapped-keys/testFailStoreEncryptedKeyBatchIsAtomic.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api } from '@lit-protocol/wrapped-keys'; import { getPkpSessionSigs } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; @@ -115,7 +114,7 @@ export const testFailBatchGeneratePrivateKeysAtomic = async ( ); } catch (err) { if (err.message.includes('No keys exist for pkpAddress')) { - log('✅ testFailBatchGeneratePrivateKeysAtomic'); + console.log('✅ testFailBatchGeneratePrivateKeysAtomic'); } else { throw err; } diff --git a/local-tests/tests/wrapped-keys/testGenerateEthereumWrappedKey.ts b/local-tests/tests/wrapped-keys/testGenerateEthereumWrappedKey.ts index 5f0c22aa37..743d926692 100644 --- a/local-tests/tests/wrapped-keys/testGenerateEthereumWrappedKey.ts +++ b/local-tests/tests/wrapped-keys/testGenerateEthereumWrappedKey.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api } from '@lit-protocol/wrapped-keys'; import { getPkpSessionSigs } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; @@ -65,7 +64,7 @@ export const testGenerateEthereumWrappedKey = async ( ); } - log('✅ testGenerateEthereumWrappedKey'); + console.log('✅ testGenerateEthereumWrappedKey'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testGenerateSolanaWrappedKey.ts b/local-tests/tests/wrapped-keys/testGenerateSolanaWrappedKey.ts index 1bcb239ce2..532bdcd112 100644 --- a/local-tests/tests/wrapped-keys/testGenerateSolanaWrappedKey.ts +++ b/local-tests/tests/wrapped-keys/testGenerateSolanaWrappedKey.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api } from '@lit-protocol/wrapped-keys'; import { getPkpSessionSigs } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; @@ -102,7 +101,7 @@ export const testGenerateSolanaWrappedKey = async ( ); } - log('✅ testGenerateSolanaWrappedKey'); + console.log('✅ testGenerateSolanaWrappedKey'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testImportWrappedKey.ts b/local-tests/tests/wrapped-keys/testImportWrappedKey.ts index 9c2bdfd00c..d3fe9b7da1 100644 --- a/local-tests/tests/wrapped-keys/testImportWrappedKey.ts +++ b/local-tests/tests/wrapped-keys/testImportWrappedKey.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api } from '@lit-protocol/wrapped-keys'; import { getPkpSessionSigs } from 'local-tests/setup/session-sigs/get-pkp-session-sigs'; @@ -53,7 +52,7 @@ export const testImportWrappedKey = async (devEnv: TinnyEnvironment) => { ); } - log('✅ testImportWrappedKey'); + console.log('✅ testImportWrappedKey'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testSignMessageWithSolanaEncryptedKey.ts b/local-tests/tests/wrapped-keys/testSignMessageWithSolanaEncryptedKey.ts index b182f3e27d..a00ffbde5e 100644 --- a/local-tests/tests/wrapped-keys/testSignMessageWithSolanaEncryptedKey.ts +++ b/local-tests/tests/wrapped-keys/testSignMessageWithSolanaEncryptedKey.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { api } from '@lit-protocol/wrapped-keys'; import { Keypair } from '@solana/web3.js'; @@ -77,7 +76,7 @@ export const testSignMessageWithSolanaEncryptedKey = async ( `signature: ${signature} doesn't validate for the Solana public key: ${solanaKeypair.publicKey.toString()}` ); - log('✅ testSignMessageWithSolanaEncryptedKey'); + console.log('✅ testSignMessageWithSolanaEncryptedKey'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/local-tests/tests/wrapped-keys/testSignTransactionWithSolanaEncryptedKey.ts b/local-tests/tests/wrapped-keys/testSignTransactionWithSolanaEncryptedKey.ts index 138d2c5b53..bd9af84d06 100644 --- a/local-tests/tests/wrapped-keys/testSignTransactionWithSolanaEncryptedKey.ts +++ b/local-tests/tests/wrapped-keys/testSignTransactionWithSolanaEncryptedKey.ts @@ -1,4 +1,3 @@ -import { log } from '@lit-protocol/misc'; import { TinnyEnvironment } from 'local-tests/setup/tinny-environment'; import { SerializedTransaction, api } from '@lit-protocol/wrapped-keys'; import { @@ -135,7 +134,7 @@ export const testSignTransactionWithSolanaEncryptedKey = async ( ); } - log('✅ testSignMessageWithSolanaEncryptedKey'); + console.log('✅ testSignMessageWithSolanaEncryptedKey'); } finally { devEnv.releasePrivateKeyFromUser(alice); } diff --git a/package.json b/package.json index 60518533c8..f0f93e9b49 100644 --- a/package.json +++ b/package.json @@ -24,6 +24,7 @@ "publish:staging": "yarn node ./tools/scripts/pub.mjs --tag staging", "build:tinny": "node ./local-tests/build.mjs", "publish:tinny": "cd ./local-tests && npm publish", + "gen:context": "bun run packages/networks/src/lib/networks/vNaga/local-develop/getCustomContext.ts", "gen:docs": "node ./tools/scripts/gen-doc.mjs", "gen:readme": "yarn node ./tools/scripts/gen-readme.mjs", "tools": "yarn node ./tools/scripts/tools.mjs", @@ -40,9 +41,10 @@ "@cosmjs/proto-signing": "0.30.1", "@cosmjs/stargate": "0.30.1", "@dotenvx/dotenvx": "^1.6.4", - "@lit-protocol/contracts": "^0.0.86", + "@lit-protocol/contracts": "^0.1.7", "@metamask/eth-sig-util": "5.0.2", "@mysten/sui.js": "^0.37.1", + "@noble/curves": "^1.8.1", "@openagenda/verror": "^3.1.4", "@simplewebauthn/browser": "^7.2.0", "@simplewebauthn/typescript-types": "^7.0.0", @@ -54,21 +56,26 @@ "abitype": "^1.0.8", "base64url": "^3.0.1", "bech32": "^2.0.0", + "bs58": "^6.0.0", "cbor-web": "^9.0.2", "cross-fetch": "3.1.8", "date-and-time": "^2.4.1", "depd": "^2.0.0", + "elysia": "^1.2.25", "ethers": "^5.7.1", "jose": "^4.14.4", "micromodal": "^0.4.10", "multiformats": "^9.7.1", "pako": "^2.1.0", + "pino": "^9.6.0", + "pino-pretty": "^13.0.0", "siwe": "^2.3.2", "siwe-recap": "0.0.2-alpha.0", + "stytch": "^12.4.0", "tslib": "^2.7.0", "tweetnacl": "^1.0.3", - "tweetnacl-util": "^0.15.1", "uint8arrays": "^4.0.3", + "wagmi": "^2.14.13", "zod": "^3.24.2", "zod-validation-error": "^3.4.0" }, @@ -88,7 +95,7 @@ "@types/depd": "^1.1.36", "@types/events": "^3.0.3", "@types/jest": "27.4.1", - "@types/node": "18.19.18", + "@types/node": "20", "@types/secp256k1": "^4.0.6", "@typescript-eslint/eslint-plugin": "6.21.0", "@typescript-eslint/parser": "6.21.0", @@ -111,13 +118,15 @@ "eslint-plugin-jsx-a11y": "6.9.0", "inquirer": "^9.2.21", "ipfs-unixfs-importer": "12.0.1", - "jest": "27.5.1", + "jest": "^29.2.2", + "jest-environment-jsdom": "^29.7.0", "lerna": "^5.4.3", "live-server": "^1.2.2", "node-fetch": "^2.6.1", "node-localstorage": "^3.0.5", "nx": "17.3.0", "path": "^0.12.7", + "pino-pretty": "^13.0.0", "prettier": "^2.6.2", "ts-jest": "29.2.5", "typedoc": "^0.26.6", diff --git a/packages/access-control-conditions-schemas/project.json b/packages/access-control-conditions-schemas/project.json index 683b67e7f8..91e13e6dac 100644 --- a/packages/access-control-conditions-schemas/project.json +++ b/packages/access-control-conditions-schemas/project.json @@ -30,7 +30,7 @@ ] } }, - "testPackage": { + "test": { "executor": "@nx/jest:jest", "outputs": [ "{workspaceRoot}/coverage/packages/access-control-conditions-schemas" diff --git a/packages/access-control-conditions/src/index.ts b/packages/access-control-conditions/src/index.ts index ac7cbe3e77..1b512cc351 100644 --- a/packages/access-control-conditions/src/index.ts +++ b/packages/access-control-conditions/src/index.ts @@ -1,3 +1,4 @@ +export * from './lib/booleanExpressions'; export * from './lib/canonicalFormatter'; export * from './lib/hashing'; export * from './lib/humanizer'; diff --git a/packages/misc/src/lib/utils.ts b/packages/access-control-conditions/src/lib/booleanExpressions.ts similarity index 82% rename from packages/misc/src/lib/utils.ts rename to packages/access-control-conditions/src/lib/booleanExpressions.ts index ea7ef45536..f01e53c7d2 100644 --- a/packages/misc/src/lib/utils.ts +++ b/packages/access-control-conditions/src/lib/booleanExpressions.ts @@ -1,3 +1,4 @@ +import { OperatorAccSchema } from '@lit-protocol/access-control-conditions-schemas'; import { AccessControlConditions, EvmContractConditions, @@ -5,9 +6,8 @@ import { UnifiedAccessControlConditions, } from '@lit-protocol/types'; -export function isTokenOperator(token: any): boolean { - const OPERATORS = ['and', 'or']; // Only permissible boolean operators on the nodes - return token.hasOwnProperty('operator') && OPERATORS.includes(token.operator); +export function isTokenOperator(token: unknown): boolean { + return OperatorAccSchema.safeParse(token).success; } export function isValidBooleanExpression( diff --git a/packages/access-control-conditions/src/lib/canonicalFormatter.spec.ts b/packages/access-control-conditions/src/lib/canonicalFormatter.spec.ts index d0f36b7146..b3e925539e 100644 --- a/packages/access-control-conditions/src/lib/canonicalFormatter.spec.ts +++ b/packages/access-control-conditions/src/lib/canonicalFormatter.spec.ts @@ -1,4 +1,6 @@ +import { InvalidAccessControlConditions } from '@lit-protocol/constants'; import { ConditionItem } from '@lit-protocol/types'; + import { canonicalUnifiedAccessControlConditionFormatter, canonicalSolRpcConditionFormatter, @@ -11,7 +13,7 @@ import { // ---------- Test Cases ---------- describe('canonicalFormatter.ts', () => { it('should format canonical unified access control (ETH + SOLANA Wallet Addresses with "AND" operator)', async () => { - const EXPECTED_INPUT: Array = [ + const EXPECTED_INPUT: ConditionItem[] = [ { conditionType: 'evmBasic', contractAddress: '', @@ -74,19 +76,14 @@ describe('canonicalFormatter.ts', () => { }, ]; - const test = - canonicalUnifiedAccessControlConditionFormatter(EXPECTED_INPUT); - - expect(test).toStrictEqual(EXPECTED_OUTPUT); + expect( + canonicalUnifiedAccessControlConditionFormatter(EXPECTED_INPUT) + ).toStrictEqual(EXPECTED_OUTPUT); }); it('should FAIL to format canonical unified access control if key "conditionType" doesnt exist', async () => { - console.log = jest.fn(); - - let test; - - try { - test = canonicalUnifiedAccessControlConditionFormatter([ + expect(() => + canonicalUnifiedAccessControlConditionFormatter([ { contractAddress: '', standardContractType: '', @@ -98,84 +95,60 @@ describe('canonicalFormatter.ts', () => { value: '0x3B5dD260598B7579A0b015A1F3BBF322aDC499A2', }, }, - ]); - } catch (e) { - console.log(e); - } - - expect((console.log as any).mock.calls[0][0].message).toContain( + ]) + ).toThrow( 'You passed an invalid access control condition that is missing or has a wrong' ); }); it('should FAIL to format canonical unified access control (key: foo, value: bar)', async () => { - console.log = jest.fn(); - - const MOCK_ACCS_UNKNOWN_KEY: any = [ - { - foo: 'bar', - }, - { - conditionType: 'evmBasic', - contractAddress: '', - standardContractType: '', - chain: 'ethereum', - method: '', - parameters: [':userAddress'], - returnValueTest: { - comparator: '=', - value: '0x3B5dD260598B7579A0b015A1F3BBF322aDC499A2', + expect(() => + canonicalUnifiedAccessControlConditionFormatter([ + { + // @ts-expect-error we are testing + foo: 'bar', }, - }, - ]; - - try { - test = canonicalUnifiedAccessControlConditionFormatter( - MOCK_ACCS_UNKNOWN_KEY - ); - } catch (e) { - console.log(e); - } - - expect((console.log as any).mock.calls[0][0].name).toBe( - 'InvalidAccessControlConditions' - ); + { + conditionType: 'evmBasic', + contractAddress: '', + standardContractType: '', + chain: 'ethereum', + method: '', + parameters: [':userAddress'], + returnValueTest: { + comparator: '=', + value: '0x3B5dD260598B7579A0b015A1F3BBF322aDC499A2', + }, + }, + ]) + ).toThrow(InvalidAccessControlConditions); }); it('should throw error when format canonical sol rpc condition', async () => { - console.log = jest.fn(); - - const MOCK_ACCS_UNKNOWN_KEY: any = [ - { - foo: 'bar', - }, - { - conditionType: 'evmBasic', - contractAddress: '', - standardContractType: '', - chain: 'ethereum', - method: '', - parameters: [':userAddress'], - returnValueTest: { - comparator: '=', - value: '0x3B5dD260598B7579A0b015A1F3BBF322aDC499A2', + expect(() => + canonicalSolRpcConditionFormatter([ + { + // @ts-expect-error we are testing + foo: 'bar', }, - }, - ]; - - try { - test = canonicalSolRpcConditionFormatter(MOCK_ACCS_UNKNOWN_KEY); - } catch (e) { - console.log(e); - } - - expect((console.log as any).mock.calls[0][0].name).toBe( - 'InvalidAccessControlConditions' - ); + { + conditionType: 'evmBasic', + contractAddress: '', + standardContractType: '', + chain: 'ethereum', + method: '', + parameters: [':userAddress'], + returnValueTest: { + comparator: '=', + value: '0x3B5dD260598B7579A0b015A1F3BBF322aDC499A2', + }, + }, + ]) + ).toThrow(InvalidAccessControlConditions); }); it('should call "canonicalAccessControlConditionFormatter" in node.js', () => { - const params: any = []; + const params = [] as never[]; const OUTPUT = canonicalAccessControlConditionFormatter(params); @@ -183,7 +156,7 @@ describe('canonicalFormatter.ts', () => { }); it('should call canonicalEVMContractConditionFormatter in node.js', () => { - const params: any = []; + const params = [] as never[]; const OUTPUT = canonicalEVMContractConditionFormatter(params); @@ -191,7 +164,7 @@ describe('canonicalFormatter.ts', () => { }); it('should call canonicalCosmosConditionFormatter in node.js', () => { - const params: any = []; + const params = [] as never[]; const OUTPUT = canonicalCosmosConditionFormatter(params); @@ -199,11 +172,8 @@ describe('canonicalFormatter.ts', () => { }); it('should call canonicalResourceIdFormatter in node.js', () => { - const params: any = []; - - const OUTPUT = canonicalResourceIdFormatter(params); - - // const res = (console.log as any).mock.calls[0][0]; + // @ts-expect-error we are testing + const OUTPUT = canonicalResourceIdFormatter({}); expect(OUTPUT.baseUrl).toBe(undefined); }); diff --git a/packages/access-control-conditions/src/lib/canonicalFormatter.ts b/packages/access-control-conditions/src/lib/canonicalFormatter.ts index a10b4181c8..69640b855a 100644 --- a/packages/access-control-conditions/src/lib/canonicalFormatter.ts +++ b/packages/access-control-conditions/src/lib/canonicalFormatter.ts @@ -142,7 +142,7 @@ export const canonicalUnifiedAccessControlConditionFormatter = ( * @returns { any[] | OperatorAcc | AccsSOLV2Params | any } */ export const canonicalSolRpcConditionFormatter = ( - cond: ConditionItem, + cond: ConditionItem | ConditionItem[], requireV2Conditions: boolean = false ): any[] | OperatorAcc | ConditionItem | AccsSOLV2Params | any => { // -- if is array @@ -186,7 +186,7 @@ export const canonicalSolRpcConditionFormatter = ( cond, }, }, - 'Solana RPC Conditions have changed and there are some new fields you must include in your condition. Check the docs here: https://developer.litprotocol.com/AccessControlConditions/solRpcConditions' + 'Solana RPC Conditions have changed and there are some new fields you must include in your condition. Check the docs here: https://developer.litprotocol.com/AccessControlConditions/solRpcConditions' ); } @@ -415,7 +415,7 @@ export const canonicalEVMContractConditionFormatter = ( * @returns */ export const canonicalCosmosConditionFormatter = ( - cond: ConditionItem + cond: ConditionItem | ConditionItem[] ): any[] | OperatorAcc | AccsCOSMOSParams | any => { // -- if it's an array if (Array.isArray(cond)) { diff --git a/packages/access-control-conditions/src/lib/hashing.spec.ts b/packages/access-control-conditions/src/lib/hashing.spec.ts index ead036529f..9281186890 100644 --- a/packages/access-control-conditions/src/lib/hashing.spec.ts +++ b/packages/access-control-conditions/src/lib/hashing.spec.ts @@ -51,7 +51,6 @@ describe('hashing.ts', () => { }, }, ]); - // console.log(typeof OUTPUT); expect(new Uint8Array(OUTPUT).length).toBe(32); }); @@ -63,7 +62,6 @@ describe('hashing.ts', () => { role: '', extraData: '', }); - // console.log(typeof OUTPUT); expect(new Uint8Array(OUTPUT).length).toBe(32); }); @@ -75,7 +73,6 @@ describe('hashing.ts', () => { role: '', extraData: '', }); - // console.log(typeof OUTPUT); expect(OUTPUT).toBe( '5b36d72f2145af3617e5da2a8a626f9f42e64ed14340622bdfe1a6f0702b9e8d' ); @@ -96,7 +93,6 @@ describe('hashing.ts', () => { }, }, ]); - // console.log(typeof OUTPUT); expect(typeof OUTPUT).toBe('object'); }); @@ -152,7 +148,6 @@ describe('hashing.ts', () => { }, }, ]); - // console.log(typeof OUTPUT); expect(typeof OUTPUT).toBe('object'); }); @@ -172,7 +167,6 @@ describe('hashing.ts', () => { }, }, ]); - // console.log(typeof OUTPUT); expect(typeof OUTPUT).toBe('object'); }); }); diff --git a/packages/access-control-conditions/src/lib/hashing.ts b/packages/access-control-conditions/src/lib/hashing.ts index eecf844836..e936f145e5 100644 --- a/packages/access-control-conditions/src/lib/hashing.ts +++ b/packages/access-control-conditions/src/lib/hashing.ts @@ -1,5 +1,5 @@ import { InvalidAccessControlConditions } from '@lit-protocol/constants'; -import { log } from '@lit-protocol/misc'; +import { logger } from '@lit-protocol/logger'; import { AccessControlConditions, EvmContractConditions, @@ -10,7 +10,6 @@ import { SupportedJsonRequests, UnifiedAccessControlConditions, } from '@lit-protocol/types'; -import { uint8arrayToString } from '@lit-protocol/uint8arrays'; import { canonicalAccessControlConditionFormatter, @@ -77,7 +76,7 @@ import { // const hash = await hashUnifiedAccessControlConditions(unifiedAccs); -// return uint8arrayToString(new Uint8Array(hash), 'base16'); +// return Buffer.from(new Uint8Array(hash), 'hex'); // }; /** @@ -90,12 +89,15 @@ import { export const hashUnifiedAccessControlConditions = ( unifiedAccessControlConditions: UnifiedAccessControlConditions ): Promise => { - log('unifiedAccessControlConditions:', unifiedAccessControlConditions); + logger.info({ + msg: 'unifiedAccessControlConditions', + unifiedAccessControlConditions, + }); const conditions = unifiedAccessControlConditions.map((condition) => { return canonicalUnifiedAccessControlConditionFormatter(condition); }); - log('conditions:', conditions); + logger.info({ msg: 'conditions', conditions }); // check if there's any undefined in the conditions const hasUndefined = conditions.some((c) => c === undefined); @@ -122,7 +124,7 @@ export const hashUnifiedAccessControlConditions = ( } const toHash = JSON.stringify(conditions); - log('Hashing unified access control conditions: ', toHash); + logger.info({ msg: 'Hashing unified access control conditions', toHash }); const encoder = new TextEncoder(); const data = encoder.encode(toHash); @@ -161,7 +163,7 @@ export const hashResourceIdForSigning = async ( resourceId: JsonSigningResourceId ): Promise => { const hashed = await hashResourceId(resourceId); - return uint8arrayToString(new Uint8Array(hashed), 'base16'); + return Buffer.from(new Uint8Array(hashed)).toString('hex'); }; /** @@ -181,7 +183,7 @@ export const hashAccessControlConditions = ( ); const toHash = JSON.stringify(conds); - log('Hashing access control conditions: ', toHash); + logger.info({ msg: 'Hashing access control conditions', toHash }); const encoder = new TextEncoder(); const data = encoder.encode(toHash); @@ -205,7 +207,7 @@ export const hashEVMContractConditions = ( ); const toHash = JSON.stringify(conds); - log('Hashing evm contract conditions: ', toHash); + logger.info({ msg: 'Hashing evm contract conditions', toHash }); const encoder = new TextEncoder(); const data = encoder.encode(toHash); return crypto.subtle.digest('SHA-256', data); @@ -228,7 +230,7 @@ export const hashSolRpcConditions = ( ); const toHash = JSON.stringify(conds); - log('Hashing sol rpc conditions: ', toHash); + logger.info({ msg: 'Hashing sol rpc conditions', toHash }); const encoder = new TextEncoder(); const data = encoder.encode(toHash); @@ -301,34 +303,43 @@ export const getFormattedAccessControlConditions = ( formattedAccessControlConditions = accessControlConditions.map((c) => canonicalAccessControlConditionFormatter(c) ); - log( - 'formattedAccessControlConditions', - JSON.stringify(formattedAccessControlConditions) - ); + logger.info({ + msg: 'formattedAccessControlConditions', + formattedAccessControlConditions: JSON.stringify( + formattedAccessControlConditions + ), + }); } else if (evmContractConditions) { formattedEVMContractConditions = evmContractConditions.map((c) => canonicalEVMContractConditionFormatter(c) ); - log( - 'formattedEVMContractConditions', - JSON.stringify(formattedEVMContractConditions) - ); + logger.info({ + msg: 'formattedEVMContractConditions', + formattedEVMContractConditions: JSON.stringify( + formattedEVMContractConditions + ), + }); } else if (solRpcConditions) { // FIXME: ConditionItem is too narrow, or `solRpcConditions` is too wide // eslint-disable-next-line @typescript-eslint/no-explicit-any formattedSolRpcConditions = solRpcConditions.map((c: any) => canonicalSolRpcConditionFormatter(c) ); - log('formattedSolRpcConditions', JSON.stringify(formattedSolRpcConditions)); + logger.info({ + msg: 'formattedSolRpcConditions', + formattedSolRpcConditions: JSON.stringify(formattedSolRpcConditions), + }); } else if (unifiedAccessControlConditions) { formattedUnifiedAccessControlConditions = unifiedAccessControlConditions.map((c) => canonicalUnifiedAccessControlConditionFormatter(c) ); - log( - 'formattedUnifiedAccessControlConditions', - JSON.stringify(formattedUnifiedAccessControlConditions) - ); + logger.info({ + msg: 'formattedUnifiedAccessControlConditions', + formattedUnifiedAccessControlConditions: JSON.stringify( + formattedUnifiedAccessControlConditions + ), + }); } else { error = true; } diff --git a/packages/access-control-conditions/src/lib/humanizer.spec.ts b/packages/access-control-conditions/src/lib/humanizer.spec.ts index baa6e092a1..cace7f2697 100644 --- a/packages/access-control-conditions/src/lib/humanizer.spec.ts +++ b/packages/access-control-conditions/src/lib/humanizer.spec.ts @@ -1,5 +1,3 @@ -import * as humanizer from './humanizer'; -import { humanizeAccessControlConditions } from './humanizer'; import { AccsCOSMOSParams, AccsEVMParams, @@ -7,6 +5,9 @@ import { UnifiedAccessControlConditions, } from '@lit-protocol/types'; +import * as humanizer from './humanizer'; +import { humanizeAccessControlConditions } from './humanizer'; + // ---------- Test Cases ---------- describe('humanizer.ts', () => { it('should format sol', () => { @@ -60,7 +61,7 @@ describe('humanizer.ts', () => { expect(OUTPUT).toBe('at most'); }); it('should humanizeEvmBasicAccessControlConditions', async () => { - const INPUT: Array = [ + const INPUT: AccsEVMParams[] = [ { contractAddress: '0x7C7757a9675f06F3BE4618bB68732c4aB25D2e88', functionName: 'balanceOf', @@ -108,7 +109,7 @@ describe('humanizer.ts', () => { }); it('should humanizeSolRpcConditions', async () => { - const INPUT: Array = [ + const INPUT: AccsSOLV2Params[] = [ { method: 'getTokenAccountBalance', params: ['tn2WEWk4Kqj157XsSdmBBcjWumVhkyJECXCKPq9ReL9'], @@ -134,7 +135,7 @@ describe('humanizer.ts', () => { }); it('should humanizeCosmosConditions', async () => { - const INPUT: Array = [ + const INPUT: AccsCOSMOSParams[] = [ { conditionType: 'cosmos', path: '/cosmos/bank/v1beta1/balances/:userAddress', diff --git a/packages/access-control-conditions/src/lib/humanizer.ts b/packages/access-control-conditions/src/lib/humanizer.ts index 40de46aae7..1c99c4b8ca 100644 --- a/packages/access-control-conditions/src/lib/humanizer.ts +++ b/packages/access-control-conditions/src/lib/humanizer.ts @@ -1,7 +1,13 @@ +import { Contract } from '@ethersproject/contracts'; +import { JsonRpcProvider } from '@ethersproject/providers'; import { formatEther, formatUnits } from 'ethers/lib/utils'; -import { InvalidUnifiedConditionType } from '@lit-protocol/constants'; -import { decimalPlaces, log } from '@lit-protocol/misc'; +import { + LIT_CHAINS, + LitEVMChainKeys, + InvalidUnifiedConditionType, +} from '@lit-protocol/constants'; +import { logger } from '@lit-protocol/logger'; import { AccessControlConditions, AccsCOSMOSParams, @@ -11,6 +17,51 @@ import { UnifiedAccessControlConditions, } from '@lit-protocol/types'; +export const ERC20ABI = [ + { + constant: true, + inputs: [], + name: 'decimals', + outputs: [ + { + name: '', + type: 'uint8', + }, + ], + payable: false, + stateMutability: 'view', + type: 'function', + }, +]; + +/** + * + * Get the number of decimal places in a token + * + * @property { string } contractAddress The token contract address + * @property { LitEVMChainKeys } chain The chain on which the token is deployed + * + * @returns { number } The number of decimal places in the token + */ +export const decimalPlaces = async ({ + contractAddress, + chain, +}: { + contractAddress: string; + chain: LitEVMChainKeys; +}): Promise => { + const rpcUrl = LIT_CHAINS[chain].rpcUrls[0] as string; + + const web3 = new JsonRpcProvider({ + url: rpcUrl, + skipFetchSetup: true, + }); + + const contract = new Contract(contractAddress, ERC20ABI, web3); + + return await contract['decimals'](); +}; + /** * * Format SOL number using Ether Units @@ -58,7 +109,7 @@ export const humanizeComparator = (comparator: string): string | undefined => { const selected: string | undefined = list[comparator]; if (!selected) { - log(`Unregonized comparator ${comparator}`); + logger.info(`Unrecognized comparator ${comparator}`); return; } @@ -84,9 +135,9 @@ export const humanizeEvmBasicAccessControlConditions = async ({ tokenList?: (any | string)[]; myWalletAddress?: string; }): Promise => { - log('humanizing evm basic access control conditions'); - log('myWalletAddress', myWalletAddress); - log('accessControlConditions', accessControlConditions); + logger.info('humanizing evm basic access control conditions'); + logger.info({ msg: 'myWalletAddress', myWalletAddress }); + logger.info({ msg: 'accessControlConditions', accessControlConditions }); let fixedConditions = accessControlConditions; @@ -230,10 +281,10 @@ export const humanizeEvmBasicAccessControlConditions = async ({ chain: acc.chain, }); } catch (e) { - console.log(`Failed to get decimals for ${acc.contractAddress}`); + logger.info(`Failed to get decimals for ${acc.contractAddress}`); // is this safe to fail and continue? } } - log('decimals', decimals); + logger.info({ msg: 'decimals', decimals }); return `Owns ${humanizeComparator( acc.returnValueTest.comparator )} ${formatUnits(acc.returnValueTest.value, decimals)} of ${ @@ -284,9 +335,9 @@ export const humanizeEvmContractConditions = async ({ tokenList?: (any | string)[]; myWalletAddress?: string; }): Promise => { - log('humanizing evm contract conditions'); - log('myWalletAddress', myWalletAddress); - log('evmContractConditions', evmContractConditions); + logger.info('humanizing evm contract conditions'); + logger.info({ msg: 'myWalletAddress', myWalletAddress }); + logger.info({ msg: 'evmContractConditions', evmContractConditions }); const promises = await Promise.all( evmContractConditions.map(async (acc: any) => { @@ -345,9 +396,9 @@ export const humanizeSolRpcConditions = async ({ tokenList?: (any | string)[]; myWalletAddress?: string; }): Promise => { - log('humanizing sol rpc conditions'); - log('myWalletAddress', myWalletAddress); - log('solRpcConditions', solRpcConditions); + logger.info('humanizing sol rpc conditions'); + logger.info({ msg: 'myWalletAddress', myWalletAddress }); + logger.info({ msg: 'solRpcConditions', solRpcConditions }); const promises = await Promise.all( solRpcConditions.map(async (acc: any) => { @@ -419,9 +470,9 @@ export const humanizeCosmosConditions = async ({ tokenList?: (any | string)[]; myWalletAddress?: string; }): Promise => { - log('humanizing cosmos conditions'); - log('myWalletAddress', myWalletAddress); - log('cosmosConditions', cosmosConditions); + logger.info('humanizing cosmos conditions'); + logger.info({ msg: 'myWalletAddress', myWalletAddress }); + logger.info({ msg: 'cosmosConditions', cosmosConditions }); const promises = await Promise.all( cosmosConditions.map(async (acc: any) => { diff --git a/packages/access-control-conditions/src/lib/validator.ts b/packages/access-control-conditions/src/lib/validator.ts index ac4819d75d..78f6245a4a 100644 --- a/packages/access-control-conditions/src/lib/validator.ts +++ b/packages/access-control-conditions/src/lib/validator.ts @@ -1,5 +1,3 @@ -import { fromError, isZodErrorLike } from 'zod-validation-error'; - import { EvmBasicConditionsSchema, EvmContractConditionsSchema, @@ -7,7 +5,7 @@ import { SolRpcConditionsSchema, UnifiedConditionsSchema, } from '@lit-protocol/access-control-conditions-schemas'; -import { InvalidArgumentException } from '@lit-protocol/constants'; +import { applySchemaWithValidation } from '@lit-protocol/schemas'; import { AccessControlConditions, EvmContractConditions, @@ -16,30 +14,6 @@ import { UnifiedAccessControlConditions, } from '@lit-protocol/types'; -function formatZodError(accs: unknown, e: unknown): never { - throw new InvalidArgumentException( - { - info: { - accs, - }, - cause: isZodErrorLike(e) ? fromError(e) : e, - }, - 'Invalid access control conditions. Check error cause for more details.' - ); -} - -async function validateSchema( - accs: T, - schema: { parse: (arg: unknown) => void } -): Promise { - try { - schema.parse(accs); - } catch (e) { - formatZodError(accs, e); - } - return true; -} - /** * Validates Multiple access control conditions schema * @param { MultipleAccessControlConditions } accs @@ -47,7 +21,13 @@ async function validateSchema( export const validateAccessControlConditions = async ( accs: MultipleAccessControlConditions ): Promise => { - return validateSchema(accs, MultipleAccessControlConditionsSchema); + applySchemaWithValidation( + 'validateAccessControlConditions', + accs, + MultipleAccessControlConditionsSchema + ); + + return true; }; /** @@ -57,7 +37,13 @@ export const validateAccessControlConditions = async ( export const validateAccessControlConditionsSchema = async ( accs: AccessControlConditions ): Promise => { - return validateSchema(accs, EvmBasicConditionsSchema); + applySchemaWithValidation( + 'validateAccessControlConditionsSchema', + accs, + EvmBasicConditionsSchema + ); + + return true; }; /** @@ -67,7 +53,13 @@ export const validateAccessControlConditionsSchema = async ( export const validateEVMContractConditionsSchema = async ( accs: EvmContractConditions ): Promise => { - return validateSchema(accs, EvmContractConditionsSchema); + applySchemaWithValidation( + 'validateEVMContractConditionsSchema', + accs, + EvmContractConditionsSchema + ); + + return true; }; /** @@ -77,7 +69,13 @@ export const validateEVMContractConditionsSchema = async ( export const validateSolRpcConditionsSchema = async ( accs: SolRpcConditions ): Promise => { - return validateSchema(accs, SolRpcConditionsSchema); + applySchemaWithValidation( + 'validateSolRpcConditionsSchema', + accs, + SolRpcConditionsSchema + ); + + return true; }; /** @@ -87,5 +85,11 @@ export const validateSolRpcConditionsSchema = async ( export const validateUnifiedAccessControlConditionsSchema = async ( accs: UnifiedAccessControlConditions ): Promise => { - return validateSchema(accs, UnifiedConditionsSchema); + applySchemaWithValidation( + 'validateUnifiedAccessControlConditionsSchema', + accs, + UnifiedConditionsSchema + ); + + return true; }; diff --git a/packages/auth-browser/README.md b/packages/auth-browser/README.md deleted file mode 100644 index 4daea68c8c..0000000000 --- a/packages/auth-browser/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# Quick Start - -This submodule provides functionalities from various modules within the Lit SDK, enabling users to authenticate in the browser and connect to different blockchain networks (Ethereum, Cosmos, and Solana) with convenience, while also providing a function to disconnect from the Ethereum network. - -### node.js / browser - -``` -yarn add @lit-protocol/auth-browser -``` - -## Generate an authSig with long expiration - -``` -const expiration = new Date(Date.now() + 1000 * 60 * 60 * 99999).toISOString(); - -const authSig = LitJsSdk_authBrowser.checkAndSignAuthMessage({chain: 'ethereum', expiration: expiration}); - -``` diff --git a/packages/auth-browser/jest.config.ts b/packages/auth-browser/jest.config.ts deleted file mode 100644 index d194f8fec2..0000000000 --- a/packages/auth-browser/jest.config.ts +++ /dev/null @@ -1,17 +0,0 @@ -/* eslint-disable */ -export default { - displayName: 'auth-browser', - preset: '../../jest.preset.js', - transform: { - '^.+\\.[tj]s$': [ - 'babel-jest', - { - cwd: '/Users/anson/Projects/js-sdk-master/packages/auth-browser', - }, - ], - }, - transformIgnorePatterns: ['node_modules/(?!(@walletconnect)/)'], - moduleFileExtensions: ['ts', 'js', 'html'], - coverageDirectory: '../../coverage/packages/auth-browser', - setupFilesAfterEnv: ['../../jest.setup.js'], -}; diff --git a/packages/auth-browser/package.json b/packages/auth-browser/package.json deleted file mode 100644 index 26e3ae2a49..0000000000 --- a/packages/auth-browser/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@lit-protocol/auth-browser", - "license": "MIT", - "homepage": "https://github.com/Lit-Protocol/js-sdk", - "repository": { - "type": "git", - "url": "https://github.com/LIT-Protocol/js-sdk" - }, - "keywords": [ - "library" - ], - "bugs": { - "url": "https://github.com/LIT-Protocol/js-sdk/issues" - }, - "type": "commonjs", - "publishConfig": { - "access": "public", - "directory": "../../dist/packages/auth-browser" - }, - "gitHead": "0d7334c2c55f448e91fe32f29edc5db8f5e09e4b", - "peerDependencies": { - "@walletconnect/ethereum-provider": "2.9.2", - "@walletconnect/modal": "2.6.1", - "siwe": "^2.0.5", - "tweetnacl": "^1.0.3", - "tweetnacl-util": "^0.13.3", - "util": "^0.12.4", - "web-vitals": "^3.0.4", - "@lit-protocol/contracts": "^0.0.74" - }, - "tags": [ - "browser" - ], - "version": "8.0.0-alpha.0", - "main": "./dist/src/index.js", - "typings": "./dist/src/index.d.ts" -} diff --git a/packages/auth-browser/src/index.ts b/packages/auth-browser/src/index.ts deleted file mode 100644 index 281927d608..0000000000 --- a/packages/auth-browser/src/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export * from './lib/auth-browser'; - -// -- all the chains you can connect to -export * as ethConnect from './lib/chains/eth'; -export { disconnectWeb3 } from './lib/chains/eth'; diff --git a/packages/auth-browser/src/lib/auth-browser.ts b/packages/auth-browser/src/lib/auth-browser.ts deleted file mode 100644 index 75be28f20c..0000000000 --- a/packages/auth-browser/src/lib/auth-browser.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { - ALL_LIT_CHAINS, - UnsupportedChainException, - VMTYPE, -} from '@lit-protocol/constants'; -import { AuthCallbackParams, AuthSig } from '@lit-protocol/types'; - -import { checkAndSignEVMAuthMessage } from './chains/eth'; - -/** - * !! NOTE !! - * This function is purely used for crafting the authSig for access control conditions & decryption. For SessionSigs, you can pass the `authSig` as `jsParams` - * or Eth Wallet Auth Method for `signSessionKey` and claiming, but you won't be able to use this to add resource ability requests in the SIWE message. Instead, you should provide your own signer to the authNeededCallback parameter for the getSessionSigs method. - * - * Check for an existing cryptographic authentication signature and create one of it does not exist. This is used to prove ownership of a given crypto wallet address to the Lit nodes. The result is stored in LocalStorage so the user doesn't have to sign every time they perform an operation. - * - * @param { AuthCallbackParams } - * - * @returns { AuthSig } The AuthSig created or retrieved - */ -export const checkAndSignAuthMessage = ({ - chain, - resources, - switchChain, - expiration, - uri, - walletConnectProjectId, - nonce, -}: AuthCallbackParams): Promise => { - const chainInfo = ALL_LIT_CHAINS[chain]; - - // -- validate: if chain info not found - if (!chainInfo) { - throw new UnsupportedChainException( - { - info: { - chain, - }, - }, - `Unsupported chain selected. Please select one of: %s`, - Object.keys(ALL_LIT_CHAINS) - ); - } - - if (!expiration) { - // set default of 1 week - expiration = new Date(Date.now() + 1000 * 60 * 60 * 24 * 7).toISOString(); - } - - // -- check and sign auth message based on chain - if (chainInfo.vmType === VMTYPE.EVM) { - return checkAndSignEVMAuthMessage({ - chain, - resources, - switchChain, - expiration, - uri, - walletConnectProjectId, - nonce, - }); - } - - // Else, throw an error - throw new UnsupportedChainException( - { - info: { - chain, - }, - }, - `vmType not found for this chain: %s. This should not happen. Unsupported chain selected. Please select one of: %s`, - chain, - Object.keys(ALL_LIT_CHAINS) - ); -}; diff --git a/packages/auth-browser/tsconfig.spec.json b/packages/auth-browser/tsconfig.spec.json deleted file mode 100644 index 855f604fb0..0000000000 --- a/packages/auth-browser/tsconfig.spec.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "../../dist/out-tsc", - "module": "commonjs", - "types": ["jest", "node"] - }, - "include": ["jest.config.ts", "**/*.test.ts", "**/*.spec.ts", "**/*.d.ts"], - "exclude": ["node_modules", "tmp"] -} diff --git a/packages/auth-helpers/src/lib/generate-auth-sig.ts b/packages/auth-helpers/src/lib/generate-auth-sig.ts index 49692f89c8..4cb56c3a5e 100644 --- a/packages/auth-helpers/src/lib/generate-auth-sig.ts +++ b/packages/auth-helpers/src/lib/generate-auth-sig.ts @@ -1,6 +1,8 @@ -import { AuthSig, SignerLike } from '@lit-protocol/types'; import { ethers } from 'ethers'; +import { InvalidArgumentException } from '@lit-protocol/constants'; +import { AuthSig, SignerLike } from '@lit-protocol/types'; + /** * Generate an AuthSig object using the signer. * @@ -26,7 +28,16 @@ export const generateAuthSig = async ({ algo?: 'ed25519'; }): Promise => { if (!signer?.signMessage) { - throw new Error('signer does not have a signMessage method'); + throw new InvalidArgumentException( + { + info: { + signer, + address, + algo, + }, + }, + 'signer does not have a signMessage method' + ); } const signature = await signer.signMessage(toSign); @@ -41,7 +52,16 @@ export const generateAuthSig = async ({ // If address is still not available, throw an error if (!address) { - throw new Error('address is required'); + throw new InvalidArgumentException( + { + info: { + signer, + address, + algo, + }, + }, + 'address is required' + ); } return { diff --git a/packages/auth-helpers/src/lib/models.ts b/packages/auth-helpers/src/lib/models.ts index 6a59af56b0..22b63326c9 100644 --- a/packages/auth-helpers/src/lib/models.ts +++ b/packages/auth-helpers/src/lib/models.ts @@ -1,13 +1,3 @@ -import { LIT_ABILITY_VALUES } from '@lit-protocol/constants'; -import { ILitResource } from '@lit-protocol/types'; - -// This is here to prevent circular dependency issue -export interface AuthSig { - sig: any; - derivedVia: string; - signedMessage: string; - address: string; -} export type PlainJSON = | boolean | number @@ -18,18 +8,3 @@ export type AttenuationsObject = { [key: string]: { [key: string]: Array }; }; export type CID = string; - -/** - * A LIT resource ability is a combination of a LIT resource and a LIT ability. - * It specifies which LIT specific ability is being requested to be performed - * on the specified LIT resource. - * - * @description This object does NOT guarantee compatibility between the - * specified LIT resource and the specified LIT ability, and will be validated by - * the LIT-internal systems. - */ -export type LitResourceAbilityRequest = { - resource: ILitResource; - ability: LIT_ABILITY_VALUES; - data?: any; -}; diff --git a/packages/auth-helpers/src/lib/recap/recap-session-capability-object.spec.ts b/packages/auth-helpers/src/lib/recap/recap-session-capability-object.spec.ts index b2884d1fc9..c506a21825 100644 --- a/packages/auth-helpers/src/lib/recap/recap-session-capability-object.spec.ts +++ b/packages/auth-helpers/src/lib/recap/recap-session-capability-object.spec.ts @@ -1,14 +1,16 @@ import { SiweMessage } from 'siwe'; + import { LIT_ABILITY, LIT_RESOURCE_PREFIX, LIT_NAMESPACE, LIT_RECAP_ABILITY, } from '@lit-protocol/constants'; + import { LitAccessControlConditionResource } from '../resources'; import { RecapSessionCapabilityObject } from './recap-session-capability-object'; -const isClass = (v: any) => { +const isClass = (v: unknown) => { return typeof v === 'function' && /^\s*class\s+/.test(v.toString()); }; diff --git a/packages/auth-helpers/src/lib/recap/recap-session-capability-object.ts b/packages/auth-helpers/src/lib/recap/recap-session-capability-object.ts index e9956278e7..847dc69d67 100644 --- a/packages/auth-helpers/src/lib/recap/recap-session-capability-object.ts +++ b/packages/auth-helpers/src/lib/recap/recap-session-capability-object.ts @@ -1,24 +1,27 @@ +import { SiweMessage } from 'siwe'; +import { Recap } from 'siwe-recap'; + import { InvalidArgumentException, LIT_ABILITY_VALUES, } from '@lit-protocol/constants'; -import { ILitResource, ISessionCapabilityObject } from '@lit-protocol/types'; -import depd from 'depd'; -import { SiweMessage } from 'siwe'; -import { Recap } from 'siwe-recap'; -import { AttenuationsObject, CID as CIDString, PlainJSON } from '../models'; -import { sanitizeSiweMessage } from '../siwe/siwe-helper'; +import { + AttenuationsObject, + CID as CIDString, + DefinedJson, + ILitResource, + ISessionCapabilityObject, +} from '@lit-protocol/types'; + import { getRecapNamespaceAndAbility } from './utils'; +import { sanitizeSiweMessage } from '../siwe/siwe-helper'; -const deprecated = depd('lit-js-sdk:auth-recap:session-capability-object'); +export type Restriction = Record; export class RecapSessionCapabilityObject implements ISessionCapabilityObject { private _inner: Recap; - constructor( - att: AttenuationsObject = {}, - prf: Array | Array = [] - ) { + constructor(att: AttenuationsObject = {}, prf: CIDString[] | string[] = []) { this._inner = new Recap(att, prf); } @@ -26,7 +29,7 @@ export class RecapSessionCapabilityObject implements ISessionCapabilityObject { const recap = Recap.decode_urn(encoded); return new this( recap.attenuations, - recap.proofs.map((cid: any) => cid.toString()) + recap.proofs.map((cid) => cid.toString()) ); } @@ -34,7 +37,7 @@ export class RecapSessionCapabilityObject implements ISessionCapabilityObject { const recap = Recap.extract_and_verify(siwe); return new this( recap.attenuations, - recap.proofs.map((cid: any) => cid.toString()) + recap.proofs.map((cid) => cid.toString()) ); } @@ -42,8 +45,8 @@ export class RecapSessionCapabilityObject implements ISessionCapabilityObject { return this._inner.attenuations; } - get proofs(): Array { - return this._inner.proofs.map((cid: any) => cid.toString()); + get proofs(): CIDString[] { + return this._inner.proofs.map((cid) => cid.toString()); } get statement(): string { @@ -56,9 +59,9 @@ export class RecapSessionCapabilityObject implements ISessionCapabilityObject { addAttenuation( resource: string, - namespace: string = '*', - name: string = '*', - restriction: { [key: string]: PlainJSON } = {} + namespace = '*', + name = '*', + restriction: Restriction = {} ) { return this._inner.addAttenuation(resource, namespace, name, restriction); } @@ -75,7 +78,7 @@ export class RecapSessionCapabilityObject implements ISessionCapabilityObject { addCapabilityForResource( litResource: ILitResource, ability: LIT_ABILITY_VALUES, - data = {} + data: Restriction = {} ): void { // Validate Lit ability is compatible with the Lit resource. if (!litResource.isValidLitAbility(ability)) { diff --git a/packages/auth-helpers/src/lib/resources.ts b/packages/auth-helpers/src/lib/resources.ts index 1c729f4bab..a5a494a9ce 100644 --- a/packages/auth-helpers/src/lib/resources.ts +++ b/packages/auth-helpers/src/lib/resources.ts @@ -7,7 +7,6 @@ import { LIT_RESOURCE_PREFIX_VALUES, } from '@lit-protocol/constants'; import { AccessControlConditions, ILitResource } from '@lit-protocol/types'; -import { uint8arrayToString } from '@lit-protocol/uint8arrays'; import { formatPKPResource } from './utils'; abstract class LitResourceBase { @@ -73,9 +72,8 @@ export class LitAccessControlConditionResource } const hashedAccs = await hashAccessControlConditions(accs); - const hashedAccsStr = uint8arrayToString( - new Uint8Array(hashedAccs), - 'base16' + const hashedAccsStr = Buffer.from(new Uint8Array(hashedAccs)).toString( + 'hex' ); const resourceString = `${hashedAccsStr}/${dataToEncryptHash}`; diff --git a/packages/auth-helpers/src/lib/session-capability-object.ts b/packages/auth-helpers/src/lib/session-capability-object.ts index babdd3cc75..4e766e559c 100644 --- a/packages/auth-helpers/src/lib/session-capability-object.ts +++ b/packages/auth-helpers/src/lib/session-capability-object.ts @@ -1,5 +1,7 @@ import { SiweMessage } from 'siwe'; + import { ISessionCapabilityObject } from '@lit-protocol/types'; + import { AttenuationsObject, CID } from './models'; import { RecapSessionCapabilityObject } from './recap/recap-session-capability-object'; @@ -19,7 +21,7 @@ import { RecapSessionCapabilityObject } from './recap/recap-session-capability-o */ export function newSessionCapabilityObject( attenuations: AttenuationsObject = {}, - proof: Array = [] + proof: CID[] = [] ): ISessionCapabilityObject { return new RecapSessionCapabilityObject(attenuations, proof); } diff --git a/packages/auth-helpers/src/lib/siwe/create-siwe-message.ts b/packages/auth-helpers/src/lib/siwe/create-siwe-message.ts index 55c5f6525a..c50faace1d 100644 --- a/packages/auth-helpers/src/lib/siwe/create-siwe-message.ts +++ b/packages/auth-helpers/src/lib/siwe/create-siwe-message.ts @@ -1,5 +1,6 @@ import { SiweMessage } from 'siwe'; +import { InvalidArgumentException } from '@lit-protocol/constants'; import { BaseSiweMessage, CapacityDelegationFields, @@ -23,7 +24,14 @@ export const createSiweMessage = async ( ): Promise => { // -- validations if (!params.walletAddress) { - throw new Error('walletAddress is required'); + throw new InvalidArgumentException( + { + info: { + params, + }, + }, + 'walletAddress is required' + ); } const ONE_WEEK_FROM_NOW = new Date( @@ -62,11 +70,12 @@ export const createSiweMessage = async ( // resource: new LitRLIResource(ccParams.capacityTokenId ?? '*'), // ability: LIT_ABILITY.RateLimitIncreaseAuth, - // @ts-ignore - TODO: new resource to be used + // @ts-expect-error - TODO: new resource to be used resource: null, - // @ts-ignore - TODO: new ability to be used + // @ts-expect-error - TODO: new ability to be used ability: null, + // @ts-expect-error Complaining because of index signature in destination data: capabilities, }, ]; @@ -75,7 +84,14 @@ export const createSiweMessage = async ( // -- add recap resources if needed if (params.resources) { if (!params.litNodeClient) { - throw new Error('litNodeClient is required'); + throw new InvalidArgumentException( + { + info: { + params, + }, + }, + 'litNodeClient is required' + ); } siweMessage = await addRecapToSiweMessage({ @@ -112,7 +128,14 @@ export const createSiweMessageWithCapacityDelegation = async ( params: WithCapacityDelegation ) => { if (!params.litNodeClient) { - throw new Error('litNodeClient is required'); + throw new InvalidArgumentException( + { + info: { + params, + }, + }, + 'litNodeClient is required' + ); } return createSiweMessage({ diff --git a/packages/auth-helpers/src/lib/siwe/siwe-helper.ts b/packages/auth-helpers/src/lib/siwe/siwe-helper.ts index 8a4f63404c..918d2bc6d8 100644 --- a/packages/auth-helpers/src/lib/siwe/siwe-helper.ts +++ b/packages/auth-helpers/src/lib/siwe/siwe-helper.ts @@ -1,5 +1,9 @@ import { SiweMessage } from 'siwe'; +import { + InvalidArgumentException, + UnknownError, +} from '@lit-protocol/constants'; import { CapacityDelegationFields, CapacityDelegationRequest, @@ -8,6 +12,7 @@ import { ISessionCapabilityObject, LitResourceAbilityRequest, } from '@lit-protocol/types'; + import { RecapSessionCapabilityObject } from '../recap/recap-session-capability-object'; /** @@ -87,11 +92,27 @@ export const addRecapToSiweMessage = async ({ litNodeClient: ILitNodeClient; }) => { if (!resources || resources.length < 1) { - throw new Error('resources is required'); + throw new InvalidArgumentException( + { + info: { + resources, + siweMessage, + }, + }, + 'resources is required' + ); } if (!litNodeClient) { - throw new Error('litNodeClient is required'); + throw new InvalidArgumentException( + { + info: { + resources, + siweMessage, + }, + }, + 'litNodeClient is required' + ); } for (const request of resources) { @@ -102,7 +123,7 @@ export const addRecapToSiweMessage = async ({ recapObject.addCapabilityForResource( request.resource, request.ability, - request.data || null + request.data ); const verified = recapObject.verifyCapabilitiesForResource( @@ -111,7 +132,13 @@ export const addRecapToSiweMessage = async ({ ); if (!verified) { - throw new Error( + throw new UnknownError( + { + info: { + recapObject, + request, + }, + }, `Failed to verify capabilities for resource: "${request.resource}" and ability: "${request.ability}` ); } diff --git a/packages/auth-helpers/src/lib/utils.ts b/packages/auth-helpers/src/lib/utils.ts index da935f6a7c..f65f33faa6 100644 --- a/packages/auth-helpers/src/lib/utils.ts +++ b/packages/auth-helpers/src/lib/utils.ts @@ -1,3 +1,5 @@ +import { InvalidArgumentException } from '@lit-protocol/constants'; + /** * Formats the resource ID to a 32-byte hex string. * @@ -21,7 +23,14 @@ export function formatPKPResource(resource: string): string { // Throw an error if the resource length exceeds 64 characters if (fixedResource.length > 64) { - throw new Error('Resource ID exceeds 64 characters (32 bytes) in length.'); + throw new InvalidArgumentException( + { + info: { + resource, + }, + }, + 'Resource ID exceeds 64 characters (32 bytes) in length.' + ); } /** diff --git a/packages/auth-browser/.babelrc b/packages/auth/.babelrc similarity index 100% rename from packages/auth-browser/.babelrc rename to packages/auth/.babelrc diff --git a/packages/auth-browser/.eslintrc.json b/packages/auth/.eslintrc.json similarity index 100% rename from packages/auth-browser/.eslintrc.json rename to packages/auth/.eslintrc.json diff --git a/packages/auth/README.md b/packages/auth/README.md new file mode 100644 index 0000000000..395853033d --- /dev/null +++ b/packages/auth/README.md @@ -0,0 +1,15 @@ +# Quick Start + +This module provides management of auth methods that are used to control LIT PKPs, and authorization primitives. + +### AuthManager + +An AuthManager works with `authenticators` (migrated from: @lit-protocol/lit-auth-client) to generate auth material using various methods (see: authenticators documentation). + +The `AuthManager` then uses that auth material to create session credentials, and caches the resulting credentials for use with LIT network services. It also validates auth material and session material, and will attempt to get new auth material any time it detects that existing cached credentials have expired. + +### node.js / browser + +``` +yarn add @lit-protocol/auth +``` diff --git a/packages/lit-node-client-nodejs/jest.config.ts b/packages/auth/jest.config.ts similarity index 53% rename from packages/lit-node-client-nodejs/jest.config.ts rename to packages/auth/jest.config.ts index 5cd97335e7..144dabf42b 100644 --- a/packages/lit-node-client-nodejs/jest.config.ts +++ b/packages/auth/jest.config.ts @@ -1,6 +1,6 @@ /* eslint-disable */ export default { - displayName: 'lit-node-client-nodejs', + displayName: 'auth', preset: '../../jest.preset.js', globals: { 'ts-jest': { @@ -11,6 +11,11 @@ export default { '^.+\\.[t]s$': 'ts-jest', }, moduleFileExtensions: ['ts', 'js', 'html'], - coverageDirectory: '../../coverage/packages/lit-node-client-nodejs', + coverageDirectory: '../../coverage/packages/auth', + moduleNameMapper: { + '^ipfs-unixfs-importer': + 'node_modules/ipfs-unixfs-importer/dist/index.min.js', + '^blockstore-core': 'node_modules/blockstore-core/dist/index.min.js', + }, setupFilesAfterEnv: ['../../jest.setup.js'], }; diff --git a/packages/encryption/package.json b/packages/auth/package.json similarity index 82% rename from packages/encryption/package.json rename to packages/auth/package.json index 0c7e2461a7..8af8f27aa2 100644 --- a/packages/encryption/package.json +++ b/packages/auth/package.json @@ -1,5 +1,5 @@ { - "name": "@lit-protocol/encryption", + "name": "@lit-protocol/auth", "license": "MIT", "homepage": "https://github.com/Lit-Protocol/js-sdk", "repository": { @@ -15,12 +15,15 @@ "type": "commonjs", "publishConfig": { "access": "public", - "directory": "../../dist/packages/encryption" + "directory": "../../dist/packages/auth" }, "gitHead": "0d7334c2c55f448e91fe32f29edc5db8f5e09e4b", "tags": [ "universal" ], + "peerDependencies": { + "tslib": "^2.3.0" + }, "browser": { "crypto": false, "stream": false diff --git a/packages/misc/project.json b/packages/auth/project.json similarity index 57% rename from packages/misc/project.json rename to packages/auth/project.json index 30a9e012c5..e024850b05 100644 --- a/packages/misc/project.json +++ b/packages/auth/project.json @@ -1,17 +1,17 @@ { - "name": "misc", + "name": "auth", "$schema": "../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "packages/misc/src", + "sourceRoot": "packages/auth/src", "projectType": "library", "targets": { "build": { "executor": "@nx/js:tsc", "outputs": ["{options.outputPath}"], "options": { - "outputPath": "dist/packages/misc", - "main": "packages/misc/src/index.ts", - "tsConfig": "packages/misc/tsconfig.lib.json", - "assets": ["packages/misc/*.md"], + "outputPath": "dist/packages/auth", + "main": "packages/auth/src/index.ts", + "tsConfig": "packages/auth/tsconfig.lib.json", + "assets": ["packages/auth/*.md"], "updateBuildableProjectDepsInPackageJson": true } }, @@ -19,14 +19,14 @@ "executor": "@nx/linter:eslint", "outputs": ["{options.outputFile}"], "options": { - "lintFilePatterns": ["packages/misc/**/*.ts"] + "lintFilePatterns": ["packages/auth/**/*.ts"] } }, "test": { "executor": "@nx/jest:jest", - "outputs": ["{workspaceRoot}/coverage/packages/misc"], + "outputs": ["{workspaceRoot}/coverage/packages/auth"], "options": { - "jestConfig": "packages/misc/jest.config.ts", + "jestConfig": "packages/auth/jest.config.ts", "passWithNoTests": true } } diff --git a/packages/auth/src/index.ts b/packages/auth/src/index.ts new file mode 100644 index 0000000000..d0942dafb3 --- /dev/null +++ b/packages/auth/src/index.ts @@ -0,0 +1,15 @@ +import { getAuthManager } from './lib/auth-manager'; +import * as authenticators from './lib/authenticators'; +import { LitRelay } from './lib/authenticators'; +import { localStorage } from './lib/storage'; + +import type { LitAuthStorageProvider } from './lib/storage/types'; +import type { LitAuthData } from './lib/types'; + +export type { LitAuthStorageProvider, LitAuthData }; + +export const storagePlugins = { localStorage }; + +export { authenticators, getAuthManager, LitRelay }; + +export { getAuthIdByAuthMethod } from './lib/authenticators/utils'; diff --git a/packages/auth/src/lib/auth-manager.ts b/packages/auth/src/lib/auth-manager.ts new file mode 100644 index 0000000000..8ce1d540d0 --- /dev/null +++ b/packages/auth/src/lib/auth-manager.ts @@ -0,0 +1,31 @@ +import { generateSessionKeyPair } from '@lit-protocol/crypto'; + +import type { LitAuthStorageProvider } from './storage/types'; +import type { LitAuthData } from './types'; + +interface LitAuthManagerConfig { + storage: LitAuthStorageProvider; +} + +async function tryGetCachedAuthData() { + // Use `storage` to see if there is cached auth data + // If error thrown trying to get it, error to caller or ?? +} + +async function tryGetAuthMethodFromAuthenticator() { + // Use authenticator `getAuthMethod()` method to get a new auth method +} + +function validateAuthData(authData: LitAuthData) { + // Validate auth data is not expired, and is well-formed +} + +async function signSessionKey({ storage }: LitAuthManagerConfig) { + // Use LitNodeClient to signSessionKey with AuthData +} + +export function getAuthManager({ storage }: LitAuthManagerConfig) { + return { + getAuthContext() {}, + }; +} diff --git a/packages/lit-auth-client/src/lib/providers/AppleProvider.ts b/packages/auth/src/lib/authenticators/AppleAuthenticator.ts similarity index 95% rename from packages/lit-auth-client/src/lib/providers/AppleProvider.ts rename to packages/auth/src/lib/authenticators/AppleAuthenticator.ts index 3a6ef00e9a..661c17d637 100644 --- a/packages/lit-auth-client/src/lib/providers/AppleProvider.ts +++ b/packages/auth/src/lib/authenticators/AppleAuthenticator.ts @@ -1,24 +1,26 @@ -import { - AuthMethod, - BaseProviderOptions, - OAuthProviderOptions, -} from '@lit-protocol/types'; +import { ethers } from 'ethers'; +import * as jose from 'jose'; + import { AUTH_METHOD_TYPE, UnauthorizedException, UnknownError, } from '@lit-protocol/constants'; +import { + AuthMethod, + BaseProviderOptions, + OAuthProviderOptions, +} from '@lit-protocol/types'; + +import { BaseAuthenticator } from './BaseAuthenticator'; import { prepareLoginUrl, parseLoginParams, getStateParam, decode, -} from '../utils'; -import { BaseProvider } from './BaseProvider'; -import { ethers } from 'ethers'; -import * as jose from 'jose'; +} from './utils'; -export default class AppleProvider extends BaseProvider { +export class AppleAuthenticator extends BaseAuthenticator { /** * The redirect URI that Lit's login server should send the user back to */ @@ -140,7 +142,7 @@ export default class AppleProvider extends BaseProvider { * @returns {Promise} - Auth method id */ public async getAuthMethodId(authMethod: AuthMethod): Promise { - return AppleProvider.authMethodId(authMethod); + return AppleAuthenticator.authMethodId(authMethod); } public static async authMethodId(authMethod: AuthMethod): Promise { const tokenPayload = jose.decodeJwt(authMethod.accessToken); diff --git a/packages/lit-auth-client/src/lib/providers/BaseProvider.ts b/packages/auth/src/lib/authenticators/BaseAuthenticator.ts similarity index 97% rename from packages/lit-auth-client/src/lib/providers/BaseProvider.ts rename to packages/auth/src/lib/authenticators/BaseAuthenticator.ts index ce8cc5c313..659fefdfc9 100644 --- a/packages/lit-auth-client/src/lib/providers/BaseProvider.ts +++ b/packages/auth/src/lib/authenticators/BaseAuthenticator.ts @@ -1,11 +1,9 @@ -import depd from 'depd'; import { ethers } from 'ethers'; import { AUTH_METHOD_TYPE_VALUES, InvalidArgumentException, LitNodeClientNotReadyError, - ParamsMissingError, UnknownError, } from '@lit-protocol/constants'; import { LitContracts } from '@lit-protocol/contracts-sdk'; @@ -22,11 +20,9 @@ import { MintRequestBody, } from '@lit-protocol/types'; -import { validateMintRequestBody } from '../validators'; +import { validateMintRequestBody } from './validators'; -const deprecated = depd('lit-js-sdk:auth-browser:base-provider'); - -export abstract class BaseProvider { +export abstract class BaseAuthenticator { /** * Relay server to subsidize minting of PKPs */ @@ -65,7 +61,7 @@ export abstract class BaseProvider { */ abstract getAuthMethodId( authMethod: AuthMethod, - options?: any + options?: unknown ): Promise; /** diff --git a/packages/lit-auth-client/src/lib/providers/DiscordProvider.ts b/packages/auth/src/lib/authenticators/DiscordAuthenticator.ts similarity index 98% rename from packages/lit-auth-client/src/lib/providers/DiscordProvider.ts rename to packages/auth/src/lib/authenticators/DiscordAuthenticator.ts index 27911e9e20..9103893ad5 100644 --- a/packages/lit-auth-client/src/lib/providers/DiscordProvider.ts +++ b/packages/auth/src/lib/authenticators/DiscordAuthenticator.ts @@ -1,24 +1,26 @@ -import { - AuthMethod, - BaseProviderOptions, - OAuthProviderOptions, -} from '@lit-protocol/types'; +import { ethers } from 'ethers'; + import { AUTH_METHOD_TYPE, UnauthorizedException, UnknownError, } from '@lit-protocol/constants'; -import { BaseProvider } from './BaseProvider'; +import { + AuthMethod, + BaseProviderOptions, + OAuthProviderOptions, +} from '@lit-protocol/types'; + +import { BaseAuthenticator } from './BaseAuthenticator'; import { prepareLoginUrl, parseLoginParams, getStateParam, decode, LIT_LOGIN_GATEWAY, -} from '../utils'; -import { ethers } from 'ethers'; +} from './utils'; -export default class DiscordProvider extends BaseProvider { +export class DiscordAuthenticator extends BaseAuthenticator { /** * The redirect URI that Lit's login server should send the user back to */ diff --git a/packages/lit-auth-client/src/lib/providers/GoogleProvider.ts b/packages/auth/src/lib/authenticators/GoogleAuthenticator.ts similarity index 96% rename from packages/lit-auth-client/src/lib/providers/GoogleProvider.ts rename to packages/auth/src/lib/authenticators/GoogleAuthenticator.ts index 34f7b8ba2d..81bab7844f 100644 --- a/packages/lit-auth-client/src/lib/providers/GoogleProvider.ts +++ b/packages/auth/src/lib/authenticators/GoogleAuthenticator.ts @@ -12,16 +12,16 @@ import { OAuthProviderOptions, } from '@lit-protocol/types'; +import { BaseAuthenticator } from './BaseAuthenticator'; import { prepareLoginUrl, parseLoginParams, getStateParam, decode, LIT_LOGIN_GATEWAY, -} from '../utils'; -import { BaseProvider } from './BaseProvider'; +} from './utils'; -export default class GoogleProvider extends BaseProvider { +export class GoogleAuthenticator extends BaseAuthenticator { /** * The redirect URI that Lit's login server should send the user back to */ @@ -211,7 +211,7 @@ export default class GoogleProvider extends BaseProvider { * @returns {Promise} - Auth method id */ public async getAuthMethodId(authMethod: AuthMethod): Promise { - return GoogleProvider.authMethodId(authMethod); + return GoogleAuthenticator.authMethodId(authMethod); } public static async authMethodId(authMethod: AuthMethod): Promise { diff --git a/packages/lit-auth-client/src/lib/providers/WebAuthnProvider.ts b/packages/auth/src/lib/authenticators/WebAuthnAuthenticator.ts similarity index 81% rename from packages/lit-auth-client/src/lib/providers/WebAuthnProvider.ts rename to packages/auth/src/lib/authenticators/WebAuthnAuthenticator.ts index 3bdad5f431..957579b06f 100644 --- a/packages/lit-auth-client/src/lib/providers/WebAuthnProvider.ts +++ b/packages/auth/src/lib/authenticators/WebAuthnAuthenticator.ts @@ -1,26 +1,29 @@ import { - AuthMethod, - BaseProviderOptions, - MintRequestBody, - WebAuthnProviderOptions, -} from '@lit-protocol/types'; + PublicKeyCredentialCreationOptionsJSON, + UserVerificationRequirement, + RegistrationResponseJSON, +} from '@simplewebauthn/typescript-types'; +import base64url from 'base64url'; +import { ethers } from 'ethers'; + import { AUTH_METHOD_TYPE, + InvalidArgumentException, RemovedFunctionError, UnknownError, WrongParamFormat, } from '@lit-protocol/constants'; -import { ethers } from 'ethers'; import { - PublicKeyCredentialCreationOptionsJSON, - UserVerificationRequirement, -} from '@simplewebauthn/typescript-types'; -import base64url from 'base64url'; -import { getRPIdFromOrigin, parseAuthenticatorData } from '../utils'; -import { BaseProvider } from './BaseProvider'; -import { RegistrationResponseJSON } from '@simplewebauthn/typescript-types'; + AuthMethod, + BaseProviderOptions, + MintRequestBody, + WebAuthnProviderOptions, +} from '@lit-protocol/types'; -export default class WebAuthnProvider extends BaseProvider { +import { BaseAuthenticator } from './BaseAuthenticator'; +import { getRPIdFromOrigin, parseAuthenticatorData } from './utils'; + +export class WebAuthnAuthenticator extends BaseAuthenticator { /** * Name of relying party. Defaults to "lit" */ @@ -68,7 +71,7 @@ export default class WebAuthnProvider extends BaseProvider { // Get auth method pub key const authMethodPubkey = - WebAuthnProvider.getPublicKeyFromRegistration(attResp); + WebAuthnAuthenticator.getPublicKeyFromRegistration(attResp); // Format args for relay server const defaultArgs = { @@ -176,7 +179,7 @@ export default class WebAuthnProvider extends BaseProvider { * @returns {Promise} - Auth method id */ public async getAuthMethodId(authMethod: AuthMethod): Promise { - return WebAuthnProvider.authMethodId(authMethod, this.rpName); + return WebAuthnAuthenticator.authMethodId(authMethod, this.rpName); } public static async authMethodId( @@ -228,8 +231,8 @@ export default class WebAuthnProvider extends BaseProvider { // Parse the buffer to reconstruct the object // Buffer is COSE formatted, utilities decode the buffer into json, and extract the public key information - const authenticationResponse: any = - parseAuthenticatorData(attestationBuffer); + const authenticationResponse = parseAuthenticatorData(attestationBuffer); + assertAuthenticationResponse(authenticationResponse); // Public key in cose format to register the auth method const publicKeyCoseBuffer: Buffer = authenticationResponse @@ -251,3 +254,39 @@ export default class WebAuthnProvider extends BaseProvider { return publicKey; } } + +function assertAuthenticationResponse( + authenticationResponse: unknown +): asserts authenticationResponse is { + attestedCredentialData: { + credentialPublicKey: Buffer; + }; +} { + /* eslint-disable @typescript-eslint/no-explicit-any */ + if ( + typeof authenticationResponse !== 'object' || + authenticationResponse === null || + !('attestedCredentialData' in authenticationResponse) || + typeof (authenticationResponse as any).attestedCredentialData !== + 'object' || + (authenticationResponse as any).attestedCredentialData === null || + !( + 'credentialPublicKey' in + (authenticationResponse as any).attestedCredentialData + ) || + !( + (authenticationResponse as any).attestedCredentialData + .credentialPublicKey instanceof Buffer + ) + ) { + throw new InvalidArgumentException( + { + info: { + authenticationResponse, + }, + }, + 'authenticationResponse does not match the expected structure: { attestedCredentialData: { credentialPublicKey: Buffer } }' + ); + } + /* eslint-enable @typescript-eslint/no-explicit-any */ +} diff --git a/packages/auth/src/lib/authenticators/index.ts b/packages/auth/src/lib/authenticators/index.ts new file mode 100644 index 0000000000..546fc49925 --- /dev/null +++ b/packages/auth/src/lib/authenticators/index.ts @@ -0,0 +1,29 @@ +import { LitRelay } from '../relay'; +import { AppleAuthenticator } from './AppleAuthenticator'; +import { DiscordAuthenticator } from './DiscordAuthenticator'; +import { GoogleAuthenticator } from './GoogleAuthenticator'; +import { MetamaskAuthenticator } from './metamask'; +import { + StytchOtpAuthenticator, + StytchAuthFactorOtpAuthenticator, +} from './stytch'; +import { + isSignInRedirect, + getProviderFromUrl, + getAuthIdByAuthMethod, +} from './utils'; +import { WebAuthnAuthenticator } from './WebAuthnAuthenticator'; + +export { + AppleAuthenticator, + DiscordAuthenticator, + MetamaskAuthenticator, + GoogleAuthenticator, + StytchAuthFactorOtpAuthenticator, + StytchOtpAuthenticator, + WebAuthnAuthenticator, + isSignInRedirect, + getProviderFromUrl, + getAuthIdByAuthMethod, + LitRelay, +}; diff --git a/packages/lit-auth-client/src/lib/providers/EthWalletProvider.ts b/packages/auth/src/lib/authenticators/metamask/MetamaskAuthenticator.ts similarity index 88% rename from packages/lit-auth-client/src/lib/providers/EthWalletProvider.ts rename to packages/auth/src/lib/authenticators/metamask/MetamaskAuthenticator.ts index c1d1afd3d4..46afd01b1c 100644 --- a/packages/lit-auth-client/src/lib/providers/EthWalletProvider.ts +++ b/packages/auth/src/lib/authenticators/metamask/MetamaskAuthenticator.ts @@ -8,11 +8,8 @@ import { LitEVMChainKeys, WrongParamFormat, } from '@lit-protocol/constants'; -import { - LitNodeClient, - checkAndSignAuthMessage, -} from '@lit-protocol/lit-node-client'; -import { log } from '@lit-protocol/misc'; +import { LitNodeClient } from '@lit-protocol/lit-node-client'; +import { getChildLogger } from '@lit-protocol/logger'; import { AuthMethod, AuthSig, @@ -21,14 +18,18 @@ import { EthWalletAuthenticateOptions, } from '@lit-protocol/types'; -import { BaseProvider } from './BaseProvider'; +import { BaseAuthenticator } from '../BaseAuthenticator'; +import { checkAndSignEVMAuthMessage } from './eth'; interface DomainAndOrigin { domain?: string; origin?: string; } -export default class EthWalletProvider extends BaseProvider { +export class MetamaskAuthenticator extends BaseAuthenticator { + private static readonly _logger = getChildLogger({ + module: 'MetamaskAuthenticator', + }); /** * The domain from which the signing request is made */ @@ -41,7 +42,8 @@ export default class EthWalletProvider extends BaseProvider { constructor(options: EthWalletProviderOptions & BaseProviderOptions) { super(options); - const { domain, origin } = EthWalletProvider.getDomainAndOrigin(options); + const { domain, origin } = + MetamaskAuthenticator.getDomainAndOrigin(options); this.domain = domain; this.origin = origin; } @@ -52,7 +54,7 @@ export default class EthWalletProvider extends BaseProvider { domain = options.domain || window.location.hostname; origin = options.origin || window.location.origin; } catch (e) { - log( + MetamaskAuthenticator._logger.error( '⚠️ Error getting "domain" and "origin" from window object, defaulting to "localhost" and "http://localhost"' ); domain = options.domain || 'localhost'; @@ -85,7 +87,7 @@ export default class EthWalletProvider extends BaseProvider { ); } - return EthWalletProvider.authenticate({ + return MetamaskAuthenticator.authenticate({ signer: options, address: options.address, chain: options.chain, @@ -109,7 +111,7 @@ export default class EthWalletProvider extends BaseProvider { * @param {string} [options.origin] - Origin from which the signing request is made * @returns {Promise} - Auth method object containing the auth signature * @static - * @memberof EthWalletProvider + * @memberof MetamaskAuthenticator * * @example * ```typescript @@ -170,7 +172,7 @@ export default class EthWalletProvider extends BaseProvider { expiration || new Date(Date.now() + 1000 * 60 * 60 * 24).toISOString(); const { domain: resolvedDomain, origin: resolvedOrigin } = - EthWalletProvider.getDomainAndOrigin({ domain, origin }); + MetamaskAuthenticator.getDomainAndOrigin({ domain, origin }); // Prepare Sign in with Ethereum message const preparedMessage: Partial = { @@ -196,7 +198,7 @@ export default class EthWalletProvider extends BaseProvider { address: address, }; } else { - authSig = await checkAndSignAuthMessage({ + authSig = await checkAndSignEVMAuthMessage({ chain, nonce: await litNodeClient.getLatestBlockhash(), }); @@ -218,7 +220,7 @@ export default class EthWalletProvider extends BaseProvider { * @returns {Promise} - Auth method id */ public async getAuthMethodId(authMethod: AuthMethod): Promise { - return EthWalletProvider.authMethodId(authMethod); + return MetamaskAuthenticator.authMethodId(authMethod); } public static async authMethodId(authMethod: AuthMethod): Promise { diff --git a/packages/auth-browser/src/lib/connect-modal/modal.ts b/packages/auth/src/lib/authenticators/metamask/connect-modal/modal.ts similarity index 99% rename from packages/auth-browser/src/lib/connect-modal/modal.ts rename to packages/auth/src/lib/authenticators/metamask/connect-modal/modal.ts index 8d23d1691a..0d3bf49f59 100644 --- a/packages/auth-browser/src/lib/connect-modal/modal.ts +++ b/packages/auth/src/lib/authenticators/metamask/connect-modal/modal.ts @@ -1,3 +1,4 @@ +/* eslint-disable */ // @ts-nocheck // node_modules/micromodal/dist/micromodal.es.js import { NoWalletException } from '@lit-protocol/constants'; diff --git a/packages/auth-browser/src/lib/chains/eth.ts b/packages/auth/src/lib/authenticators/metamask/eth.ts similarity index 71% rename from packages/auth-browser/src/lib/chains/eth.ts rename to packages/auth/src/lib/authenticators/metamask/eth.ts index 32393f09e5..79dceffc3b 100644 --- a/packages/auth-browser/src/lib/chains/eth.ts +++ b/packages/auth/src/lib/authenticators/metamask/eth.ts @@ -1,55 +1,43 @@ import { Buffer as BufferPolyfill } from 'buffer'; -import depd from 'depd'; import { hexlify } from '@ethersproject/bytes'; import { JsonRpcSigner, Web3Provider } from '@ethersproject/providers'; import { toUtf8Bytes } from '@ethersproject/strings'; - -// import WalletConnectProvider from '@walletconnect/ethereum-provider'; import { verifyMessage } from '@ethersproject/wallet'; import { EthereumProvider, default as WalletConnectProvider, } from '@walletconnect/ethereum-provider'; +import depd from 'depd'; import { ethers } from 'ethers'; import { getAddress } from 'ethers/lib/utils'; import { SiweMessage } from 'siwe'; -// @ts-ignore: If importing 'nacl' directly, the built files will use .default instead -import * as nacl from 'tweetnacl'; -import * as naclUtil from 'tweetnacl-util'; - -// @ts-ignore: If importing 'nacl' directly, the built files will use .default instead import { ConstantValues, ConstantKeys, - EITHER_TYPE, - ELeft, - ERight, - IEither, + Environment, InvalidSignatureError, LIT_CHAINS, LOCAL_STORAGE_KEYS, - LocalStorageItemNotFoundException, UnknownError, UnsupportedChainException, WrongNetworkException, WrongParamFormat, LIT_CHAINS_KEYS, } from '@lit-protocol/constants'; +import { validateSessionSig } from '@lit-protocol/lit-node-client'; +import { getChildLogger, logger } from '@lit-protocol/logger'; import { - isBrowser, - isNode, - log, - numberToHex, - validateSessionSig, -} from '@lit-protocol/misc'; -import { getStorageItem } from '@lit-protocol/misc-browser'; + getStorageItem, + setStorageItem, + removeStorageItem, +} from '@lit-protocol/misc-browser'; import { AuthCallbackParams, AuthSig } from '@lit-protocol/types'; -import LitConnectModal from '../connect-modal/modal'; +import LitConnectModal from './connect-modal/modal'; -const deprecated = depd('lit-js-sdk:auth-browser:index'); +const deprecated = depd('lit-js-sdk:auth:metamask-authenticator:index'); if (globalThis && typeof globalThis.Buffer === 'undefined') { globalThis.Buffer = BufferPolyfill; @@ -168,35 +156,27 @@ export const chainHexIdToChainName = (chainHexId: string): void | string => { * Get chain id of the current network * @param { string } chain * @param { Web3Provider } web3 - * @returns { Promise } + * @returns { Promise } */ export const getChainId = async ( chain: string, web3: Web3Provider -): Promise> => { - let resultOrError: IEither; - +): Promise => { try { - const resp = await web3.getNetwork(); - resultOrError = ERight(resp.chainId); + const network = await web3.getNetwork(); + return network.chainId; } catch (e) { - // couldn't get chainId. throw the incorrect network error - log('getNetwork threw an exception', e); - - resultOrError = ELeft( - new WrongNetworkException( - { - info: { - chain, - }, + throw new WrongNetworkException( + { + info: { + chain, }, - `Incorrect network selected. Please switch to the %s network in your wallet and try again.`, - chain - ) + cause: e, + }, + `Incorrect network selected. Please switch to the %s network in your wallet and try again.`, + chain ); } - - return resultOrError; }; /** @@ -225,7 +205,10 @@ export function isSignedMessageExpired(signedMessage: string) { * * @returns { boolean } */ -export const getMustResign = (authSig: AuthSig, resources: any): boolean => { +export const getMustResign = ( + authSig: AuthSig, + resources: unknown +): boolean => { let mustResign!: boolean; // if it's not expired, then we don't need to resign @@ -235,23 +218,30 @@ export const getMustResign = (authSig: AuthSig, resources: any): boolean => { try { const parsedSiwe = new SiweMessage(authSig.signedMessage); - log('parsedSiwe.resources', parsedSiwe.resources); + logger.info({ + msg: 'parsedSiwe.resources', + resources: parsedSiwe.resources, + }); if (JSON.stringify(parsedSiwe.resources) !== JSON.stringify(resources)) { - log( + logger.info( 'signing auth message because resources differ from the resources in the auth sig' ); mustResign = true; } if (parsedSiwe.address !== getAddress(parsedSiwe.address)) { - log( - 'signing auth message because parsedSig.address is not equal to the same address but checksummed. This usually means the user had a non-checksummed address saved and so they need to re-sign.' + logger.info( + 'signing auth message because parsedSig.address is not equal to the same address but checksummed. This usually means the user had a non-checksummed address saved and so they need to re-sign.' ); mustResign = true; } - } catch (e) { - log('error parsing siwe sig. making the user sign again: ', e); + } catch (error) { + logger.error({ + function: 'getMustResign', + msg: 'error parsing siwe sig. Making the user sign again', + error, + }); mustResign = true; } @@ -298,8 +288,8 @@ export const connectWeb3 = async ({ walletConnectProjectId, }: ConnectWeb3): Promise => { // -- check if it's nodejs - if (isNode()) { - log('connectWeb3 is not supported in nodejs.'); + if (Environment.isNode) { + logger.info('connectWeb3 is not supported in nodejs.'); return { web3: null, account: null }; } @@ -322,20 +312,20 @@ export const connectWeb3 = async ({ }, }; - if (isBrowser()) { + if (Environment.isBrowser) { litWCProvider = wcProvider; } } - log('getting provider via lit connect modal'); + logger.info('getting provider via lit connect modal'); const dialog = new LitConnectModal({ providerOptions }); const provider = await dialog.getWalletProvider(); - log('got provider'); + logger.info('got provider'); - // @ts-ignore + // @ts-expect-error provider is not typed const web3 = new Web3Provider(provider); // trigger metamask popup @@ -343,19 +333,19 @@ export const connectWeb3 = async ({ deprecated( '@deprecated soon to be removed. - trying to enable provider. this will trigger the metamask popup.' ); - // @ts-ignore + // @ts-expect-error provider is not typed await provider.enable(); - } catch (e) { - log( - "error enabling provider but swallowed it because it's not important. most wallets use a different function now to enable the wallet so you can ignore this error, because those other methods will be tried.", - e - ); + } catch (error) { + logger.info({ + msg: "error enabling provider but swallowed it because it's not important. Most wallets use a different function now to enable the wallet so you can ignore this error, those other methods will be tried.", + error, + }); } - log('listing accounts'); + logger.info('listing accounts'); const accounts = await web3.listAccounts(); - log('accounts', accounts); + logger.info({ msg: 'accounts', accounts }); const account = ethers.utils.getAddress(accounts[0]); return { web3, account }; @@ -364,23 +354,22 @@ export const connectWeb3 = async ({ /** * @browserOnly * Delete any saved AuthSigs from local storage. Takes no params and returns - * nothing. This will also clear out the WalletConnect cache in local storage. + * nothing. This will also clear out the WalletConnect cache in localstorage. * We often run this function as a result of the user pressing a "Logout" button. * * @return { void } */ export const disconnectWeb3 = (): void => { - if (isNode()) { - log('disconnectWeb3 is not supported in nodejs.'); + if (Environment.isNode) { + logger.info('disconnectWeb3 is not supported in nodejs.'); return; } - // @ts-ignore - if (isBrowser() && litWCProvider) { + if (Environment.isBrowser && litWCProvider) { try { litWCProvider.disconnect(); } catch (err) { - log( + logger.info( 'Attempted to disconnect global WalletConnectProvider for lit-connect-modal', err ); @@ -389,11 +378,8 @@ export const disconnectWeb3 = (): void => { const storage = LOCAL_STORAGE_KEYS; - localStorage.removeItem(storage.AUTH_SIGNATURE); - localStorage.removeItem(storage.AUTH_SOL_SIGNATURE); - localStorage.removeItem(storage.AUTH_COSMOS_SIGNATURE); - localStorage.removeItem(storage.WEB3_PROVIDER); - localStorage.removeItem(storage.WALLET_SIGNATURE); + removeStorageItem(storage.AUTH_SIGNATURE); + removeStorageItem(storage.WALLET_SIGNATURE); }; /** @@ -412,10 +398,11 @@ export const checkAndSignEVMAuthMessage = async ({ walletConnectProjectId, nonce, }: AuthCallbackParams): Promise => { - // -- check if it's nodejs - if (isNode()) { - log( - 'checkAndSignEVMAuthMessage is not supported in nodejs. You can create a SIWE on your own using the SIWE package.' + const logger = getChildLogger({ function: 'checkAndSignEVMAuthMessage' }); + // -- check if it's Node.js + if (Environment.isNode) { + logger.info( + 'checkAndSignEVMAuthMessage is not supported in nodejs. You can create a SIWE on your own using the SIWE package.' ); return { sig: '', @@ -426,7 +413,7 @@ export const checkAndSignEVMAuthMessage = async ({ } // --- scoped methods --- - const _throwIncorrectNetworkError = (error: any) => { + const _throwIncorrectNetworkError = (error: any): never => { if (error.code === WALLET_ERROR.NO_SUCH_METHOD) { throw new WrongNetworkException( { @@ -450,45 +437,28 @@ export const checkAndSignEVMAuthMessage = async ({ walletConnectProjectId, }); - log(`got web3 and account: ${account}`); + logger.info(`got web3 and account: ${account}`); // -- 2. prepare all required variables - const currentChainIdOrError = await getChainId(chain, web3); + let currentChainId = await getChainId(chain, web3); const selectedChainId: number = selectedChain.chainId; - const selectedChainIdHex: string = numberToHex(selectedChainId); - let authSigOrError = getStorageItem(LOCAL_STORAGE_KEYS.AUTH_SIGNATURE); - - log('currentChainIdOrError:', currentChainIdOrError); - log('selectedChainId:', selectedChainId); - log('selectedChainIdHex:', selectedChainIdHex); - log('authSigOrError:', authSigOrError); - - // -- 3. check all variables before executing business logic - if (currentChainIdOrError.type === EITHER_TYPE.ERROR) { - throw new UnknownError( - { - info: { - chainId: chain, - }, - cause: currentChainIdOrError.result, - }, - 'Unknown error when getting chain id' - ); - } - - log('chainId from web3', currentChainIdOrError); - log( - `checkAndSignAuthMessage with chainId ${currentChainIdOrError} and chain set to ${chain} and selectedChain is `, - selectedChain - ); + const selectedChainIdHex: string = `0x${selectedChainId.toString(16)}`; + + logger.info({ msg: 'currentChainId', currentChainId }); + logger.info({ msg: 'selectedChainId', selectedChainId }); + logger.info({ msg: 'selectedChainIdHex', selectedChainIdHex }); + logger.info({ + msg: `checkAndSignAuthMessage with chainId ${currentChainId} and chain set to ${chain} and selectedChain is `, + selectedChain, + }); // -- 4. case: (current chain id is NOT equal to selected chain) AND is set to switch chain - if (currentChainIdOrError.result !== selectedChainId && switchChain) { + if (currentChainId !== selectedChainId && switchChain) { const provider = web3.provider as any; // -- (case) if able to switch chain id try { - log('trying to switch to chainId', selectedChainIdHex); + logger.info({ msg: 'trying to switch to chainId', selectedChainIdHex }); await provider.request({ method: 'wallet_switchEthereumChain', @@ -497,7 +467,7 @@ export const checkAndSignEVMAuthMessage = async ({ // -- (case) if unable to switch chain } catch (switchError: any) { - log('error switching to chainId', switchError); + logger.error({ msg: 'error switching to chainId', switchError }); // -- (error case) if ( @@ -522,7 +492,7 @@ export const checkAndSignEVMAuthMessage = async ({ method: 'wallet_addEthereumChain', params: data, }); - } catch (addError: any) { + } catch (addError) { _throwIncorrectNetworkError(addError); } } else { @@ -531,17 +501,27 @@ export const checkAndSignEVMAuthMessage = async ({ } // we may have switched the chain to the selected chain. set the chainId accordingly - currentChainIdOrError.result = selectedChain.chainId; + currentChainId = selectedChain.chainId; } + let authSig: AuthSig | undefined; // -- 5. case: Lit auth signature is NOT in the local storage - log('checking if sig is in local storage'); - - if (authSigOrError.type === EITHER_TYPE.ERROR) { - log('signing auth message because sig is not in local storage'); + try { + logger.info('checking if sig is in local storage'); + const authSigString = getStorageItem(LOCAL_STORAGE_KEYS.AUTH_SIGNATURE); + authSig = JSON.parse(authSigString); + } catch (error) { + logger.warn({ + msg: 'Could not get sig from local storage', + error, + }); + } + if (!authSig) { try { - const authSig = await _signAndGetAuth({ + logger.info('signing auth message because sig is not in local storage'); + + authSig = await _signAndGetAuth({ web3, account, chainId: selectedChain.chainId, @@ -550,12 +530,7 @@ export const checkAndSignEVMAuthMessage = async ({ uri, nonce, }); - - authSigOrError = { - type: EITHER_TYPE.SUCCESS, - result: JSON.stringify(authSig), - }; - } catch (e: any) { + } catch (e) { throw new UnknownError( { info: { @@ -571,20 +546,14 @@ export const checkAndSignEVMAuthMessage = async ({ 'Could not get authenticated message' ); } - - // Log new authSig - log('5. authSigOrError:', authSigOrError); } // -- 6. case: Lit auth signature IS in the local storage - const authSigString: string = authSigOrError.result; - let authSig = JSON.parse(authSigString); - - log('6. authSig:', authSig); + logger.info({ msg: 'authSig', authSig }); // -- 7. case: when we are NOT on the right wallet address if (account.toLowerCase() !== authSig.address.toLowerCase()) { - log( + logger.info( 'signing auth message because account is not the same as the address in the auth sig' ); authSig = await _signAndGetAuth({ @@ -596,7 +565,7 @@ export const checkAndSignEVMAuthMessage = async ({ uri, nonce, }); - log('7. authSig:', authSig); + logger.info({ msg: 'authSig', authSig }); // -- 8. case: we are on the right wallet, but need to check the resources of the sig and re-sign if they don't match } else { @@ -613,7 +582,7 @@ export const checkAndSignEVMAuthMessage = async ({ nonce, }); } - log('8. mustResign:', mustResign); + logger.info({ msg: 'mustResign', mustResign }); } // -- 9. finally, if the authSig is expired, re-sign @@ -622,10 +591,10 @@ export const checkAndSignEVMAuthMessage = async ({ if (isSignedMessageExpired(authSig.signedMessage) || !checkAuthSig.isValid) { if (!checkAuthSig.isValid) { - log(`Invalid AuthSig: ${checkAuthSig.errors.join(', ')}`); + logger.info(`Invalid AuthSig: ${checkAuthSig.errors.join(', ')}`); } - log('9. authSig expired!, resigning..'); + logger.info('authSig expired!, resigning..'); authSig = await _signAndGetAuth({ web3, @@ -664,23 +633,8 @@ const _signAndGetAuth = async ({ nonce, }); - const authSigOrError = getStorageItem(LOCAL_STORAGE_KEYS.AUTH_SIGNATURE); - - if (authSigOrError.type === 'ERROR') { - throw new LocalStorageItemNotFoundException( - { - info: { - storageKey: LOCAL_STORAGE_KEYS.AUTH_SIGNATURE, - }, - }, - 'Failed to get authSig from local storage' - ); - } - - const authSig: AuthSig = - typeof authSigOrError.result === 'string' - ? JSON.parse(authSigOrError.result) - : authSigOrError.result; + const authSigString = getStorageItem(LOCAL_STORAGE_KEYS.AUTH_SIGNATURE); + const authSig: AuthSig = JSON.parse(authSigString); return authSig; }; @@ -703,8 +657,8 @@ export const signAndSaveAuthMessage = async ({ nonce, }: signAndSaveAuthParams): Promise => { // check if it's nodejs - if (isNode()) { - log('checkAndSignEVMAuthMessage is not supported in nodejs.'); + if (Environment.isNode) { + logger.info('checkAndSignEVMAuthMessage is not supported in nodejs.'); return { sig: '', derivedVia: '', @@ -752,25 +706,10 @@ export const signAndSaveAuthMessage = async ({ }; // -- 4. store auth and a keypair in localstorage for communication with sgx - if (isBrowser()) { - localStorage.setItem( - LOCAL_STORAGE_KEYS.AUTH_SIGNATURE, - JSON.stringify(authSig) - ); - } - const commsKeyPair = nacl.box.keyPair(); - - if (isBrowser()) { - localStorage.setItem( - LOCAL_STORAGE_KEYS.KEY_PAIR, - JSON.stringify({ - publicKey: naclUtil.encodeBase64(commsKeyPair.publicKey), - secretKey: naclUtil.encodeBase64(commsKeyPair.secretKey), - }) - ); + if (Environment.isBrowser) { + setStorageItem(LOCAL_STORAGE_KEYS.AUTH_SIGNATURE, JSON.stringify(authSig)); } - log(`generated and saved ${LOCAL_STORAGE_KEYS.KEY_PAIR}`); return authSig; }; @@ -787,9 +726,9 @@ export const signMessage = async ({ web3, account, }: SignMessageParams): Promise => { - // check if it's nodejs - if (isNode()) { - log('signMessage is not supported in nodejs.'); + // check if it's Node.js + if (Environment.isNode) { + logger.info('signMessage is not supported in nodejs.'); return { signature: '', address: '', @@ -798,22 +737,22 @@ export const signMessage = async ({ // -- validate if (!web3 || !account) { - log(`web3: ${web3} OR ${account} not found. Connecting web3..`); + logger.info(`web3: ${web3} OR ${account} not found. Connecting web3..`); const res = await connectWeb3({ chainId: 1 }); web3 = res.web3; account = res.account; } - log('pausing...'); + logger.info('pausing...'); await new Promise((resolve) => setTimeout(resolve, 500)); - log('signing with ', account); + logger.info({ msg: 'signing with ', account }); const signature = await signMessageAsync(web3.getSigner(), account, body); const address = verifyMessage(body, signature).toLowerCase(); - log('Signature: ', signature); - log('recovered address: ', address); + logger.info({ msg: 'Signature', signature }); + logger.info({ msg: 'recovered address', address }); if (address.toLowerCase() !== account.toLowerCase()) { const msg = `ruh roh, the user signed with a different address (${address}) then they're using with web3 (${account}). This will lead to confusion.`; @@ -850,8 +789,8 @@ export const signMessageAsync = async ( message: string ): Promise => { // check if it's nodejs - if (isNode()) { - log('signMessageAsync is not supported in nodejs.'); + if (Environment.isNode) { + logger.warn('signMessageAsync is not supported in nodejs.'); return null; } @@ -859,23 +798,25 @@ export const signMessageAsync = async ( if (signer instanceof JsonRpcSigner) { try { - log('Signing with personal_sign'); + logger.info('Signing with personal_sign'); const signature = await signer.provider.send('personal_sign', [ hexlify(messageBytes), address.toLowerCase(), ]); return signature; - } catch (e: any) { - log( - 'Signing with personal_sign failed, trying signMessage as a fallback' - ); - if (e.message.includes('personal_sign')) { + } catch (error: any) { + logger.warn({ + function: 'signMessageAsync', + msg: 'Signing with personal_sign failed, trying signMessage as a fallback', + error, + }); + if (error.message.includes('personal_sign')) { return await signer.signMessage(messageBytes); } - throw e; + throw error; } } else { - log('signing with signMessage'); + logger.info('signing with signMessage'); return await signer.signMessage(messageBytes); } }; diff --git a/packages/auth/src/lib/authenticators/metamask/index.ts b/packages/auth/src/lib/authenticators/metamask/index.ts new file mode 100644 index 0000000000..d42f9e5022 --- /dev/null +++ b/packages/auth/src/lib/authenticators/metamask/index.ts @@ -0,0 +1,3 @@ +import { MetamaskAuthenticator } from './MetamaskAuthenticator'; + +export { MetamaskAuthenticator }; diff --git a/packages/auth-browser/src/lib/lit-connect-modal.d.ts b/packages/auth/src/lib/authenticators/metamask/lit-connect-modal.d.ts similarity index 100% rename from packages/auth-browser/src/lib/lit-connect-modal.d.ts rename to packages/auth/src/lib/authenticators/metamask/lit-connect-modal.d.ts diff --git a/packages/lit-auth-client/src/lib/providers/StytchAuthFactorOtp.ts b/packages/auth/src/lib/authenticators/stytch/StytchAuthFactorOtpAuthenticator.ts similarity index 89% rename from packages/lit-auth-client/src/lib/providers/StytchAuthFactorOtp.ts rename to packages/auth/src/lib/authenticators/stytch/StytchAuthFactorOtpAuthenticator.ts index 7f4dc25751..8e35a6467a 100644 --- a/packages/lit-auth-client/src/lib/providers/StytchAuthFactorOtp.ts +++ b/packages/auth/src/lib/authenticators/stytch/StytchAuthFactorOtpAuthenticator.ts @@ -12,19 +12,18 @@ import { StytchToken, } from '@lit-protocol/types'; -import { BaseProvider } from './BaseProvider'; +import { BaseAuthenticator } from '../BaseAuthenticator'; import { FactorParser, emailOtpAuthFactorParser, smsOtpAuthFactorParser, totpAuthFactorParser, whatsAppOtpAuthFactorParser, -} from './StytchAuthFactors'; +} from './parsers'; -export default class StytchAuthFactorOtpProvider< +export class StytchAuthFactorOtpAuthenticator< T extends FactorParser -> extends BaseProvider { - private _params: StytchOtpProviderOptions; +> extends BaseAuthenticator { private _factor: T; private static _provider: string = 'https://stytch.com/session'; @@ -34,7 +33,6 @@ export default class StytchAuthFactorOtpProvider< factor: T ) { super(params); - this._params = config; this._factor = factor; } @@ -66,13 +64,16 @@ export default class StytchAuthFactorOtpProvider< } const parsedToken: StytchToken = - StytchAuthFactorOtpProvider._parseJWT(accessToken); - const factorParser = StytchAuthFactorOtpProvider._resolveAuthFactor( + StytchAuthFactorOtpAuthenticator._parseJWT(accessToken); + const factorParser = StytchAuthFactorOtpAuthenticator._resolveAuthFactor( this._factor ); try { - factorParser.parser(parsedToken, StytchAuthFactorOtpProvider._provider); + factorParser.parser( + parsedToken, + StytchAuthFactorOtpAuthenticator._provider + ); } catch (e) { reject(e); } @@ -93,7 +94,7 @@ export default class StytchAuthFactorOtpProvider< * @returns {Promise} - Auth method id */ public async getAuthMethodId(authMethod: AuthMethod): Promise { - return StytchAuthFactorOtpProvider.authMethodId(authMethod); + return StytchAuthFactorOtpAuthenticator.authMethodId(authMethod); } /** @@ -109,7 +110,7 @@ export default class StytchAuthFactorOtpProvider< return new Promise((resolve, reject) => { const accessToken = authMethod.accessToken; const parsedToken: StytchToken = - StytchAuthFactorOtpProvider._parseJWT(accessToken); + StytchAuthFactorOtpAuthenticator._parseJWT(accessToken); let factor: FactorParser = 'email'; switch (authMethod.authMethodType) { case AUTH_METHOD_TYPE.StytchEmailFactorOtp: @@ -144,7 +145,7 @@ export default class StytchAuthFactorOtpProvider< } private static _resolveAuthFactor(factor: FactorParser): { - parser: Function; + parser: (parsedToken: StytchToken, provider: string) => string; authMethodType: AUTH_METHOD_TYPE_VALUES; } { switch (factor) { diff --git a/packages/lit-auth-client/src/lib/providers/StytchOtpProvider.ts b/packages/auth/src/lib/authenticators/stytch/StytchOtpAuthenticator.ts similarity index 90% rename from packages/lit-auth-client/src/lib/providers/StytchOtpProvider.ts rename to packages/auth/src/lib/authenticators/stytch/StytchOtpAuthenticator.ts index ee69ae999a..6eab48436f 100644 --- a/packages/lit-auth-client/src/lib/providers/StytchOtpProvider.ts +++ b/packages/auth/src/lib/authenticators/stytch/StytchOtpAuthenticator.ts @@ -9,9 +9,9 @@ import { StytchOtpProviderOptions, } from '@lit-protocol/types'; -import { BaseProvider } from './BaseProvider'; +import { BaseAuthenticator } from '../BaseAuthenticator'; -export class StytchOtpProvider extends BaseProvider { +export class StytchOtpAuthenticator extends BaseAuthenticator { private _params: StytchOtpProviderOptions; private _provider: string = 'https://stytch.com/session'; @@ -48,7 +48,8 @@ export class StytchOtpProvider extends BaseProvider { ); } - const parsedToken: StytchToken = StytchOtpProvider._parseJWT(accessToken); + const parsedToken: StytchToken = + StytchOtpAuthenticator._parseJWT(accessToken); const audience = (parsedToken['aud'] as string[])[0]; if (audience != this._params.appId) { reject(new Error('Parsed application id does not match parameters')); @@ -92,11 +93,11 @@ export class StytchOtpProvider extends BaseProvider { * @returns {Promise} - Auth method id */ public async getAuthMethodId(authMethod: AuthMethod): Promise { - return StytchOtpProvider.authMethodId(authMethod); + return StytchOtpAuthenticator.authMethodId(authMethod); } public static async authMethodId(authMethod: AuthMethod): Promise { - const tokenBody = StytchOtpProvider._parseJWT(authMethod.accessToken); + const tokenBody = StytchOtpAuthenticator._parseJWT(authMethod.accessToken); const userId = tokenBody['sub'] as string; const orgId = (tokenBody['aud'] as string[])[0]; const authMethodId = ethers.utils.keccak256( @@ -124,7 +125,6 @@ export class StytchOtpProvider extends BaseProvider { } const body = Buffer.from(parts[1], 'base64'); const parsedBody: StytchToken = JSON.parse(body.toString('ascii')); - console.log('JWT body: ', parsedBody); return parsedBody; } } diff --git a/packages/auth/src/lib/authenticators/stytch/index.ts b/packages/auth/src/lib/authenticators/stytch/index.ts new file mode 100644 index 0000000000..e3a3005202 --- /dev/null +++ b/packages/auth/src/lib/authenticators/stytch/index.ts @@ -0,0 +1,4 @@ +import { StytchAuthFactorOtpAuthenticator } from './StytchAuthFactorOtpAuthenticator'; +import { StytchOtpAuthenticator } from './StytchOtpAuthenticator'; + +export { StytchAuthFactorOtpAuthenticator, StytchOtpAuthenticator }; diff --git a/packages/lit-auth-client/src/lib/providers/StytchAuthFactors.ts b/packages/auth/src/lib/authenticators/stytch/parsers.ts similarity index 68% rename from packages/lit-auth-client/src/lib/providers/StytchAuthFactors.ts rename to packages/auth/src/lib/authenticators/stytch/parsers.ts index f734ce4f2d..eb9149882d 100644 --- a/packages/lit-auth-client/src/lib/providers/StytchAuthFactors.ts +++ b/packages/auth/src/lib/authenticators/stytch/parsers.ts @@ -1,6 +1,14 @@ +import { ethers } from 'ethers'; + import { WrongParamFormat } from '@lit-protocol/constants'; import { StytchToken } from '@lit-protocol/types'; -import { ethers } from 'ethers'; + +import type { + AuthenticationFactor, + AuthenticatorAppFactor, + EmailFactor, + PhoneNumberFactor, +} from 'stytch'; export type FactorParser = 'email' | 'sms' | 'whatsApp' | 'totp'; @@ -9,10 +17,9 @@ export const emailOtpAuthFactorParser = ( provider: string ): string => { const session = parsedToken[provider]; - const authFactors: any[] = session['authentication_factors']; - let authFactor = authFactors.find((value, _index, _obj) => { - if (value.email_factor) return value; - }); + const authFactors: AuthenticationFactor[] = session['authentication_factors']; + + const authFactor = authFactors.find((value) => !!value.email_factor); if (!authFactor) { throw new WrongParamFormat( @@ -25,6 +32,9 @@ export const emailOtpAuthFactorParser = ( 'Could not find email authentication info in session' ); } + + const emailFactor = authFactor.email_factor as EmailFactor; + const audience = (parsedToken['aud'] as string[])[0]; if (!audience) { throw new WrongParamFormat( @@ -38,7 +48,7 @@ export const emailOtpAuthFactorParser = ( ); } - const userId = authFactor.email_factor.email_address; + const userId = emailFactor.email_address; const authMethodId = ethers.utils.keccak256( ethers.utils.toUtf8Bytes( `${userId.toLowerCase()}:${audience.toLowerCase()}` @@ -53,10 +63,8 @@ export const smsOtpAuthFactorParser = ( provider: string ): string => { const session = parsedToken[provider]; - const authFactors: any[] = session['authentication_factors']; - let authFactor = authFactors.find((value, _index, _obj) => { - if (value.phone_number_factor) return value; - }); + const authFactors: AuthenticationFactor[] = session['authentication_factors']; + const authFactor = authFactors.find((value) => !!value.phone_number_factor); if (!authFactor) { throw new WrongParamFormat( @@ -66,9 +74,12 @@ export const smsOtpAuthFactorParser = ( provider, }, }, - 'Could not find email authentication info in session' + 'Could not find phone authentication info in session' ); } + + const phoneNumberFactor = authFactor.phone_number_factor as PhoneNumberFactor; + const audience = (parsedToken['aud'] as string[])[0]; if (!audience) { throw new WrongParamFormat( @@ -82,7 +93,7 @@ export const smsOtpAuthFactorParser = ( ); } - const userId = authFactor.phone_number_factor.phone_number; + const userId = phoneNumberFactor.phone_number; const authMethodId = ethers.utils.keccak256( ethers.utils.toUtf8Bytes( `${userId.toLowerCase()}:${audience.toLowerCase()}` @@ -97,10 +108,8 @@ export const whatsAppOtpAuthFactorParser = ( provider: string ): string => { const session = parsedToken[provider]; - const authFactors: any[] = session['authentication_factors']; - let authFactor = authFactors.find((value, _index, _obj) => { - if (value.phone_number_factor) return value; - }); + const authFactors: AuthenticationFactor[] = session['authentication_factors']; + const authFactor = authFactors.find((value) => !!value.phone_number_factor); if (!authFactor) { throw new WrongParamFormat( @@ -110,9 +119,12 @@ export const whatsAppOtpAuthFactorParser = ( provider, }, }, - 'Could not find email authentication info in session' + 'Could not find phone authentication info in session' ); } + + const phoneNumberFactor = authFactor.phone_number_factor as PhoneNumberFactor; + const audience = (parsedToken['aud'] as string[])[0]; if (!audience) { throw new WrongParamFormat( @@ -126,7 +138,7 @@ export const whatsAppOtpAuthFactorParser = ( ); } - const userId = authFactor.phone_number_factor.phone_number; + const userId = phoneNumberFactor.phone_number; const authMethodId = ethers.utils.keccak256( ethers.utils.toUtf8Bytes( `${userId.toLowerCase()}:${audience.toLowerCase()}` @@ -141,10 +153,10 @@ export const totpAuthFactorParser = ( provider: string ): string => { const session = parsedToken[provider]; - const authFactors: any[] = session['authentication_factors']; - let authFactor = authFactors.find((value, _index, _obj) => { - if (value.phone_number_factor) return value; - }); + const authFactors: AuthenticationFactor[] = session['authentication_factors']; + const authFactor = authFactors.find( + (value) => !!value.authenticator_app_factor + ); if (!authFactor) { throw new WrongParamFormat( @@ -154,9 +166,13 @@ export const totpAuthFactorParser = ( provider, }, }, - 'Could not find email authentication info in session' + 'Could not find authenticator app authentication info in session' ); } + + const authenticatorAppFactor = + authFactor.authenticator_app_factor as AuthenticatorAppFactor; + const audience = (parsedToken['aud'] as string[])[0]; if (!audience) { throw new WrongParamFormat( @@ -170,7 +186,7 @@ export const totpAuthFactorParser = ( ); } - const userId = authFactor.authenticator_app_factor.totp_id; + const userId = authenticatorAppFactor.totp_id; const authMethodId = ethers.utils.keccak256( ethers.utils.toUtf8Bytes( `${userId.toLowerCase()}:${audience.toLowerCase()}` diff --git a/packages/lit-auth-client/src/lib/utils.ts b/packages/auth/src/lib/authenticators/utils.ts similarity index 87% rename from packages/lit-auth-client/src/lib/utils.ts rename to packages/auth/src/lib/authenticators/utils.ts index 636986a974..78c912f9d6 100644 --- a/packages/lit-auth-client/src/lib/utils.ts +++ b/packages/auth/src/lib/authenticators/utils.ts @@ -5,30 +5,18 @@ import { InvalidArgumentException, UnknownError, } from '@lit-protocol/constants'; -import { getLoggerbyId } from '@lit-protocol/misc'; import { AuthMethod, LoginUrlParams } from '@lit-protocol/types'; -import DiscordProvider from './providers/DiscordProvider'; -import EthWalletProvider from './providers/EthWalletProvider'; -import GoogleProvider from './providers/GoogleProvider'; -import StytchAuthFactorOtpProvider from './providers/StytchAuthFactorOtp'; -import { StytchOtpProvider } from './providers/StytchOtpProvider'; -import WebAuthnProvider from './providers/WebAuthnProvider'; +import { DiscordAuthenticator } from './DiscordAuthenticator'; +import { GoogleAuthenticator } from './GoogleAuthenticator'; +import { MetamaskAuthenticator } from './metamask/MetamaskAuthenticator'; +import { StytchAuthFactorOtpAuthenticator } from './stytch/StytchAuthFactorOtpAuthenticator'; +import { StytchOtpAuthenticator } from './stytch/StytchOtpAuthenticator'; +import { WebAuthnAuthenticator } from './WebAuthnAuthenticator'; export const STATE_PARAM_KEY = 'lit-state-param'; export const LIT_LOGIN_GATEWAY = 'https://login.litgateway.com'; -/** - * Check if OAuth provider is supported - * - * @param provider {string} - Auth provider name - * - * @returns {boolean} - True if provider is supported - */ -export function isSocialLoginSupported(provider: string): boolean { - return ['google', 'discord'].includes(provider); -} - /** * Create login url using the parameters provided as arguments when initializing the client * @@ -79,11 +67,11 @@ function getLoginRoute(provider: string): string { /** * Create query params string from given object * - * @param params {any} - Object of query params + * @param params {Record} - Object of query params * * @returns {string} - Query string */ -function createQueryParams(params: any): string { +function createQueryParams(params: Record): string { // Strip undefined values from params const filteredParams = Object.keys(params) .filter((k) => typeof params[k] !== 'undefined') @@ -335,11 +323,6 @@ export function unparse(buf: any) { ); } -export function log(...args: any) { - const logger = getLoggerbyId('auth-client'); - logger.debug(...args); -} - /** * Retrieves the authentication ID based on the provided authentication method. * @@ -353,28 +336,27 @@ export async function getAuthIdByAuthMethod( switch (authMethod.authMethodType) { case AUTH_METHOD_TYPE.EthWallet: - authId = await EthWalletProvider.authMethodId(authMethod); + authId = await MetamaskAuthenticator.authMethodId(authMethod); break; case AUTH_METHOD_TYPE.Discord: - authId = await DiscordProvider.authMethodId(authMethod); + authId = await DiscordAuthenticator.authMethodId(authMethod); break; case AUTH_METHOD_TYPE.WebAuthn: - authId = await WebAuthnProvider.authMethodId(authMethod); + authId = await WebAuthnAuthenticator.authMethodId(authMethod); break; case AUTH_METHOD_TYPE.GoogleJwt: - authId = await GoogleProvider.authMethodId(authMethod); + authId = await GoogleAuthenticator.authMethodId(authMethod); break; case AUTH_METHOD_TYPE.StytchOtp: - authId = await StytchOtpProvider.authMethodId(authMethod); + authId = await StytchOtpAuthenticator.authMethodId(authMethod); break; case AUTH_METHOD_TYPE.StytchEmailFactorOtp: case AUTH_METHOD_TYPE.StytchSmsFactorOtp: case AUTH_METHOD_TYPE.StytchTotpFactorOtp: case AUTH_METHOD_TYPE.StytchWhatsAppFactorOtp: - authId = await StytchAuthFactorOtpProvider.authMethodId(authMethod); + authId = await StytchAuthFactorOtpAuthenticator.authMethodId(authMethod); break; default: - log(`unsupported AuthMethodType: ${authMethod.authMethodType}`); throw new InvalidArgumentException( { info: { diff --git a/packages/lit-auth-client/src/lib/validators.spec.ts b/packages/auth/src/lib/authenticators/validators.spec.ts similarity index 86% rename from packages/lit-auth-client/src/lib/validators.spec.ts rename to packages/auth/src/lib/authenticators/validators.spec.ts index 91d1419fbb..f821db1bc1 100644 --- a/packages/lit-auth-client/src/lib/validators.spec.ts +++ b/packages/auth/src/lib/authenticators/validators.spec.ts @@ -1,8 +1,16 @@ +const errorMock = jest.fn(); + +jest.mock('pino', () => { + return { + pino: jest.fn(() => ({ + error: errorMock, + })), + }; +}); + import { validateMintRequestBody } from './validators'; describe('validateMintRequestBody', () => { - const mockConsoleError = jest.spyOn(console, 'error').mockImplementation(); - afterEach(() => { jest.clearAllMocks(); }); @@ -22,13 +30,13 @@ describe('validateMintRequestBody', () => { sendPkpToItself: true, }; expect(validateMintRequestBody(customArgs)).toBe(true); - expect(mockConsoleError).not.toHaveBeenCalled(); + expect(errorMock).not.toHaveBeenCalled(); }); it('should pass validation when no fields are provided', () => { const customArgs = {}; expect(validateMintRequestBody(customArgs)).toBe(true); - expect(mockConsoleError).not.toHaveBeenCalled(); + expect(errorMock).not.toHaveBeenCalled(); }); it('should pass validation when some fields are provided and correct', () => { @@ -37,7 +45,7 @@ describe('validateMintRequestBody', () => { permittedAuthMethodPubkeys: ['pubkey123'], }; expect(validateMintRequestBody(customArgs)).toBe(true); - expect(mockConsoleError).not.toHaveBeenCalled(); + expect(errorMock).not.toHaveBeenCalled(); }); it('should fail validation and log error for incorrect keyType', () => { @@ -45,7 +53,7 @@ describe('validateMintRequestBody', () => { keyType: '2', // should be a number }; expect(validateMintRequestBody(customArgs as any)).toBe(false); - expect(mockConsoleError).toHaveBeenCalledWith( + expect(errorMock).toHaveBeenCalledWith( expect.stringContaining('Invalid type for keyType') ); }); @@ -55,7 +63,7 @@ describe('validateMintRequestBody', () => { permittedAuthMethodTypes: ['1'], // should be an array of numbers }; expect(validateMintRequestBody(customArgs as any)).toBe(false); - expect(mockConsoleError).toHaveBeenCalledWith( + expect(errorMock).toHaveBeenCalledWith( expect.stringContaining('Invalid type for permittedAuthMethodTypes') ); }); @@ -65,7 +73,7 @@ describe('validateMintRequestBody', () => { permittedAuthMethodIds: [123], // should be an array of strings }; expect(validateMintRequestBody(customArgs as any)).toBe(false); - expect(mockConsoleError).toHaveBeenCalledWith( + expect(errorMock).toHaveBeenCalledWith( expect.stringContaining('Invalid type for permittedAuthMethodIds') ); }); @@ -75,7 +83,7 @@ describe('validateMintRequestBody', () => { permittedAuthMethodPubkeys: [123], // should be an array of strings }; expect(validateMintRequestBody(customArgs as any)).toBe(false); - expect(mockConsoleError).toHaveBeenCalledWith( + expect(errorMock).toHaveBeenCalledWith( expect.stringContaining('Invalid type for permittedAuthMethodPubkeys') ); }); @@ -91,7 +99,7 @@ describe('validateMintRequestBody', () => { addPkpEthAddressAsPermittedAddress: 'true', // should be a boolean }; expect(validateMintRequestBody(customArgs as any)).toBe(false); - expect(mockConsoleError).toHaveBeenCalledWith( + expect(errorMock).toHaveBeenCalledWith( expect.stringContaining( 'Invalid type for addPkpEthAddressAsPermittedAddress' ) @@ -103,7 +111,7 @@ describe('validateMintRequestBody', () => { sendPkpToItself: 'true', // should be a boolean }; expect(validateMintRequestBody(customArgs as any)).toBe(false); - expect(mockConsoleError).toHaveBeenCalledWith( + expect(errorMock).toHaveBeenCalledWith( expect.stringContaining('Invalid type for sendPkpToItself') ); }); @@ -113,7 +121,7 @@ describe('validateMintRequestBody', () => { extraneousKey: 'unexpected', // This key is not defined in MintRequestBody }; expect(validateMintRequestBody(customArgs as any)).toBe(false); - expect(mockConsoleError).toHaveBeenCalledWith( + expect(errorMock).toHaveBeenCalledWith( expect.stringContaining('Invalid key found: extraneousKey') ); }); diff --git a/packages/lit-auth-client/src/lib/validators.ts b/packages/auth/src/lib/authenticators/validators.ts similarity index 86% rename from packages/lit-auth-client/src/lib/validators.ts rename to packages/auth/src/lib/authenticators/validators.ts index d7cb27dc2a..f9ccf59147 100644 --- a/packages/lit-auth-client/src/lib/validators.ts +++ b/packages/auth/src/lib/authenticators/validators.ts @@ -1,8 +1,10 @@ +import { getChildLogger } from '@lit-protocol/logger'; import { MintRequestBody } from '@lit-protocol/types'; export const validateMintRequestBody = ( customArgs: Partial ): boolean => { + const logger = getChildLogger({ function: 'validateMintRequestBody' }); let isValid = true; const validKeys = [ 'keyType', @@ -17,7 +19,7 @@ export const validateMintRequestBody = ( // Check for any extraneous keys for (const key of Object.keys(customArgs)) { if (!validKeys.includes(key)) { - console.error( + logger.error( `Invalid key found: ${key}. This key is not allowed. Valid keys are: ${validKeys.join( ', ' )}` @@ -30,7 +32,7 @@ export const validateMintRequestBody = ( customArgs.keyType !== undefined && typeof customArgs.keyType !== 'number' ) { - console.error('Invalid type for keyType: expected a number.'); + logger.error('Invalid type for keyType: expected a number.'); isValid = false; } @@ -41,7 +43,7 @@ export const validateMintRequestBody = ( (type) => typeof type === 'number' )) ) { - console.error( + logger.error( 'Invalid type for permittedAuthMethodTypes: expected an array of numbers.' ); isValid = false; @@ -52,7 +54,7 @@ export const validateMintRequestBody = ( (!Array.isArray(customArgs.permittedAuthMethodIds) || !customArgs.permittedAuthMethodIds.every((id) => typeof id === 'string')) ) { - console.error( + logger.error( 'Invalid type for permittedAuthMethodIds: expected an array of strings.' ); isValid = false; @@ -65,7 +67,7 @@ export const validateMintRequestBody = ( (pubkey) => typeof pubkey === 'string' )) ) { - console.error( + logger.error( 'Invalid type for permittedAuthMethodPubkeys: expected an array of strings.' ); isValid = false; @@ -79,8 +81,8 @@ export const validateMintRequestBody = ( Array.isArray(scope) && scope.every((s) => typeof s === 'number') )) ) { - console.error( - 'Invalid type for permittedAuthMethodScopes: expected an array of arrays of numberr.' + logger.error( + 'Invalid type for permittedAuthMethodScopes: expected an array of arrays of number.' ); isValid = false; } @@ -89,7 +91,7 @@ export const validateMintRequestBody = ( customArgs.addPkpEthAddressAsPermittedAddress !== undefined && typeof customArgs.addPkpEthAddressAsPermittedAddress !== 'boolean' ) { - console.error( + logger.error( 'Invalid type for addPkpEthAddressAsPermittedAddress: expected a boolean.' ); isValid = false; @@ -99,7 +101,7 @@ export const validateMintRequestBody = ( customArgs.sendPkpToItself !== undefined && typeof customArgs.sendPkpToItself !== 'boolean' ) { - console.error('Invalid type for sendPkpToItself: expected a boolean.'); + logger.error('Invalid type for sendPkpToItself: expected a boolean.'); isValid = false; } diff --git a/packages/lit-auth-client/src/lib/relay.ts b/packages/auth/src/lib/relay.ts similarity index 75% rename from packages/lit-auth-client/src/lib/relay.ts rename to packages/auth/src/lib/relay.ts index cf0951aaca..a55e53a0c6 100644 --- a/packages/lit-auth-client/src/lib/relay.ts +++ b/packages/auth/src/lib/relay.ts @@ -9,6 +9,7 @@ import { InvalidParamType, NetworkError, } from '@lit-protocol/constants'; +import { getChildLogger } from '@lit-protocol/logger'; import { AuthMethod, MintRequestBody, @@ -19,13 +20,16 @@ import { LitRelayConfig, } from '@lit-protocol/types'; -import WebAuthnProvider from './providers/WebAuthnProvider'; -import { getAuthIdByAuthMethod, log } from './utils'; +import { getAuthIdByAuthMethod } from './authenticators/utils'; +import { WebAuthnAuthenticator } from './authenticators/WebAuthnAuthenticator'; /** * Class that communicates with Lit relay server */ export class LitRelay implements IRelay { + private _logger = getChildLogger({ + module: 'LitRelay', + }); /** URL for Lit's relay server */ static getRelayUrl(litNetwork: LIT_NETWORK_VALUES): string { const relayerUrl = RELAYER_URL_BY_NETWORK[litNetwork]; @@ -71,7 +75,10 @@ export class LitRelay implements IRelay { this.relayUrl = config.relayUrl || LitRelay.getRelayUrl(LIT_NETWORK.NagaDev); this.relayApiKey = config.relayApiKey || ''; - log("Lit's relay server URL:", this.relayUrl); + this._logger.info({ + msg: "Lit's relay server URL", + relayUrl: this.relayUrl, + }); } /** @@ -92,12 +99,25 @@ export class LitRelay implements IRelay { }); if (response.status < 200 || response.status >= 400) { - log('Something wrong with the API call', await response.json()); - const err = new Error('Unable to mint PKP through relay server'); - throw err; + const responseBody = await response.json(); + this._logger.info({ + msg: 'Something wrong with the API call', + responseBody, + }); + throw new NetworkError( + { + info: { + route: `${this.relayUrl}${this.mintRoute}`, + responseStatus: response.status, + responseStatusText: response.statusText, + responseBody, + }, + }, + 'Unable to mint PKP through relay server' + ); } else { const resBody = await response.json(); - log('Successfully initiated minting PKP with relayer'); + this._logger.info('Successfully initiated minting PKP with relayer'); return resBody; } } @@ -169,7 +189,7 @@ export class LitRelay implements IRelay { permittedAuthMethodIds.push(id); if (authMethod.authMethodType === AUTH_METHOD_TYPE.WebAuthn) { permittedAuthMethodPubkeys.push( - WebAuthnProvider.getPublicKeyFromRegistration( + WebAuthnAuthenticator.getPublicKeyFromRegistration( JSON.parse(authMethod.accessToken) ) ); @@ -231,26 +251,47 @@ export class LitRelay implements IRelay { ); if (response.status < 200 || response.status >= 400) { - log('Something wrong with the API call', await response.json()); - const err = new Error( + const responseBody = await response.json(); + this._logger.info({ + msg: 'Something wrong with the API call', + responseBody, + }); + throw new NetworkError( + { + info: { + route: `${this.relayUrl}/auth/status/${requestId}`, + responseStatus: response.status, + responseStatusText: response.statusText, + responseBody, + }, + }, `Unable to poll the status of this mint PKP transaction: ${requestId}` ); - throw err; } const resBody = await response.json(); - log('Response OK', { body: resBody }); + this._logger.info({ msg: 'Response OK', resBody }); if (resBody.error) { // exit loop since error - log('Something wrong with the API call', { + this._logger.info({ + msg: 'Something wrong with the API call', error: resBody.error, }); - const err = new Error(resBody.error); - throw err; + throw new NetworkError( + { + info: { + route: `${this.relayUrl}/auth/status/${requestId}`, + responseStatus: response.status, + responseStatusText: response.statusText, + resBody, + }, + }, + resBody.error + ); } else if (resBody.status === 'Succeeded') { // exit loop since success - log('Successfully authed', { ...resBody }); + this._logger.info({ msg: 'Successfully authed', resBody }); return resBody; } @@ -259,9 +300,14 @@ export class LitRelay implements IRelay { } // at this point, polling ended and still no success, set failure status - // console.error(`Hmm this is taking longer than expected...`); - const err = new Error('Polling for mint PKP transaction status timed out'); - throw err; + throw new NetworkError( + { + info: { + route: `${this.relayUrl}/auth/status/${requestId}`, + }, + }, + 'Polling for mint PKP transaction status timed out' + ); } /** @@ -282,13 +328,25 @@ export class LitRelay implements IRelay { }); if (response.status < 200 || response.status >= 400) { - console.warn('Something wrong with the API call', await response.json()); - // console.log("Uh oh, something's not quite right."); - const err = new Error('Unable to fetch PKPs through relay server'); - throw err; + const resBody = await response.json(); + this._logger.warn({ + msg: 'Something wrong with the API call', + resBody, + }); + throw new NetworkError( + { + info: { + route: `${this.relayUrl}${this.fetchRoute}`, + responseStatus: response.status, + responseStatusText: response.statusText, + resBody, + }, + }, + 'Unable to fetch PKPs through relay server' + ); } else { const resBody = await response.json(); - console.log('Successfully fetched PKPs with relayer'); + this._logger.info('Successfully fetched PKPs with relayer'); return resBody; } } @@ -312,10 +370,16 @@ export class LitRelay implements IRelay { }, }); if (response.status < 200 || response.status >= 400) { - const err = new Error( + throw new NetworkError( + { + info: { + route: `${this.relayUrl}${this.fetchRoute}`, + responseStatus: response.status, + responseStatusText: response.statusText, + }, + }, `Unable to generate registration options: ${response}` ); - throw err; } const registrationOptions = await response.json(); return registrationOptions; diff --git a/packages/auth/src/lib/storage/index.ts b/packages/auth/src/lib/storage/index.ts new file mode 100644 index 0000000000..03dc996c44 --- /dev/null +++ b/packages/auth/src/lib/storage/index.ts @@ -0,0 +1,3 @@ +import { localStorage } from './localStorage'; + +export { localStorage }; diff --git a/packages/auth/src/lib/storage/localStorage.spec.ts b/packages/auth/src/lib/storage/localStorage.spec.ts new file mode 100644 index 0000000000..6b706a270d --- /dev/null +++ b/packages/auth/src/lib/storage/localStorage.spec.ts @@ -0,0 +1,131 @@ +import { localStorage as createLocalStorage } from './localStorage'; + +import type { LitAuthData } from '../types'; + +describe('localStorage', () => { + let mockLocalStorage: WindowLocalStorage['localStorage']; + + beforeEach(() => { + mockLocalStorage = (() => { + const store = new Map(); + return { + getItem: (key: string) => + store.has(key) ? store.get(key) ?? null : null, + setItem: (key: string, value: string) => store.set(key, value), + }; + })() as unknown as WindowLocalStorage['localStorage']; + }); + + const appName: string = 'testApp'; + const networkName: string = 'testNetwork'; + const pkpAddress: string = '0x123'; + const authData: LitAuthData = { + credential: 'abc123', + authMethod: 'EthWallet', + }; + + test('initializes correctly and validates localStorage', () => { + expect(() => + createLocalStorage({ + appName, + networkName, + localStorage: mockLocalStorage, + }) + ).not.toThrow(); + }); + + test('throws an error if localStorage is missing', () => { + expect(() => + // @ts-expect-error Stubbing localstorage for error checking + createLocalStorage({ appName, networkName, localStorage: null }) + ).toThrow('localStorage is not available in this environment'); + }); + + test('writes and reads to/from localStorage correctly', async () => { + mockLocalStorage.setItem( + `lit-auth:${appName}:${networkName}:${pkpAddress}`, + JSON.stringify(authData) + ); + const storage = createLocalStorage({ + appName, + networkName, + localStorage: mockLocalStorage, + }); + + await expect(storage.read({ pkpAddress })).resolves.toEqual(authData); + }); + + test('returns null when reading nonexistent data', async () => { + const storage = createLocalStorage({ + appName, + networkName, + localStorage: mockLocalStorage, + }); + const result = await storage.read({ pkpAddress }); + + expect(result).toBeNull(); + }); + + test('isolates data between different network names', async () => { + const storageNetworkA = createLocalStorage({ + appName, + networkName: 'networkA', + localStorage: mockLocalStorage, + }); + const storageNetworkB = createLocalStorage({ + appName, + networkName: 'networkB', + localStorage: mockLocalStorage, + }); + + const authDataNetworkA = { ...authData, credential: 'networkA' }; + const authDataNetworkB = { ...authData, credential: 'networkB' }; + + await storageNetworkA.write({ + pkpAddress, + authData: authDataNetworkA, + }); + + await expect(storageNetworkA.read({ pkpAddress })).resolves.toEqual( + authDataNetworkA + ); + await expect(storageNetworkB.read({ pkpAddress })).resolves.toBeNull(); + + await storageNetworkB.write({ + pkpAddress, + authData: authDataNetworkB, + }); + + await expect(storageNetworkA.read({ pkpAddress })).resolves.toEqual( + authDataNetworkA + ); + await expect(storageNetworkB.read({ pkpAddress })).resolves.toEqual( + authDataNetworkB + ); + }); + + test('isolates data between different app names', async () => { + const storageAppA = createLocalStorage({ + appName: 'appA', + networkName, + localStorage: mockLocalStorage, + }); + const storageAppB = createLocalStorage({ + appName: 'appB', + networkName, + localStorage: mockLocalStorage, + }); + + const authDataNetworkB = { ...authData, credential: 'networkB' }; + + await storageAppA.write({ pkpAddress, authData }); + await expect(storageAppA.read({ pkpAddress })).resolves.toEqual(authData); + await expect(storageAppB.read({ pkpAddress })).resolves.toBeNull(); + + await storageAppB.write({ pkpAddress, authData: authDataNetworkB }); + await expect(storageAppB.read({ pkpAddress })).resolves.toEqual( + authDataNetworkB + ); + await expect(storageAppA.read({ pkpAddress })).resolves.toEqual(authData); + }); +}); diff --git a/packages/auth/src/lib/storage/localStorage.ts b/packages/auth/src/lib/storage/localStorage.ts new file mode 100644 index 0000000000..c3fdf9a7f4 --- /dev/null +++ b/packages/auth/src/lib/storage/localStorage.ts @@ -0,0 +1,102 @@ +import type { LitAuthStorageProvider } from './types'; +import type { LitAuthData } from '../types'; + +const LOCALSTORAGE_LIT_AUTH_PREFIX = 'lit-auth'; + +interface LocalStorageConfig { + appName: string; + localStorage?: WindowLocalStorage['localStorage']; + networkName: string; +} + +function assertLocalstorageValid( + localStorage: unknown +): asserts localStorage is WindowLocalStorage['localStorage'] { + if (!localStorage) { + throw new Error('localStorage is not available in this environment'); + } + + if (typeof localStorage !== 'object') { + throw new Error('localStorage is not an object'); + } + + if ( + !('getItem' in localStorage) || + typeof localStorage.getItem !== 'function' + ) { + throw new Error('localStorage does not have `getItem` method'); + } + + if ( + !('setItem' in localStorage) || + typeof localStorage.setItem !== 'function' + ) { + throw new Error('localStorage does not have `setItem` method'); + } +} + +/** + * Builds a lookup key for localStorage based on the provided parameters. + * Ensures that all auth data loaded for a given PKP is for the expected LIT network + * in cases where the same environment may be used to communicate w/ multiple networks + * + * @param {object} params - The parameters required to build the lookup key. + * @param {string} params.appName - The name of the application; used to store different auth material for the same PKP on the same domain + * @param {string} params.networkName - The name of the network; used to store different auth material per LIT network + * @param {string} params.pkpAddress - The LIT PKP address. + * + * @returns {string} The generated lookup key for localStorage. + * + * @private + */ +function buildLookupKey({ + appName, + networkName, + pkpAddress, +}: { + appName: string; + networkName: string; + pkpAddress: string; +}): string { + return `${LOCALSTORAGE_LIT_AUTH_PREFIX}:${appName}:${networkName}:${pkpAddress}`; +} + +export function localStorage({ + appName, + networkName, + localStorage = globalThis.localStorage, +}: LocalStorageConfig): LitAuthStorageProvider { + assertLocalstorageValid(localStorage); + + return { + config: { appName, networkName, localStorage }, + + async write({ pkpAddress, authData }) { + localStorage.setItem( + buildLookupKey({ + appName, + networkName, + pkpAddress, + }), + JSON.stringify(authData) + ); + }, + + async read({ pkpAddress }): Promise { + const value = localStorage.getItem( + buildLookupKey({ + appName, + networkName, + pkpAddress, + }) + ); + + if (!value) { + // Empty string will be converted to null + return null; + } else { + return JSON.parse(value); + } + }, + }; +} diff --git a/packages/auth/src/lib/storage/types.ts b/packages/auth/src/lib/storage/types.ts new file mode 100644 index 0000000000..3905bb95fc --- /dev/null +++ b/packages/auth/src/lib/storage/types.ts @@ -0,0 +1,15 @@ +import type { LitAuthData } from '../types'; + +export interface LitAuthStorageProvider { + config: unknown; + + read( + params: T, + options?: unknown + ): Promise; + + write( + params: T, + options?: unknown + ): Promise; +} diff --git a/packages/auth/src/lib/types.ts b/packages/auth/src/lib/types.ts new file mode 100644 index 0000000000..b7347a9ef5 --- /dev/null +++ b/packages/auth/src/lib/types.ts @@ -0,0 +1,6 @@ +import { AUTH_METHOD_TYPE } from '@lit-protocol/constants'; + +export interface LitAuthData { + credential: string; + authMethod: keyof typeof AUTH_METHOD_TYPE; +} diff --git a/packages/encryption/tsconfig.json b/packages/auth/tsconfig.json similarity index 100% rename from packages/encryption/tsconfig.json rename to packages/auth/tsconfig.json diff --git a/packages/auth-browser/tsconfig.lib.json b/packages/auth/tsconfig.lib.json similarity index 100% rename from packages/auth-browser/tsconfig.lib.json rename to packages/auth/tsconfig.lib.json diff --git a/packages/auth/tsconfig.spec.json b/packages/auth/tsconfig.spec.json new file mode 100644 index 0000000000..df5eec354a --- /dev/null +++ b/packages/auth/tsconfig.spec.json @@ -0,0 +1,16 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "../../dist/out-tsc", + "module": "ES2022", + "types": ["jest", "node"], + "allowJs": true + }, + "include": [ + "jest.config.ts", + "**/*.test.ts", + "**/*.spec.ts", + "**/*.d.ts", + "../auth-browser/src/lib/chains/lit-connect-modal.d.ts" + ] +} diff --git a/packages/constants/babel.config.json b/packages/constants/babel.config.json deleted file mode 100644 index 158083d278..0000000000 --- a/packages/constants/babel.config.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "presets": [ - [ - "@nx/web/babel", - { - "useBuiltIns": "usage" - } - ] - ] -} diff --git a/packages/constants/package.json b/packages/constants/package.json index 0b84b2c77c..7c357ec8c1 100644 --- a/packages/constants/package.json +++ b/packages/constants/package.json @@ -12,6 +12,7 @@ "bugs": { "url": "https://github.com/LIT-Protocol/js-sdk/issues" }, + "type": "commonjs", "publishConfig": { "access": "public", "directory": "../../dist/packages/constants" diff --git a/packages/constants/project.json b/packages/constants/project.json index c270222f93..715e72efb0 100644 --- a/packages/constants/project.json +++ b/packages/constants/project.json @@ -15,12 +15,6 @@ "updateBuildableProjectDepsInPackageJson": true } }, - "copyJSONFilesToDist": { - "executor": "nx:run-commands", - "options": { - "command": "mkdir -p dist/packages/constants/web/abis && cp ./packages/constants/src/lib/abis/ERC20.json dist/packages/constants/web/abis/ERC20.json && cp ./packages/constants/src/lib/abis/LIT.json dist/packages/constants/web/abis/LIT.json" - } - }, "generateDoc": { "executor": "nx:run-commands", "options": { @@ -34,7 +28,7 @@ "lintFilePatterns": ["packages/constants/**/*.ts"] } }, - "testPackage": { + "test": { "executor": "@nx/jest:jest", "outputs": ["{workspaceRoot}/coverage/packages/constants"], "options": { diff --git a/packages/constants/src/index.ts b/packages/constants/src/index.ts index 940d84a592..2c72a5d0d2 100644 --- a/packages/constants/src/index.ts +++ b/packages/constants/src/index.ts @@ -1,22 +1,14 @@ // ----------- Version ----------- export * from './lib/version'; +// ----------- Environment ----------- +export * from './lib/environment'; + // ----------- Constants ----------- export * from './lib/constants/constants'; export * from './lib/constants/mappers'; export * from './lib/constants/endpoints'; export * from './lib/constants/curves'; -// ----------- Interfaces ----------- -export * from './lib/interfaces/i-errors'; - // ----------- Errors ----------- export * from './lib/errors'; - -// ----------- Utils ----------- -export * from './lib/utils/utils'; - -// ----------- ABIs ----------- -import * as ABI_ERC20 from './lib/abis/ERC20.json'; - -export { ABI_ERC20 }; diff --git a/packages/constants/src/lib/abis/ERC20.json b/packages/constants/src/lib/abis/ERC20.json deleted file mode 100644 index b8347e7d73..0000000000 --- a/packages/constants/src/lib/abis/ERC20.json +++ /dev/null @@ -1,224 +0,0 @@ -{ - "abi": [ - { - "constant": true, - "inputs": [], - "name": "name", - "outputs": [ - { - "name": "", - "type": "string" - } - ], - "payable": false, - "stateMutability": "view", - "type": "function" - }, - { - "constant": false, - "inputs": [ - { - "name": "_spender", - "type": "address" - }, - { - "name": "_value", - "type": "uint256" - } - ], - "name": "approve", - "outputs": [ - { - "name": "", - "type": "bool" - } - ], - "payable": false, - "stateMutability": "nonpayable", - "type": "function" - }, - { - "constant": true, - "inputs": [], - "name": "totalSupply", - "outputs": [ - { - "name": "", - "type": "uint256" - } - ], - "payable": false, - "stateMutability": "view", - "type": "function" - }, - { - "constant": false, - "inputs": [ - { - "name": "_from", - "type": "address" - }, - { - "name": "_to", - "type": "address" - }, - { - "name": "_value", - "type": "uint256" - } - ], - "name": "transferFrom", - "outputs": [ - { - "name": "", - "type": "bool" - } - ], - "payable": false, - "stateMutability": "nonpayable", - "type": "function" - }, - { - "constant": true, - "inputs": [], - "name": "decimals", - "outputs": [ - { - "name": "", - "type": "uint8" - } - ], - "payable": false, - "stateMutability": "view", - "type": "function" - }, - { - "constant": true, - "inputs": [ - { - "name": "_owner", - "type": "address" - } - ], - "name": "balanceOf", - "outputs": [ - { - "name": "balance", - "type": "uint256" - } - ], - "payable": false, - "stateMutability": "view", - "type": "function" - }, - { - "constant": true, - "inputs": [], - "name": "symbol", - "outputs": [ - { - "name": "", - "type": "string" - } - ], - "payable": false, - "stateMutability": "view", - "type": "function" - }, - { - "constant": false, - "inputs": [ - { - "name": "_to", - "type": "address" - }, - { - "name": "_value", - "type": "uint256" - } - ], - "name": "transfer", - "outputs": [ - { - "name": "", - "type": "bool" - } - ], - "payable": false, - "stateMutability": "nonpayable", - "type": "function" - }, - { - "constant": true, - "inputs": [ - { - "name": "_owner", - "type": "address" - }, - { - "name": "_spender", - "type": "address" - } - ], - "name": "allowance", - "outputs": [ - { - "name": "", - "type": "uint256" - } - ], - "payable": false, - "stateMutability": "view", - "type": "function" - }, - { - "payable": true, - "stateMutability": "payable", - "type": "fallback" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "name": "owner", - "type": "address" - }, - { - "indexed": true, - "name": "spender", - "type": "address" - }, - { - "indexed": false, - "name": "value", - "type": "uint256" - } - ], - "name": "Approval", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "name": "from", - "type": "address" - }, - { - "indexed": true, - "name": "to", - "type": "address" - }, - { - "indexed": false, - "name": "value", - "type": "uint256" - } - ], - "name": "Transfer", - "type": "event" - } - ] -} diff --git a/packages/constants/src/lib/constants/constants.spec.ts b/packages/constants/src/lib/constants/constants.spec.ts index 9f204caeaf..c0ea4154e5 100644 --- a/packages/constants/src/lib/constants/constants.spec.ts +++ b/packages/constants/src/lib/constants/constants.spec.ts @@ -1,91 +1,13 @@ -// @ts-nocheck -import { - LIT_CHAINS, - LIT_COSMOS_CHAINS, - LIT_SVM_CHAINS, - NETWORK_PUB_KEY, -} from './constants'; +import { LIT_CHAINS, NETWORK_PUB_KEY } from './constants'; describe('constants', () => { - const MUST_HAVE_EVM_CHAINS: Array = [ - 'ethereum', - 'polygon', - 'fantom', - 'xdai', - 'bsc', - 'arbitrum', - 'avalanche', - 'fuji', - 'harmony', - 'kovan', - 'mumbai', - 'goerli', - 'ropsten', - 'rinkeby', - 'cronos', - 'optimism', - 'celo', - 'aurora', - 'eluvio', - 'alfajores', - 'xdc', - 'evmos', - 'evmosTestnet', - 'bscTestnet', - 'baseGoerli', - ]; - - const MUST_HAVE_SOL_CHAINS = ['solana', 'solanaDevnet', 'solanaTestnet']; - const MUST_HAVE_COSMOS_CHAINS = [ - 'cosmos', - 'kyve', - 'evmosCosmos', - 'evmosCosmosTestnet', - ]; - const networkPubKey = '9971e835a1fe1a4d78e381eebbe0ddc84fde5119169db816900de796d10187f3c53d65c1202ac083d099a517f34a9b62'; - it(`LIT_CHAINS should have ${MUST_HAVE_EVM_CHAINS.toString()}`, () => { - let total = 0; - - MUST_HAVE_EVM_CHAINS.forEach((chain) => { - if (Object.keys(LIT_CHAINS).includes(chain)) { - total++; - } - }); - - expect(total).toEqual(Object.keys(LIT_CHAINS).length); - }); - it(`Network public key should be ${networkPubKey}`, () => { expect(NETWORK_PUB_KEY).toEqual(networkPubKey); }); - it(`LIT_SVM_CHAINS should have ${MUST_HAVE_SOL_CHAINS}`, () => { - let total = 0; - - MUST_HAVE_SOL_CHAINS.forEach((chain) => { - if (Object.keys(LIT_SVM_CHAINS).includes(chain)) { - total++; - } - }); - - expect(total).toEqual(Object.keys(LIT_SVM_CHAINS).length); - }); - - it(`LIT_COSMOS_CHAINS should have ${MUST_HAVE_COSMOS_CHAINS}`, () => { - let total = 0; - - MUST_HAVE_COSMOS_CHAINS.forEach((chain) => { - if (Object.keys(LIT_COSMOS_CHAINS).includes(chain)) { - total++; - } - }); - - expect(total).toEqual(Object.keys(LIT_COSMOS_CHAINS).length); - }); - const ethContract = '0xA54F7579fFb3F98bd8649fF02813F575f9b3d353'; it(`Ethereum contract address should be ${ethContract}`, () => { diff --git a/packages/constants/src/lib/constants/constants.ts b/packages/constants/src/lib/constants/constants.ts index b58efeeb53..1935d7d3f9 100644 --- a/packages/constants/src/lib/constants/constants.ts +++ b/packages/constants/src/lib/constants/constants.ts @@ -1257,11 +1257,8 @@ export const ALL_LIT_CHAINS = { * Local storage key constants */ export const LOCAL_STORAGE_KEYS = { - AUTH_COSMOS_SIGNATURE: 'lit-auth-cosmos-signature', AUTH_SIGNATURE: 'lit-auth-signature', - AUTH_SOL_SIGNATURE: 'lit-auth-sol-signature', WEB3_PROVIDER: 'lit-web3-provider', - KEY_PAIR: 'lit-comms-keypair', SESSION_KEY: 'lit-session-key', WALLET_SIGNATURE: 'lit-wallet-sig', } as const; @@ -1276,14 +1273,6 @@ export const LIT_NETWORKS: Record = { [LIT_NETWORK.Custom]: [], } as const; -// ========== Either Types ========== -export const EITHER_TYPE = { - ERROR: 'ERROR', - SUCCESS: 'SUCCESS', -} as const; -export type EITHER_TYPE_TYPE = ConstantKeys; -export type EITHER_TYPE_VALUES = ConstantValues; - // ========== Supported PKP Auth Method Types ========== export const AUTH_METHOD_TYPE = { EthWallet: 1, diff --git a/packages/constants/src/lib/constants/mappers.ts b/packages/constants/src/lib/constants/mappers.ts index faca47ae28..76ef68c7d9 100644 --- a/packages/constants/src/lib/constants/mappers.ts +++ b/packages/constants/src/lib/constants/mappers.ts @@ -1,4 +1,4 @@ -import { _nagaDev } from '@lit-protocol/contracts'; +import { nagaDev } from '@lit-protocol/contracts'; import { LIT_NETWORK, @@ -12,9 +12,9 @@ import { */ export const NETWORK_CONTEXT_BY_NETWORK: Record< LIT_NETWORK_VALUES, - typeof _nagaDev | undefined + typeof nagaDev | undefined > = { - [LIT_NETWORK.NagaDev]: _nagaDev, + [LIT_NETWORK.NagaDev]: nagaDev, [LIT_NETWORK.Custom]: undefined, } as const; diff --git a/packages/constants/src/lib/environment.ts b/packages/constants/src/lib/environment.ts new file mode 100644 index 0000000000..086b427f30 --- /dev/null +++ b/packages/constants/src/lib/environment.ts @@ -0,0 +1,9 @@ +export class Environment { + static get isNode(): boolean { + return typeof process?.versions?.node !== 'undefined'; + } + + static get isBrowser(): boolean { + return !Environment.isNode; + } +} diff --git a/packages/constants/src/lib/errors.ts b/packages/constants/src/lib/errors.ts index 1373866777..8f97de313b 100644 --- a/packages/constants/src/lib/errors.ts +++ b/packages/constants/src/lib/errors.ts @@ -208,6 +208,11 @@ export const LIT_ERROR: Record = { code: 'network_error', kind: LIT_ERROR_KIND.Unexpected, }, + LIT_NETWORK_ERROR: { + name: 'LitNetworkError', + code: 'lit_network_error', + kind: LIT_ERROR_KIND.Unexpected, + }, TRANSACTION_ERROR: { name: 'TransactionError', code: 'transaction_error', @@ -300,6 +305,7 @@ export { MultiError }; export const { AutomationError, + CurveTypeNotFoundError, InitError, InvalidAccessControlConditions, InvalidArgumentException, @@ -310,6 +316,7 @@ export const { InvalidParamType, InvalidSignatureError, InvalidUnifiedConditionType, + LitNetworkError, LitNodeClientBadConfigError, LitNodeClientNotReadyError, LocalStorageItemNotFoundException, @@ -337,5 +344,4 @@ export const { WasmInitError, WrongNetworkException, WrongParamFormat, - CurveTypeNotFoundError, } = errorClasses; diff --git a/packages/constants/src/lib/interfaces/i-errors.ts b/packages/constants/src/lib/interfaces/i-errors.ts deleted file mode 100644 index 11ccb44c10..0000000000 --- a/packages/constants/src/lib/interfaces/i-errors.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { EITHER_TYPE } from '../constants/constants'; -import { LitError } from '../errors'; - -/** - * A standardized way to return either error or success - */ -export type IEither = IEitherError | IEitherSuccess; - -export interface IEitherError { - type: typeof EITHER_TYPE.ERROR; - result: LitError; -} - -export interface IEitherSuccess { - type: typeof EITHER_TYPE.SUCCESS; - result: T; -} diff --git a/packages/constants/src/lib/utils/utils.spec.ts b/packages/constants/src/lib/utils/utils.spec.ts deleted file mode 100644 index 166b313f0d..0000000000 --- a/packages/constants/src/lib/utils/utils.spec.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { ELeft, ERight } from './utils'; -import { UnknownError } from '../errors'; - -describe('error handling utils ELeft/Right works', () => { - const unknownError = new UnknownError({}, 'ERROR'); - const res = ELeft(unknownError); - const res2 = ERight('ANSWER'); - - it('returns result on ELeft()', () => { - expect(res.result).toBe(unknownError); - }); - - it('returns type on ELeft()', () => { - expect(res.type).toBe('ERROR'); - }); - - it('returns result on ERight()', () => { - expect(res2.result).toBe('ANSWER'); - }); - - it('returns type on ERight()', () => { - expect(res2.type).toBe('SUCCESS'); - }); -}); diff --git a/packages/constants/src/lib/utils/utils.ts b/packages/constants/src/lib/utils/utils.ts deleted file mode 100644 index 48f98ab987..0000000000 --- a/packages/constants/src/lib/utils/utils.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { EITHER_TYPE } from '../constants/constants'; -import { LitError } from '../errors'; -import { IEitherSuccess, IEitherError } from '../interfaces/i-errors'; - -/** - * - * This method should be used when there's an expected error - * - * @param error is the error encountered - * @returns { IEither } - */ -export function ELeft(error: LitError): IEitherError { - return { - type: EITHER_TYPE.ERROR, - result: error, - }; -} - -/** - * - * This method should be used when there's an expected success outcome - * - * @param result is the successful return value - * @returns - */ -export function ERight(result: T): IEitherSuccess { - return { - type: EITHER_TYPE.SUCCESS, - result, - }; -} diff --git a/packages/constants/tsconfig.lib.json b/packages/constants/tsconfig.lib.json index a6b1ef35bc..7bfc80f73e 100644 --- a/packages/constants/tsconfig.lib.json +++ b/packages/constants/tsconfig.lib.json @@ -3,8 +3,8 @@ "compilerOptions": { "outDir": "../../dist/out-tsc", "declaration": true, - "types": [] + "types": ["node"] }, - "include": ["**/*.ts", "src/lib/utils/uint8arrays.ts"], + "include": ["**/*.ts"], "exclude": ["jest.config.ts", "**/*.spec.ts", "**/*.test.ts"] } diff --git a/packages/contracts-sdk/src/lib/auth-utils.ts b/packages/contracts-sdk/src/lib/auth-utils.ts index 00893ddd83..5d8efe0a8f 100644 --- a/packages/contracts-sdk/src/lib/auth-utils.ts +++ b/packages/contracts-sdk/src/lib/auth-utils.ts @@ -245,7 +245,6 @@ function _parseJWT(jwt: string): StytchToken { } const body = Buffer.from(parts[1], 'base64'); const parsedBody: StytchToken = JSON.parse(body.toString('ascii')); - console.log('JWT body: ', parsedBody); return parsedBody; } diff --git a/packages/contracts-sdk/src/lib/contracts-sdk.ts b/packages/contracts-sdk/src/lib/contracts-sdk.ts index 5443df28f6..b1f6ab4915 100644 --- a/packages/contracts-sdk/src/lib/contracts-sdk.ts +++ b/packages/contracts-sdk/src/lib/contracts-sdk.ts @@ -11,11 +11,13 @@ import { computeAddress } from 'ethers/lib/utils'; import { AUTH_METHOD_SCOPE_VALUES, AUTH_METHOD_TYPE_VALUES, + Environment, HTTP, HTTP_BY_NETWORK, HTTPS, InitError, InvalidArgumentException, + LitNetworkError, LIT_NETWORK, LIT_NETWORK_VALUES, METAMASK_CHAIN_INFO_BY_NETWORK, @@ -23,10 +25,11 @@ import { ParamsMissingError, RPC_URL_BY_NETWORK, TransactionError, + UnsupportedMethodError, WrongNetworkException, } from '@lit-protocol/constants'; -import { Logger, LogManager } from '@lit-protocol/logger'; -import { derivedAddresses, isBrowser, isNode } from '@lit-protocol/misc'; +import { Logger, getChildLogger } from '@lit-protocol/logger'; +import { getStorageItem, setStorageItem } from '@lit-protocol/misc-browser'; import { ContractName, EpochInfo, @@ -42,6 +45,7 @@ import { } from '@lit-protocol/types'; import { getAuthIdByAuthMethod, stringToArrayify } from './auth-utils'; +import { derivedAddresses } from './helpers/addresses'; import { CIDParser, getBytes32FromMultihash, @@ -75,6 +79,7 @@ const GAS_LIMIT_ADJUSTMENT = ethers.BigNumber.from(100).add( // The class has a number of properties that represent the smart contract instances, such as accessControlConditionsContract, litTokenContract, pkpNftContract, etc. These smart contract instances are created by passing the contract address, ABI, and provider to the ethers.Contract constructor. // The class also has a utils object with helper functions for converting between hexadecimal and decimal representation of numbers, as well as functions for working with multihashes and timestamps. export class LitContracts { + private readonly _logger: Logger; // eslint-disable-next-line @typescript-eslint/no-explicit-any provider: ethers.providers.StaticJsonRpcProvider | any; rpc: string; @@ -103,8 +108,6 @@ export class LitContracts { 'PriceFeed', ]; - static logger: Logger = LogManager.Instance.get('contract-sdk'); - // make the constructor args optional constructor(args?: { // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -124,6 +127,10 @@ export class LitContracts { debug?: boolean; network?: LIT_NETWORKS_KEYS; }) { + this._logger = getChildLogger({ + module: 'LitContracts', + ...(args?.debug ? { level: 'debug' } : {}), + }); // this.provider = args?.provider; this.customContext = args?.customContext; this.rpc = args?.rpc; @@ -145,18 +152,6 @@ export class LitContracts { } } - /** - * Logs a message to the console. - * - * @param {any} [args] An optional value to log with the message. - */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - log = (...args: any[]) => { - if (this.debug) { - LitContracts.logger.debug(...args); - } - }; - connect = async () => { // ======================================= // SETTING UP PROVIDER @@ -168,9 +163,9 @@ export class LitContracts { let wallet; let SETUP_DONE = false; if (this.provider) { - this.log('Using provided provider'); - } else if (isBrowser() && !this.signer) { - this.log("----- We're in the browser! -----"); + this._logger.info('Using provided provider'); + } else if (Environment.isBrowser && !this.signer) { + this._logger.info("----- We're in the browser! -----"); const web3Provider = window.ethereum; @@ -219,8 +214,8 @@ export class LitContracts { // ---------------------------------------------- // (Node) Setting up Provider // ---------------------------------------------- - else if (isNode()) { - this.log("----- We're in node! -----"); + else if (Environment.isNode) { + this._logger.info("----- We're in node! -----"); this.provider = new ethers.providers.StaticJsonRpcProvider({ url: this.rpc, skipFetchSetup: true, @@ -231,7 +226,7 @@ export class LitContracts { // CUSTOM PRIVATE KEY // ====================================== if (this.privateKey) { - this.log('Using your own private key'); + this._logger.info('Using your own private key'); this.signer = new ethers.Wallet(this.privateKey, this.provider); this.provider = this.signer.provider; SETUP_DONE = true; @@ -244,61 +239,72 @@ export class LitContracts { (!this.privateKey && this.randomPrivateKey) || this.options?.storeOrUseStorageKey ) { - this.log('THIS.SIGNER:', this.signer); + this._logger.info({ msg: 'THIS.SIGNER', signer: this.signer }); const STORAGE_KEY = 'lit-contracts-sdk-private-key'; - this.log("Let's see if you have a private key in your local storage!"); + this._logger.info( + "Let's see if you have a private key in your local storage!" + ); // -- find private key in local storage let storagePrivateKey; try { - storagePrivateKey = localStorage.getItem(STORAGE_KEY); + storagePrivateKey = getStorageItem(STORAGE_KEY); } catch (e) { // swallow - // this.log('Not a problem.'); + // this.#logger.info('Not a problem.'); } // -- (NOT FOUND) no private key found if (!storagePrivateKey) { - this.log('Not a problem, we will generate a random private key'); + this._logger.info( + 'Not a problem, we will generate a random private key' + ); storagePrivateKey = ethers.utils.hexlify(ethers.utils.randomBytes(32)); } // -- (FOUND) private key found else { - this.log("Found your private key in local storage. Let's use it!"); + this._logger.info( + "Found your private key in local storage. Let's use it!" + ); } this.signer = new ethers.Wallet(storagePrivateKey, this.provider); - this.log('- Your private key:', storagePrivateKey); - this.log('- Your address:', await this.signer.getAddress()); - this.log('- this.signer:', this.signer); - this.log('- this.provider.getSigner():', this.provider.getSigner()); + this._logger.info({ + msg: '- Your address', + address: await this.signer.getAddress(), + }); + this._logger.info({ msg: '- this.signer', signer: this.signer }); + this._logger.info({ + msg: '- this.provider.getSigner()', + signer: this.provider.getSigner(), + }); // -- (OPTION) store private key in local storage if (this.options?.storeOrUseStorageKey) { - this.log( + this._logger.info( "You've set the option to store your private key in local storage." ); - localStorage.setItem(STORAGE_KEY, storagePrivateKey); + setStorageItem(STORAGE_KEY, storagePrivateKey); } } else { // ---------------------------------------- // Ask Metamask to sign // ---------------------------------------- - if (isBrowser() && wallet && !SETUP_DONE) { - // this.log('HERE????'); - this.log('this.signer:', this.signer); + if (Environment.isBrowser && wallet && !SETUP_DONE) { + // this.#logger.info('HERE????'); + this._logger.info({ msg: 'this.signer', signer: this.signer }); this.signer = wallet.getSigner(); } } if (this.signer !== undefined && this.signer !== null) { if ('litNodeClient' in this.signer && 'rpcProvider' in this.signer) { - this.log(` + this._logger.info(` // *********************************************************************************************** // THIS IS A PKP WALLET, USING IT AS A SIGNER AND ITS RPC PROVIDER AS PROVIDER // *********************************************************************************************** @@ -309,13 +315,21 @@ export class LitContracts { } } - this.log('Your Signer:', this.signer); - this.log('Your Provider:', this.provider?.connection); + this._logger.info({ msg: 'Your Signer', signer: this.signer }); + this._logger.info({ + msg: 'Your Provider', + provider: this.provider?.connection, + }); if (!this.provider) { - this.log('No provider found. Will try to use the one from the signer.'); + this._logger.info( + 'No provider found. Will try to use the one from the signer.' + ); this.provider = this.signer.provider; - this.log('Your Provider(from signer):', this.provider?.connection); + this._logger.info({ + msg: 'Your Provider(from signer)', + provider: this.provider?.connection, + }); } this.connected = true; @@ -807,12 +821,7 @@ export class LitContracts { HTTP; // Fallback to HTTP // Construct the URL - const url = `${protocol}${ip}:${port}`; - - // Log the constructed URL for debugging - LitContracts.logger.debug("Validator's URL:", url); - - return url; + return `${protocol}${ip}:${port}`; }); } @@ -868,11 +877,27 @@ export class LitContracts { const minNodeCountInt = ethers.BigNumber.from(minNodeCount).toNumber(); if (!minNodeCountInt) { - throw new Error('❌ Minimum validator count is not set'); + throw new LitNetworkError( + { + info: { + epochInfo, + activeValidators: activeUnkickedValidatorStructs.length, + minNodeCount: minNodeCountInt, + }, + }, + '❌ Minimum validator count is not set' + ); } if (activeUnkickedValidatorStructs.length < minNodeCountInt) { - throw new Error( + throw new LitNetworkError( + { + info: { + epochInfo, + activeValidators: activeUnkickedValidatorStructs.length, + minNodeCount: minNodeCountInt, + }, + }, `❌ Active validator set does not meet the consensus. Required: ${minNodeCountInt} but got: ${activeUnkickedValidatorStructs.length}` ); } @@ -1136,13 +1161,13 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope } const tokenId = events[0].topics[1]; - this.log('tokenId:', tokenId); + this._logger.info({ msg: 'tokenId', tokenId }); let tries = 0; const maxAttempts = 10; let publicKey = ''; while (tries < maxAttempts) { publicKey = await pkpNftContract['getPubkey'](tokenId); - this.log('pkp pub key: ', publicKey); + this._logger.info({ msg: 'pkp pub key', publicKey }); if (publicKey !== '0x') { break; } @@ -1377,7 +1402,6 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope * const CID = require('multiformats/cid') * const ipfsId = 'QmZKLGf3vgYsboM7WVUS9X56cJSdLzQVacNp841wmEDRkW' * const bytes32 = getBytes32FromMultihash(ipfsId, CID) - * console.log(bytes32) * * @returns {IPFSHash} */ @@ -1451,7 +1475,9 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope tokens.push(token); } catch (e) { - this.log(`[getTokensByAddress] Ended search on index: ${i}`); + this._logger.info( + `[getTokensByAddress] Ended search on index: ${i}` + ); break; } } @@ -1512,7 +1538,9 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope tokens.push(token); } catch (e) { - this.log(`[getTokensByAddress] Ended search on index: ${i}`); + this._logger.info( + `[getTokensByAddress] Ended search on index: ${i}` + ); break; } } @@ -1599,12 +1627,12 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope } if (this.isPKP) { - this.log( + this._logger.info( "This is a PKP wallet, so we'll use the PKP wallet to sign the tx" ); } - this.log('...signing and sending tx'); + this._logger.info('...signing and sending tx'); const sentTx = await this._callWithAdjustedOverrides( pkpNftContract, @@ -1613,22 +1641,22 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope { value: mintCost, ...param } ); - this.log('sentTx:', sentTx); + this._logger.info({ msg: 'sentTx', sentTx }); // eslint-disable-next-line @typescript-eslint/no-explicit-any const res: any = await sentTx.wait(); - this.log('res:', res); + this._logger.info({ msg: 'res', res }); const events = 'events' in res ? res.events : res.logs; const tokenIdFromEvent = events[0].topics[1]; - this.log('tokenIdFromEvent:', tokenIdFromEvent); + this._logger.info({ msg: 'tokenIdFromEvent', tokenIdFromEvent }); let tries = 0; const maxAttempts = 10; let publicKey = ''; while (tries < maxAttempts) { publicKey = await pkpNftContract['getPubkey'](tokenIdFromEvent); - this.log('pkp pub key: ', publicKey); + this._logger.info({ msg: 'pkp pub key', publicKey }); if (publicKey !== '0x') { break; } @@ -1638,7 +1666,7 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope }); } - this.log('public key from token id', publicKey); + this._logger.info({ msg: 'public key from token id', publicKey }); if (publicKey.startsWith('0x')) { publicKey = publicKey.slice(2); } @@ -1686,7 +1714,7 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope const tokenId = events[1].topics[1]; return { tx, res: txRec, tokenId }; } catch (e: unknown) { - this.log(`[claimAndMint] error: ${(e as Error).message}`); + this._logger.info(`[claimAndMint] error: ${(e as Error).message}`); throw new TransactionError( { info: { @@ -1791,7 +1819,10 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope ); } - this.log('[getPermittedAddresses] input:', tokenId); + this._logger.info({ + msg: '[getPermittedAddresses] input', + tokenId, + }); let addresses: string[] = []; @@ -1811,7 +1842,7 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope break; } } catch (e: unknown) { - this.log( + this._logger.info( `[getPermittedAddresses] error:`, (e as Error).message ); @@ -1878,10 +1909,10 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope break; } } catch (e: unknown) { - this.log( - `[getPermittedActions] error:`, - (e as Error).message - ); + this._logger.info({ + msg: `[getPermittedActions] error:`, + message: (e as Error).message, + }); tries++; } } @@ -1929,11 +1960,14 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope ); } - this.log('[isPermittedAction] input:', pkpId); - this.log('[isPermittedAction] input:', ipfsId); + this._logger.info({ msg: '[isPermittedAction] input', pkpId }); + this._logger.info({ msg: '[isPermittedAction] input', ipfsId }); const ipfsHash = this.utils.getBytesFromMultihash(ipfsId); - this.log('[isPermittedAction] converted:', ipfsHash); + this._logger.info({ + msg: '[isPermittedAction] converted', + ipfsHash, + }); const bool = await pkpPermissionsContract['isPermittedAction']( pkpId, @@ -1990,21 +2024,36 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope ); } - this.log('[addPermittedAction] input:', pkpId); + this._logger.info({ msg: '[addPermittedAction] input', pkpId }); const pubKey = await pubkeyRouterContract['getPubkey'](pkpId); - this.log('[addPermittedAction] converted:', pubKey); + this._logger.info({ + msg: '[addPermittedAction] converted', + pubKey, + }); const pubKeyHash = ethers.utils.keccak256(pubKey); - this.log('[addPermittedAction] converted:', pubKeyHash); + this._logger.info({ + msg: '[addPermittedAction] converted', + pubKeyHash, + }); const tokenId = ethers.BigNumber.from(pubKeyHash); - this.log('[addPermittedAction] converted:', tokenId); + this._logger.info({ + msg: '[addPermittedAction] converted', + tokenId, + }); - this.log('[addPermittedAction] input:', ipfsId); + this._logger.info({ + msg: '[addPermittedAction] input', + ipfsId, + }); const ipfsIdBytes = this.utils.getBytesFromMultihash(ipfsId); - this.log('[addPermittedAction] converted:', ipfsIdBytes); + this._logger.info({ + msg: '[addPermittedAction] converted', + ipfsIdBytes, + }); const tx = await this._callWithAdjustedOverrides( pkpPermissionsContract, @@ -2012,7 +2061,7 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope [tokenId, ipfsIdBytes, [1]] ); - this.log('[addPermittedAction] output:', tx); + this._logger.info({ msg: '[addPermittedAction] output', tx }); return tx; }, @@ -2057,10 +2106,14 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope ); } - this.log('[addPermittedAddress] input:', pkpId); - this.log('[addPermittedAddress] input:', ownerAddress); - - this.log('[addPermittedAddress] input:', pkpId); + this._logger.info({ + msg: '[addPermittedAddress] input', + pkpId, + }); + this._logger.info({ + msg: '[addPermittedAddress] input', + ownerAddress, + }); const tx = await this._callWithAdjustedOverrides( pkpPermissionsContract, @@ -2068,7 +2121,7 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope [pkpId, ownerAddress, [1]] ); - this.log('[addPermittedAddress] output:', tx); + this._logger.info({ msg: '[addPermittedAddress] output', tx }); return tx; }, @@ -2112,11 +2165,20 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope ); } - this.log('[revokePermittedAction] input:', pkpId); - this.log('[revokePermittedAction] input:', ipfsId); + this._logger.info({ + msg: '[revokePermittedAction] input', + pkpId, + }); + this._logger.info({ + msg: '[revokePermittedAction] input', + ipfsId, + }); const ipfsHash = this.utils.getBytesFromMultihash(ipfsId); - this.log('[revokePermittedAction] converted:', ipfsHash); + this._logger.info({ + msg: '[revokePermittedAction] converted', + ipfsHash, + }); const tx = await this._callWithAdjustedOverrides( pkpPermissionsContract, @@ -2124,7 +2186,7 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope [pkpId, ipfsHash] ); - this.log('[revokePermittedAction] output:', tx); + this._logger.info({ msg: '[revokePermittedAction] output', tx }); return tx; }, @@ -2144,9 +2206,9 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope * @return { Promise } */ // getIpfsIds: async (solidityIpfsId: string): Promise => { - // this.log('[getIpfsIds] input:', solidityIpfsId); + // this.#logger.info({ msg: '[getIpfsIds] input', solidityIpfsId }); // const ipfsId = this.utils.getMultihashFromBytes(solidityIpfsId); - // this.log('[getIpfsIds] output:', ipfsId); + // this.#logger.info({ msg: '[getIpfsIds] output', ipfsId }); // return ipfsId; // }, }, @@ -2230,7 +2292,14 @@ https://developer.litprotocol.com/v3/sdk/wallets/auth-methods/#auth-method-scope ): Promise> { // Check if the method exists on the contract if (!(method in contract.functions)) { - throw new Error( + throw new UnsupportedMethodError( + { + info: { + network: this.network, + contract, + method, + }, + }, `Method ${String(method)} does not exist on the contract` ); } diff --git a/packages/misc/src/lib/addresses.spec.ts b/packages/contracts-sdk/src/lib/helpers/addresses.spec.ts similarity index 100% rename from packages/misc/src/lib/addresses.spec.ts rename to packages/contracts-sdk/src/lib/helpers/addresses.spec.ts diff --git a/packages/misc/src/lib/addresses.ts b/packages/contracts-sdk/src/lib/helpers/addresses.ts similarity index 76% rename from packages/misc/src/lib/addresses.ts rename to packages/contracts-sdk/src/lib/helpers/addresses.ts index 12b6dea8de..34f778328c 100644 --- a/packages/misc/src/lib/addresses.ts +++ b/packages/contracts-sdk/src/lib/helpers/addresses.ts @@ -11,77 +11,11 @@ import { NoWalletException, ParamsMissingError, } from '@lit-protocol/constants'; +import { publicKeyCompress } from '@lit-protocol/crypto'; +import { logger } from '@lit-protocol/logger'; +import { getStorageItem, setStorageItem } from '@lit-protocol/misc-browser'; import { DerivedAddresses } from '@lit-protocol/types'; -/** - * Converts a public key between compressed and uncompressed formats. - * - * @param publicKey - Public key as a Buffer (33 bytes compressed or 65 bytes uncompressed) - * @param compressed - Boolean flag indicating whether the output should be compressed - * @returns Converted public key as a Buffer - */ -export function publicKeyConvert( - publicKey: Buffer, - compressed: boolean = true -): Buffer { - if (compressed) { - // Compress the public key (if it's not already compressed) - if (publicKey.length === 65 && publicKey[0] === 0x04) { - const x = publicKey.subarray(1, 33); - const y = publicKey.subarray(33, 65); - const prefix = y[y.length - 1] % 2 === 0 ? 0x02 : 0x03; - return Buffer.concat([Buffer.from([prefix]), x]); - } - } else { - // Decompress the public key - if ( - publicKey.length === 33 && - (publicKey[0] === 0x02 || publicKey[0] === 0x03) - ) { - const x = publicKey.subarray(1); - const y = decompressY(publicKey[0], x); - return Buffer.concat([Buffer.from([0x04]), x, y]); - } - } - // Return the original if no conversion is needed - return publicKey; -} - -/** - * Decompresses the y-coordinate of a compressed public key. - * - * @param prefix - The first byte of the compressed public key (0x02 or 0x03) - * @param x - The x-coordinate of the public key - * @returns The decompressed y-coordinate as a Buffer - */ -function decompressY(prefix: number, x: Buffer): Buffer { - const p = BigInt( - '0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F' - ); - const a = BigInt('0'); - const b = BigInt('7'); - - const xBigInt = BigInt('0x' + x.toString('hex')); - const rhs = (xBigInt ** 3n + a * xBigInt + b) % p; - const yBigInt = modSqrt(rhs, p); - - const isEven = yBigInt % 2n === 0n; - const y = isEven === (prefix === 0x02) ? yBigInt : p - yBigInt; - - return Buffer.from(y.toString(16).padStart(64, '0'), 'hex'); -} - -/** - * Computes the modular square root of a number. - * - * @param a - The number to find the square root of - * @param p - The modulus - * @returns The square root modulo p - */ -function modSqrt(a: bigint, p: bigint): bigint { - return a ** ((p + 1n) / 4n) % p; -} - /** * Derives a Bitcoin address (P2PKH) from a public key. * @@ -133,7 +67,7 @@ function deriveCosmosAddress( // If the Ethereum public key is uncompressed (130 characters), compress it if (pubKeyBuffer.length === 65 && pubKeyBuffer[0] === 0x04) { - pubKeyBuffer = Buffer.from(publicKeyConvert(pubKeyBuffer, true)); + pubKeyBuffer = Buffer.from(publicKeyCompress(pubKeyBuffer)); } // Hash the compressed public key with SHA-256 @@ -245,13 +179,17 @@ export const derivedAddresses = async ( const CACHE_KEY = 'lit-cached-pkps'; let cachedPkpJSON; try { - const cachedPkp = localStorage.getItem(CACHE_KEY); + const cachedPkp = getStorageItem(CACHE_KEY); if (cachedPkp) { cachedPkpJSON = JSON.parse(cachedPkp); publicKey = cachedPkpJSON[pkpTokenId]; } } catch (e) { - console.error(e); + logger.error({ + function: 'derivedAddresses', + msg: `Could not get ${CACHE_KEY} from storage. Continuing...`, + error: e, + }); } if (!publicKey) { @@ -287,18 +225,19 @@ export const derivedAddresses = async ( if (options.cacheContractCall) { // trying to store key value pair in local storage try { - const cachedPkp = localStorage.getItem(CACHE_KEY); - if (cachedPkp) { - const cachedPkpJSON = JSON.parse(cachedPkp); - cachedPkpJSON[pkpTokenId] = publicKey; - localStorage.setItem(CACHE_KEY, JSON.stringify(cachedPkpJSON)); - } else { - const cachedPkpJSON: Record = {}; - cachedPkpJSON[pkpTokenId] = publicKey; - localStorage.setItem(CACHE_KEY, JSON.stringify(cachedPkpJSON)); - } + const cachedPkp = getStorageItem(CACHE_KEY); + const cachedPkpJSON: Record = cachedPkp + ? JSON.parse(cachedPkp) + : {}; + + cachedPkpJSON[pkpTokenId] = publicKey; + setStorageItem(CACHE_KEY, JSON.stringify(cachedPkpJSON)); } catch (e) { - console.error(e); + logger.error({ + function: 'derivedAddresses', + msg: `Could not get ${CACHE_KEY} from storage. Continuing...`, + error: e, + }); } } } diff --git a/packages/contracts-sdk/src/lib/helpers/getBytes32FromMultihash.ts b/packages/contracts-sdk/src/lib/helpers/getBytes32FromMultihash.ts index 40e3ddc199..8ac5d63b56 100644 --- a/packages/contracts-sdk/src/lib/helpers/getBytes32FromMultihash.ts +++ b/packages/contracts-sdk/src/lib/helpers/getBytes32FromMultihash.ts @@ -30,9 +30,6 @@ export interface CIDParser { * const CID = require('multiformats/cid') * const ipfsId = 'QmZKLGf3vgYsboM7WVUS9X56cJSdLzQVacNp841wmEDRkW' * const {digest, hashFunction, size} = getBytes32FromMultihash(ipfsId, CID) - * console.log(digest) // string - * console.log(hashFunction) // number - * console.log(size) // number * * @returns {IPFSHash} */ diff --git a/packages/contracts-sdk/src/lib/price-feed-info-manager.ts b/packages/contracts-sdk/src/lib/price-feed-info-manager.ts index 9c3bf6c0da..39dd0ef07b 100644 --- a/packages/contracts-sdk/src/lib/price-feed-info-manager.ts +++ b/packages/contracts-sdk/src/lib/price-feed-info-manager.ts @@ -77,20 +77,6 @@ async function fetchPriceFeedInfo({ } }); - // console.log( - // 'getPriceFeedInfo()', - // util.inspect( - // { - // epochId, - // minNodeCount, - // networkPrices: { - // mapByAddress: networkPriceMap, - // }, - // }, - // { depth: 4 } - // ) - // ); - return { epochId, minNodeCount, diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 96715e1682..c6f1566eb9 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,2 +1,3 @@ +export * from './lib/helpers/endpoint-version'; +export * from './lib/helpers/most-common-value'; export * from './lib/lit-core'; -export * from './lib/endpoint-version'; diff --git a/packages/core/src/lib/endpoint-version.ts b/packages/core/src/lib/helpers/endpoint-version.ts similarity index 71% rename from packages/core/src/lib/endpoint-version.ts rename to packages/core/src/lib/helpers/endpoint-version.ts index 543d154eee..b2825bff85 100644 --- a/packages/core/src/lib/endpoint-version.ts +++ b/packages/core/src/lib/helpers/endpoint-version.ts @@ -1,3 +1,4 @@ +import { NetworkError } from '@lit-protocol/constants'; import { LitEndpoint } from '@lit-protocol/types'; /** @@ -17,7 +18,15 @@ export const composeLitUrl = (params: { try { new URL(params.url); } catch (error) { - throw new Error(`[composeLitUrl] Invalid URL: "${params.url}"`); + throw new NetworkError( + { + info: { + url: params.url, + endpoint: params.endpoint, + }, + }, + `[composeLitUrl] Invalid URL: "${params.url}"` + ); } const version = params.endpoint.version; diff --git a/packages/core/src/lib/helpers/most-common-value.test.ts b/packages/core/src/lib/helpers/most-common-value.test.ts new file mode 100644 index 0000000000..02df45ecf6 --- /dev/null +++ b/packages/core/src/lib/helpers/most-common-value.test.ts @@ -0,0 +1,19 @@ +import { mostCommonValue } from './most-common-value'; + +describe('mostCommonValue', () => { + it('should get the most common string in an array', () => { + const arr = [1, 2, 3, 4, 5, 6, 7, 8, 9, 8]; + + const mostOccured = mostCommonValue(arr); + + expect(mostOccured).toBe(8); + }); + + it('should get the last element of the array if every element only appears once', () => { + const arr = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]; + + const mostOccured = mostCommonValue(arr); + + expect(mostOccured).toBe(0); + }); +}); diff --git a/packages/core/src/lib/helpers/most-common-value.ts b/packages/core/src/lib/helpers/most-common-value.ts new file mode 100644 index 0000000000..060dab6878 --- /dev/null +++ b/packages/core/src/lib/helpers/most-common-value.ts @@ -0,0 +1,17 @@ +/** + * + * Find the element that occurs the most in an array + * + * @template T + * @param { T[] } arr + * @returns { T } the element that appeared the most + */ +export const mostCommonValue = (arr: T[]): T | undefined => { + return arr + .sort( + (a: T, b: T) => + arr.filter((v: T) => v === a).length - + arr.filter((v: T) => v === b).length + ) + .pop(); +}; diff --git a/packages/core/src/lib/lit-core.spec.ts b/packages/core/src/lib/lit-core.spec.ts index b44c455656..3b1082c16f 100644 --- a/packages/core/src/lib/lit-core.spec.ts +++ b/packages/core/src/lib/lit-core.spec.ts @@ -1,12 +1,22 @@ import { InvalidEthBlockhash } from '@lit-protocol/constants'; +const logMock = jest.fn(); + +jest.mock('pino', () => { + return { + pino: jest.fn(() => ({ + info: logMock, + error: logMock, + })), + }; +}); + import { LitCore } from './lit-core'; describe('LitCore', () => { let core: LitCore; describe('getLatestBlockhash', () => { - let originalFetch: typeof fetch; let originalDateNow: typeof Date.now; const mockBlockhashUrl = 'https://block-indexer-url.com/get_most_recent_valid_block'; @@ -16,12 +26,11 @@ describe('LitCore', () => { litNetwork: 'custom', }); core['_blockHashUrl'] = mockBlockhashUrl; - originalFetch = fetch; originalDateNow = Date.now; + Date.now = jest.fn().mockReturnValue(1000000); }); afterEach(() => { - global.fetch = originalFetch; Date.now = originalDateNow; jest.clearAllMocks(); }); diff --git a/packages/core/src/lib/lit-core.ts b/packages/core/src/lib/lit-core.ts index 7ee0479e72..ff7e003e3a 100644 --- a/packages/core/src/lib/lit-core.ts +++ b/packages/core/src/lib/lit-core.ts @@ -2,6 +2,7 @@ import { ethers } from 'ethers'; import { CENTRALISATION_BY_NETWORK, + Environment, HTTP, HTTPS, InitError, @@ -16,7 +17,6 @@ import { LIT_NETWORK, LIT_NETWORKS, LitNodeClientNotReadyError, - LOG_LEVEL, NetworkError, NodeError, RPC_URL_BY_NETWORK, @@ -27,18 +27,7 @@ import { } from '@lit-protocol/constants'; import { LitContracts } from '@lit-protocol/contracts-sdk'; import { checkSevSnpAttestation, computeHDPubKey } from '@lit-protocol/crypto'; -import { - bootstrapLogManager, - isBrowser, - isNode, - log, - logError, - logErrorWithRequestId, - logWithRequestId, - mostCommonString, - sendRequest, - setMiscLitConfig, -} from '@lit-protocol/misc'; +import { Logger, getChildLogger } from '@lit-protocol/logger'; import { AuthSig, BlockHashErrorResponse, @@ -53,7 +42,8 @@ import { SuccessNodePromises, } from '@lit-protocol/types'; -import { composeLitUrl } from './endpoint-version'; +import { composeLitUrl } from './helpers/endpoint-version'; +import { mostCommonValue } from './helpers/most-common-value'; import { CoreNodeConfig, EpochCache, @@ -118,6 +108,7 @@ export type LitNodeClientConfigWithDefaults = Required< }; export class LitCore { + private readonly _coreLogger: Logger; config: LitNodeClientConfigWithDefaults = { alertWhenUnauthorized: false, debug: true, @@ -178,12 +169,10 @@ export class LitCore { }; } - // -- set global variables - setMiscLitConfig(this.config); - bootstrapLogManager( - 'core', - this.config.debug ? LOG_LEVEL.DEBUG : LOG_LEVEL.OFF - ); + this._coreLogger = getChildLogger({ + module: 'LitCore', + ...(this.config.debug ? { level: 'debug' } : {}), + }); // -- configure local storage if not present // LitNodeClientNodejs is a base for LitNodeClient @@ -191,7 +180,7 @@ export class LitCore { // If the user sets a new storage provider we respect it over our default storage // If the user sets a new file path, we respect it over the default path. if (this.config.storageProvider?.provider) { - log( + this._coreLogger.info( 'localstorage api not found, injecting persistence instance found in config' ); // using Object defineProperty in order to set a property previously defined as readonly. @@ -200,25 +189,16 @@ export class LitCore { value: this.config.storageProvider?.provider, }); } else if ( - isNode() && + Environment.isNode && !globalThis.localStorage && !this.config.storageProvider?.provider ) { - log( + this._coreLogger.info( 'Looks like you are running in NodeJS and did not provide a storage provider, your sessions will not be cached' ); } } - // ========== Logger utilities ========== - getLogsForRequestId = (id: string): string[] => { - return globalThis.logManager.getLogsForId(id); - }; - - getRequestIds = (): Set => { - return globalThis.logManager.LoggerIds; - }; - /** * Retrieves the validator data including staking contract, epoch, minNodeCount, and bootstrapUrls. * @returns An object containing the validator data. @@ -262,10 +242,19 @@ export class LitCore { ); } - log('[_getValidatorData] epochInfo: ', epochInfo); - log('[_getValidatorData] minNodeCount: ', minNodeCount); - log('[_getValidatorData] Bootstrap urls: ', bootstrapUrls); - log('[_getValidatorData] stakingContract: ', stakingContract.address); + this._coreLogger.info({ msg: '[_getValidatorData] epochInfo', epochInfo }); + this._coreLogger.info({ + msg: '[_getValidatorData] minNodeCount', + minNodeCount, + }); + this._coreLogger.info({ + msg: '[_getValidatorData] Bootstrap urls', + bootstrapUrls, + }); + this._coreLogger.info({ + msg: '[_getValidatorData] stakingContract', + address: stakingContract.address, + }); return { stakingContract, @@ -289,7 +278,7 @@ export class LitCore { private async _handleStakingContractStateChange( state: STAKING_STATES_VALUES ) { - log(`New state detected: "${state}"`); + this._coreLogger.info(`New state detected: "${state}"`); const validatorData = await this._getValidatorData(); @@ -303,7 +292,7 @@ export class LitCore { if (CENTRALISATION_BY_NETWORK[this.config.litNetwork] !== 'centralised') { // We don't need to handle node urls changing on centralised networks, since their validator sets are static try { - log( + this._coreLogger.info( 'State found to be new validator set locked, checking validator set' ); const existingNodeUrls: string[] = [...this.config.bootstrapUrls]; @@ -322,11 +311,10 @@ export class LitCore { The sdk should be able to understand its current execution environment and wait on an active network request to the previous epoch's node set before changing over. */ - log( - 'Active validator sets changed, new validators ', + this._coreLogger.info({ + msg: 'Active validator sets changed, new validators. Check delta. Starting node connection', delta, - 'starting node connection' - ); + }); } await this.connect(); @@ -335,10 +323,10 @@ export class LitCore { // But for now, our every-30-second network sync will fix things in at most 30s from now. // this.ready = false; Should we assume core is invalid if we encountered errors refreshing from an epoch change? const { message = '' } = err as Error; - logError( - 'Error while attempting to reconnect to nodes after epoch transition:', - message - ); + this._coreLogger.error({ + msg: 'Error while attempting to reconnect to nodes after epoch transition', + message, + }); } } } @@ -359,10 +347,10 @@ export class LitCore { } if (this._stakingContract) { - log( - 'listening for state change on staking contract: ', - this._stakingContract.address - ); + this._coreLogger.info({ + msg: 'listening for state change on staking contract', + address: this._stakingContract.address, + }); // Stash a function instance, because its identity must be consistent for '.off()' usage to work later this._stakingContractListener = (state: STAKING_STATES_VALUES) => { @@ -400,8 +388,6 @@ export class LitCore { this.ready = false; this._stopListeningForNewEpoch(); - // this._stopNetworkPolling(); - setMiscLitConfig(undefined); } _stopListeningForNewEpoch() { @@ -490,7 +476,7 @@ export class LitCore { {} ); if (this.config.litNetwork === LIT_NETWORK.Custom) { - log('using custom contracts: ', logAddresses); + this._coreLogger.info({ msg: 'using custom contracts', logAddresses }); } } @@ -517,8 +503,11 @@ export class LitCore { this.ready = true; - log(`🔥 lit is ready. "litNodeClient" variable is ready to use globally.`); - log('current network config', { + this._coreLogger.info( + `🔥 lit is ready. "litNodeClient" variable is ready to use globally.` + ); + this._coreLogger.info({ + msg: 'current network config', networkPubkey: this.networkPubKey, networkPubKeySet: this.networkPubKeySet, hdRootPubkeys: this.hdRootPubkeys, @@ -527,7 +516,7 @@ export class LitCore { }); // browser only - if (isBrowser()) { + if (Environment.isBrowser) { document.dispatchEvent(new Event('lit-ready')); } } @@ -562,25 +551,30 @@ export class LitCore { // node, even though its keys may be "ERR". // Should we really track servers with ERR as keys? if ( - keys.serverPubKey === 'ERR' || - keys.subnetPubKey === 'ERR' || - keys.networkPubKey === 'ERR' || - keys.networkPubKeySet === 'ERR' + [ + keys.serverPubKey, + keys.subnetPubKey, + keys.networkPubKey, + keys.networkPubKeySet, + ].includes('ERR') ) { - logErrorWithRequestId( + this._coreLogger.error({ requestId, - 'Error connecting to node. Detected "ERR" in keys', + msg: 'Error connecting to node. Detected "ERR" in keys', url, - keys - ); + keys, + }); } - log(`Handshake with ${url} returned keys: `, keys); + this._coreLogger.info({ + msg: `Handshake with ${url} returned keys: `, + keys, + }); if (!keys.latestBlockhash) { - logErrorWithRequestId( + this._coreLogger.error({ requestId, - `Error getting latest blockhash from the node ${url}.` - ); + msg: `Error getting latest blockhash from the node ${url}.`, + }); } // We force SEV checks on some networks even if the caller attempts to construct the client with them disabled @@ -599,12 +593,12 @@ export class LitCore { } // actually verify the attestation by checking the signature against AMD certs - log('Checking attestation against amd certs...'); + this._coreLogger.info('Checking attestation against amd certs...'); try { // ensure we won't try to use a node with an invalid attestation response await checkSevSnpAttestation(attestation, challenge, url); - log(`Lit Node Attestation verified for ${url}`); + this._coreLogger.info(`Lit Node Attestation verified for ${url}`); // eslint-disable-next-line @typescript-eslint/no-explicit-any } catch (e: any) { throw new InvalidNodeAttestation( @@ -617,7 +611,7 @@ export class LitCore { ); } } else if (this.config.litNetwork === LIT_NETWORK.Custom) { - log( + this._coreLogger.info( `Node attestation SEV verification is disabled. You must explicitly set "checkNodeAttestation" to true when using 'custom' network` ); } @@ -654,12 +648,7 @@ export class LitCore { this.config.bootstrapUrls.length } nodes. Please check your network connection and try again. Note that you can control this timeout with the connectTimeout config option which takes milliseconds.`; - try { - throw new InitError({}, msg); - } catch (e) { - logErrorWithRequestId(requestId, e); - reject(e); - } + reject(new InitError({ info: { requestId } }, msg)); }, this.config.connectTimeout); }), Promise.all( @@ -690,17 +679,17 @@ export class LitCore { serverKeys: Record; requestId: string; }): CoreNodeConfig { - const latestBlockhash = mostCommonString( + const latestBlockhash = mostCommonValue( Object.values(serverKeys).map( (keysFromSingleNode) => keysFromSingleNode.latestBlockhash ) ); if (!latestBlockhash) { - logErrorWithRequestId( + this._coreLogger.error({ requestId, - 'Error getting latest blockhash from the nodes.' - ); + msg: 'Error getting latest blockhash from the nodes.', + }); throw new InvalidEthBlockhash( { @@ -715,22 +704,22 @@ export class LitCore { // pick the most common public keys for the subnet and network from the bunch, in case some evil node returned a bad key return { - subnetPubKey: mostCommonString( + subnetPubKey: mostCommonValue( Object.values(serverKeys).map( (keysFromSingleNode) => keysFromSingleNode.subnetPubKey ) )!, - networkPubKey: mostCommonString( + networkPubKey: mostCommonValue( Object.values(serverKeys).map( (keysFromSingleNode) => keysFromSingleNode.networkPubKey ) )!, - networkPubKeySet: mostCommonString( + networkPubKeySet: mostCommonValue( Object.values(serverKeys).map( (keysFromSingleNode) => keysFromSingleNode.networkPubKeySet ) )!, - hdRootPubkeys: mostCommonString( + hdRootPubkeys: mostCommonValue( Object.values(serverKeys).map( (keysFromSingleNode) => keysFromSingleNode.hdRootPubkeys ) @@ -760,7 +749,7 @@ export class LitCore { testResult, }; } catch (error) { - logError(`RPC URL failed: ${url}`); + this._coreLogger.error(`RPC URL failed: ${url}`); } } return null; @@ -781,15 +770,14 @@ export class LitCore { this.lastBlockHashRetrieved && currentTime - this.lastBlockHashRetrieved < blockHashValidityDuration ) { - log('Blockhash is still valid. No need to sync.'); + this._coreLogger.info('Blockhash is still valid. No need to sync.'); return; } - log( - 'Syncing state for new blockhash ', - 'current blockhash: ', - this.latestBlockhash - ); + this._coreLogger.info({ + msg: 'Syncing state for new blockhash', + currentBlockhash: this.latestBlockhash, + }); try { // This fetches from the lit propagation service so nodes will always have it @@ -821,18 +809,20 @@ export class LitCore { this.latestBlockhash = blockHashBody.blockhash; this.lastBlockHashRetrieved = parseInt(timestamp) * 1000; - log('Done syncing state new blockhash: ', this.latestBlockhash); + this._coreLogger.info({ + msg: 'Done syncing state new blockhash', + latestBlockhash: this.latestBlockhash, + }); } catch (error: unknown) { const err = error as BlockHashErrorResponse | Error; - logError( - 'Error while attempting to fetch new latestBlockhash:', - err instanceof Error ? err.message : err.messages, - 'Reason: ', - err instanceof Error ? err : err.reason - ); + this._coreLogger.error({ + msg: 'Error while attempting to fetch new latestBlockhash', + errorMessage: err instanceof Error ? err.message : err.messages, + reason: err instanceof Error ? err : err.reason, + }); - log( + this._coreLogger.info( 'Attempting to fetch blockhash manually using ethers with fallback RPC URLs...' ); const { testResult } = @@ -842,19 +832,23 @@ export class LitCore { )) || {}; if (!testResult || !testResult.hash) { - logError('All fallback RPC URLs failed. Unable to retrieve blockhash.'); + this._coreLogger.error( + 'All fallback RPC URLs failed. Unable to retrieve blockhash.' + ); return; } try { this.latestBlockhash = testResult.hash; this.lastBlockHashRetrieved = testResult.timestamp; - log( - 'Successfully retrieved blockhash manually: ', - this.latestBlockhash - ); + this._coreLogger.info({ + msg: 'Successfully retrieved blockhash manually', + latestBlockhash: this.latestBlockhash, + }); } catch (ethersError) { - logError('Failed to manually retrieve blockhash using ethers'); + this._coreLogger.error( + 'Failed to manually retrieve blockhash using ethers' + ); } } } @@ -898,7 +892,7 @@ export class LitCore { endpoint: LIT_ENDPOINT.HANDSHAKE, }); - log(`handshakeWithNode ${urlWithPath}`); + this._coreLogger.info(`handshakeWithNode ${urlWithPath}`); const data = { clientPublicKey: 'test', @@ -923,7 +917,7 @@ export class LitCore { } if (!epochInfo) { - log( + this._coreLogger.info( 'epochinfo not found. Not a problem, fetching current epoch state from staking contract' ); try { @@ -974,6 +968,41 @@ export class LitCore { } // ==================== SENDING COMMAND ==================== + private async _sendRequest( + url: string, + req: RequestInit, + requestId: string + ): Promise { + try { + const response = await fetch(url, req); + const isJson = response.headers + .get('content-type') + ?.includes('application/json'); + + const data = isJson ? await response.json() : null; + + if (!response.ok) { + // get error message from body or default to response status + const error = data || response.status; + return Promise.reject(error); + } + + return data; + } catch (e) { + throw new NetworkError( + { + info: { + url, + req, + requestId, + }, + cause: e, + }, + `Error sending request to ${url}` + ); + } + } + /** * * Send a command to nodes @@ -998,11 +1027,11 @@ export class LitCore { delete data.sessionSigs; } - logWithRequestId( + this._coreLogger.info({ requestId, - `sendCommandToNode with url ${url} and data`, - data - ); + msg: `sendCommandToNode with url ${url} and data`, + data, + }); const req: RequestInit = { method: 'POST', @@ -1016,7 +1045,7 @@ export class LitCore { body: JSON.stringify(data), }; - return sendRequest(url, req, requestId); + return this._sendRequest(url, req, requestId); }; /** @@ -1137,9 +1166,6 @@ export class LitCore { minNodeCount ); - // console.log(`successes: ${JSON.stringify(successes, null, 2)}`) - // console.log(`errors: ${JSON.stringify(errors, null, 2)}`) - // -- case: success (when success responses are more than minNodeCount) if (successes.length >= minNodeCount) { return { @@ -1168,13 +1194,13 @@ export class LitCore { // -- case: if we're here, then we did not succeed. time to handle and report errors. const mostCommonError = JSON.parse( // eslint-disable-next-line @typescript-eslint/no-explicit-any - mostCommonString(errors.map((r: any) => JSON.stringify(r)))! + mostCommonValue(errors.map((r: any) => JSON.stringify(r)))! ); - logErrorWithRequestId( - requestId || '', - `most common error: ${JSON.stringify(mostCommonError)}` - ); + this._coreLogger.error({ + requestId, + msg: `most common error: ${JSON.stringify(mostCommonError)}`, + }); return { success: false, @@ -1202,7 +1228,7 @@ export class LitCore { res.error.errorCode === 'not_authorized') && this.config.alertWhenUnauthorized ) { - log('You are not authorized to access this content'); + this._coreLogger.info('You are not authorized to access this content'); } throw new NodeError( @@ -1241,7 +1267,9 @@ export class LitCore { sigType: LIT_CURVE_VALUES = LIT_CURVE.EcdsaCaitSith ): Promise => { if (!this.hdRootPubkeys) { - logError('root public keys not found, have you connected to the nodes?'); + this._coreLogger.error( + 'root public keys not found, have you connected to the nodes?' + ); throw new LitNodeClientNotReadyError( {}, 'root public keys not found, have you connected to the nodes?' diff --git a/packages/core/src/lib/types.ts b/packages/core/src/lib/types.ts index b93544addb..bdd7413182 100644 --- a/packages/core/src/lib/types.ts +++ b/packages/core/src/lib/types.ts @@ -1,6 +1,7 @@ -import { NodeAttestation } from '@lit-protocol/types'; import { ethers } from 'ethers'; +import { NodeAttestation } from '@lit-protocol/types'; + export interface SendNodeCommand { url: string; // eslint-disable-next-line @typescript-eslint/no-explicit-any diff --git a/packages/crypto/src/lib/crypto.spec.ts b/packages/crypto/src/lib/crypto.spec.ts index 8dd8980e7a..2898dc7e33 100644 --- a/packages/crypto/src/lib/crypto.spec.ts +++ b/packages/crypto/src/lib/crypto.spec.ts @@ -1,9 +1,50 @@ -import * as ethers from 'ethers'; +import { ed25519 } from '@noble/curves/ed25519'; +import { ethers } from 'ethers'; import { joinSignature } from 'ethers/lib/utils'; import { SigShare } from '@lit-protocol/types'; -import { combineEcdsaShares } from './crypto'; +import { + combineEcdsaShares, + generateSessionKeyPair, + publicKeyCompress, +} from './crypto'; + +describe('generateSessionKeyPair', () => { + it('generates a valid key pair where secretKey contains the publicKey', () => { + const sessionKeyPair = generateSessionKeyPair(); + + const publicKeyBytes = ethers.utils.arrayify( + '0x' + sessionKeyPair.publicKey + ); + const secretKeyBytes = ethers.utils.arrayify( + '0x' + sessionKeyPair.secretKey + ); + + expect(secretKeyBytes.length).toBe(64); + expect(publicKeyBytes.length).toBe(32); + + const derivedPublicKeyFromSecret = secretKeyBytes.slice(32); + expect(derivedPublicKeyFromSecret).toEqual(publicKeyBytes); + }); + + it('derives public key from secret key', () => { + const sessionKeyPair = generateSessionKeyPair(); + + const publicKeyBytes = ethers.utils.arrayify( + '0x' + sessionKeyPair.publicKey + ); + const secretKeyBytes = ethers.utils.arrayify( + '0x' + sessionKeyPair.secretKey + ); + + const privateKeySeed = secretKeyBytes.slice(0, 32); + + const derivedPublicKey = ed25519.getPublicKey(privateKeySeed); + + expect(derivedPublicKey).toEqual(publicKeyBytes); + }); +}); describe('combine ECDSA Shares', () => { it('Should recombine ECDSA signature shares', async () => { @@ -67,3 +108,42 @@ describe('combine ECDSA Shares', () => { expect(recoveredAddr).toEqual(addr); }); }); + +describe('publicKeyCompress', () => { + const COMPRESSED_PUBLIC_KEY_HEX = + '03bc0a563a9ddaf097ef31c3e936dda312acdbe2504953f0ea4ecb94ee737237df'; + const COMPRESSED_PUBLIC_KEY = Buffer.from(COMPRESSED_PUBLIC_KEY_HEX, 'hex'); + + const UNCOMPRESSED_PUBLIC_KEY_HEX = + '04bc0a563a9ddaf097ef31c3e936dda312acdbe2504953f0ea4ecb94ee737237dfa2be4f2e38de7540ae64cf362b897d0f93567adc23ce0abc997c18edd269d73b'; + const UNCOMPRESSED_PUBLIC_KEY = Buffer.from( + UNCOMPRESSED_PUBLIC_KEY_HEX, + 'hex' + ); + + it('should return the same compressed key when already compressed', () => { + const result = publicKeyCompress(COMPRESSED_PUBLIC_KEY); + expect(result).toEqual(COMPRESSED_PUBLIC_KEY); + }); + + it('should compress an uncompressed public key correctly', () => { + const result = publicKeyCompress(UNCOMPRESSED_PUBLIC_KEY); + expect(result).toEqual(COMPRESSED_PUBLIC_KEY); + }); + + it('should throw an error for invalid key length', () => { + const invalidKey = Buffer.from('1234567890abcdef', 'hex'); // 8 bytes only + expect(() => publicKeyCompress(invalidKey)).toThrow( + 'Invalid public key length. Expected 33 (compressed) or 65 (uncompressed) bytes.' + ); + }); + + it('should throw an error if uncompressed key does not start with 0x04', () => { + // Create a 65-byte buffer with an invalid prefix (not 0x04) + const invalidUncompressed = Buffer.alloc(65, 0); + invalidUncompressed[0] = 0x05; + expect(() => publicKeyCompress(invalidUncompressed)).toThrow( + 'Invalid uncompressed public key format: does not start with 0x04.' + ); + }); +}); diff --git a/packages/crypto/src/lib/crypto.ts b/packages/crypto/src/lib/crypto.ts index 54c46e5f97..613cfabf4d 100644 --- a/packages/crypto/src/lib/crypto.ts +++ b/packages/crypto/src/lib/crypto.ts @@ -1,7 +1,9 @@ +import { ed25519 } from '@noble/curves/ed25519'; import { joinSignature, splitSignature } from 'ethers/lib/utils'; import { InvalidParamType, + InvalidSignatureError, LIT_CURVE, LIT_CURVE_VALUES, NetworkError, @@ -9,13 +11,9 @@ import { UnknownError, UnknownSignatureError, } from '@lit-protocol/constants'; -import { log } from '@lit-protocol/misc'; -import { nacl } from '@lit-protocol/nacl'; +import { logger } from '@lit-protocol/logger'; +import { getStorageItem, setStorageItem } from '@lit-protocol/misc-browser'; import { NodeAttestation, SessionKeyPair, SigShare } from '@lit-protocol/types'; -import { - uint8arrayFromString, - uint8arrayToString, -} from '@lit-protocol/uint8arrays'; import { blsCombine, blsDecrypt, @@ -144,7 +142,13 @@ export const combineSignatureShares = async ( const signature = await blsCombine(sigShares); if (signature.length !== 192) { - throw new Error( + throw new InvalidSignatureError( + { + info: { + signature, + shares, + }, + }, `Signature length is not 192. Got ${signature.length} instead.` ); } @@ -300,16 +304,63 @@ export const computeHDPubKey = async ( * @returns { SessionKeyPair } sessionKeyPair */ export const generateSessionKeyPair = (): SessionKeyPair => { - const keyPair = nacl.sign.keyPair(); + const privateKey = ed25519.utils.randomPrivateKey(); + const publicKey = ed25519.getPublicKey(privateKey); + const combinedSecretKey = new Uint8Array( + privateKey.length + publicKey.length + ); + combinedSecretKey.set(privateKey, 0); + combinedSecretKey.set(publicKey, privateKey.length); const sessionKeyPair: SessionKeyPair = { - publicKey: uint8arrayToString(keyPair.publicKey, 'base16'), - secretKey: uint8arrayToString(keyPair.secretKey, 'base16'), + publicKey: Buffer.from(publicKey).toString('hex'), + secretKey: Buffer.from(combinedSecretKey).toString('hex'), // TODO check if concatenated public key is needed }; return sessionKeyPair; }; +/** + * Converts a public key between compressed and uncompressed formats. + * + * @param publicKey - Public key as a Buffer (33 bytes compressed or 65 bytes uncompressed) + * @returns Converted public key as a Buffer + */ +export function publicKeyCompress(publicKey: Buffer): Buffer { + // Validate the public key length is either 33 (compressed) or 65 (uncompressed) + if (publicKey.length !== 33 && publicKey.length !== 65) { + throw new InvalidSignatureError( + { + info: { + publicKey, + }, + }, + 'Invalid public key length. Expected 33 (compressed) or 65 (uncompressed) bytes.' + ); + } + + // If the key is already compressed (33 bytes), return it unchanged. + if (publicKey.length === 33) { + return publicKey; + } + + if (publicKey[0] !== 0x04) { + throw new InvalidSignatureError( + { + info: { + publicKey, + }, + }, + 'Invalid uncompressed public key format: does not start with 0x04.' + ); + } + + const x = publicKey.subarray(1, 33); + const y = publicKey.subarray(33, 65); + const prefix = y[y.length - 1] % 2 === 0 ? 0x02 : 0x03; + return Buffer.concat([Buffer.from([prefix]), x]); +} + async function doDecrypt( ciphertextBase64: string, shares: BlsSignatureShareJsonString[] @@ -334,7 +385,7 @@ async function doDecrypt( async function getAmdCert(url: string): Promise { const proxyUrl = `${LIT_CORS_PROXY}/${url}`; - log( + logger.info( `[getAmdCert] Fetching AMD cert using proxy URL ${proxyUrl} to manage CORS restrictions and to avoid being rate limited by AMD.` ); @@ -356,18 +407,26 @@ async function getAmdCert(url: string): Promise { try { return await fetchAsUint8Array(proxyUrl); - } catch (e) { - log(`[getAmdCert] Failed to fetch AMD cert from proxy:`, e); + } catch (error) { + logger.error({ + function: 'getAmdCert', + msg: `Failed to fetch AMD cert from proxy`, + error, + }); } // Try direct fetch only if proxy fails - log('[getAmdCert] Attempting to fetch directly without proxy.'); + logger.info('Attempting to fetch directly without proxy.'); try { return await fetchAsUint8Array(url); - } catch (e) { - log('[getAmdCert] Direct fetch also failed:', e); - throw e; // Re-throw to signal that both methods failed + } catch (error) { + logger.error({ + function: 'getAmdCert', + msg: 'Direct fetch also failed', + error, + }); + throw error; // Re-throw to signal that both methods failed } } @@ -468,13 +527,13 @@ export const checkSevSnpAttestation = async ( const vcekUrl = await sevSnpGetVcekUrl(report); // use local storage if we have one available if (globalThis.localStorage) { - log('Using local storage for certificate caching'); - vcekCert = localStorage.getItem(vcekUrl); + logger.info('Using local storage for certificate caching'); + vcekCert = getStorageItem(vcekUrl); if (vcekCert) { - vcekCert = uint8arrayFromString(vcekCert, 'base64'); + vcekCert = Buffer.from(vcekCert, 'base64'); } else { vcekCert = await getAmdCert(vcekUrl); - localStorage.setItem(vcekUrl, uint8arrayToString(vcekCert, 'base64')); + setStorageItem(vcekUrl, Buffer.from(vcekCert).toString('base64')); } } else { const cache = (( @@ -500,8 +559,3 @@ export const checkSevSnpAttestation = async ( // pass base64 encoded report to wasm wrapper return sevSnpVerify(report, data, signatures, challenge, vcekCert); }; - -declare global { - // eslint-disable-next-line no-var, @typescript-eslint/no-explicit-any - var LitNodeClient: any; -} diff --git a/packages/encryption/README.md b/packages/encryption/README.md deleted file mode 100644 index 82cd8d9b38..0000000000 --- a/packages/encryption/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# Quick Start - -This submodule provides encryption and decryption of contents (string, file, etc.) respectively using a symmetric key, with the encrypted content returned as a Blob and the symmetric key as a Uint8Array - -### node.js / browser - -``` -yarn add @lit-protocol/encryption -``` diff --git a/packages/encryption/jest.config.ts b/packages/encryption/jest.config.ts deleted file mode 100644 index 1787e58530..0000000000 --- a/packages/encryption/jest.config.ts +++ /dev/null @@ -1,16 +0,0 @@ -/* eslint-disable */ -export default { - displayName: 'encryption', - preset: '../../jest.preset.js', - globals: { - 'ts-jest': { - tsconfig: '/tsconfig.spec.json', - }, - }, - transform: { - '^.+\\.[t]s$': 'ts-jest', - }, - moduleFileExtensions: ['ts', 'js', 'html'], - coverageDirectory: '../../coverage/packages/encryption', - setupFilesAfterEnv: ['../../jest.setup.js'], -}; diff --git a/packages/encryption/project.json b/packages/encryption/project.json deleted file mode 100644 index cf2972ffee..0000000000 --- a/packages/encryption/project.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "encryption", - "$schema": "../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "packages/encryption/src", - "projectType": "library", - "targets": { - "build": { - "executor": "@nx/js:tsc", - "outputs": ["{options.outputPath}"], - "options": { - "outputPath": "dist/packages/encryption", - "main": "packages/encryption/src/index.ts", - "tsConfig": "packages/encryption/tsconfig.lib.json", - "assets": ["packages/encryption/*.md"], - "updateBuildableProjectDepsInPackageJson": true - } - }, - "lint": { - "executor": "@nx/linter:eslint", - "outputs": ["{options.outputFile}"], - "options": { - "lintFilePatterns": ["packages/encryption/**/*.ts"] - } - }, - "test": { - "executor": "@nx/jest:jest", - "outputs": ["{workspaceRoot}/coverage/packages/encryption"], - "options": { - "jestConfig": "packages/encryption/jest.config.ts", - "passWithNoTests": true - } - }, - "testWatch": { - "executor": "@nx/jest:jest", - "outputs": ["{workspaceRoot}/coverage/packages/encryption"], - "options": { - "jestConfig": "packages/encryption/jest.config.ts", - "passWithNoTests": true, - "watch": true - } - } - }, - "tags": [] -} diff --git a/packages/encryption/src/index.ts b/packages/encryption/src/index.ts deleted file mode 100644 index 23436519e2..0000000000 --- a/packages/encryption/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './lib/encryption'; diff --git a/packages/encryption/src/lib/encryption.spec.ts b/packages/encryption/src/lib/encryption.spec.ts deleted file mode 100644 index 978d752295..0000000000 --- a/packages/encryption/src/lib/encryption.spec.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { isValidBooleanExpression } from '@lit-protocol/misc'; -import { AccsDefaultParams } from '@lit-protocol/types'; - -const conditionA: AccsDefaultParams = { - contractAddress: '', - standardContractType: '', - chain: 'ethereum', - method: 'eth_getBalance', - parameters: [':userAddress', 'latest'], - returnValueTest: { - comparator: '>=', - value: '10000000000000', - }, -}; - -const conditionB: AccsDefaultParams = { - contractAddress: '0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2', - standardContractType: 'ERC20', - chain: 'ethereum', - method: 'balanceOf', - parameters: [':userAddress'], - returnValueTest: { - comparator: '>', - value: '0', - }, -}; - -const groupValid: any = [conditionA, { operator: 'or' }, conditionB]; - -const groupInvalid: any = [ - conditionA, - { operator: 'or' }, - conditionB, - { operator: 'and' }, -]; - -describe('encryption', () => { - it('should pass single access control condition', () => { - expect(isValidBooleanExpression([conditionA])).toBeTruthy(); - }); - it('should pass boolean access control condition', () => { - expect( - isValidBooleanExpression([conditionA, { operator: 'or' }, conditionB]) - ).toBeTruthy(); - }); - it('should fail trailing boolean operator', () => { - expect( - isValidBooleanExpression([ - conditionA, - { operator: 'or' }, - conditionB, - { operator: 'and' }, - ]) - ).toBeFalsy(); - }); - it('should fail consecutive boolean operators', () => { - expect( - isValidBooleanExpression([ - conditionA, - { operator: 'or' }, - { operator: 'and' }, - conditionB, - ]) - ).toBeFalsy(); - }); - it('should fail only boolean operator', () => { - expect(isValidBooleanExpression([{ operator: 'or' }])).toBeFalsy(); - }); - it('should fail consecutive boolean conditions', () => { - expect(isValidBooleanExpression([conditionA, conditionB])).toBeFalsy(); - }); - it('should pass boolean condition and group', () => { - expect( - isValidBooleanExpression([conditionA, { operator: 'or' }, groupValid]) - ).toBeTruthy(); - }); - it('should pass boolean group and condition', () => { - expect( - isValidBooleanExpression([groupValid, { operator: 'and' }, conditionA]) - ).toBeTruthy(); - }); - it('should pass boolean group and group', () => { - expect( - isValidBooleanExpression([groupValid, { operator: 'and' }, groupValid]) - ).toBeTruthy(); - }); - it('should pass group only', () => { - expect(isValidBooleanExpression([groupValid])).toBeTruthy(); - }); - it('should fail invalid group only', () => { - expect(isValidBooleanExpression([groupInvalid])).toBeFalsy(); - }); - it('should fail trailing boolean operator with group', () => { - expect( - isValidBooleanExpression([groupValid, { operator: 'and' }]) - ).toBeFalsy(); - }); - it('should fail consecutive boolean operators with group', () => { - expect( - isValidBooleanExpression([ - groupValid, - { operator: 'and' }, - { operator: 'or' }, - groupValid, - ]) - ).toBeFalsy(); - }); - it('should fail boolean with invalid group', () => { - expect( - isValidBooleanExpression([groupValid, { operator: 'and' }, groupInvalid]) - ).toBeFalsy(); - }); - it('should fail boolean with invalid group and valid condition', () => { - expect( - isValidBooleanExpression([groupInvalid, { operator: 'or' }, conditionB]) - ).toBeFalsy(); - }); - it('should pass boolean condition after group', () => { - expect( - isValidBooleanExpression([ - conditionB, - { operator: 'or' }, - groupValid, - { operator: 'and' }, - conditionA, - ]) - ).toBeTruthy(); - }); -}); diff --git a/packages/encryption/src/lib/encryption.ts b/packages/encryption/src/lib/encryption.ts deleted file mode 100644 index b7377f24b6..0000000000 --- a/packages/encryption/src/lib/encryption.ts +++ /dev/null @@ -1,413 +0,0 @@ -import { EITHER_TYPE, InvalidParamType } from '@lit-protocol/constants'; -import { safeParams } from '@lit-protocol/misc'; -import { - DecryptRequest, - EncryptFileRequest, - EncryptResponse, - EncryptUint8ArrayRequest, - EncryptStringRequest, - ILitNodeClient, - EncryptToJsonPayload, - EncryptToJsonProps, - DecryptFromJsonProps, -} from '@lit-protocol/types'; -import { - uint8arrayFromString, - uint8arrayToString, -} from '@lit-protocol/uint8arrays'; - -/** - * Encrypt a string or file using the LIT network public key and serialise all the metadata required to decrypt - * i.e. accessControlConditions, evmContractConditions, solRpcConditions, unifiedAccessControlConditions & chain to JSON - * - * Useful for encrypting/decrypting data in IPFS or other storage without compressing it in a file. - * - * @param params { EncryptToJsonProps } - The params required to encrypt either a file or string and serialise it to JSON - * - * @returns { Promise } - JSON serialised string of the encrypted data and associated metadata necessary to decrypt it later - * - */ -export const encryptToJson = async ( - params: EncryptToJsonProps -): Promise => { - const { - accessControlConditions, - evmContractConditions, - solRpcConditions, - unifiedAccessControlConditions, - chain, - string, - file, - litNodeClient, - } = params; - - // -- validate - const paramsIsSafe = safeParams({ - functionName: 'encryptToJson', - params, - }); - - if (paramsIsSafe.type === EITHER_TYPE.ERROR) - throw new InvalidParamType( - { - info: { - params, - function: 'encryptToJson', - }, - cause: paramsIsSafe.result, - }, - 'Invalid params' - ); - - if (string !== undefined) { - const { ciphertext, dataToEncryptHash } = await encryptString( - { - ...params, - dataToEncrypt: string, - }, - litNodeClient - ); - - return JSON.stringify({ - ciphertext, - dataToEncryptHash, - accessControlConditions, - evmContractConditions, - solRpcConditions, - unifiedAccessControlConditions, - chain, - dataType: 'string', - } as EncryptToJsonPayload); - } else if (file) { - const { ciphertext, dataToEncryptHash } = await encryptFile( - { ...params, file }, - litNodeClient - ); - - return JSON.stringify({ - ciphertext, - dataToEncryptHash, - accessControlConditions, - evmContractConditions, - solRpcConditions, - unifiedAccessControlConditions, - chain, - dataType: 'file', - } as EncryptToJsonPayload); - } else { - throw new InvalidParamType( - { - info: { - params, - }, - }, - 'You must provide either "file" or "string"' - ); - } -}; - -/** - * - * Decrypt & return a previously encrypted string (as a string) or file (as a Uint8Array) using the metadata included - * in the parsed JSON data - * - * @param params { DecryptFromJsonProps } - The params required to decrypt a parsed JSON blob containing appropriate metadata - * - * @returns { Promise } - The decrypted `string` or file (as a `Uint8Array`) depending on `dataType` property in the parsed JSON provided - * - */ -export async function decryptFromJson( - params: DecryptFromJsonProps -): Promise< - ReturnType | ReturnType -> { - const { authContext, parsedJsonData, litNodeClient } = params; - - // -- validate - const paramsIsSafe = safeParams({ - functionName: 'decryptFromJson', - params, - }); - - if (paramsIsSafe.type === EITHER_TYPE.ERROR) - throw new InvalidParamType( - { - info: { - params, - function: 'decryptFromJson', - }, - cause: paramsIsSafe.result, - }, - 'Invalid params' - ); - - if (parsedJsonData.dataType === 'string') { - return decryptToString( - { - accessControlConditions: parsedJsonData.accessControlConditions, - evmContractConditions: parsedJsonData.evmContractConditions, - solRpcConditions: parsedJsonData.solRpcConditions, - unifiedAccessControlConditions: - parsedJsonData.unifiedAccessControlConditions, - ciphertext: parsedJsonData.ciphertext, - dataToEncryptHash: parsedJsonData.dataToEncryptHash, - chain: parsedJsonData.chain, - authContext, - }, - litNodeClient - ); - } else if (parsedJsonData.dataType === 'file') { - return decryptToFile( - { - accessControlConditions: parsedJsonData.accessControlConditions, - evmContractConditions: parsedJsonData.evmContractConditions, - solRpcConditions: parsedJsonData.solRpcConditions, - unifiedAccessControlConditions: - parsedJsonData.unifiedAccessControlConditions, - ciphertext: parsedJsonData.ciphertext, - dataToEncryptHash: parsedJsonData.dataToEncryptHash, - chain: parsedJsonData.chain, - authContext, - }, - litNodeClient - ); - } else { - throw new InvalidParamType( - { - info: { - dataType: parsedJsonData.dataType, - params, - }, - }, - 'dataType of %s is not valid. Must be "string" or "file".', - parsedJsonData.dataType - ); - } -} - -// ---------- Local Helpers ---------- - -/** Encrypt a uint8array. This is used to encrypt any uint8array that is to be locked via the Lit Protocol. - * @param { EncryptUint8ArrayRequest } params - The params required to encrypt a uint8array - * @param params.dataToEncrypt - (optional) The uint8array to encrypt - * @param params.accessControlConditions - (optional) The access control conditions - * @param params.evmContractConditions - (optional) The EVM contract conditions - * @param params.solRpcConditions - (optional) The Solana RPC conditions - * @param params.unifiedAccessControlConditions - The unified access control conditions - * @param { ILitNodeClient } litNodeClient - The Lit Node Client - * - * @returns { Promise } - The encrypted uint8array and the hash of the data that was encrypted - */ -export const encryptUint8Array = async ( - params: EncryptUint8ArrayRequest, - litNodeClient: ILitNodeClient -): Promise => { - // -- validate - const paramsIsSafe = safeParams({ - functionName: 'encryptUint8Array', - params, - }); - - if (paramsIsSafe.type === EITHER_TYPE.ERROR) - throw new InvalidParamType( - { - info: { - params, - }, - }, - 'Invalid params' - ); - - return litNodeClient.encrypt({ - ...params, - }); -}; - -/** - * Decrypt a cyphertext into a Uint8Array that was encrypted with the encryptUint8Array function. - * - * @param { DecryptRequest } params - The params required to decrypt a string - * @param { ILitNodeClient } litNodeClient - The Lit Node Client - * - * @returns { Promise } - The decrypted `Uint8Array` - */ -export const decryptToUint8Array = async ( - params: DecryptRequest, - litNodeClient: ILitNodeClient -): Promise => { - // -- validate - const paramsIsSafe = safeParams({ - functionName: 'decrypt', - params, - }); - - if (paramsIsSafe.type === EITHER_TYPE.ERROR) - throw new InvalidParamType( - { - info: { - params, - function: 'decryptToUint8Array', - }, - cause: paramsIsSafe.result, - }, - 'Invalid params' - ); - - const { decryptedData } = await litNodeClient.decrypt(params); - - return decryptedData; -}; - -/** - * - * Encrypt a string. This is used to encrypt any string that is to be locked via the Lit Protocol. - * - * @param { EncryptStringRequest } params - The params required to encrypt a string - * @param params.dataToEncrypt - (optional) The string to encrypt - * @param params.accessControlConditions - (optional) The access control conditions - * @param params.evmContractConditions - (optional) The EVM contract conditions - * @param params.solRpcConditions - (optional) The Solana RPC conditions - * @param params.unifiedAccessControlConditions - The unified access control conditions - * @param { ILitNodeClient } litNodeClient - The Lit Node Client - * - * @returns { Promise } - The encrypted string and the hash of the string - */ -export const encryptString = async ( - params: EncryptStringRequest, - litNodeClient: ILitNodeClient -): Promise => { - // -- validate - const paramsIsSafe = safeParams({ - functionName: 'encryptString', - params, - }); - - if (paramsIsSafe.type === EITHER_TYPE.ERROR) - throw new InvalidParamType( - { - info: { - params, - function: 'encryptString', - }, - cause: paramsIsSafe.result, - }, - 'Invalid params' - ); - - return litNodeClient.encrypt({ - ...params, - dataToEncrypt: uint8arrayFromString(params.dataToEncrypt, 'utf8'), - }); -}; - -/** - * - * Decrypt ciphertext into a string that was encrypted with the encryptString function. - * - * @param { DecryptRequest } params - The params required to decrypt a string - * @param { ILitNodeClient } litNodeClient - The Lit Node Client - - * @returns { Promise } - The decrypted string - */ -export const decryptToString = async ( - params: DecryptRequest, - litNodeClient: ILitNodeClient -): Promise => { - // -- validate - const paramsIsSafe = safeParams({ - functionName: 'decrypt', - params, - }); - - if (paramsIsSafe.type === EITHER_TYPE.ERROR) - throw new InvalidParamType( - { - info: { - params, - function: 'decryptToString', - }, - cause: paramsIsSafe.result, - }, - 'Invalid params' - ); - - const { decryptedData } = await litNodeClient.decrypt(params); - - return uint8arrayToString(decryptedData, 'utf8'); -}; - -/** - * - * Encrypt a file without doing any compression or packing. This is useful for large files. A 1gb file can be encrypted in only 2 seconds, for example. - * - * @param { EncryptFileRequest } params - The params required to encrypt a file - * @param { ILitNodeClient } litNodeClient - The lit node client to use to encrypt the file - * - * @returns { Promise } - The encrypted file and the hash of the file - */ -export const encryptFile = async ( - params: EncryptFileRequest, - litNodeClient: ILitNodeClient -): Promise => { - // -- validate - const paramsIsSafe = safeParams({ - functionName: 'encryptFile', - params, - }); - - if (paramsIsSafe.type === EITHER_TYPE.ERROR) - throw new InvalidParamType( - { - info: { - params, - function: 'encryptFile', - }, - cause: paramsIsSafe.result, - }, - 'Invalid params' - ); - - // encrypt the file - const fileAsArrayBuffer = await params.file.arrayBuffer(); - - return litNodeClient.encrypt({ - ...params, - dataToEncrypt: new Uint8Array(fileAsArrayBuffer), - }); -}; - -/** - * - * Decrypt a file that was encrypted with the encryptFile function, without doing any uncompressing or unpacking. This is useful for large files. A 1gb file can be decrypted in only 1 second, for example. - * - * @param { DecryptRequest } params - The params required to decrypt a file - * @param { ILitNodeClient } litNodeClient - The lit node client to use to decrypt the file - * - * @returns { Promise } - The decrypted file - */ -export const decryptToFile = async ( - params: DecryptRequest, - litNodeClient: ILitNodeClient -): Promise => { - // -- validate - const paramsIsSafe = safeParams({ - functionName: 'decrypt', - params, - }); - - if (paramsIsSafe.type === EITHER_TYPE.ERROR) - throw new InvalidParamType( - { - info: { - params, - function: 'decryptToFile', - }, - cause: paramsIsSafe.result, - }, - 'Invalid params' - ); - - const { decryptedData } = await litNodeClient.decrypt(params); - - return decryptedData; -}; diff --git a/packages/event-listener/README.md b/packages/event-listener/README.md index a6a4ab0d2a..7bbb29e992 100644 --- a/packages/event-listener/README.md +++ b/packages/event-listener/README.md @@ -254,6 +254,27 @@ async function bridgeBaseSepoliaUSDCToEthereumSepolia() { const ethPrivateKey = '0xTHE_PKP_AUTHORIZED_SIGNER_PRIVATE_KEY'; const stateMachine = StateMachine.fromDefinition({ + // Extend the action respository with a custom action we will define later. For example to send notification to the machine owner on transactions + actionRepository: { + notify: class NotificationAction extends Action { + constructor({ + stateMachine, + customParam, + }: { + stateMachine: StateMachine; + customParam: string; + }) { + super({ + debug, + function: async () => { + const transferData = stateMachine.getFromContext('transfer'); + console.log('customParam', customParam); + console.log('transferData', transferData); + }, + }); + } + }, + }, privateKey: ethPrivateKey, // Used only for authorization here, minting was done previously context: { // We can prepopulate the context, for example setting the pkp here instead of using state.usePkp later @@ -371,6 +392,11 @@ async function bridgeBaseSepoliaUSDCToEthereumSepolia() { }, ], }, + { + // Our custom action to notify about the just executed transfer transaction + key: 'notify', + customParam: 'OUR_CUSTOM_PARAM', + }, ], // Going back to waitForFunds to suspend machine if we need more sepolia eth or sepolia USDC transitions: [{ toState: 'waitForFunds' }], diff --git a/packages/event-listener/src/index.ts b/packages/event-listener/src/index.ts index 12fc9ac474..a4dbd4f99e 100644 --- a/packages/event-listener/src/index.ts +++ b/packages/event-listener/src/index.ts @@ -1,3 +1,4 @@ +export * from './lib/actions'; export * from './lib/listeners'; export * from './lib/states'; export * from './lib/state-machine'; diff --git a/packages/event-listener/src/lib/actions/index.ts b/packages/event-listener/src/lib/actions/index.ts index 9291631380..bbe79ead1b 100644 --- a/packages/event-listener/src/lib/actions/index.ts +++ b/packages/event-listener/src/lib/actions/index.ts @@ -1,5 +1,18 @@ +import { LitActionAction } from './lit-action'; +import { LogContextAction } from './log-context'; +import { MintPkpAction } from './mint-pkp'; +import { TransactionAction } from './transaction'; +import { ActionConstructor } from '../types'; + export * from './action'; export * from './lit-action'; export * from './log-context'; export * from './mint-pkp'; export * from './transaction'; + +export const ACTION_REPOSITORY: Record = { + context: LogContextAction, + litAction: LitActionAction, + transaction: TransactionAction, + usePkp: MintPkpAction, +}; diff --git a/packages/event-listener/src/lib/actions/log-context.ts b/packages/event-listener/src/lib/actions/log-context.ts index 4196d3e74c..3f383bfd17 100644 --- a/packages/event-listener/src/lib/actions/log-context.ts +++ b/packages/event-listener/src/lib/actions/log-context.ts @@ -1,3 +1,5 @@ +import { logger } from '@lit-protocol/logger'; + import { Action } from './action'; import { StateMachine } from '../state-machine'; @@ -10,10 +12,10 @@ interface LogContextActionParams { export class LogContextAction extends Action { constructor(params: LogContextActionParams) { const logContextFunction = async () => { - console.log( - `State Machine context: `, - params.stateMachine.getFromContext(params.path) - ); + logger.info({ + msg: `State Machine context`, + context: params.stateMachine.getFromContext(params.path), + }); }; super({ diff --git a/packages/event-listener/src/lib/actions/mint-pkp.ts b/packages/event-listener/src/lib/actions/mint-pkp.ts index 353b6f79c7..5085a5169d 100644 --- a/packages/event-listener/src/lib/actions/mint-pkp.ts +++ b/packages/event-listener/src/lib/actions/mint-pkp.ts @@ -1,3 +1,5 @@ +import { logger } from '@lit-protocol/logger'; + import { Action } from './action'; import { StateMachine } from '../state-machine'; @@ -12,7 +14,7 @@ export class MintPkpAction extends Action { const mintingReceipt = await params.stateMachine.litContracts.pkpNftContractUtils.write.mint(); const pkp = mintingReceipt.pkp; - params.debug && console.log(`Minted PKP: ${pkp}`); + params.debug && logger.info(`Minted PKP: ${pkp}`); params.stateMachine.setToContext('activePkp', pkp); }; diff --git a/packages/event-listener/src/lib/listeners/fetch.ts b/packages/event-listener/src/lib/listeners/fetch.ts index 681cd94bba..74e1c1d06d 100644 --- a/packages/event-listener/src/lib/listeners/fetch.ts +++ b/packages/event-listener/src/lib/listeners/fetch.ts @@ -1,3 +1,5 @@ +import { logger } from '@lit-protocol/logger'; + import { Listener } from './listener'; interface FetchListenerConfig { @@ -32,7 +34,7 @@ export class FetchListener extends Listener { this.emit(value); } } catch (error) { - console.error('FetchListener error:', error); + logger.error({ msg: 'FetchListener error:', error }); } }, pollInterval); }, diff --git a/packages/event-listener/src/lib/litActions.ts b/packages/event-listener/src/lib/litActions.ts index 1f4d792b7b..805ddf7f7c 100644 --- a/packages/event-listener/src/lib/litActions.ts +++ b/packages/event-listener/src/lib/litActions.ts @@ -1,10 +1,12 @@ import { ethers } from 'ethers'; +import { authenticators } from '@lit-protocol/auth'; import { LitActionResource } from '@lit-protocol/auth-helpers'; -import { LIT_ABILITY, LIT_NETWORK } from '@lit-protocol/constants'; -import { EthWalletProvider } from '@lit-protocol/lit-auth-client'; +import { LIT_ABILITY } from '@lit-protocol/constants'; import { LitNodeClient } from '@lit-protocol/lit-node-client'; +const { MetamaskAuthenticator } = authenticators; + export const signWithLitActionCode = `(async () => { const signature = await Lit.Actions.signAndCombineEcdsa({ toSign, @@ -48,7 +50,7 @@ export async function executeLitAction({ pkpPublicKey, capabilityAuthSigs: [], authMethods: [ - await EthWalletProvider.authenticate({ + await MetamaskAuthenticator.authenticate({ signer: authSigner, litNodeClient: litNodeClient, expiration, diff --git a/packages/event-listener/src/lib/state-machine.ts b/packages/event-listener/src/lib/state-machine.ts index 049f75f19e..e91571ce7f 100644 --- a/packages/event-listener/src/lib/state-machine.ts +++ b/packages/event-listener/src/lib/state-machine.ts @@ -7,14 +7,9 @@ import { } from '@lit-protocol/constants'; import { LitContracts } from '@lit-protocol/contracts-sdk'; import { LitNodeClient } from '@lit-protocol/lit-node-client'; +import { logger } from '@lit-protocol/logger'; -import { - Action, - LitActionAction, - LogContextAction, - MintPkpAction, - TransactionAction, -} from './actions'; +import { Action, ACTION_REPOSITORY } from './actions'; import { MachineContext } from './context/machine-context'; import { ContractEventData, @@ -26,6 +21,7 @@ import { import { State, StateParams } from './states'; import { CheckFn, Transition } from './transitions'; import { + ActionConstructor, ActionDefinition, BaseStateMachineParams, ContextOrLiteral, @@ -59,6 +55,7 @@ export class StateMachine { public id: string; public status: MachineStatus = 'stopped'; + private readonly actionsRepository: Record; private states = new Map(); private transitions = new Map>(); private currentState?: State; @@ -73,6 +70,10 @@ export class StateMachine { ...params.context, }); + this.actionsRepository = { + ...ACTION_REPOSITORY, + ...params.actionRepository, + }; this.litNodeClient = params.litNodeClient; this.litContracts = params.litContracts; this.privateKey = params.privateKey; @@ -124,6 +125,10 @@ export class StateMachine { litContracts: litContractsInstance, privateKey, onError, + actionRepository: { + ...ACTION_REPOSITORY, + ...machineConfig.actionRepository, + }, }); const stateTransitions = [] as TransitionDefinition[]; @@ -382,17 +387,17 @@ export class StateMachine { // Aggregate (AND) all listener checks to a single function result transitionConfig.check = async (values) => { this.debug && - console.log( - `${transitionDefinition.fromState} -> ${transitionDefinition.toState} values`, - values - ); + logger.info({ + msg: `${transitionDefinition.fromState} -> ${transitionDefinition.toState} values`, + values, + }); return Promise.all(checks.map((check) => check(values))).then( (results) => { this.debug && - console.log( - `${transitionDefinition.fromState} -> ${transitionDefinition.toState} results`, - results - ); + logger.info({ + msg: `${transitionDefinition.fromState} -> ${transitionDefinition.toState} results`, + results, + }); return results.every((result) => result); } ); @@ -410,7 +415,7 @@ export class StateMachine { initialState: string, onStop?: voidAsyncFunction ): Promise { - this.debug && console.log('Starting state machine...'); + this.debug && logger.info('Starting state machine...'); await Promise.all([ this.litContracts.connect(), @@ -421,7 +426,7 @@ export class StateMachine { await this.enterState(initialState); this.status = 'running'; - this.debug && console.log('State machine started'); + this.debug && logger.info('State machine started'); } /** @@ -470,20 +475,21 @@ export class StateMachine { * Stops the state machine by exiting the current state and not moving to another one. */ public async stopMachine(): Promise { - this.debug && console.log('Stopping state machine...'); + this.debug && logger.info('Stopping state machine...'); this.status = 'stopped'; await this.exitCurrentState(); await this.onStopCallback?.(); - this.debug && console.log('State machine stopped'); + this.debug && logger.info('State machine stopped'); } /** * Stops listening on the current state's transitions and exits the current state. */ private async exitCurrentState(): Promise { - this.debug && console.log('exitCurrentState', this.currentState?.key); + this.debug && + logger.info({ msg: 'exitCurrentState', state: this.currentState?.key }); const currentTransitions = this.transitions.get(this.currentState?.key ?? '') ?? @@ -514,7 +520,7 @@ export class StateMachine { `State ${stateKey} not found` ); } - this.debug && console.log('enterState', state.key); + this.debug && logger.info({ msg: 'enterState', state: state.key }); await state.enter(); const nextTransitions = this.transitions.get(state.key) ?? new Map(); @@ -542,7 +548,7 @@ export class StateMachine { ); } if (this.currentState === nextState) { - console.warn( + logger.warn( `State ${stateKey} is already active. Skipping state change.` ); return; @@ -569,66 +575,18 @@ export class StateMachine { ): voidAsyncFunction { const actions = [] as Action[]; - actionDefinitions.forEach((action) => { - switch (action.key) { - case 'context': - if (typeof action.log?.path === 'string') { - actions.push( - new LogContextAction({ - debug: this.debug, - stateMachine: this, - path: action.log.path, - }) - ); - } - break; - case 'litAction': - actions.push( - new LitActionAction({ - debug: this.debug, - stateMachine: this, - ...action, - }) - ); - break; - case 'transaction': - actions.push( - new TransactionAction({ - debug: this.debug, - stateMachine: this, - ...action, - }) - ); - break; - case 'usePkp': - if ('pkp' in action) { - this.context.set( - 'activePkp', - this.resolveContextPathOrLiteral(action.pkp) - ); - } else if ('mint' in action) { - const mintPkpAction = new MintPkpAction({ - debug: this.debug, - stateMachine: this, - }); - actions.push(mintPkpAction); - } - if (this.debug) { - const activePkp = this.context.get('activePkp'); - console.log(`Machine configured to use pkp ${activePkp}`); - } - break; - default: - throw new AutomationError( - { - info: { - action, - }, - }, - `Unknown action. Check error info.` - ); + for (const action of actionDefinitions) { + const ActionCtor = this.actionsRepository[action.key]; + if (!ActionCtor) { + throw new AutomationError( + { info: { action } }, + `Action key "${action.key}" not found in action repository` + ); } - }); + actions.push( + new ActionCtor({ debug: this.debug, stateMachine: this, ...action }) + ); + } return async () => { await Promise.all(actions.map((action) => action.run())).catch((err) => { @@ -665,7 +623,7 @@ export class StateMachine { } // Throwing when stopping could hide above error - this.stopMachine().catch(console.error); + this.stopMachine().catch((error) => logger.error({ error })); } } diff --git a/packages/event-listener/src/lib/states/state.ts b/packages/event-listener/src/lib/states/state.ts index b9501c62ba..7b712d261b 100644 --- a/packages/event-listener/src/lib/states/state.ts +++ b/packages/event-listener/src/lib/states/state.ts @@ -1,3 +1,5 @@ +import { logger } from '@lit-protocol/logger'; + import { voidAsyncFunction } from '../types'; export interface BaseStateParams { @@ -29,7 +31,7 @@ export class State { * Executes the onEnter action for the state. */ async enter() { - this.debug && console.log(`enter ${this.key}`); + this.debug && logger.info(`enter ${this.key}`); await this.onEnter?.(); } @@ -37,7 +39,7 @@ export class State { * Executes the onExit action for the state. */ async exit() { - this.debug && console.log(`exit ${this.key}`); + this.debug && logger.info(`exit ${this.key}`); await this.onExit?.(); } } diff --git a/packages/event-listener/src/lib/transitions/transition.ts b/packages/event-listener/src/lib/transitions/transition.ts index 8b7ed60b1c..0306f5db67 100644 --- a/packages/event-listener/src/lib/transitions/transition.ts +++ b/packages/event-listener/src/lib/transitions/transition.ts @@ -1,3 +1,5 @@ +import { logger } from '@lit-protocol/logger'; + import { Listener } from '../listeners'; import { onError } from '../types'; @@ -75,13 +77,13 @@ export class Transition { */ async startListening() { try { - this.debug && console.log('startListening'); + this.debug && logger.info('startListening'); await Promise.all(this.listeners.map((listener) => listener.start())); if (!this.listeners.length) { // If the transition does not have any listeners it will never emit. Therefore, we "match" automatically on next event loop setTimeout(() => { - this.debug && console.log('Transition without listeners: auto match'); + this.debug && logger.info('Transition without listeners: auto match'); this.onMatch([]); }, 0); } @@ -99,7 +101,7 @@ export class Transition { */ async stopListening() { try { - this.debug && console.log('stopListening'); + this.debug && logger.info('stopListening'); this.queue.length = 0; // Flush the queue as there might be more value arrays to check await Promise.all(this.listeners.map((listener) => listener.stop())); } catch (e) { @@ -129,10 +131,10 @@ export class Transition { const isMatch = this.check ? await this.check(currentValues) : true; if (isMatch) { - this.debug && console.log('match', currentValues); + this.debug && logger.info({ msg: 'match', values: currentValues }); await this.onMatch?.(currentValues); } else { - this.debug && console.log('mismatch', currentValues); + this.debug && logger.info({ msg: 'mismatch', values: currentValues }); await this.onMismatch?.(currentValues); } } diff --git a/packages/event-listener/src/lib/types.ts b/packages/event-listener/src/lib/types.ts index 1ec982857c..cf19f2d8d0 100644 --- a/packages/event-listener/src/lib/types.ts +++ b/packages/event-listener/src/lib/types.ts @@ -3,6 +3,7 @@ import { ethers } from 'ethers'; import { LitContracts } from '@lit-protocol/contracts-sdk'; import { LitNodeClient } from '@lit-protocol/lit-node-client'; +import { Action, ActionParams } from './actions/action'; import { BaseTransitionParams } from './transitions'; export type Address = `0x${string}`; @@ -35,6 +36,13 @@ export interface UpdatesContext { } // Action Types +export type ActionConstructor = new (params: any) => Action; + +export interface RawActionDefinition { + key: string; + [actionProperty: string]: unknown; +} + export interface LitActionActionDefinition { key: 'litAction'; code?: ContextOrLiteral; @@ -96,6 +104,7 @@ export interface UseCapacityNFTActionDefinition { } export type ActionDefinition = + | RawActionDefinition | ContextActionDefinition | LitActionActionDefinition | MintCapacityNFTActionDefinition @@ -176,6 +185,7 @@ export interface TransitionParams // Machine Types export interface BaseStateMachineParams { + actionRepository?: Record; context?: Record; debug?: boolean; litContracts: LitContracts; diff --git a/packages/event-listener/src/lib/utils/chain.ts b/packages/event-listener/src/lib/utils/chain.ts index 91025ee327..dc94bd4a6c 100644 --- a/packages/event-listener/src/lib/utils/chain.ts +++ b/packages/event-listener/src/lib/utils/chain.ts @@ -1,18 +1,36 @@ import { ethers } from 'ethers'; -import { LIT_EVM_CHAINS } from '@lit-protocol/constants'; +import { + LIT_EVM_CHAINS, + InvalidArgumentException, + UnsupportedChainException, +} from '@lit-protocol/constants'; export function getEvmChain(evmChainId: ethers.BigNumberish) { const evmChainIdNumber = ethers.BigNumber.from(evmChainId).toNumber(); if (evmChainIdNumber === 0) { - throw new Error('EVM chainId cannot be 0'); + throw new InvalidArgumentException( + { + info: { + evmChainId: evmChainIdNumber, + }, + }, + 'EVM chainId cannot be 0' + ); } const chain = Object.values(LIT_EVM_CHAINS).find( (chain) => chain.chainId === evmChainIdNumber ); if (!chain) { - throw new Error(`EVM chain with chainId ${evmChainId} not found`); + throw new UnsupportedChainException( + { + info: { + evmChainId: evmChainIdNumber, + }, + }, + `EVM chain with chainId ${evmChainId} not found` + ); } return chain; diff --git a/packages/event-listener/src/lib/utils/erc20.ts b/packages/event-listener/src/lib/utils/erc20.ts index b1e04abd37..61bc76ab05 100644 --- a/packages/event-listener/src/lib/utils/erc20.ts +++ b/packages/event-listener/src/lib/utils/erc20.ts @@ -1,5 +1,7 @@ import { ethers } from 'ethers'; +import { InvalidArgumentException } from '@lit-protocol/constants'; + import { Address, BalanceTransitionDefinition } from '../types'; export const ERC20ABI = [ @@ -69,7 +71,15 @@ export function getBalanceTransitionCheck( case '>': return addressBalance.gt(targetAmount); default: - throw new Error(`Unrecognized comparator ${comparator}`); + throw new InvalidArgumentException( + { + info: { + comparator, + balance, + }, + }, + `Unrecognized comparator ${comparator}` + ); } }; diff --git a/packages/lit-auth-client/README.md b/packages/lit-auth-client/README.md deleted file mode 100644 index 989eb9c448..0000000000 --- a/packages/lit-auth-client/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# lit-auth-client - -`lit-auth-client` makes it easy to manage PKP authentication with Lit Protocol. This library offers convenient classes for social logins, Ethereum wallet sign-ins, and minting and fetching of PKPs linked to auth methods. - -## 📜 API Reference - -Check out the [API reference](https://docs.lit-js-sdk-v2.litprotocol.com/modules/lit_auth_client_src.html). - -## 📦 Installation - -Get started by installing the package: - -```bash -yarn add @lit-protocol/lit-auth-client -``` - -## 🙌 Contributing - -This library was generated with [Nx](https://nx.dev). - -### Building - -Run `nx build lit-auth-client` to build the library. - -### Running unit tests - -Run `nx test lit-auth-client` to execute the unit tests via [Jest](https://jestjs.io). diff --git a/packages/lit-auth-client/jest.config.ts b/packages/lit-auth-client/jest.config.ts deleted file mode 100644 index 61d4b7df55..0000000000 --- a/packages/lit-auth-client/jest.config.ts +++ /dev/null @@ -1,17 +0,0 @@ -/* eslint-disable */ -export default { - displayName: 'lit-auth-client', - preset: '../../jest.preset.js', - globals: { - 'ts-jest': { - tsconfig: '/tsconfig.spec.json', - }, - }, - transform: { - '^.+\\.[tj]s$': 'ts-jest', - }, - moduleFileExtensions: ['ts', 'js', 'html'], - coverageDirectory: '../../coverage/packages/lit-auth-client', - transformIgnorePatterns: ['/node_modules/(?!(@simplewebauthn)/)'], - setupFilesAfterEnv: ['../../jest.setup.js'], -}; diff --git a/packages/lit-auth-client/package.json b/packages/lit-auth-client/package.json deleted file mode 100644 index 2bb9bca246..0000000000 --- a/packages/lit-auth-client/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "@lit-protocol/lit-auth-client", - "version": "8.0.0-alpha.0", - "type": "commonjs", - "license": "MIT", - "homepage": "https://github.com/Lit-Protocol/js-sdk", - "repository": { - "type": "git", - "url": "https://github.com/LIT-Protocol/js-sdk" - }, - "keywords": [ - "library" - ], - "bugs": { - "url": "https://github.com/LIT-Protocol/js-sdk/issues" - }, - "publishConfig": { - "access": "public", - "directory": "../../dist/packages/lit-auth-client" - }, - "browser": { - "crypto": false, - "stream": false - }, - "tags": [ - "vanilla" - ], - "peerDependencies": { - "@simplewebauthn/browser": "^7.2.0", - "@simplewebauthn/typescript-types": "^7.0.0" - }, - "main": "./dist/src/index.js", - "typings": "./dist/src/index.d.ts" -} diff --git a/packages/lit-auth-client/src/index.ts b/packages/lit-auth-client/src/index.ts deleted file mode 100644 index 344070a76f..0000000000 --- a/packages/lit-auth-client/src/index.ts +++ /dev/null @@ -1,29 +0,0 @@ -import AppleProvider from './lib/providers/AppleProvider'; -import { BaseProvider } from './lib/providers/BaseProvider'; -import DiscordProvider from './lib/providers/DiscordProvider'; -import EthWalletProvider from './lib/providers/EthWalletProvider'; -import GoogleProvider from './lib/providers/GoogleProvider'; -import StytchAuthFactorOtpProvider from './lib/providers/StytchAuthFactorOtp'; -import { StytchOtpProvider } from './lib/providers/StytchOtpProvider'; -import WebAuthnProvider from './lib/providers/WebAuthnProvider'; -import { LitRelay } from './lib/relay'; -import { - isSignInRedirect, - getProviderFromUrl, - getAuthIdByAuthMethod, -} from './lib/utils'; - -export { - AppleProvider, - BaseProvider, - DiscordProvider, - EthWalletProvider, - GoogleProvider, - LitRelay, - StytchOtpProvider, - StytchAuthFactorOtpProvider, - WebAuthnProvider, - isSignInRedirect, - getProviderFromUrl, - getAuthIdByAuthMethod, -}; diff --git a/packages/lit-auth-client/tsconfig.json b/packages/lit-auth-client/tsconfig.json deleted file mode 100644 index d87cb2e661..0000000000 --- a/packages/lit-auth-client/tsconfig.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "extends": "../../tsconfig.base.json", - "compilerOptions": { - "module": "commonjs", - "forceConsistentCasingInFileNames": true, - "strict": true, - "noImplicitOverride": true, - "noPropertyAccessFromIndexSignature": true, - "noImplicitReturns": true, - "noFallthroughCasesInSwitch": true, - "allowJs": true - }, - "files": [], - "include": [], - "references": [ - { - "path": "./tsconfig.lib.json" - }, - { - "path": "./tsconfig.spec.json" - } - ] -} diff --git a/packages/lit-auth-client/tsconfig.spec.json b/packages/lit-auth-client/tsconfig.spec.json deleted file mode 100644 index 546f12877f..0000000000 --- a/packages/lit-auth-client/tsconfig.spec.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "../../dist/out-tsc", - "module": "commonjs", - "types": ["jest", "node"] - }, - "include": ["jest.config.ts", "**/*.test.ts", "**/*.spec.ts", "**/*.d.ts"] -} diff --git a/packages/encryption/.babelrc b/packages/lit-client/.babelrc similarity index 100% rename from packages/encryption/.babelrc rename to packages/lit-client/.babelrc diff --git a/packages/encryption/.eslintrc.json b/packages/lit-client/.eslintrc.json similarity index 100% rename from packages/encryption/.eslintrc.json rename to packages/lit-client/.eslintrc.json diff --git a/packages/lit-client/README.md b/packages/lit-client/README.md new file mode 100644 index 0000000000..92a790c64b --- /dev/null +++ b/packages/lit-client/README.md @@ -0,0 +1,9 @@ +# Quick Start + +This package contains the main API layer for interacting with LIT network nodes and the LIT blockchain. + +### node.js / browser + +``` +yarn add @lit-protocol/lit-client +``` diff --git a/packages/lit-client/index.ts b/packages/lit-client/index.ts new file mode 100644 index 0000000000..e69de29bb2 diff --git a/packages/nacl/jest.config.ts b/packages/lit-client/jest.config.ts similarity index 77% rename from packages/nacl/jest.config.ts rename to packages/lit-client/jest.config.ts index d97296773f..bdb4deef40 100644 --- a/packages/nacl/jest.config.ts +++ b/packages/lit-client/jest.config.ts @@ -1,6 +1,6 @@ /* eslint-disable */ export default { - displayName: 'nacl', + displayName: 'lit-client', preset: '../../jest.preset.js', globals: { 'ts-jest': { @@ -11,6 +11,6 @@ export default { '^.+\\.[t]s$': 'ts-jest', }, moduleFileExtensions: ['ts', 'js', 'html'], - coverageDirectory: '../../coverage/packages/nacl', + coverageDirectory: '../../coverage/packages/lit-client', setupFilesAfterEnv: ['../../jest.setup.js'], }; diff --git a/packages/misc/package.json b/packages/lit-client/package.json similarity index 86% rename from packages/misc/package.json rename to packages/lit-client/package.json index 8ae2d1f91d..15d280ed02 100644 --- a/packages/misc/package.json +++ b/packages/lit-client/package.json @@ -1,5 +1,5 @@ { - "name": "@lit-protocol/misc", + "name": "@lit-protocol/lit-client", "license": "MIT", "homepage": "https://github.com/Lit-Protocol/js-sdk", "repository": { @@ -15,7 +15,7 @@ "type": "commonjs", "publishConfig": { "access": "public", - "directory": "../../dist/packages/misc" + "directory": "../../dist/packages/lit-client" }, "gitHead": "0d7334c2c55f448e91fe32f29edc5db8f5e09e4b", "tags": [ diff --git a/packages/auth-browser/project.json b/packages/lit-client/project.json similarity index 53% rename from packages/auth-browser/project.json rename to packages/lit-client/project.json index bdd325457c..84b42582d7 100644 --- a/packages/auth-browser/project.json +++ b/packages/lit-client/project.json @@ -1,17 +1,17 @@ { - "name": "auth-browser", + "name": "lit-client", "$schema": "../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "packages/auth-browser/src", + "sourceRoot": "packages/lit-client/src", "projectType": "library", "targets": { "build": { "executor": "@nx/js:tsc", "outputs": ["{options.outputPath}"], "options": { - "outputPath": "dist/packages/auth-browser", - "main": "packages/auth-browser/src/index.ts", - "tsConfig": "packages/auth-browser/tsconfig.lib.json", - "assets": ["packages/auth-browser/*.md"], + "outputPath": "dist/packages/lit-client", + "main": "packages/lit-client/src/index.ts", + "tsConfig": "packages/lit-client/tsconfig.lib.json", + "assets": ["packages/lit-client/*.md"], "updateBuildableProjectDepsInPackageJson": true } }, @@ -19,14 +19,14 @@ "executor": "@nx/linter:eslint", "outputs": ["{options.outputFile}"], "options": { - "lintFilePatterns": ["packages/auth-browser/**/*.ts"] + "lintFilePatterns": ["packages/lit-client/**/*.ts"] } }, "test": { "executor": "@nx/jest:jest", - "outputs": ["{workspaceRoot}/coverage/packages/auth-browser"], + "outputs": ["{workspaceRoot}/coverage/packages/lit-client"], "options": { - "jestConfig": "packages/auth-browser/jest.config.ts", + "jestConfig": "packages/lit-client/jest.config.ts", "passWithNoTests": true } } diff --git a/packages/lit-client/src/index.ts b/packages/lit-client/src/index.ts new file mode 100644 index 0000000000..8c78392744 --- /dev/null +++ b/packages/lit-client/src/index.ts @@ -0,0 +1,4 @@ +// Export our top-level consumer API and types for consumers of the entire lit-client package +// export `getLitClient({network, authManager, options? })` => { ...api } + +export {}; diff --git a/packages/lit-client/src/lib/api/index.ts b/packages/lit-client/src/lib/api/index.ts new file mode 100644 index 0000000000..7b507e8323 --- /dev/null +++ b/packages/lit-client/src/lib/api/index.ts @@ -0,0 +1,3 @@ +// This folder will contain the modules that 'glue together' chain and lit-node-specific behaviours into our top-level `lit-client` interface + +export {}; diff --git a/packages/lit-client/src/lib/chain/index.ts b/packages/lit-client/src/lib/chain/index.ts new file mode 100644 index 0000000000..014da51de2 --- /dev/null +++ b/packages/lit-client/src/lib/chain/index.ts @@ -0,0 +1,4 @@ +// Define behaviours / methods / modules that are entirely specific to interacting with LIT blockchains +// These should be 'thin' methods that basically orchestrate a provided `LitNetwork` + params, and use an internal `LitChainClient` to do what is required + +export {}; diff --git a/packages/lit-client/src/lib/index.ts b/packages/lit-client/src/lib/index.ts new file mode 100644 index 0000000000..d87c4bb696 --- /dev/null +++ b/packages/lit-client/src/lib/index.ts @@ -0,0 +1,3 @@ +import * as api from './api'; + +export { api }; diff --git a/packages/lit-client/src/lib/lit-nodes/index.ts b/packages/lit-client/src/lib/lit-nodes/index.ts new file mode 100644 index 0000000000..9168eae186 --- /dev/null +++ b/packages/lit-client/src/lib/lit-nodes/index.ts @@ -0,0 +1,4 @@ +// Define behaviours / methods / modules that are entirely specific to interacting with LIT nodes +// These should be 'thin' methods that basically orchestrate a provided `LitNetwork` + params, and use an internal `LitNodeClient` to do what is required + +export {}; diff --git a/packages/misc/tsconfig.json b/packages/lit-client/tsconfig.json similarity index 100% rename from packages/misc/tsconfig.json rename to packages/lit-client/tsconfig.json diff --git a/packages/encryption/tsconfig.lib.json b/packages/lit-client/tsconfig.lib.json similarity index 100% rename from packages/encryption/tsconfig.lib.json rename to packages/lit-client/tsconfig.lib.json diff --git a/packages/encryption/tsconfig.spec.json b/packages/lit-client/tsconfig.spec.json similarity index 100% rename from packages/encryption/tsconfig.spec.json rename to packages/lit-client/tsconfig.spec.json diff --git a/packages/lit-node-client-nodejs/.babelrc b/packages/lit-node-client-nodejs/.babelrc deleted file mode 100644 index 158083d278..0000000000 --- a/packages/lit-node-client-nodejs/.babelrc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "presets": [ - [ - "@nx/web/babel", - { - "useBuiltIns": "usage" - } - ] - ] -} diff --git a/packages/lit-node-client-nodejs/.eslintrc.json b/packages/lit-node-client-nodejs/.eslintrc.json deleted file mode 100644 index 9d9c0db55b..0000000000 --- a/packages/lit-node-client-nodejs/.eslintrc.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "extends": ["../../.eslintrc.json"], - "ignorePatterns": ["!**/*"], - "overrides": [ - { - "files": ["*.ts", "*.tsx", "*.js", "*.jsx"], - "rules": {} - }, - { - "files": ["*.ts", "*.tsx"], - "rules": {} - }, - { - "files": ["*.js", "*.jsx"], - "rules": {} - } - ] -} diff --git a/packages/lit-node-client-nodejs/README.md b/packages/lit-node-client-nodejs/README.md deleted file mode 100644 index 4f20193e88..0000000000 --- a/packages/lit-node-client-nodejs/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# Getting Started - -This `LitNodeClientNodeJs` is created solely to run on Node.js. - -The usual `checkAndSignAuthMessage` is not included in this package, so you need to add it manually to the constructor if you decide to use it on a browser, or with any custom auth callback. - -```js -import * as LitJsSdkNodeJs from '@lit-protocol/lit-node-client-nodejs'; -import { checkAndSignAuthMessage } from '@lit-protocol/auth-browser'; - -const client = new LitJsSdkNodeJs.LitNodeClientNodeJs({ - litNetwork: 'serrano', - defaultAuthCallback: checkAndSignAuthMessage, -}); - -await client.connect(); - -const authSig = await checkAndSignAuthMessage({ - chain: 'ethereum', -}); -``` diff --git a/packages/lit-node-client-nodejs/package.json b/packages/lit-node-client-nodejs/package.json deleted file mode 100644 index 55e550786a..0000000000 --- a/packages/lit-node-client-nodejs/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "@lit-protocol/lit-node-client-nodejs", - "type": "commonjs", - "license": "MIT", - "homepage": "https://github.com/Lit-Protocol/js-sdk", - "repository": { - "type": "git", - "url": "https://github.com/LIT-Protocol/js-sdk" - }, - "keywords": [ - "library" - ], - "bugs": { - "url": "https://github.com/LIT-Protocol/js-sdk/issues" - }, - "publishConfig": { - "access": "public", - "directory": "../../dist/packages/lit-node-client-nodejs" - }, - "browser": { - "crypto": false, - "stream": false - }, - "tags": [ - "nodejs" - ], - "version": "8.0.0-alpha.0", - "main": "./dist/src/index.js", - "typings": "./dist/src/index.d.ts" -} diff --git a/packages/lit-node-client-nodejs/project.json b/packages/lit-node-client-nodejs/project.json deleted file mode 100644 index 1f41892a20..0000000000 --- a/packages/lit-node-client-nodejs/project.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "lit-node-client-nodejs", - "$schema": "../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "packages/lit-node-client-nodejs/src", - "projectType": "library", - "targets": { - "build": { - "executor": "@nx/js:tsc", - "outputs": ["{options.outputPath}"], - "options": { - "outputPath": "dist/packages/lit-node-client-nodejs", - "main": "packages/lit-node-client-nodejs/src/index.ts", - "tsConfig": "packages/lit-node-client-nodejs/tsconfig.lib.json", - "assets": ["packages/lit-node-client-nodejs/*.md"], - "updateBuildableProjectDepsInPackageJson": true - } - }, - "lint": { - "executor": "@nx/linter:eslint", - "outputs": ["{options.outputFile}"], - "options": { - "lintFilePatterns": ["packages/lit-node-client-nodejs/**/*.ts"] - } - }, - "test": { - "executor": "@nx/jest:jest", - "outputs": ["{workspaceRoot}/coverage/packages/lit-node-client-nodejs"], - "options": { - "jestConfig": "packages/lit-node-client-nodejs/jest.config.ts", - "passWithNoTests": true - } - }, - "testWatch": { - "executor": "@nx/jest:jest", - "outputs": ["{workspaceRoot}/coverage/packages/lit-node-client-nodejs"], - "options": { - "jestConfig": "packages/lit-node-client-nodejs/jest.config.ts", - "watch": true, - "passWithNoTests": true - } - } - }, - "tags": [] -} diff --git a/packages/lit-node-client-nodejs/src/index.ts b/packages/lit-node-client-nodejs/src/index.ts deleted file mode 100644 index bbe72272e2..0000000000 --- a/packages/lit-node-client-nodejs/src/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -import 'cross-fetch/dist/node-polyfill.js'; - -// ==================== Exports ==================== -export * from './lib/lit-node-client-nodejs'; - -export { - hashResourceIdForSigning, - humanizeAccessControlConditions, -} from '@lit-protocol/access-control-conditions'; - -export { - base64StringToBlob, - blobToBase64String, -} from '@lit-protocol/misc-browser'; - -export { - uint8arrayFromString, - uint8arrayToString, -} from '@lit-protocol/uint8arrays'; diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/encode-code.ts b/packages/lit-node-client-nodejs/src/lib/helpers/encode-code.ts deleted file mode 100644 index 1b45f2c58a..0000000000 --- a/packages/lit-node-client-nodejs/src/lib/helpers/encode-code.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { - uint8arrayFromString, - uint8arrayToString, -} from '@lit-protocol/uint8arrays'; - -/** - * Encodes the given code string into base64 format. - * - * @param code - The code string to be encoded. - * @returns The encoded code string in base64 format. - */ -export const encodeCode = (code: string) => { - const _uint8Array = uint8arrayFromString(code, 'utf8'); - const encodedJs = uint8arrayToString(_uint8Array, 'base64'); - - return encodedJs; -}; diff --git a/packages/lit-node-client-nodejs/src/lib/lit-node-client-nodejs.ts b/packages/lit-node-client-nodejs/src/lib/lit-node-client-nodejs.ts deleted file mode 100644 index 8270c1b1f5..0000000000 --- a/packages/lit-node-client-nodejs/src/lib/lit-node-client-nodejs.ts +++ /dev/null @@ -1,2125 +0,0 @@ -import { computeAddress } from '@ethersproject/transactions'; -import { ethers } from 'ethers'; -import { SiweMessage } from 'siwe'; - -import { - getFormattedAccessControlConditions, - getHashedAccessControlConditions, - validateAccessControlConditions, -} from '@lit-protocol/access-control-conditions'; -import { - createSiweMessage, - createSiweMessageWithCapacityDelegation, - createSiweMessageWithRecaps, - decode, - generateAuthSig, - generateSessionCapabilityObjectWithWildcards, - LitAccessControlConditionResource, - LitResourceAbilityRequest, -} from '@lit-protocol/auth-helpers'; -import { - AUTH_METHOD_TYPE, - EITHER_TYPE, - FALLBACK_IPFS_GATEWAYS, - GLOBAL_OVERWRITE_IPFS_CODE_BY_NETWORK, - InvalidArgumentException, - InvalidParamType, - InvalidSessionSigs, - InvalidSignatureError, - LIT_CURVE, - LIT_CURVE_TYPE, - LIT_ENDPOINT, - LitNodeClientNotReadyError, - LOCAL_STORAGE_KEYS, - ParamNullError, - ParamsMissingError, - PRODUCT_IDS, - SIWE_URI_PREFIX, - UnknownError, - UnsupportedMethodError, - WalletSignatureNotFoundError, -} from '@lit-protocol/constants'; -import { getNodePrices } from '@lit-protocol/contracts-sdk'; -import { composeLitUrl, LitCore } from '@lit-protocol/core'; -import { - combineSignatureShares, - encrypt, - generateSessionKeyPair, - verifyAndDecryptWithSignatureShares, - verifySignature, -} from '@lit-protocol/crypto'; -import { - defaultMintClaimCallback, - findMostCommonResponse, - formatSessionSigs, - hexPrefixed, - log, - logErrorWithRequestId, - logWithRequestId, - mostCommonString, - normalizeAndStringify, - removeHexPrefix, - safeParams, - validateSessionSigs, -} from '@lit-protocol/misc'; -import { - getStorageItem, - removeStorageItem, - setStorageItem, -} from '@lit-protocol/misc-browser'; -import { nacl } from '@lit-protocol/nacl'; -import { - AuthCallback, - AuthCallbackParams, - type AuthenticationContext, - AuthSig, - BlsResponseData, - CapacityCreditsReq, - CapacityCreditsRes, - ClaimKeyResponse, - ClaimProcessor, - ClaimRequest, - CustomNetwork, - DecryptRequest, - DecryptResponse, - EncryptionSignRequest, - EncryptResponse, - EncryptSdkParams, - ExecuteJsNoSigningResponse, - ExecuteJsResponse, - FormattedMultipleAccs, - GetWalletSigProps, - ILitNodeClient, - JsonExecutionRequest, - JsonExecutionSdkParams, - JsonPKPClaimKeyRequest, - JsonPkpSignRequest, - JsonPkpSignSdkParams, - JsonSignSessionKeyRequestV1, - JsonSignSessionKeyRequestV2, - LitNodeClientConfig, - NodeBlsSigningShare, - NodeCommandResponse, - NodeSet, - NodeShare, - PKPSignEndpointResponse, - RejectedNodePromises, - SessionKeyPair, - SessionSigningTemplate, - SessionSigsMap, - Signature, - SignSessionKeyProp, - SignSessionKeyResponse, - SigResponse, - SuccessNodePromises, -} from '@lit-protocol/types'; -import { AuthMethod } from '@lit-protocol/types'; -import { - uint8arrayFromString, - uint8arrayToString, -} from '@lit-protocol/uint8arrays'; - -import { encodeCode } from './helpers/encode-code'; -import { getBlsSignatures } from './helpers/get-bls-signatures'; -import { getClaims } from './helpers/get-claims'; -import { getClaimsList } from './helpers/get-claims-list'; -import { getExpiration } from './helpers/get-expiration'; -import { getMaxPricesForNodeProduct } from './helpers/get-max-prices-for-node-product'; -import { getSignatures } from './helpers/get-signatures'; -import { normalizeArray } from './helpers/normalize-array'; -import { normalizeJsParams } from './helpers/normalize-params'; -import { parseAsJsonOrString } from './helpers/parse-as-json-or-string'; -import { parsePkpSignResponse } from './helpers/parse-pkp-sign-response'; -import { processLitActionResponseStrategy } from './helpers/process-lit-action-response-strategy'; -import { removeDoubleQuotes } from './helpers/remove-double-quotes'; -import { blsSessionSigVerify } from './helpers/validate-bls-session-sig'; - -export class LitNodeClientNodeJs extends LitCore implements ILitNodeClient { - /** Tracks the total max price a user is willing to pay for each supported product type - * This must be distributed across all nodes; each node will get a percentage of this price - * - * If the user never sets a max price, it means 'unlimited' - */ - defaultMaxPriceByProduct: Record = { - DECRYPTION: BigInt(-1), - SIGN: BigInt(-1), - LIT_ACTION: BigInt(-1), - }; - - defaultAuthCallback?: (authSigParams: AuthCallbackParams) => Promise; - - // ========== Constructor ========== - constructor(args: LitNodeClientConfig | CustomNetwork) { - if (!args) { - throw new ParamsMissingError({}, 'must provide LitNodeClient parameters'); - } - - super(args); - - if (args !== undefined && args !== null && 'defaultAuthCallback' in args) { - this.defaultAuthCallback = args.defaultAuthCallback; - } - } - - setDefaultMaxPrice(product: keyof typeof PRODUCT_IDS, price: bigint) { - this.defaultMaxPriceByProduct[product] = price; - } - - private _getNodePrices() { - return getNodePrices({ - realmId: 1, - litNetwork: this.config.litNetwork, - networkContext: this.config.contractContext, - rpcUrl: this.config.rpcUrl, - nodeProtocol: this.config.nodeProtocol, - }); - } - // ========== Rate Limit NFT ========== - - // TODO: Add support for browser feature/lit-2321-js-sdk-add-browser-support-for-createCapacityDelegationAuthSig - createCapacityDelegationAuthSig = async ( - params: CapacityCreditsReq - ): Promise => { - // -- validate - if (!params.dAppOwnerWallet) { - throw new InvalidParamType( - { - info: { - params, - }, - }, - 'dAppOwnerWallet must exist' - ); - } - - // Useful log for debugging - if (!params.delegateeAddresses || params.delegateeAddresses.length === 0) { - log( - `[createCapacityDelegationAuthSig] 'delegateeAddresses' is an empty array. It means that no body can use it. However, if the 'delegateeAddresses' field is omitted, It means that the capability will not restrict access based on delegatee list, but it may still enforce other restrictions such as usage limits (uses) and specific NFT IDs (nft_id).` - ); - } - - // -- This is the owner address who holds the Capacity Credits NFT token and wants to delegate its - // usage to a list of delegatee addresses - const dAppOwnerWalletAddress = ethers.utils.getAddress( - await params.dAppOwnerWallet.getAddress() - ); - - // -- if it's not ready yet, then connect - if (!this.ready) { - await this.connect(); - } - - const siweMessage = await createSiweMessageWithCapacityDelegation({ - uri: SIWE_URI_PREFIX.DELEGATION, - litNodeClient: this, - walletAddress: dAppOwnerWalletAddress, - nonce: await this.getLatestBlockhash(), - expiration: params.expiration, - domain: params.domain, - statement: params.statement, - - // -- capacity delegation specific configuration - uses: params.uses, - delegateeAddresses: params.delegateeAddresses, - // paymentId: params.paymentId, // CHANGE: Not supported yet - }); - - const authSig = await generateAuthSig({ - signer: params.dAppOwnerWallet, - toSign: siweMessage, - }); - - return { capacityDelegationAuthSig: authSig }; - }; - - // ==================== SESSIONS ==================== - /** - * Try to get the session key in the local storage, - * if not, generates one. - * @return { SessionKeyPair } session key pair - */ - private _getSessionKey = (): SessionKeyPair => { - const storageKey = LOCAL_STORAGE_KEYS.SESSION_KEY; - const storedSessionKeyOrError = getStorageItem(storageKey); - - if ( - storedSessionKeyOrError.type === EITHER_TYPE.ERROR || - !storedSessionKeyOrError.result || - storedSessionKeyOrError.result === '' - ) { - console.warn( - `Storage key "${storageKey}" is missing. Not a problem. Continue...` - ); - - // Generate new one - const newSessionKey = generateSessionKeyPair(); - - // (TRY) to set to local storage - try { - localStorage.setItem(storageKey, JSON.stringify(newSessionKey)); - } catch (e) { - log( - `[getSessionKey] Localstorage not available.Not a problem. Continue...` - ); - } - - return newSessionKey; - } else { - return JSON.parse(storedSessionKeyOrError.result as string); - } - }; - - /** - * Get the signature from local storage, if not, generates one - */ - private _getWalletSig = async ({ - authNeededCallback, - chain, - sessionCapabilityObject, - switchChain, - expiration, - sessionKeyUri, - nonce, - resourceAbilityRequests, - litActionCode, - litActionIpfsId, - jsParams, - sessionKey, - }: GetWalletSigProps): Promise => { - let walletSig: AuthSig; - - const storageKey = LOCAL_STORAGE_KEYS.WALLET_SIGNATURE; - const storedWalletSigOrError = getStorageItem(storageKey); - - // browser: 2 > 2.1 > 3 - // nodejs: 1. > 1.1 - - // -- (TRY) to get it in the local storage - // -- IF NOT: Generates one - log(`getWalletSig - flow starts - storageKey: ${storageKey} - storedWalletSigOrError: ${JSON.stringify(storedWalletSigOrError)} - `); - - if ( - storedWalletSigOrError.type === EITHER_TYPE.ERROR || - !storedWalletSigOrError.result || - storedWalletSigOrError.result == '' - ) { - log('getWalletSig - flow 1'); - console.warn( - `Storage key "${storageKey}" is missing. Not a problem. Continue...` - ); - if (authNeededCallback) { - log('getWalletSig - flow 1.1'); - - const body = { - chain, - statement: sessionCapabilityObject?.statement, - resources: sessionCapabilityObject - ? [sessionCapabilityObject.encodeAsSiweResource()] - : undefined, - ...(switchChain && { switchChain }), - expiration, - uri: sessionKeyUri, - sessionKey: sessionKey, - nonce, - - // for recap - ...(resourceAbilityRequests && { resourceAbilityRequests }), - - // for lit action custom auth - ...(litActionCode && { litActionCode }), - ...(litActionIpfsId && { litActionIpfsId }), - ...(jsParams && { jsParams }), - }; - - log('callback body:', body); - - walletSig = await authNeededCallback(body); - } else { - log('getWalletSig - flow 1.2'); - if (!this.defaultAuthCallback) { - log('getWalletSig - flow 1.2.1'); - throw new ParamsMissingError( - {}, - 'No authNeededCallback nor default auth callback provided' - ); - } - - log('getWalletSig - flow 1.2.2'); - walletSig = await this.defaultAuthCallback({ - chain, - statement: sessionCapabilityObject.statement, - resources: sessionCapabilityObject - ? [sessionCapabilityObject.encodeAsSiweResource()] - : undefined, - switchChain, - expiration, - uri: sessionKeyUri, - nonce, - }); - } - - log('getWalletSig - flow 1.3'); - - // (TRY) to set walletSig to local storage - const storeNewWalletSigOrError = setStorageItem( - storageKey, - JSON.stringify(walletSig) - ); - if (storeNewWalletSigOrError.type === 'ERROR') { - log('getWalletSig - flow 1.4'); - console.warn( - `Unable to store walletSig in local storage. Not a problem. Continue...` - ); - } - } else { - log('getWalletSig - flow 2'); - try { - walletSig = JSON.parse(storedWalletSigOrError.result as string); - log('getWalletSig - flow 2.1'); - } catch (e) { - console.warn('Error parsing walletSig', e); - log('getWalletSig - flow 2.2'); - } - } - - log('getWalletSig - flow 3'); - return walletSig!; - }; - - private _authCallbackAndUpdateStorageItem = async ({ - authCallbackParams, - authCallback, - }: { - authCallbackParams: AuthCallbackParams; - authCallback?: AuthCallback; - }): Promise => { - let authSig: AuthSig; - - if (authCallback) { - authSig = await authCallback(authCallbackParams); - } else { - if (!this.defaultAuthCallback) { - throw new ParamsMissingError( - {}, - 'No authCallback nor default auth callback provided' - ); - } - authSig = await this.defaultAuthCallback(authCallbackParams); - } - - // (TRY) to set walletSig to local storage - const storeNewWalletSigOrError = setStorageItem( - LOCAL_STORAGE_KEYS.WALLET_SIGNATURE, - JSON.stringify(authSig) - ); - if (storeNewWalletSigOrError.type === EITHER_TYPE.SUCCESS) { - return authSig; - } - - // Setting local storage failed, try to remove the item key. - console.warn( - `Unable to store walletSig in local storage. Not a problem. Continuing to remove item key...` - ); - const removeWalletSigOrError = removeStorageItem( - LOCAL_STORAGE_KEYS.WALLET_SIGNATURE - ); - if (removeWalletSigOrError.type === EITHER_TYPE.ERROR) { - console.warn( - `Unable to remove walletSig in local storage. Not a problem. Continuing...` - ); - } - - return authSig; - }; - - /** - * - * Check if a session key needs to be resigned. These are the scenarios where a session key needs to be resigned: - * 1. The authSig.sig does not verify successfully against the authSig.signedMessage - * 2. The authSig.signedMessage.uri does not match the sessionKeyUri - * 3. The authSig.signedMessage does not contain at least one session capability object - * - */ - private _checkNeedToResignSessionKey = async ({ - authSig, - sessionKeyUri, - resourceAbilityRequests, - }: { - authSig: AuthSig; - sessionKeyUri: string; - resourceAbilityRequests: LitResourceAbilityRequest[]; - }): Promise => { - const authSigSiweMessage = new SiweMessage(authSig.signedMessage); - // We will either have `ed25519` or `LIT_BLS` as we have deviated from the specification of SIWE and use BLS signatures in some cases - // Here we need to check the `algo` of the SIWE to confirm we can validate the signature as if we attempt to validate the BLS signature here - // it will fail. If the algo is not defined we can assume that it was an EOA wallet signing the message so we can use SIWE. - if (authSig.algo === `ed25519` || authSig.algo === undefined) { - try { - await authSigSiweMessage.verify( - { signature: authSig.sig }, - { suppressExceptions: false } - ); - } catch (e) { - log(`Error while verifying BLS signature: `, e); - return true; - } - } else if (authSig.algo === `LIT_BLS`) { - try { - await blsSessionSigVerify( - verifySignature, - this.networkPubKey!, - authSig, - authSigSiweMessage - ); - } catch (e) { - log(`Error while verifying bls signature: `, e); - return true; - } - } else { - throw new InvalidSignatureError( - { - info: { - authSig, - resourceAbilityRequests, - sessionKeyUri, - }, - }, - 'Unsupported signature algo for session signature. Expected ed25519 or LIT_BLS received %s', - authSig.algo - ); - } - - // make sure the sig is for the correct session key - if (authSigSiweMessage.uri !== sessionKeyUri) { - log('Need retry because uri does not match'); - return true; - } - - // make sure the authSig contains at least one resource. - if ( - !authSigSiweMessage.resources || - authSigSiweMessage.resources.length === 0 - ) { - log('Need retry because empty resources'); - return true; - } - - // make sure the authSig contains session capabilities that can be parsed. - // TODO: we currently only support the first resource being a session capability object. - const authSigSessionCapabilityObject = decode( - authSigSiweMessage.resources[0] - ); - - // make sure the authSig session capability object describes capabilities that are equal or greater than - // the abilities requested against the resources in the resource ability requests. - for (const resourceAbilityRequest of resourceAbilityRequests) { - if ( - !authSigSessionCapabilityObject.verifyCapabilitiesForResource( - resourceAbilityRequest.resource, - resourceAbilityRequest.ability - ) - ) { - log('Need retry because capabilities do not match', { - authSigSessionCapabilityObject, - resourceAbilityRequest, - }); - return true; - } - } - - return false; - }; - - private _decryptWithSignatureShares = ( - networkPubKey: string, - identityParam: Uint8Array, - ciphertext: string, - signatureShares: NodeBlsSigningShare[] - ): Promise => { - const sigShares = signatureShares.map((s) => s.signatureShare); - - return verifyAndDecryptWithSignatureShares( - networkPubKey, - identityParam, - ciphertext, - sigShares - ); - }; - - /** - * Retrieves the fallback IPFS code for a given IPFS ID. - * - * @param gatewayUrl - the gateway url. - * @param ipfsId - The IPFS ID. - * @returns The base64-encoded fallback IPFS code. - * @throws An error if the code retrieval fails. - */ - private async _getFallbackIpfsCode( - gatewayUrl: string | undefined, - ipfsId: string - ) { - const allGateways = gatewayUrl - ? [gatewayUrl, ...FALLBACK_IPFS_GATEWAYS] - : FALLBACK_IPFS_GATEWAYS; - - log( - `Attempting to fetch code for IPFS ID: ${ipfsId} using fallback IPFS gateways` - ); - - for (const url of allGateways) { - try { - const response = await fetch(`${url}${ipfsId}`); - - if (!response.ok) { - throw new Error( - `Failed to fetch code from IPFS gateway ${url}: ${response.status} ${response.statusText}` - ); - } - - const code = await response.text(); - const codeBase64 = Buffer.from(code).toString('base64'); - - return codeBase64; - } catch (error) { - console.error(`Error fetching code from IPFS gateway ${url}`); - // Continue to the next gateway in the array - } - } - - throw new Error('All IPFS gateways failed to fetch the code.'); - } - - private async executeJsNodeRequest( - url: string, - formattedParams: JsonExecutionSdkParams & { sessionSigs: SessionSigsMap }, - requestId: string, - nodeSet: NodeSet[] - ) { - // -- choose the right signature - const sessionSig = this._getSessionSigByUrl({ - sessionSigs: formattedParams.sessionSigs, - url, - }); - - const reqBody: JsonExecutionRequest = { - ...formattedParams, - authSig: sessionSig, - nodeSet, - }; - - const urlWithPath = composeLitUrl({ - url, - endpoint: LIT_ENDPOINT.EXECUTE_JS, - }); - - return this.generatePromise(urlWithPath, reqBody, requestId); - } - /** - * - * Execute JS on the nodes and combine and return any resulting signatures - * - * @param { JsonExecutionSdkParams } params - * - * @returns { ExecuteJsResponse } - * - */ - executeJs = async ( - params: JsonExecutionSdkParams - ): Promise => { - // ========== Validate Params ========== - if (!this.ready) { - const message = - '[executeJs] LitNodeClient is not ready. Please call await litNodeClient.connect() first.'; - - throw new LitNodeClientNotReadyError({}, message); - } - - const paramsIsSafe = safeParams({ - functionName: 'executeJs', - params: params, - }); - - if (!paramsIsSafe) { - throw new InvalidParamType( - { - info: { - params, - }, - }, - 'executeJs params are not valid' - ); - } - - // Format the params - let formattedParams: JsonExecutionSdkParams = { - ...params, - ...(params.jsParams && { jsParams: normalizeJsParams(params.jsParams) }), - ...(params.code && { code: encodeCode(params.code) }), - }; - - // Check if IPFS options are provided and if the code should be fetched from IPFS and overwrite the current code. - // This will fetch the code from the specified IPFS gateway using the provided ipfsId, - // and update the params with the fetched code, removing the ipfsId afterward. - const overwriteCode = - params.ipfsOptions?.overwriteCode || - GLOBAL_OVERWRITE_IPFS_CODE_BY_NETWORK[this.config.litNetwork]; - - if (overwriteCode && params.ipfsId) { - const code = await this._getFallbackIpfsCode( - params.ipfsOptions?.gatewayUrl, - params.ipfsId - ); - - formattedParams = { - ...params, - code: code, - ipfsId: undefined, - }; - } - - const requestId = this._getNewRequestId(); - - const userMaxPrices = await this.getMaxPricesForNodeProduct({ - product: 'LIT_ACTION', - userMaxPrice: params.userMaxPrice, - }); - - const targetNodePrices = params.useSingleNode - ? userMaxPrices.slice(0, 1) - : userMaxPrices; - - const sessionSigs = await this._getSessionSigs({ - ...params.authContext, - userMaxPrices: targetNodePrices, - }); - - const targetNodeUrls = targetNodePrices.map(({ url }) => url); - // ========== Get Node Promises ========== - // Handle promises for commands sent to Lit nodes - const nodePromises = this._getNodePromises(targetNodeUrls, (url: string) => - this.executeJsNodeRequest( - url, - { - ...formattedParams, - sessionSigs, - }, - requestId, - this._getNodeSet(targetNodeUrls) - ) - ); - - // -- resolve promises - const res = await this._handleNodePromises( - nodePromises, - requestId, - params.useSingleNode ? 1 : this._getThreshold() - ); - - // -- case: promises rejected - if (!res.success) { - this._throwNodeError(res, requestId); - } - - // -- case: promises success (TODO: check the keys of "values") - const responseData = (res as SuccessNodePromises).values; - - logWithRequestId( - requestId, - 'executeJs responseData from node : ', - JSON.stringify(responseData, null, 2) - ); - - // -- find the responseData that has the most common response - const mostCommonResponse = findMostCommonResponse( - responseData - ) as NodeShare; - - const responseFromStrategy = processLitActionResponseStrategy( - responseData, - params.responseStrategy ?? { strategy: 'leastCommon' } - ); - mostCommonResponse.response = responseFromStrategy; - - const isSuccess = mostCommonResponse.success; - const hasSignedData = Object.keys(mostCommonResponse.signedData).length > 0; - const hasClaimData = Object.keys(mostCommonResponse.claimData).length > 0; - - // -- we must also check for claim responses as a user may have submitted for a claim and signatures must be aggregated before returning - if (isSuccess && !hasSignedData && !hasClaimData) { - return mostCommonResponse as unknown as ExecuteJsResponse; - } - - // -- in the case where we are not signing anything on Lit action and using it as purely serverless function - if (!hasSignedData && !hasClaimData) { - return { - claims: {}, - signatures: null, - decryptions: [], - response: mostCommonResponse.response, - logs: mostCommonResponse.logs, - } as ExecuteJsNoSigningResponse; - } - - // ========== Extract shares from response data ========== - - // -- 1. combine signed data as a list, and get the signatures from it - const signedDataList = responseData.map((r) => { - return removeDoubleQuotes(r.signedData); - }); - - logWithRequestId( - requestId, - 'signatures shares to combine: ', - signedDataList - ); - - // Flatten the signedDataList by moving the data within the `sig` (or any other key user may choose) object to the top level. - // The specific key name (`sig`) is irrelevant, as the contents of the object are always lifted directly. - const key = Object.keys(signedDataList[0])[0]; // Get the first key of the object - - const flattenedSignedMessageShares = signedDataList.map((item) => { - return item[key]; // Return the value corresponding to that key - }); - - // -- 2. combine responses as a string, and parse it as JSON if possible - const parsedResponse = parseAsJsonOrString(mostCommonResponse.response); - - // -- 3. combine logs - const mostCommonLogs: string = mostCommonString( - responseData.map( - (r: { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - logs: any; - }) => r.logs - ) - ); - - // -- 4. combine claims - const claimsList = getClaimsList(responseData); - const claims = claimsList.length > 0 ? getClaims(claimsList) : undefined; - - // ========== Result ========== - const returnVal: ExecuteJsResponse = { - claims, - signatures: hasSignedData - ? { - [key]: await getSignatures({ - requestId, - networkPubKeySet: this.networkPubKeySet, - threshold: params.useSingleNode ? 1 : this._getThreshold(), - signedMessageShares: flattenedSignedMessageShares, - }), - } - : {}, - // decryptions: [], - response: parsedResponse, - logs: mostCommonLogs, - }; - - log('returnVal:', returnVal); - - return returnVal; - }; - - /** - * Generates a promise by sending a command to the Lit node - * - * @param url - The URL to send the command to. - * @param params - The parameters to include in the command. - * @param requestId - The ID of the request. - * @returns A promise that resolves with the response from the server. - */ - generatePromise = async ( - url: string, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - params: any, - requestId: string - ): Promise => { - return await this._sendCommandToNode({ - url, - data: params, - requestId, - }); - }; - - /** - * Use PKP to sign - * - * @param { JsonPkpSignSdkParams } params - * @param params.toSign - The data to sign - * @param params.pubKey - The public key to sign with - * @param params.sessionSigs - The session signatures to use - * @param params.authMethods - (optional) The auth methods to use - */ - pkpSign = async (params: JsonPkpSignSdkParams): Promise => { - // -- validate required params - const requiredParamKeys = ['toSign', 'pubKey', 'authContext']; - - (requiredParamKeys as (keyof JsonPkpSignSdkParams)[]).forEach((key) => { - if (!params[key]) { - throw new ParamNullError( - { - info: { - params, - key, - }, - }, - `"%s" cannot be undefined, empty, or null. Please provide a valid value.`, - key - ); - } - }); - - const requestId = this._getNewRequestId(); - - const targetNodePrices = await this.getMaxPricesForNodeProduct({ - product: 'SIGN', - userMaxPrice: params.userMaxPrice, - }); - - const sessionSigs = await this._getSessionSigs({ - pkpPublicKey: params.pubKey, - ...params.authContext, - userMaxPrices: targetNodePrices, - }); - - // validate session sigs - const checkedSessionSigs = validateSessionSigs(sessionSigs); - - if (checkedSessionSigs.isValid === false) { - throw new InvalidSessionSigs( - {}, - `Invalid sessionSigs. Errors: ${checkedSessionSigs.errors}` - ); - } - - // ========== Get Node Promises ========== - // Handle promises for commands sent to Lit nodes - - const targetNodeUrls = targetNodePrices.map(({ url }) => url); - const nodePromises = this._getNodePromises( - targetNodeUrls, - (url: string) => { - // -- get the session sig from the url key - const sessionSig = this._getSessionSigByUrl({ - sessionSigs, - url, - }); - - const reqBody: JsonPkpSignRequest = { - toSign: normalizeArray(params.toSign), - pubkey: hexPrefixed(params.pubKey), - authSig: sessionSig, - - // -- optional params - no longer allowed in >= Naga? - // ...(params.authContext.authMethods && - // params.authContext.authMethods.length > 0 && { - // authMethods: params.authContext.authMethods, - // }), - - // nodeSet: thresholdNodeSet, - nodeSet: this._getNodeSet(targetNodeUrls), - signingScheme: 'EcdsaK256Sha256', - }; - - logWithRequestId(requestId, 'reqBody:', reqBody); - - const urlWithPath = composeLitUrl({ - url, - endpoint: LIT_ENDPOINT.PKP_SIGN, - }); - - return this.generatePromise(urlWithPath, reqBody, requestId); - } - ); - - const res = await this._handleNodePromises( - nodePromises, - requestId, - this._getThreshold() - ); - - // ========== Handle Response ========== - if (!res.success) { - this._throwNodeError(res, requestId); - } - - const responseData = (res as SuccessNodePromises) - .values; - - logWithRequestId( - requestId, - 'pkpSign responseData', - JSON.stringify(responseData) - ); - - // clean up the response data (as there are double quotes & snake cases in the response) - const signedMessageShares = parsePkpSignResponse(responseData); - - try { - const signatures = await getSignatures({ - requestId, - networkPubKeySet: this.networkPubKeySet, - threshold: this._getThreshold(), - signedMessageShares: signedMessageShares, - }); - - logWithRequestId(requestId, `signature combination`, signatures); - - return signatures; - } catch (e) { - console.error('Error getting signature', e); - throw e; - } - }; - - /** - * Encrypt data using the LIT network public key. - * See more: https://developer.litprotocol.com/sdk/access-control/encryption - * - * @param { EncryptSdkParams } params - * @param params.dataToEncrypt - The data to encrypt - * @param params.accessControlConditions - (optional) The access control conditions for the data - * @param params.evmContractConditions - (optional) The EVM contract conditions for the data - * @param params.solRpcConditions - (optional) The Solidity RPC conditions for the data - * @param params.unifiedAccessControlConditions - (optional) The unified access control conditions for the data - * - * @return { Promise } The encrypted ciphertext and the hash of the data - * - * @throws { Error } if the LIT node client is not ready - * @throws { Error } if the subnetPubKey is null - */ - encrypt = async (params: EncryptSdkParams): Promise => { - // ========== Validate Params ========== - // -- validate if it's ready - if (!this.ready) { - throw new LitNodeClientNotReadyError( - {}, - '6 LitNodeClient is not ready. Please call await litNodeClient.connect() first.' - ); - } - - // -- validate if this.subnetPubKey is null - if (!this.subnetPubKey) { - throw new LitNodeClientNotReadyError({}, 'subnetPubKey cannot be null'); - } - - const paramsIsSafe = safeParams({ - functionName: 'encrypt', - params, - }); - - if (!paramsIsSafe) { - throw new InvalidArgumentException( - { - info: { - params, - }, - }, - 'You must provide either accessControlConditions or evmContractConditions or solRpcConditions or unifiedAccessControlConditions' - ); - } - - // ========== Validate Access Control Conditions Schema ========== - await validateAccessControlConditions(params); - - // ========== Hashing Access Control Conditions ========= - // hash the access control conditions - const hashOfConditions: ArrayBuffer | undefined = - await getHashedAccessControlConditions(params); - - if (!hashOfConditions) { - throw new InvalidArgumentException( - { - info: { - params, - }, - }, - 'You must provide either accessControlConditions or evmContractConditions or solRpcConditions or unifiedAccessControlConditions' - ); - } - - const hashOfConditionsStr = uint8arrayToString( - new Uint8Array(hashOfConditions), - 'base16' - ); - - // ========== Hashing Private Data ========== - // hash the private data - const hashOfPrivateData = await crypto.subtle.digest( - 'SHA-256', - params.dataToEncrypt - ); - const hashOfPrivateDataStr = uint8arrayToString( - new Uint8Array(hashOfPrivateData), - 'base16' - ); - - // ========== Assemble identity parameter ========== - const identityParam = this._getIdentityParamForEncryption( - hashOfConditionsStr, - hashOfPrivateDataStr - ); - - // ========== Encrypt ========== - const ciphertext = await encrypt( - this.subnetPubKey, - params.dataToEncrypt, - uint8arrayFromString(identityParam, 'utf8') - ); - - return { ciphertext, dataToEncryptHash: hashOfPrivateDataStr }; - }; - - /** - * - * Decrypt ciphertext with the LIT network. - * - */ - decrypt = async (params: DecryptRequest): Promise => { - const { authContext, authSig, chain, ciphertext, dataToEncryptHash } = - params; - - // ========== Validate Params ========== - // -- validate if it's ready - if (!this.ready) { - throw new LitNodeClientNotReadyError( - {}, - '6 LitNodeClient is not ready. Please call await litNodeClient.connect() first.' - ); - } - - // -- validate if this.subnetPubKey is null - if (!this.subnetPubKey) { - throw new LitNodeClientNotReadyError({}, 'subnetPubKey cannot be null'); - } - - const paramsIsSafe = safeParams({ - functionName: 'decrypt', - params, - }); - - if (!paramsIsSafe) { - throw new InvalidArgumentException( - { - info: { - params, - }, - }, - 'Parameter validation failed.' - ); - } - - // ========== Hashing Access Control Conditions ========= - // hash the access control conditions - const hashOfConditions: ArrayBuffer | undefined = - await getHashedAccessControlConditions(params); - - if (!hashOfConditions) { - throw new InvalidArgumentException( - { - info: { - params, - }, - }, - 'You must provide either accessControlConditions or evmContractConditions or solRpcConditions or unifiedAccessControlConditions' - ); - } - - const hashOfConditionsStr = uint8arrayToString( - new Uint8Array(hashOfConditions), - 'base16' - ); - - // ========== Formatting Access Control Conditions ========= - const { - error, - formattedAccessControlConditions, - formattedEVMContractConditions, - formattedSolRpcConditions, - formattedUnifiedAccessControlConditions, - }: FormattedMultipleAccs = getFormattedAccessControlConditions(params); - - if (error) { - throw new InvalidArgumentException( - { - info: { - params, - }, - }, - 'You must provide either accessControlConditions or evmContractConditions or solRpcConditions or unifiedAccessControlConditions' - ); - } - - // ========== Assemble identity parameter ========== - const identityParam = this._getIdentityParamForEncryption( - hashOfConditionsStr, - dataToEncryptHash - ); - - log('identityParam', identityParam); - - let sessionSigs: SessionSigsMap = {}; - const userMaxPrices = await this.getMaxPricesForNodeProduct({ - product: 'DECRYPTION', - userMaxPrice: params.userMaxPrice, - }); - - if (!authSig) { - if (!authContext) { - throw new InvalidArgumentException( - { - info: { - params, - }, - }, - 'Missing auth context; you must provide either authSig or authContext.' - ); - } - - sessionSigs = await this._getSessionSigs({ - ...authContext, - userMaxPrices, - }); - } - - // ========== Get Network Signature ========== - const requestId = this._getNewRequestId(); - const nodePromises = this._getNodePromises( - userMaxPrices.map(({ url }) => url), - (url: string) => { - // -- if session key is available, use it - const authSigToSend = authSig ? authSig : sessionSigs[url]; - - if (!authSigToSend) { - throw new InvalidArgumentException( - { - info: { - params, - }, - }, - 'authSig is required' - ); - } - - const reqBody: EncryptionSignRequest = { - accessControlConditions: formattedAccessControlConditions, - evmContractConditions: formattedEVMContractConditions, - solRpcConditions: formattedSolRpcConditions, - unifiedAccessControlConditions: - formattedUnifiedAccessControlConditions, - dataToEncryptHash, - chain, - authSig: authSigToSend, - epoch: this.currentEpochNumber!, - }; - - const urlWithParh = composeLitUrl({ - url, - endpoint: LIT_ENDPOINT.ENCRYPTION_SIGN, - }); - - return this.generatePromise(urlWithParh, reqBody, requestId); - } - ); - - // -- resolve promises - const res = await this._handleNodePromises( - nodePromises, - requestId, - this._getThreshold() - ); - - // -- case: promises rejected - if (!res.success) { - this._throwNodeError(res, requestId); - } - - const signatureShares: NodeBlsSigningShare[] = ( - res as SuccessNodePromises - ).values; - - logWithRequestId(requestId, 'signatureShares', signatureShares); - - // ========== Result ========== - const decryptedData = await this._decryptWithSignatureShares( - this.subnetPubKey, - uint8arrayFromString(identityParam, 'utf8'), - ciphertext, - signatureShares - ); - - return { decryptedData }; - }; - - private _getIdentityParamForEncryption = ( - hashOfConditionsStr: string, - hashOfPrivateDataStr: string - ): string => { - return new LitAccessControlConditionResource( - `${hashOfConditionsStr}/${hashOfPrivateDataStr}` - ).getResourceKey(); - }; - - /** ============================== SESSION ============================== */ - - /** - * Sign a session public key using a PKP, which generates an authSig. - * @returns {Object} An object containing the resulting signature. - */ - private _signSessionKey = async ( - params: SignSessionKeyProp - ): Promise => { - log(`[signSessionKey] params:`, params); - - // ========== Validate Params ========== - // -- validate: If it's NOT ready - if (!this.ready) { - throw new LitNodeClientNotReadyError( - {}, - '[signSessionKey] ]LitNodeClient is not ready. Please call await litNodeClient.connect() first.' - ); - } - - // -- construct SIWE message that will be signed by node to generate an authSig. - const _expiration = - params.expiration || - new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(); - - // Try to get it from local storage, if not generates one~ - const sessionKey: SessionKeyPair = - params.sessionKey ?? this._getSessionKey(); - const sessionKeyUri = this._getSessionKeyUri(sessionKey.publicKey); - - log( - `[signSessionKey] sessionKeyUri is not found in params, generating a new one`, - sessionKeyUri - ); - - if (!sessionKeyUri) { - throw new InvalidParamType( - { - info: { - params, - }, - }, - '[signSessionKey] sessionKeyUri is not defined. Please provide a sessionKeyUri or a sessionKey.' - ); - } - - // Compute the address from the public key if it's provided. Otherwise, the node will compute it. - const pkpEthAddress = (function () { - // prefix '0x' if it's not already prefixed - params.pkpPublicKey = hexPrefixed(params.pkpPublicKey!); - - if (params.pkpPublicKey) return computeAddress(params.pkpPublicKey); - - // This will be populated by the node, using dummy value for now. - return '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'; - })(); - - let siwe_statement = 'Lit Protocol PKP session signature'; - if (params.statement) { - siwe_statement += ' ' + params.statement; - log(`[signSessionKey] statement found in params: "${params.statement}"`); - } - - let siweMessage; - - const siweParams = { - domain: params?.domain || globalThis.location?.host || 'litprotocol.com', - walletAddress: pkpEthAddress, - statement: siwe_statement, - uri: sessionKeyUri, - version: '1', - chainId: params.chainId ?? 1, - expiration: _expiration, - nonce: await this.getLatestBlockhash(), - }; - - if (params.resourceAbilityRequests) { - siweMessage = await createSiweMessageWithRecaps({ - ...siweParams, - resources: params.resourceAbilityRequests, - litNodeClient: this, - }); - } else { - siweMessage = await createSiweMessage(siweParams); - } - - // This may seem a bit weird because we usually only care about prices for sessionSigs... - // But this also ensures we use the cheapest nodes and takes care of getting the minNodeCount of node URLs for the operation - const targetNodePrices = await this.getMaxPricesForNodeProduct({ - product: 'LIT_ACTION', - }); - - // ========== Get Node Promises ========== - // -- fetch shares from nodes - const body: JsonSignSessionKeyRequestV2 = { - nodeSet: this._getNodeSet(targetNodePrices.map(({ url }) => url)), - sessionKey: sessionKeyUri, - authMethods: params.authMethods, - ...(params?.pkpPublicKey && { pkpPublicKey: params.pkpPublicKey }), - siweMessage: siweMessage, - curveType: LIT_CURVE.BLS, - - // -- custom auths - ...(params?.litActionIpfsId && { - litActionIpfsId: params.litActionIpfsId, - }), - ...(params?.litActionCode && { code: params.litActionCode }), - ...(params?.jsParams && { jsParams: params.jsParams }), - ...(this.currentEpochNumber && { epoch: this.currentEpochNumber }), - signingScheme: LIT_CURVE.BLS, - }; - - log(`[signSessionKey] body:`, body); - - const requestId = this._getNewRequestId(); - logWithRequestId(requestId, 'signSessionKey body', body); - - const targetNodeUrls = targetNodePrices.map(({ url }) => url); - const nodePromises = this._getNodePromises( - targetNodeUrls, - (url: string) => { - const reqBody: JsonSignSessionKeyRequestV1 = body; - - const urlWithPath = composeLitUrl({ - url, - endpoint: LIT_ENDPOINT.SIGN_SESSION_KEY, - }); - - return this.generatePromise(urlWithPath, reqBody, requestId); - } - ); - - // -- resolve promises - let res; - try { - res = await this._handleNodePromises( - nodePromises, - requestId, - this._getThreshold() - ); - log('signSessionKey node promises:', res); - } catch (e) { - logErrorWithRequestId(requestId, e); - throw new UnknownError( - { - info: { - requestId, - }, - cause: e, - }, - 'Error when handling node promises' - ); - } - - logWithRequestId(requestId, 'handleNodePromises res:', res); - - // -- case: promises rejected - if (!res.success) { - this._throwNodeError(res as RejectedNodePromises, requestId); - return {} as SignSessionKeyResponse; - } - - const responseData: BlsResponseData[] = res.values as BlsResponseData[]; - logWithRequestId( - requestId, - '[signSessionKey] responseData', - JSON.stringify(responseData, null, 2) - ); - - // ========== Extract shares from response data ========== - // -- 1. combine signed data as a list, and get the signatures from it - const curveType = responseData[0]?.curveType; - - if (curveType === 'ECDSA') { - throw new Error( - 'The ECDSA curve type is not supported in this version. Please use version 6.x.x instead.' - ); - } - - log(`[signSessionKey] curveType is "${curveType}"`); - - const signedDataList = responseData.map((s) => s.dataSigned); - - if (signedDataList.length <= 0) { - const err = `[signSessionKey] signedDataList is empty.`; - log(err); - throw new InvalidSignatureError( - { - info: { - requestId, - responseData, - signedDataList, - }, - }, - err - ); - } - - logWithRequestId( - requestId, - '[signSessionKey] signedDataList', - signedDataList - ); - - // -- checking if we have enough shares. - const validatedSignedDataList = this._validateSignSessionKeyResponseData( - responseData, - requestId, - this._getThreshold() - ); - - const blsSignedData: BlsResponseData[] = validatedSignedDataList; - - const sigType = mostCommonString(blsSignedData.map((s) => s.curveType)); - log(`[signSessionKey] sigType:`, sigType); - - const signatureShares = getBlsSignatures(blsSignedData); - - log(`[signSessionKey] signatureShares:`, signatureShares); - - const blsCombinedSignature = await combineSignatureShares(signatureShares); - - log(`[signSessionKey] blsCombinedSignature:`, blsCombinedSignature); - - const publicKey = removeHexPrefix(params.pkpPublicKey); - log(`[signSessionKey] publicKey:`, publicKey); - - const dataSigned = mostCommonString(blsSignedData.map((s) => s.dataSigned)); - log(`[signSessionKey] dataSigned:`, dataSigned); - - const mostCommonSiweMessage = mostCommonString( - blsSignedData.map((s) => s.siweMessage) - ); - - log(`[signSessionKey] mostCommonSiweMessage:`, mostCommonSiweMessage); - - const signedMessage = normalizeAndStringify(mostCommonSiweMessage!); - - log(`[signSessionKey] signedMessage:`, signedMessage); - - const signSessionKeyRes: SignSessionKeyResponse = { - authSig: { - sig: JSON.stringify({ - ProofOfPossession: blsCombinedSignature, - }), - algo: 'LIT_BLS', - derivedVia: 'lit.bls', - signedMessage, - address: computeAddress(hexPrefixed(publicKey)), - }, - pkpPublicKey: publicKey, - }; - - return signSessionKeyRes; - }; - - getSignSessionKeyShares = async ( - url: string, - params: { - body: { - sessionKey: string; - authMethods: AuthMethod[]; - pkpPublicKey?: string; - authSig?: AuthSig; - siweMessage: string; - }; - }, - requestId: string - ) => { - log('getSignSessionKeyShares'); - const urlWithPath = composeLitUrl({ - url, - endpoint: LIT_ENDPOINT.SIGN_SESSION_KEY, - }); - return await this._sendCommandToNode({ - url: urlWithPath, - data: params.body, - requestId, - }); - }; - - getMaxPricesForNodeProduct = async ({ - userMaxPrice, - product, - }: { - userMaxPrice?: bigint; - product: keyof typeof PRODUCT_IDS; - }) => { - log('getMaxPricesForNodeProduct()', { product }); - const getUserMaxPrice = () => { - if (userMaxPrice) { - log('getMaxPricesForNodeProduct(): User provided maxPrice of', { - userMaxPrice, - }); - return userMaxPrice; - } - - if (this.defaultMaxPriceByProduct[product] === -1n) { - log( - `getMaxPricesForNodeProduct(): No user-provided maxPrice and no defaultMaxPrice set for ${product}; setting to max value` - ); - - return 340_282_366_920_938_463_463_374_607_431_768_211_455n; // Rust U128 max - } - return this.defaultMaxPriceByProduct[product]; - }; - - console.log('getMaxPricesForNodeProduct():', {}); - return getMaxPricesForNodeProduct({ - nodePrices: await this._getNodePrices(), - userMaxPrice: getUserMaxPrice(), - productId: PRODUCT_IDS[product], - numRequiredNodes: this._getThreshold(), - }); - }; - - /** - * - * Retrieves or generates sessionSigs (think access token) for accessing Lit Network resources. - * - * How this function works on a high level: - * 1. Generate or retrieve [session keys](https://v6-api-doc-lit-js-sdk.vercel.app/interfaces/types_src.SessionKeyPair.html) (a public and private key pair) - * 2. Generate or retrieve the [`AuthSig`](https://v6-api-doc-lit-js-sdk.vercel.app/interfaces/types_src.AuthSig.html) that specifies the session [abilities](https://v6-api-doc-lit-js-sdk.vercel.app/enums/auth_helpers_src.LitAbility.html) - * 3. Sign the specific resources with the session key - * - * The process follows these steps: - * 1. Retrieves or generates a session key pair (Ed25519) for the user's device. The session key is either fetched from local storage or newly created if not found. The key does not expire. - * 2. Generates an authentication signature (`authSig`) by signing an ERC-5573 "Sign-in with Ethereum" message, which includes resource ability requests, capabilities, expiration, the user's device session public key, and a nonce. The `authSig` is retrieved from local storage, and if it has expired, the user will be prompted to re-sign. - * 3. Uses the session private key to sign the session public key along with the resource ability requests, capabilities, issuedAt, and expiration details. This creates a device-generated signature. - * 4. Constructs the session signatures (`sessionSigs`) by including the device-generated signature and the original message. The `sessionSigs` provide access to Lit Network features such as `executeJs` and `pkpSign`. - * - * See Sequence Diagram: https://www.plantuml.com/plantuml/uml/VPH1RnCn48Nl_XLFlT1Av00eGkm15QKLWY8K9K9SO-rEar4sjcLFalBl6NjJAuaMRl5utfjlPjQvJsAZx7UziQtuY5-9eWaQufQ3TOAR77cJy407Rka6zlNdHTRouUbIzSEtjiTIBUswg5v_NwMnuAVlA9KKFPN3I0x9qSSj7bqNF3iPykl9c4o9oUSJMuElv2XQ8IHAYRt3bluWM8wuVUpUJwVlFjsP8JUh5B_1DyV2AYdD6DjhLsTQTaYd3W3ad28SGWqM997fG5ZrB9DJqOaALuRwH1TMpik8tIYze-E8OrPKU5I6cMqtem2kCqOhr4vdaRAvtSjcoMkTo68scKu_Vi1EPMfrP_xVtj7sFMaHNg-6GVqk0MW0z18uKdVULTvDWtdqko28b7KktvUB2hKOBd1asU2QgDfTzrj7T4bLPdv6TR0zLwPQKkkZpIRTY4CTMbrBpg_VKuXyi49beUAHqIlirOUrL2zq9JPPdpRR5OMLVQGoGlLcjyRyQNv6MHz4W_fG42W--xWhUfNyOxiLL1USS6lRLeyAkYLNjrkVJuClm_qp5I8Lq0krUw7lwIt2DgY9oiozrjA_Yhy0 - * - * Note: When generating session signatures for different PKPs or auth methods, - * be sure to call disconnectWeb3 to clear auth signatures stored in local storage - * - * @param { AuthenticationContext } params - * - * An example of how this function is used can be found in the Lit developer-guides-code repository [here](https://github.com/LIT-Protocol/developer-guides-code/tree/master/session-signatures/getSessionSigs). - * - */ - private _getSessionSigs = async ( - params: AuthenticationContext & { - userMaxPrices: { url: string; price: bigint }[]; - } - ): Promise => { - // -- prepare - // Try to get it from local storage, if not generates one~ - const sessionKey = params.sessionKey ?? this._getSessionKey(); - - const sessionKeyUri = this._getSessionKeyUri(sessionKey.publicKey); - - // First get or generate the session capability object for the specified resources. - const sessionCapabilityObject = params.sessionCapabilityObject - ? params.sessionCapabilityObject - : await generateSessionCapabilityObjectWithWildcards( - params.resourceAbilityRequests.map((r) => r.resource) - ); - const expiration = params.expiration || getExpiration(); - - // -- (TRY) to get the wallet signature - let authSig = await this._getWalletSig({ - authNeededCallback: params.authNeededCallback, - chain: params.chain || 'ethereum', - sessionCapabilityObject, - switchChain: params.switchChain, - expiration: expiration, - sessionKey: sessionKey, - sessionKeyUri: sessionKeyUri, - nonce: await this.getLatestBlockhash(), - - // -- for recap - resourceAbilityRequests: params.resourceAbilityRequests, - - // -- optional fields - ...(params.litActionCode && { litActionCode: params.litActionCode }), - ...(params.litActionIpfsId && { - litActionIpfsId: params.litActionIpfsId, - }), - ...(params.jsParams && { jsParams: params.jsParams }), - }); - - const needToResignSessionKey = await this._checkNeedToResignSessionKey({ - authSig, - sessionKeyUri, - resourceAbilityRequests: params.resourceAbilityRequests, - }); - - // -- (CHECK) if we need to resign the session key - if (needToResignSessionKey) { - log('need to re-sign session key. Signing...'); - authSig = await this._authCallbackAndUpdateStorageItem({ - authCallback: params.authNeededCallback, - authCallbackParams: { - chain: params.chain || 'ethereum', - statement: sessionCapabilityObject.statement, - resources: [sessionCapabilityObject.encodeAsSiweResource()], - switchChain: params.switchChain, - expiration, - sessionKey: sessionKey, - uri: sessionKeyUri, - nonce: await this.getLatestBlockhash(), - resourceAbilityRequests: params.resourceAbilityRequests, - - // -- optional fields - ...(params.litActionCode && { litActionCode: params.litActionCode }), - ...(params.litActionIpfsId && { - litActionIpfsId: params.litActionIpfsId, - }), - ...(params.jsParams && { jsParams: params.jsParams }), - }, - }); - } - - if ( - authSig.address === '' || - authSig.derivedVia === '' || - authSig.sig === '' || - authSig.signedMessage === '' - ) { - throw new WalletSignatureNotFoundError( - { - info: { - authSig, - }, - }, - 'No wallet signature found' - ); - } - - // ===== AFTER we have Valid Signed Session Key ===== - // - Let's sign the resources with the session key - // - 5 minutes is the default expiration for a session signature - // - Because we can generate a new session sig every time the user wants to access a resource without prompting them to sign with their wallet - const sessionExpiration = - expiration ?? new Date(Date.now() + 1000 * 60 * 5).toISOString(); - - const capabilities = params.capabilityAuthSigs - ? [ - ...(params.capabilityAuthSigs ?? []), - params.capabilityAuthSigs, - authSig, - ] - : [...(params.capabilityAuthSigs ?? []), authSig]; - - // This is the template that will be combined with the node address as a single object, then signed by the session key - // so that the node can verify the session signature - const sessionSigningTemplate = { - sessionKey: sessionKey.publicKey, - resourceAbilityRequests: params.resourceAbilityRequests, - capabilities, - issuedAt: new Date().toISOString(), - expiration: sessionExpiration, - }; - - const sessionSigs: SessionSigsMap = {}; - - // console.log( - // 'getSessionSigs()', - // util.inspect( - // { - // userMaxPrices: params.userMaxPrices, - // }, - // { depth: 4 } - // ) - // ); - - params.userMaxPrices.forEach(({ url: nodeAddress, price }) => { - const toSign: SessionSigningTemplate = { - ...sessionSigningTemplate, - nodeAddress, - maxPrice: price.toString(), - }; - - log(`Setting maxprice for ${nodeAddress} to `, price.toString()); - - const signedMessage = JSON.stringify(toSign); - - const uint8arrayKey = uint8arrayFromString( - sessionKey.secretKey, - 'base16' - ); - - const uint8arrayMessage = uint8arrayFromString(signedMessage, 'utf8'); - const signature = nacl.sign.detached(uint8arrayMessage, uint8arrayKey); - - sessionSigs[nodeAddress] = { - sig: uint8arrayToString(signature, 'base16'), - derivedVia: 'litSessionSignViaNacl', - signedMessage: signedMessage, - address: sessionKey.publicKey, - algo: 'ed25519', - }; - }); - - log('sessionSigs:', sessionSigs); - - try { - const formattedSessionSigs = formatSessionSigs( - JSON.stringify(sessionSigs) - ); - log(formattedSessionSigs); - } catch (e) { - // swallow error - log('Error formatting session signatures: ', e); - } - - return sessionSigs; - }; - - /** - * Retrieves the PKP sessionSigs. - * - * @param params - The parameters for retrieving the PKP sessionSigs. - * @returns A promise that resolves to the PKP sessionSigs. - * @throws An error if any of the required parameters are missing or if `litActionCode` and `ipfsId` exist at the same time. - */ - getPkpAuthContext = (params: AuthenticationContext) => { - const chain = params?.chain || 'ethereum'; - - return { - chain, - ...params, - authNeededCallback: async (props: AuthCallbackParams) => { - // -- validate - if (!props.expiration) { - throw new ParamsMissingError( - { - info: { - props, - }, - }, - '[getPkpSessionSigs/callback] expiration is required' - ); - } - - if (!props.resources) { - throw new ParamsMissingError( - { - info: { - props, - }, - }, - '[getPkpSessionSigs/callback]resources is required' - ); - } - - if (!props.resourceAbilityRequests) { - throw new ParamsMissingError( - { - info: { - props, - }, - }, - '[getPkpSessionSigs/callback]resourceAbilityRequests is required' - ); - } - - // lit action code and ipfs id cannot exist at the same time - if (props.litActionCode && props.litActionIpfsId) { - throw new UnsupportedMethodError( - { - info: { - props, - }, - }, - '[getPkpSessionSigs/callback]litActionCode and litActionIpfsId cannot exist at the same time' - ); - } - - // Check if IPFS options are provided and if the code should be fetched from IPFS and overwrite the current code. - // This will fetch the code from the specified IPFS gateway using the provided ipfsId, - // and update the params with the fetched code, removing the ipfsId afterward. - const overwriteCode = - params.ipfsOptions?.overwriteCode || - GLOBAL_OVERWRITE_IPFS_CODE_BY_NETWORK[this.config.litNetwork]; - - if (overwriteCode && props.litActionIpfsId) { - const code = await this._getFallbackIpfsCode( - params.ipfsOptions?.gatewayUrl, - props.litActionIpfsId - ); - - props = { - ...props, - litActionCode: code, - litActionIpfsId: undefined, - }; - } - - /** - * We must provide an empty array for authMethods even if we are not using any auth methods. - * So that the nodes can serialize the request correctly. - */ - const authMethods = params.authMethods || []; - - const response = await this._signSessionKey({ - sessionKey: props.sessionKey, - statement: props.statement || 'Some custom statement.', - authMethods: [...authMethods], - pkpPublicKey: params.pkpPublicKey, - expiration: props.expiration, - resources: props.resources, - chainId: 1, - - // -- required fields - resourceAbilityRequests: props.resourceAbilityRequests, - - // -- optional fields - ...(props.litActionCode && { litActionCode: props.litActionCode }), - ...(props.litActionIpfsId && { - litActionIpfsId: props.litActionIpfsId, - }), - ...(props.jsParams && { jsParams: props.jsParams }), - }); - - return response.authSig; - }, - }; - }; - - /** - * - * Get Session Key URI eg. lit:session:0x1234 - * - * @param publicKey is the public key of the session key - * @returns { string } the session key uri - */ - private _getSessionKeyUri = (publicKey: string): string => { - return SIWE_URI_PREFIX.SESSION_KEY + publicKey; - }; - - /** - * Authenticates an Auth Method for claiming a Programmable Key Pair (PKP). - * A {@link MintCallback} can be defined for custom on chain interactions - * by default the callback will forward to a relay server for minting on chain. - * @param {ClaimKeyRequest} params an Auth Method and {@link MintCallback} - * @returns {Promise} - */ - async claimKeyId( - params: ClaimRequest - ): Promise { - if (!this.ready) { - const message = - 'LitNodeClient is not ready. Please call await litNodeClient.connect() first.'; - throw new LitNodeClientNotReadyError({}, message); - } - - if (params.authMethod.authMethodType == AUTH_METHOD_TYPE.WebAuthn) { - throw new LitNodeClientNotReadyError( - {}, - 'Unsupported auth method type. Webauthn, and Lit Actions are not supported for claiming' - ); - } - - const requestId = this._getNewRequestId(); - - // This may seem a bit weird because we usually only care about prices for sessionSigs... - // But this also ensures we use the cheapest nodes and takes care of getting the minNodeCount of node URLs for the operation - const targetNodePrices = await this.getMaxPricesForNodeProduct({ - product: 'LIT_ACTION', - }); - - const targetNodeUrls = targetNodePrices.map(({ url }) => url); - - const nodePromises = this._getNodePromises( - targetNodeUrls, - (url: string) => { - if (!params.authMethod) { - throw new ParamsMissingError( - { - info: { - params, - }, - }, - 'authMethod is required' - ); - } - - const reqBody: JsonPKPClaimKeyRequest = { - authMethod: params.authMethod, - }; - - const urlWithPath = composeLitUrl({ - url, - endpoint: LIT_ENDPOINT.PKP_CLAIM, - }); - - return this.generatePromise(urlWithPath, reqBody, requestId); - } - ); - - const responseData = await this._handleNodePromises( - nodePromises, - requestId, - this._getThreshold() - ); - - if (responseData.success) { - const nodeSignatures: Signature[] = responseData.values.map((r) => { - const sig = ethers.utils.splitSignature(`0x${r.signature}`); - return { - r: sig.r, - s: sig.s, - v: sig.v, - }; - }); - - logWithRequestId( - requestId, - `responseData: ${JSON.stringify(responseData, null, 2)}` - ); - - const derivedKeyId = responseData.values[0].derivedKeyId; - - const pubkey = await this.computeHDPubKey(derivedKeyId); - logWithRequestId( - requestId, - `pubkey ${pubkey} derived from key id ${derivedKeyId}` - ); - - const relayParams = params as ClaimRequest<'relay'>; - - let mintTx = ''; - if (params.mintCallback && 'signer' in params) { - mintTx = await params.mintCallback( - { - derivedKeyId, - authMethodType: params.authMethod.authMethodType, - signatures: nodeSignatures, - pubkey, - signer: (params as ClaimRequest<'client'>).signer, - ...relayParams, - }, - this.config.litNetwork - ); - } else { - mintTx = await defaultMintClaimCallback( - { - derivedKeyId, - authMethodType: params.authMethod.authMethodType, - signatures: nodeSignatures, - pubkey, - ...relayParams, - }, - this.config.litNetwork - ); - } - - return { - signatures: nodeSignatures, - claimedKeyId: derivedKeyId, - pubkey, - mintTx, - }; - } else { - throw new UnknownError( - { - info: { - requestId, - responseData, - }, - }, - `Claim request has failed. Request trace id: lit_%s`, - requestId - ); - } - } - - /** - * Note: ✨ This is to check data integrity of the response from the signSessionKey endpoint. - * As sometimes the response data structure has changed and we need to update the required fields. - * Validates the response data from the signSessionKey endpoint. - * Each response data item must have all required fields and valid ProofOfPossession. - * - * @param responseData - Array of BlsResponseData to validate - * @param requestId - Request ID for logging and error reporting - * @param threshold - Minimum number of valid responses needed - * @returns Filtered array of valid BlsResponseData - * @throws InvalidSignatureError if validation fails - */ - private _validateSignSessionKeyResponseData( - responseData: BlsResponseData[], - requestId: string, - threshold: number - ): BlsResponseData[] { - // each of this field cannot be empty - const requiredFields = [ - 'signatureShare', - 'curveType', - 'siweMessage', - 'dataSigned', - 'blsRootPubkey', - 'result', - ]; - - // -- checking if we have enough shares. - const validatedSignedDataList = responseData - .map((data: BlsResponseData) => { - // check if all required fields are present - for (const field of requiredFields) { - const key: keyof BlsResponseData = field as keyof BlsResponseData; - - if ( - data[key] === undefined || - data[key] === null || - data[key] === '' - ) { - log( - `Invalid signed data. "${field}" is missing. Not a problem, we only need ${threshold} nodes to sign the session key.` - ); - return null; - } - } - - if (!data.signatureShare.ProofOfPossession) { - const err = `Invalid signed data. "ProofOfPossession" is missing.`; - log(err); - throw new InvalidSignatureError( - { - info: { - requestId, - responseData, - data, - }, - }, - err - ); - } - - return data; - }) - .filter((item) => item !== null); - - logWithRequestId( - requestId, - 'validated length:', - validatedSignedDataList.length - ); - logWithRequestId(requestId, 'minimum threshold:', threshold); - - if (validatedSignedDataList.length < threshold) { - throw new InvalidSignatureError( - { - info: { - requestId, - responseData, - validatedSignedDataList, - threshold, - }, - }, - `not enough nodes signed the session key. Expected ${threshold}, got ${validatedSignedDataList.length}` - ); - } - - return validatedSignedDataList as BlsResponseData[]; - } -} diff --git a/packages/lit-node-client-nodejs/tsconfig.json b/packages/lit-node-client-nodejs/tsconfig.json deleted file mode 100644 index afa40e9075..0000000000 --- a/packages/lit-node-client-nodejs/tsconfig.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "extends": "../../tsconfig.base.json", - "compilerOptions": { - "module": "commonjs", - "forceConsistentCasingInFileNames": true, - "strict": true, - "noImplicitOverride": true, - "noPropertyAccessFromIndexSignature": true, - "noImplicitReturns": true, - "noFallthroughCasesInSwitch": true, - "lib": ["ES2021", "DOM"] - }, - "files": [], - "include": [], - "references": [ - { - "path": "./tsconfig.lib.json" - }, - { - "path": "./tsconfig.spec.json" - } - ] -} diff --git a/packages/lit-node-client-nodejs/tsconfig.lib.json b/packages/lit-node-client-nodejs/tsconfig.lib.json deleted file mode 100644 index e85ef50f65..0000000000 --- a/packages/lit-node-client-nodejs/tsconfig.lib.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "../../dist/out-tsc", - "declaration": true, - "types": [] - }, - "include": ["**/*.ts"], - "exclude": ["jest.config.ts", "**/*.spec.ts", "**/*.test.ts"] -} diff --git a/packages/lit-node-client/README.md b/packages/lit-node-client/README.md index 86ee2ff17e..6e8a8d65b7 100644 --- a/packages/lit-node-client/README.md +++ b/packages/lit-node-client/README.md @@ -1,9 +1,21 @@ -# Quick Start +# Getting Started -This module is the main module of this monorepo. It sets a default authentication callback using the `checkAndSignAuthMessage` function from the auth-browser submodule, which is designed to work in both browser and Node.js environments, facilitating interaction with Lit nodes. +This `LitNodeClient` is created solely to run on Node.js. -### node.js / browser +The usual `checkAndSignAuthMessage` is not included in this package, so you need to add it manually to the constructor if you decide to use it on a browser, or with any custom auth callback. -``` -yarn add @lit-protocol/lit-node-client +```js +import * as LitJsSdkNodeJs from '@lit-protocol/lit-node-client'; +import { checkAndSignAuthMessage } from '@lit-protocol/auth-browser'; + +const client = new LitJsSdkNodeJs.LitNodeClient({ + litNetwork: 'serrano', + defaultAuthCallback: checkAndSignAuthMessage, +}); + +await client.connect(); + +const authSig = await checkAndSignAuthMessage({ + chain: 'ethereum', +}); ``` diff --git a/packages/lit-node-client/jest.config.ts b/packages/lit-node-client/jest.config.ts index bf0a9ac9bb..d3310ee7a7 100644 --- a/packages/lit-node-client/jest.config.ts +++ b/packages/lit-node-client/jest.config.ts @@ -12,10 +12,5 @@ export default { }, moduleFileExtensions: ['ts', 'js', 'html'], coverageDirectory: '../../coverage/packages/lit-node-client', - moduleNameMapper: { - '^ipfs-unixfs-importer': - 'node_modules/ipfs-unixfs-importer/dist/index.min.js', - '^blockstore-core': 'node_modules/blockstore-core/dist/index.min.js', - }, setupFilesAfterEnv: ['../../jest.setup.js'], }; diff --git a/packages/lit-node-client/package.json b/packages/lit-node-client/package.json index 014864e988..3b4b2e99c1 100644 --- a/packages/lit-node-client/package.json +++ b/packages/lit-node-client/package.json @@ -1,5 +1,6 @@ { "name": "@lit-protocol/lit-node-client", + "type": "commonjs", "license": "MIT", "homepage": "https://github.com/Lit-Protocol/js-sdk", "repository": { @@ -12,22 +13,17 @@ "bugs": { "url": "https://github.com/LIT-Protocol/js-sdk/issues" }, - "type": "commonjs", "publishConfig": { "access": "public", "directory": "../../dist/packages/lit-node-client" }, - "gitHead": "0d7334c2c55f448e91fe32f29edc5db8f5e09e4b", - "tags": [ - "universal" - ], - "peerDependencies": { - "tslib": "^2.3.0" - }, "browser": { "crypto": false, "stream": false }, + "tags": [ + "nodejs" + ], "version": "8.0.0-alpha.0", "main": "./dist/src/index.js", "typings": "./dist/src/index.d.ts" diff --git a/packages/lit-node-client/project.json b/packages/lit-node-client/project.json index 4aee11d45d..ca972315e5 100644 --- a/packages/lit-node-client/project.json +++ b/packages/lit-node-client/project.json @@ -29,6 +29,15 @@ "jestConfig": "packages/lit-node-client/jest.config.ts", "passWithNoTests": true } + }, + "testWatch": { + "executor": "@nx/jest:jest", + "outputs": ["{workspaceRoot}/coverage/packages/lit-node-client"], + "options": { + "jestConfig": "packages/lit-node-client/jest.config.ts", + "watch": true, + "passWithNoTests": true + } } }, "tags": [] diff --git a/packages/lit-node-client/src/index.ts b/packages/lit-node-client/src/index.ts index 534a93b50a..b98ea523c1 100644 --- a/packages/lit-node-client/src/index.ts +++ b/packages/lit-node-client/src/index.ts @@ -1,10 +1,11 @@ +import 'cross-fetch/dist/node-polyfill.js'; + // ==================== Exports ==================== export * from './lib/lit-node-client'; export { - checkAndSignAuthMessage, - ethConnect, - disconnectWeb3, -} from '@lit-protocol/auth-browser'; + hashResourceIdForSigning, + humanizeAccessControlConditions, +} from '@lit-protocol/access-control-conditions'; -export * from '@lit-protocol/lit-node-client-nodejs'; +export { validateSessionSig } from './lib/helpers/session-sigs-validator'; diff --git a/packages/lit-node-client/src/lib/helpers/assemble-most-common-response.test.ts b/packages/lit-node-client/src/lib/helpers/assemble-most-common-response.test.ts new file mode 100644 index 0000000000..bd27d12437 --- /dev/null +++ b/packages/lit-node-client/src/lib/helpers/assemble-most-common-response.test.ts @@ -0,0 +1,159 @@ +import { assembleMostCommonResponse } from './assemble-most-common-response'; + +describe('assembleMostCommonResponse', () => { + it('should return an empty object when given an empty array', () => { + const responses: object[] = []; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({}); + }); + + it('should return the correct most common values for simple objects', () => { + const responses = [ + { color: 'red', size: 'large' }, + { color: 'blue', size: 'medium' }, + { color: 'red', size: 'large' }, + { color: 'red', size: 'small' }, + ]; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({ color: 'red', size: 'large' }); + }); + + it('should handle objects with different keys', () => { + const responses = [ + { name: 'Alice', age: 30 }, + { name: 'Bob', city: 'New York' }, + { name: 'Alice', city: 'Los Angeles' }, + ]; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({ name: 'Alice', age: 30, city: 'Los Angeles' }); + }); + + it('should handle nested objects correctly', () => { + const responses = [ + { + address: { city: 'New York', country: 'USA' }, + status: 'active', + }, + { + address: { city: 'Los Angeles', country: 'USA' }, + status: 'inactive', + }, + { + address: { city: 'New York', country: 'Canada' }, + status: 'active', + }, + { + address: { city: 'New York', country: 'USA' }, + status: 'active', + }, + ]; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({ + address: { city: 'New York', country: 'USA' }, + status: 'active', + }); + }); + + it('should handle undefined and empty string values', () => { + const responses = [ + { name: 'Alice', value: undefined }, + { name: 'Bob', value: 'test' }, + { name: 'Alice', value: '' }, + { name: 'Alice', value: 'test' }, + ]; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({ name: 'Alice', value: 'test' }); + }); + + it('should handle undefined and empty string values in nested object', () => { + const responses = [ + { person: { name: 'Alice', value: undefined } }, + { person: { name: 'Bob', value: 'test' } }, + { person: { name: 'Alice', value: '' } }, + { person: { name: 'Alice', value: 'test' } }, + ]; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({ person: { name: 'Alice', value: 'test' } }); + }); + + it('should return undefined if all values are undefined or empty string', () => { + const responses = [ + { name: 'Alice', value: undefined }, + { name: 'Bob', value: '' }, + { name: 'Alice', value: undefined }, + { name: 'Alice', value: '' }, + ]; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({ name: 'Alice', value: undefined }); + }); + + it('should handle nested object with different depth', () => { + const responses = [ + { data: { level1: { level2: 'value1' } } }, + { data: { level1: 'value2' } }, + { data: { level1: { level2: 'value1' } } }, + ]; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({ data: { level1: { level2: 'value1' } } }); + }); + + it('should handle arrays of different types', () => { + const responses = [ + { name: 'Alice', tags: ['tag1', 'tag2'] }, + { name: 'Bob', tags: ['tag2', 'tag3'] }, + { name: 'Alice', tags: ['tag1', 'tag2'] }, + ]; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({ name: 'Alice', tags: ['tag1', 'tag2'] }); + }); + it('should handle arrays with mixed value types', () => { + const responses = [ + { + name: 'Alice', + value: 10, + other: true, + values: [1, 2, '3'], + }, + { + name: 'Bob', + value: 10, + other: false, + values: [2, 3, '4'], + }, + { + name: 'Alice', + value: 10, + other: true, + values: [1, 2, '3'], + }, + ]; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({ + name: 'Alice', + value: 10, + other: true, + values: [1, 2, '3'], + }); + }); + + it('should handle ties by choosing the last encountered value', () => { + const responses = [ + { color: 'red', size: 'small' }, + { color: 'blue', size: 'large' }, + { color: 'red', size: 'large' }, + { color: 'blue', size: 'small' }, + ]; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({ color: 'blue', size: 'small' }); + }); + it('should handle ties in nested objects by choosing the last encountered value', () => { + const responses = [ + { data: { color: 'red', size: 'small' } }, + { data: { color: 'blue', size: 'large' } }, + { data: { color: 'red', size: 'large' } }, + { data: { color: 'blue', size: 'small' } }, + ]; + const result = assembleMostCommonResponse(responses); + expect(result).toEqual({ data: { color: 'blue', size: 'small' } }); + }); +}); diff --git a/packages/lit-node-client/src/lib/helpers/assemble-most-common-response.ts b/packages/lit-node-client/src/lib/helpers/assemble-most-common-response.ts new file mode 100644 index 0000000000..f710df35bd --- /dev/null +++ b/packages/lit-node-client/src/lib/helpers/assemble-most-common-response.ts @@ -0,0 +1,37 @@ +import { mostCommonValue } from '@lit-protocol/core'; + +export const assembleMostCommonResponse = (responses: object[]): object => { + const result: Record = {}; + + // Aggregate all values for each key across all responses + const keys = new Set(responses.flatMap(Object.keys)); + + for (const key of keys) { + const values = responses.map( + (response: Record) => response[key] + ); + + // Filter out undefined first and unmatching type values after before processing + const definedValues = values.filter( + (value) => value !== undefined && value !== '' + ); + const valuesType = mostCommonValue( + definedValues.map((value) => typeof value) + ); + const filteredValues = values.filter( + (value) => typeof value === valuesType + ); + + if (filteredValues.length === 0) { + result[key] = undefined; // or set a default value if needed + } else if (valuesType === 'object' && !Array.isArray(filteredValues[0])) { + // Recursive case for objects + result[key] = assembleMostCommonResponse(filteredValues); + } else { + // Most common element from filtered values + result[key] = mostCommonValue(filteredValues); + } + } + + return result; +}; diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/encode-code.test.ts b/packages/lit-node-client/src/lib/helpers/encode-code.test.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/encode-code.test.ts rename to packages/lit-node-client/src/lib/helpers/encode-code.test.ts diff --git a/packages/lit-node-client/src/lib/helpers/encode-code.ts b/packages/lit-node-client/src/lib/helpers/encode-code.ts new file mode 100644 index 0000000000..a8090b82d7 --- /dev/null +++ b/packages/lit-node-client/src/lib/helpers/encode-code.ts @@ -0,0 +1,9 @@ +/** + * Encodes the given code string into base64 format. + * + * @param code - The code string to be encoded. + * @returns The encoded code string in base64 format. + */ +export const encodeCode = (code: string) => { + return Buffer.from(code, 'utf8').toString('base64'); +}; diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/get-bls-signatures.test.ts b/packages/lit-node-client/src/lib/helpers/get-bls-signatures.test.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/get-bls-signatures.test.ts rename to packages/lit-node-client/src/lib/helpers/get-bls-signatures.test.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/get-bls-signatures.ts b/packages/lit-node-client/src/lib/helpers/get-bls-signatures.ts similarity index 63% rename from packages/lit-node-client-nodejs/src/lib/helpers/get-bls-signatures.ts rename to packages/lit-node-client/src/lib/helpers/get-bls-signatures.ts index dcfdcecc1e..3d8ce64ed1 100644 --- a/packages/lit-node-client-nodejs/src/lib/helpers/get-bls-signatures.ts +++ b/packages/lit-node-client/src/lib/helpers/get-bls-signatures.ts @@ -1,4 +1,4 @@ -import { log } from '@lit-protocol/misc'; +import { InvalidArgumentException } from '@lit-protocol/constants'; import { BlsResponseData, BlsSignatureShare } from '@lit-protocol/types'; /** @@ -11,7 +11,14 @@ export function getBlsSignatures( responseData: BlsResponseData[] ): BlsSignatureShare[] { if (!responseData) { - throw new Error('[getBlsSignatures] No data provided'); + throw new InvalidArgumentException( + { + info: { + responseData, + }, + }, + '[getBlsSignatures] No data provided' + ); } const signatureShares = responseData.map((s) => ({ @@ -21,10 +28,15 @@ export function getBlsSignatures( }, })); - log(`[getBlsSignatures] signatureShares:`, signatureShares); - if (!signatureShares || signatureShares.length <= 0) { - throw new Error('[getBlsSignatures] No signature shares provided'); + throw new InvalidArgumentException( + { + info: { + signatureShares, + }, + }, + '[getBlsSignatures] No signature shares provided' + ); } return signatureShares; diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/get-claims-list.test.ts b/packages/lit-node-client/src/lib/helpers/get-claims-list.test.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/get-claims-list.test.ts rename to packages/lit-node-client/src/lib/helpers/get-claims-list.test.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/get-claims-list.ts b/packages/lit-node-client/src/lib/helpers/get-claims-list.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/get-claims-list.ts rename to packages/lit-node-client/src/lib/helpers/get-claims-list.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/get-claims.test.ts b/packages/lit-node-client/src/lib/helpers/get-claims.test.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/get-claims.test.ts rename to packages/lit-node-client/src/lib/helpers/get-claims.test.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/get-claims.ts b/packages/lit-node-client/src/lib/helpers/get-claims.ts similarity index 99% rename from packages/lit-node-client-nodejs/src/lib/helpers/get-claims.ts rename to packages/lit-node-client/src/lib/helpers/get-claims.ts index 8bc984efe4..56eb8a0392 100644 --- a/packages/lit-node-client-nodejs/src/lib/helpers/get-claims.ts +++ b/packages/lit-node-client/src/lib/helpers/get-claims.ts @@ -1,6 +1,7 @@ -import { Signature } from '@lit-protocol/types'; import { ethers } from 'ethers'; +import { Signature } from '@lit-protocol/types'; + /** * Retrieves the claims from an array of objects and organizes them into a record. * Each claim is associated with its corresponding signatures and derived key ID. diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/get-expiration.ts b/packages/lit-node-client/src/lib/helpers/get-expiration.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/get-expiration.ts rename to packages/lit-node-client/src/lib/helpers/get-expiration.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/get-max-prices-for-node-product.ts b/packages/lit-node-client/src/lib/helpers/get-max-prices-for-node-product.ts similarity index 96% rename from packages/lit-node-client-nodejs/src/lib/helpers/get-max-prices-for-node-product.ts rename to packages/lit-node-client/src/lib/helpers/get-max-prices-for-node-product.ts index e0e8c5bfb3..f657edfe78 100644 --- a/packages/lit-node-client-nodejs/src/lib/helpers/get-max-prices-for-node-product.ts +++ b/packages/lit-node-client/src/lib/helpers/get-max-prices-for-node-product.ts @@ -55,8 +55,6 @@ export function getMaxPricesForNodeProduct({ * our request to fail if the price on some of the nodes is higher than we think it was, as long as it's not * drastically different than we expect it to be */ - // console.log('totalBaseCost:', totalBaseCost); - // console.log('userMaxPrice:', userMaxPrice); const excessBalance = userMaxPrice - totalBaseCost; // Map matching the keys from `nodePrices`, but w/ the per-node maxPrice computed based on `userMaxPrice` diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/get-max-prices-for-nodes.test.ts b/packages/lit-node-client/src/lib/helpers/get-max-prices-for-nodes.test.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/get-max-prices-for-nodes.test.ts rename to packages/lit-node-client/src/lib/helpers/get-max-prices-for-nodes.test.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/get-signatures.test.ts b/packages/lit-node-client/src/lib/helpers/get-signatures.test.ts similarity index 95% rename from packages/lit-node-client-nodejs/src/lib/helpers/get-signatures.test.ts rename to packages/lit-node-client/src/lib/helpers/get-signatures.test.ts index a0177e3227..d776abf761 100644 --- a/packages/lit-node-client-nodejs/src/lib/helpers/get-signatures.test.ts +++ b/packages/lit-node-client/src/lib/helpers/get-signatures.test.ts @@ -1,7 +1,4 @@ -import { - EcdsaSignedMessageShareParsed, - SigResponse, -} from '@lit-protocol/types'; +import { EcdsaSignedMessageShareParsed } from '@lit-protocol/types'; import { getSignatures } from './get-signatures'; @@ -51,8 +48,6 @@ describe('getSignatures', () => { requestId, }); - console.log('signatures:', signatures); - expect(signatures).toHaveProperty('publicKey'); expect(signatures).toHaveProperty('r'); expect(signatures).toHaveProperty('recid'); diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/get-signatures.ts b/packages/lit-node-client/src/lib/helpers/get-signatures.ts similarity index 80% rename from packages/lit-node-client-nodejs/src/lib/helpers/get-signatures.ts rename to packages/lit-node-client/src/lib/helpers/get-signatures.ts index 77941e63b5..bb50181fe5 100644 --- a/packages/lit-node-client-nodejs/src/lib/helpers/get-signatures.ts +++ b/packages/lit-node-client/src/lib/helpers/get-signatures.ts @@ -1,24 +1,18 @@ -import { joinSignature } from 'ethers/lib/utils'; - import { CURVE_GROUP_BY_CURVE_TYPE, LIT_CURVE_VALUES, + InvalidArgumentException, NoValidShares, ParamNullError, - UnknownSignatureError, UnknownSignatureType, CurveTypeNotFoundError, } from '@lit-protocol/constants'; +import { mostCommonValue } from '@lit-protocol/core'; import { combineEcdsaShares } from '@lit-protocol/crypto'; -import { - logErrorWithRequestId, - logWithRequestId, - mostCommonString, -} from '@lit-protocol/misc'; +import { logger } from '@lit-protocol/logger'; import { EcdsaSignedMessageShareParsed, SigResponse, - SigShare, } from '@lit-protocol/types'; /** @@ -42,7 +36,8 @@ export const getSignatures = async (params: { signedMessageShares: EcdsaSignedMessageShareParsed[]; requestId: string; }): Promise => { - let { networkPubKeySet, threshold, signedMessageShares, requestId } = params; + const { networkPubKeySet, threshold, signedMessageShares, requestId } = + params; if (networkPubKeySet === null) { throw new ParamNullError( @@ -56,10 +51,11 @@ export const getSignatures = async (params: { } if (signedMessageShares.length < threshold) { - logErrorWithRequestId( + logger.error({ + function: 'getSignatures', requestId, - `not enough nodes to get the signatures. Expected ${threshold}, got ${signedMessageShares.length}` - ); + msg: `not enough nodes to get the signatures. Expected ${threshold}, got ${signedMessageShares.length}`, + }); throw new NoValidShares( { @@ -105,15 +101,24 @@ export const getSignatures = async (params: { // -- combine const combinedSignature = await combineEcdsaShares(signedMessageShares); - const _publicKey = mostCommonString( + const _publicKey = mostCommonValue( signedMessageShares.map((s) => s.publicKey) ); - const _dataSigned = mostCommonString( + const _dataSigned = mostCommonValue( signedMessageShares.map((s) => s.dataSigned) ); if (!_publicKey || !_dataSigned) { - throw new Error('No valid publicKey or dataSigned found'); + throw new InvalidArgumentException( + { + info: { + requestId, + publicKey: _publicKey, + dataSigned: _dataSigned, + }, + }, + 'No valid publicKey or dataSigned found' + ); } const sigResponse: SigResponse = { diff --git a/packages/lit-node-client/src/lib/helpers/hex.test.ts b/packages/lit-node-client/src/lib/helpers/hex.test.ts new file mode 100644 index 0000000000..b857529b1d --- /dev/null +++ b/packages/lit-node-client/src/lib/helpers/hex.test.ts @@ -0,0 +1,55 @@ +import { numberToHex, hexPrefixed, removeHexPrefix } from './hex'; + +describe('Hex Helpers', () => { + describe('numberToHex', () => { + it('should convert a number to a hex string prefixed with 0x', () => { + expect(numberToHex(255)).toBe('0xff'); + }); + + it('should convert zero correctly', () => { + expect(numberToHex(0)).toBe('0x0'); + }); + + it('should convert a larger number correctly', () => { + expect(numberToHex(4095)).toBe('0xfff'); + }); + }); + + describe('hexPrefixed', () => { + it('should return the string unchanged if it already has 0x prefix', () => { + expect(hexPrefixed('0xabcdef')).toBe('0xabcdef'); + }); + + it('should add 0x prefix if not present', () => { + expect(hexPrefixed('abcdef')).toBe('0xabcdef'); + }); + + it('should add 0x prefix to an empty string', () => { + expect(hexPrefixed('')).toBe('0x'); + }); + }); + + describe('removeHexPrefix', () => { + it('should remove the hex prefix from a string that starts with 0x', () => { + const input = '0xabcdef'; + const expectedOutput = 'abcdef'; + expect(removeHexPrefix(input)).toBe(expectedOutput); + }); + + it('should return the string unchanged if no 0x prefix is present', () => { + const input = 'abcdef'; + const expectedOutput = 'abcdef'; + expect(removeHexPrefix(input)).toBe(expectedOutput); + }); + + it('should not remove prefix if it is uppercase 0X (not valid)', () => { + // The helper checks only for lowercase '0x' + const input = '0XABCDEF'; + expect(removeHexPrefix(input)).toBe('0XABCDEF'); + }); + + it('should handle an empty string', () => { + expect(removeHexPrefix('')).toBe(''); + }); + }); +}); diff --git a/packages/lit-node-client/src/lib/helpers/hex.ts b/packages/lit-node-client/src/lib/helpers/hex.ts new file mode 100644 index 0000000000..e451013de8 --- /dev/null +++ b/packages/lit-node-client/src/lib/helpers/hex.ts @@ -0,0 +1,38 @@ +import { Hex } from '@lit-protocol/types'; + +/** + * + * Convert number to hex + * @param { number } v + * @return { string } hex value prefixed with 0x + */ +export const numberToHex = (v: number): Hex => { + return `0x${v.toString(16)}`; +}; + +/** + * Adds a '0x' prefix to a string if it doesn't already have one. + * @param str - The input string. + * @returns The input string with a '0x' prefix. + */ +export const hexPrefixed = (str: string): Hex => { + if (str.startsWith('0x')) { + return str as Hex; + } + + return ('0x' + str) as Hex; +}; + +/** + * Removes the '0x' prefix from a hexadecimal string if it exists. + * + * @param str - The input string. + * @returns The input string with the '0x' prefix removed, if present. + */ +export const removeHexPrefix = (str: string) => { + if (str.startsWith('0x')) { + return str.slice(2); + } + + return str; +}; diff --git a/packages/lit-node-client/src/lib/helpers/mint-claim-callback.ts b/packages/lit-node-client/src/lib/helpers/mint-claim-callback.ts new file mode 100644 index 0000000000..494787764b --- /dev/null +++ b/packages/lit-node-client/src/lib/helpers/mint-claim-callback.ts @@ -0,0 +1,95 @@ +import { + InvalidArgumentException, + LIT_NETWORK, + LIT_NETWORK_VALUES, + NetworkError, + RELAYER_URL_BY_NETWORK, + WrongNetworkException, +} from '@lit-protocol/constants'; +import { logger } from '@lit-protocol/logger'; +import { + ClaimResult, + MintCallback, + RelayClaimProcessor, +} from '@lit-protocol/types'; + +/** + * Checks if the given LIT_NETWORK value is supported. + * @param litNetwork - The Lit Network value to check. + * @throws {Error} - Throws an error if the Lit Network value is not supported. + */ +export function isSupportedLitNetwork( + litNetwork: LIT_NETWORK_VALUES +): asserts litNetwork is LIT_NETWORK_VALUES { + const supportedNetworks = Object.values(LIT_NETWORK); + + if (!supportedNetworks.includes(litNetwork)) { + throw new WrongNetworkException( + { + info: { + litNetwork, + supportedNetworks, + }, + }, + `Unsupported LitNetwork! (${supportedNetworks.join('|')}) are supported.` + ); + } +} + +export const defaultMintClaimCallback: MintCallback< + RelayClaimProcessor +> = async ( + params: ClaimResult, + network: LIT_NETWORK_VALUES = LIT_NETWORK.NagaDev +): Promise => { + isSupportedLitNetwork(network); + + const AUTH_CLAIM_PATH = '/auth/claim'; + + const relayUrl: string = params.relayUrl || RELAYER_URL_BY_NETWORK[network]; + + if (!relayUrl) { + throw new InvalidArgumentException( + { + info: { + network, + relayUrl, + }, + }, + 'No relayUrl provided and no default relayUrl found for network' + ); + } + + const relayUrlWithPath = relayUrl + AUTH_CLAIM_PATH; + + const response = await fetch(relayUrlWithPath, { + method: 'POST', + body: JSON.stringify(params), + headers: { + 'api-key': params.relayApiKey + ? params.relayApiKey + : '67e55044-10b1-426f-9247-bb680e5fe0c8_relayer', + 'Content-Type': 'application/json', + }, + }); + + if (response.status < 200 || response.status >= 400) { + const errResp = (await response.json()) ?? ''; + const errStmt = `An error occurred requesting "/auth/claim" endpoint ${JSON.stringify( + errResp + )}`; + logger.warn(errStmt); + throw new NetworkError( + { + info: { + response, + errResp, + }, + }, + `An error occurred requesting "/auth/claim" endpoint` + ); + } + + const body = await response.json(); + return body.requestId; +}; diff --git a/packages/lit-node-client/src/lib/helpers/normalize-and-stringify.test.ts b/packages/lit-node-client/src/lib/helpers/normalize-and-stringify.test.ts new file mode 100644 index 0000000000..0d720c3390 --- /dev/null +++ b/packages/lit-node-client/src/lib/helpers/normalize-and-stringify.test.ts @@ -0,0 +1,78 @@ +import { normalizeAndStringify } from './normalize-and-stringify'; + +describe('normalizeAndStringify', () => { + it('should return a non-JSON string unchanged', () => { + const input = 'Hello, world!'; + expect(normalizeAndStringify(input)).toBe(input); + }); + + it('should parse and stringify a valid JSON object', () => { + const input = '{"a": "b"}'; + // JSON.stringify removes spaces so the output will be: {"a":"b"} + expect(normalizeAndStringify(input)).toBe('{"a":"b"}'); + }); + + it('should parse and stringify a valid JSON array', () => { + const input = '[1, 2, 3]'; + expect(normalizeAndStringify(input)).toBe('[1,2,3]'); + }); + + it('should normalize an overly escaped JSON object', () => { + // The input string is overly escaped. + // The literal here represents: {\"a\":\"b\"} + const input = '{\\"a\\":\\"b\\"}'; + expect(normalizeAndStringify(input)).toBe('{"a":"b"}'); + }); + + it('should normalize an overly escaped JSON array', () => { + // The literal represents: [\"a\",\"b\"] + const input = '[\\"a\\",\\"b\\"]'; + expect(normalizeAndStringify(input)).toBe('["a","b"]'); + }); + + it('should return a malformed JSON string as is', () => { + // Even though it starts with '{', it's not valid JSON and cannot be normalized. + const input = '{not a json}'; + expect(normalizeAndStringify(input)).toBe(input); + }); + + it('should return an empty string unchanged', () => { + const input = ''; + expect(normalizeAndStringify(input)).toBe(''); + }); + + it('should recursively normalize multiple levels of escaping', () => { + // This input is escaped twice: + // The literal represents: {\\\"a\\\":\\\"b\\\"} + // After one unescape, it becomes: {\"a\":\"b\"} which is still not valid JSON, + // so it needs a second unescape to yield valid JSON {"a":"b"}. + const input = '{\\\\\\"a\\\\\\":\\\\\\"b\\\\\\"}'; + expect(normalizeAndStringify(input)).toBe('{"a":"b"}'); + }); + + describe('double escaped JSON string', () => { + test('A doubly escaped JSON string', () => { + const doublyEscapedJson = '{\\"key\\": \\"value\\"}'; + expect(normalizeAndStringify(doublyEscapedJson)).toBe('{"key":"value"}'); + }); + + test('A triply escaped JSON string', () => { + const triplyEscapedJson = '{\\\\\\"key\\\\\\": \\\\\\"value\\\\\\"}'; + expect(normalizeAndStringify(triplyEscapedJson)).toBe('{"key":"value"}'); + }); + + test('A correctly escaped JSON string (for comparison)', () => { + const correctlyEscapedJson = '{"key":"value"}'; + expect(normalizeAndStringify(correctlyEscapedJson)).toBe( + '{"key":"value"}' + ); + }); + + test('regular siwe message', () => { + const regularString = + 'litprotocol.com wants you to sign in with your Ethereum account:\\n0x3edB...'; + + expect(normalizeAndStringify(regularString)).toBe(regularString); + }); + }); +}); diff --git a/packages/lit-node-client/src/lib/helpers/normalize-and-stringify.ts b/packages/lit-node-client/src/lib/helpers/normalize-and-stringify.ts new file mode 100644 index 0000000000..a10312b2e6 --- /dev/null +++ b/packages/lit-node-client/src/lib/helpers/normalize-and-stringify.ts @@ -0,0 +1,37 @@ +/** + * Attempts to normalize a string by unescaping it until it can be parsed as a JSON object, + * then stringifies it exactly once. If the input is a regular string that does not represent + * a JSON object or array, the function will return it as is without modification. + * This function is designed to handle cases where strings might be excessively escaped due + * to multiple layers of encoding, ensuring that JSON data is stored in a consistent and + * predictable format, and regular strings are left unchanged. + * + * @param input The potentially excessively escaped string. + * @return A string that is either the JSON.stringify version of the original JSON object + * or the original string if it does not represent a JSON object or array. + */ +export function normalizeAndStringify(input: string): string { + try { + // Directly return the string if it's not in a JSON format + if (!input.startsWith('{') && !input.startsWith('[')) { + return input; + } + + // Attempt to parse the input as JSON + const parsed = JSON.parse(input); + + // If parsing succeeds, return the stringified version of the parsed JSON + return JSON.stringify(parsed); + } catch (error) { + // If parsing fails, it might be due to extra escaping + const unescaped = input.replace(/\\(.)/g, '$1'); + + // If unescaping doesn't change the string, return it as is + if (input === unescaped) { + return input; + } + + // Otherwise, recursively call the function with the unescaped string + return normalizeAndStringify(unescaped); + } +} diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/normalize-array.test.ts b/packages/lit-node-client/src/lib/helpers/normalize-array.test.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/normalize-array.test.ts rename to packages/lit-node-client/src/lib/helpers/normalize-array.test.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/normalize-array.ts b/packages/lit-node-client/src/lib/helpers/normalize-array.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/normalize-array.ts rename to packages/lit-node-client/src/lib/helpers/normalize-array.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/normalize-params.test.ts b/packages/lit-node-client/src/lib/helpers/normalize-params.test.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/normalize-params.test.ts rename to packages/lit-node-client/src/lib/helpers/normalize-params.test.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/normalize-params.ts b/packages/lit-node-client/src/lib/helpers/normalize-params.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/normalize-params.ts rename to packages/lit-node-client/src/lib/helpers/normalize-params.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/parse-as-json-or-string.test.ts b/packages/lit-node-client/src/lib/helpers/parse-as-json-or-string.test.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/parse-as-json-or-string.test.ts rename to packages/lit-node-client/src/lib/helpers/parse-as-json-or-string.test.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/parse-as-json-or-string.ts b/packages/lit-node-client/src/lib/helpers/parse-as-json-or-string.ts similarity index 61% rename from packages/lit-node-client-nodejs/src/lib/helpers/parse-as-json-or-string.ts rename to packages/lit-node-client/src/lib/helpers/parse-as-json-or-string.ts index c980d2fea7..790f6f2b1b 100644 --- a/packages/lit-node-client-nodejs/src/lib/helpers/parse-as-json-or-string.ts +++ b/packages/lit-node-client/src/lib/helpers/parse-as-json-or-string.ts @@ -1,4 +1,4 @@ -import { log } from '@lit-protocol/misc'; +import { logger } from '@lit-protocol/logger'; /** * Parses a response string into a JS object. @@ -12,10 +12,11 @@ export const parseAsJsonOrString = ( try { return JSON.parse(responseString); } catch (e) { - log( - '[parseResponses] Error parsing response as json. Swallowing and returning as string.', - responseString - ); + logger.warn({ + function: 'parseAsJsonOrString', + msg: 'Error parsing response as json. Swallowing and returning as string.', + responseString, + }); return responseString; } }; diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/parse-pkp-sign-response.test.ts b/packages/lit-node-client/src/lib/helpers/parse-pkp-sign-response.test.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/parse-pkp-sign-response.test.ts rename to packages/lit-node-client/src/lib/helpers/parse-pkp-sign-response.test.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/parse-pkp-sign-response.ts b/packages/lit-node-client/src/lib/helpers/parse-pkp-sign-response.ts similarity index 93% rename from packages/lit-node-client-nodejs/src/lib/helpers/parse-pkp-sign-response.ts rename to packages/lit-node-client/src/lib/helpers/parse-pkp-sign-response.ts index 1261842705..9d594a7b0f 100644 --- a/packages/lit-node-client-nodejs/src/lib/helpers/parse-pkp-sign-response.ts +++ b/packages/lit-node-client/src/lib/helpers/parse-pkp-sign-response.ts @@ -1,3 +1,4 @@ +import { InvalidArgumentException } from '@lit-protocol/constants'; import { EcdsaSignedMessageShareParsed, PKPSignEndpointResponse, @@ -76,7 +77,14 @@ export const parsePkpSignResponse = ( : signatureShare; if (!resolvedShare || typeof resolvedShare !== 'object') { - throw new Error('Invalid signatureShare structure.'); + throw new InvalidArgumentException( + { + info: { + signatureShare, + }, + }, + 'Invalid signatureShare structure.' + ); } const camelCaseShare = convertKeysToCamelCase(resolvedShare); diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/pocess-lit-action-response-strategy.spec.ts b/packages/lit-node-client/src/lib/helpers/pocess-lit-action-response-strategy.spec.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/pocess-lit-action-response-strategy.spec.ts rename to packages/lit-node-client/src/lib/helpers/pocess-lit-action-response-strategy.spec.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/process-lit-action-response-strategy.ts b/packages/lit-node-client/src/lib/helpers/process-lit-action-response-strategy.ts similarity index 65% rename from packages/lit-node-client-nodejs/src/lib/helpers/process-lit-action-response-strategy.ts rename to packages/lit-node-client/src/lib/helpers/process-lit-action-response-strategy.ts index 4d725ffeda..34f9117921 100644 --- a/packages/lit-node-client-nodejs/src/lib/helpers/process-lit-action-response-strategy.ts +++ b/packages/lit-node-client/src/lib/helpers/process-lit-action-response-strategy.ts @@ -1,20 +1,16 @@ -import { - LitActionResponseStrategy, - ResponseStrategy, - NodeShare, -} from '@lit-protocol/types'; -import { log, logError } from '@lit-protocol/misc'; +import { logger } from '@lit-protocol/logger'; +import { LitActionResponseStrategy, NodeShare } from '@lit-protocol/types'; /** * Finds the most and least common object within an of objects array - * @param responses any[] - * @returns an object which contains both the least and most occuring item in the array + * @param responses T[] + * @returns an object which contains both the least and most occurring T items in the array */ -const _findFrequency = (responses: string[]): { min: any; max: any } => { +const _findFrequency = (responses: T[]): { min: T; max: T } => { const sorted = responses.sort( - (a: any, b: any) => - responses.filter((v: any) => v === a).length - - responses.filter((v: any) => v === b).length + (a, b) => + responses.filter((v) => v === a).length - + responses.filter((v) => v === b).length ); return { min: sorted[0], max: sorted[sorted?.length - 1] }; @@ -23,7 +19,7 @@ const _findFrequency = (responses: string[]): { min: any; max: any } => { export const processLitActionResponseStrategy = ( responses: NodeShare[], strategy: LitActionResponseStrategy -): any => { +) => { const executionResponses = responses.map((nodeResp) => { return nodeResp.response; }); @@ -38,33 +34,34 @@ export const processLitActionResponseStrategy = ( strategy?.customFilter(executionResponses); return customResponseFilterResult; } else { - logError( + logger.error( 'Custom filter specified for response strategy but none found. using most common' ); } } catch (e) { - logError( - 'Error while executing custom response filter, defaulting to most common', - (e as Error).toString() - ); + logger.error({ + function: 'processLitActionResponseStrategy', + msg: 'Error while executing custom response filter, defaulting to most common', + error: (e as Error).toString(), + }); } } - let respFrequency = _findFrequency(copiedExecutionResponses); + const respFrequency = _findFrequency(copiedExecutionResponses); if (strategy?.strategy === 'leastCommon') { - log( + logger.info( 'strategy found to be most common, taking most common response from execution results' ); return respFrequency.min; } else if (strategy?.strategy === 'mostCommon') { - log( + logger.info( 'strategy found to be most common, taking most common response from execution results' ); return respFrequency.max; } else { - log( + logger.info( 'no strategy found, using least common response object from execution results' ); - respFrequency.min; + return respFrequency.min; } }; diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/remove-double-quotes.test.ts b/packages/lit-node-client/src/lib/helpers/remove-double-quotes.test.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/remove-double-quotes.test.ts rename to packages/lit-node-client/src/lib/helpers/remove-double-quotes.test.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/remove-double-quotes.ts b/packages/lit-node-client/src/lib/helpers/remove-double-quotes.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/remove-double-quotes.ts rename to packages/lit-node-client/src/lib/helpers/remove-double-quotes.ts diff --git a/packages/misc/src/lib/helper/session-sigs-reader.test.ts b/packages/lit-node-client/src/lib/helpers/session-sigs-reader.test.ts similarity index 100% rename from packages/misc/src/lib/helper/session-sigs-reader.test.ts rename to packages/lit-node-client/src/lib/helpers/session-sigs-reader.test.ts diff --git a/packages/misc/src/lib/helper/session-sigs-reader.ts b/packages/lit-node-client/src/lib/helpers/session-sigs-reader.ts similarity index 85% rename from packages/misc/src/lib/helper/session-sigs-reader.ts rename to packages/lit-node-client/src/lib/helpers/session-sigs-reader.ts index 6b8c63e95b..99c66792fd 100644 --- a/packages/misc/src/lib/helper/session-sigs-reader.ts +++ b/packages/lit-node-client/src/lib/helpers/session-sigs-reader.ts @@ -1,3 +1,6 @@ +import { InvalidArgumentException } from '@lit-protocol/constants'; +import { logger } from '@lit-protocol/logger'; + import { parseSignedMessage } from './session-sigs-validator'; function formatDuration(start: Date, end: Date): string { @@ -33,7 +36,7 @@ function formatStatus(expirationDate: Date, currentDate: Date): string { /** * Convert this format: * {"lit-ratelimitincrease://25364":{"Auth/Auth":[{"nft_id":["25364"]}]}} - * to human readable format + * to human-readable format */ function humanReadableAtt(obj: any, indentLevel: number = 0): string { const indent = ' '.repeat(indentLevel * 2); @@ -73,7 +76,15 @@ export function formatSessionSigs( } catch (error: unknown) { const errorMessage = error instanceof Error ? error.message : 'Unknown error'; - throw new Error(`Invalid JSON format for signedMessage: ${errorMessage}`); + throw new InvalidArgumentException( + { + info: { + signedMessage, + firstNodeSignedMessage: firstNode.signedMessage, + }, + }, + `Invalid JSON format for signedMessage: ${errorMessage}` + ); } const currentDate = new Date(currentTime); @@ -88,7 +99,14 @@ export function formatSessionSigs( } catch (error) { const errorMessage = error instanceof Error ? error.message : 'Unknown error'; - throw new Error(`Error parsing issuedAt or expiration: ${errorMessage}`); + throw new InvalidArgumentException( + { + info: { + signedMessage, + }, + }, + `Error parsing issuedAt or expiration: ${errorMessage}` + ); } result += '* Outer expiration:\n'; @@ -111,7 +129,11 @@ export function formatSessionSigs( attenuation = humanReadableAtt(jsonRecap.att, 6); } catch (e) { // swallow error - console.log('Error parsing attenuation::', e); + logger.info({ + function: 'formatSessionSigs', + msg: 'Error parsing attenuation', + error: e, + }); } const capIssuedAt = new Date(parsedCapMessage['Issued At'] || ''); diff --git a/packages/misc/src/lib/helper/session-sigs-validator.spec.ts b/packages/lit-node-client/src/lib/helpers/session-sigs-validator.spec.ts similarity index 99% rename from packages/misc/src/lib/helper/session-sigs-validator.spec.ts rename to packages/lit-node-client/src/lib/helpers/session-sigs-validator.spec.ts index ddc906323c..24a7c1680a 100644 --- a/packages/misc/src/lib/helper/session-sigs-validator.spec.ts +++ b/packages/lit-node-client/src/lib/helpers/session-sigs-validator.spec.ts @@ -1,4 +1,5 @@ import { AuthSig } from '@lit-protocol/types'; + import { validateSessionSigs } from './session-sigs-validator'; describe('validateSessionSigs', () => { diff --git a/packages/misc/src/lib/helper/session-sigs-validator.ts b/packages/lit-node-client/src/lib/helpers/session-sigs-validator.ts similarity index 100% rename from packages/misc/src/lib/helper/session-sigs-validator.ts rename to packages/lit-node-client/src/lib/helpers/session-sigs-validator.ts diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/validate-bls-session-sig.ts b/packages/lit-node-client/src/lib/helpers/validate-bls-session-sig.ts similarity index 88% rename from packages/lit-node-client-nodejs/src/lib/helpers/validate-bls-session-sig.ts rename to packages/lit-node-client/src/lib/helpers/validate-bls-session-sig.ts index 55b5f0cd41..0c7641c75d 100644 --- a/packages/lit-node-client-nodejs/src/lib/helpers/validate-bls-session-sig.ts +++ b/packages/lit-node-client/src/lib/helpers/validate-bls-session-sig.ts @@ -1,12 +1,9 @@ -import { log } from '@lit-protocol/misc'; -import { AuthSig } from '@lit-protocol/types'; -import { - uint8arrayFromString, - uint8arrayToString, -} from '@lit-protocol/uint8arrays'; import { ethers } from 'ethers'; import { SiweError, SiweErrorType, SiweMessage } from 'siwe'; +import { InvalidArgumentException } from '@lit-protocol/constants'; +import { AuthSig } from '@lit-protocol/types'; + const LIT_SESSION_SIGNED_MESSAGE_PREFIX = 'lit_session:'; /** @@ -29,8 +26,8 @@ export const blsSessionSigVerify = async ( authSig: AuthSig, authSigSiweMessage: SiweMessage ): Promise => { - let sigJson = JSON.parse(authSig.sig); - // we do not nessesarly need to use ethers here but was a quick way + const sigJson = JSON.parse(authSig.sig); + // we do not necessarily need to use ethers here but was a quick way // to get verification working. const eip191Hash = ethers.utils.hashMessage(authSig.signedMessage); const prefixedStr = @@ -43,7 +40,12 @@ export const blsSessionSigVerify = async ( const checkTime = new Date(); if (!authSigSiweMessage.expirationTime || !authSigSiweMessage.issuedAt) { - throw new Error( + throw new InvalidArgumentException( + { + info: { + authSigSiweMessage, + }, + }, 'Invalid SIWE message. Missing expirationTime or issuedAt.' ); } diff --git a/packages/lit-node-client-nodejs/src/lib/helpers/validate-bls-session-sigs.spec.ts b/packages/lit-node-client/src/lib/helpers/validate-bls-session-sigs.spec.ts similarity index 100% rename from packages/lit-node-client-nodejs/src/lib/helpers/validate-bls-session-sigs.spec.ts rename to packages/lit-node-client/src/lib/helpers/validate-bls-session-sigs.spec.ts diff --git a/packages/lit-node-client-nodejs/src/lib/lit-node-client-nodejs.spec.ts b/packages/lit-node-client/src/lib/lit-node-client.spec.ts similarity index 53% rename from packages/lit-node-client-nodejs/src/lib/lit-node-client-nodejs.spec.ts rename to packages/lit-node-client/src/lib/lit-node-client.spec.ts index 379ad6049c..d8c64b332a 100644 --- a/packages/lit-node-client-nodejs/src/lib/lit-node-client-nodejs.spec.ts +++ b/packages/lit-node-client/src/lib/lit-node-client.spec.ts @@ -1,34 +1,27 @@ -// @ts-nocheck - -// This will prevent it logging the following -// [Lit-JS-SDK v2.2.39] ✅ [BLS SDK] wasmExports loaded -// [Lit-JS-SDK v2.2.39] ✅ [ECDSA SDK NodeJS] wasmECDSA loaded. -global.jestTesting = true; - import { LIT_NETWORK } from '@lit-protocol/constants'; -import { LitNodeClientNodeJs } from './lit-node-client-nodejs'; +import { LitNodeClient } from './lit-node-client'; -const isClass = (v) => { +const isClass = (v: unknown) => { return typeof v === 'function' && /^\s*class\s+/.test(v.toString()); }; -describe('LitNodeClientNodeJs', () => { +describe('LitNodeClient', () => { // --start; - it('imported { LitNodeClientNodeJs } is a class', async () => { - expect(isClass(LitNodeClientNodeJs)).toBe(true); + it('imported { LitNodeClient } is a class', async () => { + expect(isClass(LitNodeClient)).toBe(true); }); - it('should be able to instantiate a new LitNodeClientNodeJs to custom', async () => { - const litNodeClient = new LitNodeClientNodeJs({ + it('should be able to instantiate a new LitNodeClient to custom', async () => { + const litNodeClient = new LitNodeClient({ litNetwork: LIT_NETWORK.Custom, }); expect(litNodeClient).toBeDefined(); }); - it('should be able to instantiate a new LitNodeClientNodeJs to naga dev', async () => { - const litNodeClient = new LitNodeClientNodeJs({ + it('should be able to instantiate a new LitNodeClient to naga dev', async () => { + const litNodeClient = new LitNodeClient({ litNetwork: LIT_NETWORK.NagaDev, }); expect(litNodeClient).toBeDefined(); @@ -38,7 +31,7 @@ describe('LitNodeClientNodeJs', () => { const tmp = globalThis.localStorage; Object.defineProperty(globalThis, 'localStorage', { value: undefined }); const ls = require('node-localstorage').LocalStorage; - const litNodeClient = new LitNodeClientNodeJs({ + const litNodeClient = new LitNodeClient({ litNetwork: LIT_NETWORK.Custom, storageProvider: { provider: new ls('./storage.test.db'), diff --git a/packages/lit-node-client/src/lib/lit-node-client.ts b/packages/lit-node-client/src/lib/lit-node-client.ts index 716be94e98..1f8f13bf36 100644 --- a/packages/lit-node-client/src/lib/lit-node-client.ts +++ b/packages/lit-node-client/src/lib/lit-node-client.ts @@ -1,56 +1,2093 @@ -import { checkAndSignAuthMessage } from '@lit-protocol/auth-browser'; -import { EITHER_TYPE } from '@lit-protocol/constants'; -import { LitNodeClientNodeJs } from '@lit-protocol/lit-node-client-nodejs'; -import { isNode, log } from '@lit-protocol/misc'; -import { getStorageItem } from '@lit-protocol/misc-browser'; -import { CustomNetwork, LitNodeClientConfig } from '@lit-protocol/types'; - -/** - * You can find all these available networks in the `constants` package - * - * @example - * - * ``` - * import { LIT_NETWORK } from '@lit-protocol/constants'; - * - * const litNodeClient = new LitNodeClient({ - litNetwork: LIT_NETWORK.DatilTest, - }); - * ``` - */ -export class LitNodeClient extends LitNodeClientNodeJs { +import { computeAddress } from '@ethersproject/transactions'; +import { ed25519 } from '@noble/curves/ed25519'; +import { ethers } from 'ethers'; +import { SiweMessage } from 'siwe'; + +import { + getFormattedAccessControlConditions, + getHashedAccessControlConditions, +} from '@lit-protocol/access-control-conditions'; +import { + createSiweMessage, + createSiweMessageWithCapacityDelegation, + createSiweMessageWithRecaps, + decode, + generateAuthSig, + generateSessionCapabilityObjectWithWildcards, + LitAccessControlConditionResource, +} from '@lit-protocol/auth-helpers'; +import { + AUTH_METHOD_TYPE, + FALLBACK_IPFS_GATEWAYS, + GLOBAL_OVERWRITE_IPFS_CODE_BY_NETWORK, + InvalidArgumentException, + InvalidParamType, + InvalidSessionSigs, + InvalidSignatureError, + LIT_CURVE, + LIT_CURVE_TYPE, + LIT_ENDPOINT, + LitNetworkError, + LitNodeClientNotReadyError, + LOCAL_STORAGE_KEYS, + NetworkError, + ParamNullError, + ParamsMissingError, + PRODUCT_IDS, + SIWE_URI_PREFIX, + UnknownError, + UnsupportedMethodError, + WalletSignatureNotFoundError, +} from '@lit-protocol/constants'; +import { getNodePrices } from '@lit-protocol/contracts-sdk'; +import { composeLitUrl, mostCommonValue, LitCore } from '@lit-protocol/core'; +import { + combineSignatureShares, + encrypt, + generateSessionKeyPair, + verifyAndDecryptWithSignatureShares, + verifySignature, +} from '@lit-protocol/crypto'; +import { Logger, getChildLogger } from '@lit-protocol/logger'; +import { + getStorageItem, + removeStorageItem, + setStorageItem, +} from '@lit-protocol/misc-browser'; +import { + applySchemaWithValidation, + DecryptRequestSchema, + EncryptRequestSchema, + JsonExecutionSdkParamsBaseSchema, +} from '@lit-protocol/schemas'; +import { + AuthCallback, + AuthCallbackParams, + type AuthenticationContext, + AuthSig, + BlsResponseData, + CapacityCreditsReq, + CapacityCreditsRes, + ClaimKeyResponse, + ClaimProcessor, + ClaimRequest, + CustomNetwork, + DecryptRequest, + DecryptResponse, + EncryptionSignRequest, + EncryptResponse, + EncryptSdkParams, + ExecuteJsNoSigningResponse, + ExecuteJsResponse, + FormattedMultipleAccs, + GetWalletSigProps, + ILitNodeClient, + JsonExecutionRequest, + JsonExecutionSdkParams, + JsonPKPClaimKeyRequest, + JsonPkpSignRequest, + JsonPkpSignSdkParams, + JsonSignSessionKeyRequestV1, + JsonSignSessionKeyRequestV2, + LitNodeClientConfig, + LitResourceAbilityRequest, + NodeBlsSigningShare, + NodeCommandResponse, + NodeSet, + NodeShare, + PKPSignEndpointResponse, + RejectedNodePromises, + SessionKeyPair, + SessionSigningTemplate, + SessionSigsMap, + Signature, + SignSessionKeyProp, + SignSessionKeyResponse, + SigResponse, + SuccessNodePromises, +} from '@lit-protocol/types'; +import { AuthMethod } from '@lit-protocol/types'; + +import { assembleMostCommonResponse } from './helpers/assemble-most-common-response'; +import { encodeCode } from './helpers/encode-code'; +import { getBlsSignatures } from './helpers/get-bls-signatures'; +import { getClaims } from './helpers/get-claims'; +import { getClaimsList } from './helpers/get-claims-list'; +import { getExpiration } from './helpers/get-expiration'; +import { getMaxPricesForNodeProduct } from './helpers/get-max-prices-for-node-product'; +import { getSignatures } from './helpers/get-signatures'; +import { hexPrefixed, removeHexPrefix } from './helpers/hex'; +import { defaultMintClaimCallback } from './helpers/mint-claim-callback'; +import { normalizeAndStringify } from './helpers/normalize-and-stringify'; +import { normalizeArray } from './helpers/normalize-array'; +import { normalizeJsParams } from './helpers/normalize-params'; +import { parseAsJsonOrString } from './helpers/parse-as-json-or-string'; +import { parsePkpSignResponse } from './helpers/parse-pkp-sign-response'; +import { processLitActionResponseStrategy } from './helpers/process-lit-action-response-strategy'; +import { removeDoubleQuotes } from './helpers/remove-double-quotes'; +import { formatSessionSigs } from './helpers/session-sigs-reader'; +import { validateSessionSigs } from './helpers/session-sigs-validator'; +import { blsSessionSigVerify } from './helpers/validate-bls-session-sig'; + +export class LitNodeClient extends LitCore implements ILitNodeClient { + private readonly _litNodeLogger: Logger; + /** Tracks the total max price a user is willing to pay for each supported product type + * This must be distributed across all nodes; each node will get a percentage of this price + * + * If the user never sets a max price, it means 'unlimited' + */ + defaultMaxPriceByProduct: Record = { + DECRYPTION: BigInt(-1), + SIGN: BigInt(-1), + LIT_ACTION: BigInt(-1), + }; + + defaultAuthCallback?: (authSigParams: AuthCallbackParams) => Promise; + + // ========== Constructor ========== constructor(args: LitNodeClientConfig | CustomNetwork) { - super({ - ...args, - defaultAuthCallback: checkAndSignAuthMessage, + if (!args) { + throw new ParamsMissingError({}, 'must provide LitNodeClient parameters'); + } + + super(args); + + this._litNodeLogger = getChildLogger({ + module: 'LitNodeClient', + ...(this.config.debug ? { level: 'debug' } : {}), }); - // -- override configs - this._overrideConfigsFromLocalStorage(); + if (args !== undefined && args !== null && 'defaultAuthCallback' in args) { + this.defaultAuthCallback = args.defaultAuthCallback; + } + } + + setDefaultMaxPrice(product: keyof typeof PRODUCT_IDS, price: bigint) { + this.defaultMaxPriceByProduct[product] = price; + } + + private _getNodePrices() { + return getNodePrices({ + realmId: 1, + litNetwork: this.config.litNetwork, + networkContext: this.config.contractContext, + rpcUrl: this.config.rpcUrl, + nodeProtocol: this.config.nodeProtocol, + }); } + // ========== Rate Limit NFT ========== + + // TODO: Add support for browser feature/lit-2321-js-sdk-add-browser-support-for-createCapacityDelegationAuthSig + createCapacityDelegationAuthSig = async ( + params: CapacityCreditsReq + ): Promise => { + // -- validate + if (!params.dAppOwnerWallet) { + throw new InvalidParamType( + { + info: { + params, + }, + }, + 'dAppOwnerWallet must exist' + ); + } + + // Useful log for debugging + if (!params.delegateeAddresses || params.delegateeAddresses.length === 0) { + this._litNodeLogger.info( + `[createCapacityDelegationAuthSig] 'delegateeAddresses' is an empty array. It means that no body can use it. However, if the 'delegateeAddresses' field is omitted, It means that the capability will not restrict access based on delegatee list, but it may still enforce other restrictions such as usage limits (uses) and specific NFT IDs (nft_id).` + ); + } + + // -- This is the owner address who holds the Capacity Credits NFT token and wants to delegate its + // usage to a list of delegatee addresses + const dAppOwnerWalletAddress = ethers.utils.getAddress( + await params.dAppOwnerWallet.getAddress() + ); + + // -- if it's not ready yet, then connect + if (!this.ready) { + await this.connect(); + } + + const siweMessage = await createSiweMessageWithCapacityDelegation({ + uri: SIWE_URI_PREFIX.DELEGATION, + litNodeClient: this, + walletAddress: dAppOwnerWalletAddress, + nonce: await this.getLatestBlockhash(), + expiration: params.expiration, + domain: params.domain, + statement: params.statement, + + // -- capacity delegation specific configuration + uses: params.uses, + delegateeAddresses: params.delegateeAddresses, + // paymentId: params.paymentId, // CHANGE: Not supported yet + }); + + const authSig = await generateAuthSig({ + signer: params.dAppOwnerWallet, + toSign: siweMessage, + }); + + return { capacityDelegationAuthSig: authSig }; + }; + + // ==================== SESSIONS ==================== + /** + * Try to get the session key in the local storage, + * if not, generates one. + * @return { SessionKeyPair } session key pair + */ + private _getSessionKey = (): SessionKeyPair => { + const storageKey = LOCAL_STORAGE_KEYS.SESSION_KEY; + + try { + const storedSessionKeyString = getStorageItem(storageKey); + return JSON.parse(storedSessionKeyString); + } catch (e) { + this._litNodeLogger.warn({ + msg: `Couldn't get session key from local storage key "${storageKey}". Not a problem. Continue...`, + error: e, + }); + } + + this._litNodeLogger.info('Generating new session key...'); + // Generate new one + const newSessionKey = generateSessionKeyPair(); + + try { + setStorageItem(storageKey, JSON.stringify(newSessionKey)); + } catch (e) { + this._litNodeLogger.info( + `Localstorage not available. Not a problem. Continue...` + ); + } + + return newSessionKey; + }; + + /** + * Get the signature from local storage, if not, generates one + */ + private _getWalletSig = async ({ + authNeededCallback, + chain, + sessionCapabilityObject, + switchChain, + expiration, + sessionKeyUri, + nonce, + resourceAbilityRequests, + litActionCode, + litActionIpfsId, + jsParams, + sessionKey, + }: GetWalletSigProps): Promise => { + let walletSig: AuthSig | undefined; + + const storageKey = LOCAL_STORAGE_KEYS.WALLET_SIGNATURE; + + // -- (TRY) to get it in the local storage + // -- IF NOT: Generates one + this._litNodeLogger.info( + `getWalletSig - fetching from storageKey: ${storageKey}` + ); + + try { + const walletSigString = getStorageItem(storageKey); + walletSig = JSON.parse(walletSigString); + } catch (e) { + this._litNodeLogger.warn({ + msg: `Could not get wallet sig from storage key "${storageKey}"`, + error: e, + }); + } + + if (!walletSig) { + if (authNeededCallback) { + this._litNodeLogger.info( + 'getWalletSig - generating with authNeededCallback' + ); + + const body = { + chain, + statement: sessionCapabilityObject?.statement, + resources: sessionCapabilityObject + ? [sessionCapabilityObject.encodeAsSiweResource()] + : undefined, + ...(switchChain && { switchChain }), + expiration, + uri: sessionKeyUri, + sessionKey: sessionKey, + nonce, + + // for recap + ...(resourceAbilityRequests && { resourceAbilityRequests }), + + // for lit action custom auth + ...(litActionCode && { litActionCode }), + ...(litActionIpfsId && { litActionIpfsId }), + ...(jsParams && { jsParams }), + }; + + this._litNodeLogger.info({ msg: 'callback body', body }); + + walletSig = await authNeededCallback(body); + } else if (this.defaultAuthCallback) { + this._litNodeLogger.info( + 'getWalletSig - generating with defaultAuthCallback' + ); + walletSig = await this.defaultAuthCallback({ + chain, + statement: sessionCapabilityObject.statement, + resources: sessionCapabilityObject + ? [sessionCapabilityObject.encodeAsSiweResource()] + : undefined, + switchChain, + expiration, + uri: sessionKeyUri, + nonce, + }); + } else { + throw new ParamsMissingError( + {}, + 'getWalletSig - No authNeededCallback nor default auth callback provided' + ); + } + + // If localStorage, authNeededCallback or defaultAuthCallback didn't fail, walletSig is defined by it + this._litNodeLogger.info({ msg: 'getWalletSig - walletSig', walletSig }); + + // (TRY) to set walletSig to local storage + try { + setStorageItem(storageKey, JSON.stringify(walletSig)); + } catch (e) { + this._litNodeLogger.warn({ + msg: `Unable to store walletSig in local storage. Not a problem. Continue...`, + error: e, + }); + } + } + + return walletSig; + }; + + private _authCallbackAndUpdateStorageItem = async ({ + authCallbackParams, + authCallback, + }: { + authCallbackParams: AuthCallbackParams; + authCallback?: AuthCallback; + }): Promise => { + let authSig: AuthSig; + + if (authCallback) { + authSig = await authCallback(authCallbackParams); + } else { + if (!this.defaultAuthCallback) { + throw new ParamsMissingError( + {}, + 'No authCallback nor default auth callback provided' + ); + } + authSig = await this.defaultAuthCallback(authCallbackParams); + } + + // (TRY) to set walletSig to local storage + try { + setStorageItem( + LOCAL_STORAGE_KEYS.WALLET_SIGNATURE, + JSON.stringify(authSig) + ); + return authSig; + } catch (e) { + // Setting local storage failed, try to remove the item key. + this._litNodeLogger.warn({ + msg: `Unable to store walletSig in local storage. Not a problem. Continuing to remove item key...`, + error: e, + }); + } + + try { + removeStorageItem(LOCAL_STORAGE_KEYS.WALLET_SIGNATURE); + } catch (e) { + // Ignore error and continue + this._litNodeLogger.warn({ + msg: `Unable to remove walletSig in local storage. Not a problem. Continuing...`, + error: e, + }); + } + + return authSig; + }; /** * - * (Browser Only) Get the config from browser local storage and override default config + * Check if a session key needs to be resigned. These are the scenarios where a session key needs to be resigned: + * 1. The authSig.sig does not verify successfully against the authSig.signedMessage + * 2. The authSig.signedMessage.uri does not match the sessionKeyUri + * 3. The authSig.signedMessage does not contain at least one session capability object * - * @returns { void } + */ + private _checkNeedToResignSessionKey = async ({ + authSig, + sessionKeyUri, + resourceAbilityRequests, + }: { + authSig: AuthSig; + sessionKeyUri: string; + resourceAbilityRequests: LitResourceAbilityRequest[]; + }): Promise => { + const authSigSiweMessage = new SiweMessage(authSig.signedMessage); + // We will either have `ed25519` or `LIT_BLS` as we have deviated from the specification of SIWE and use BLS signatures in some cases + // Here we need to check the `algo` of the SIWE to confirm we can validate the signature as if we attempt to validate the BLS signature here + // it will fail. If the algo is not defined we can assume that it was an EOA wallet signing the message so we can use SIWE. + if (authSig.algo === `ed25519` || authSig.algo === undefined) { + try { + await authSigSiweMessage.verify( + { signature: authSig.sig }, + { suppressExceptions: false } + ); + } catch (e) { + this._litNodeLogger.error({ + msg: `Error while verifying BLS signature: `, + e, + }); + return true; + } + } else if (authSig.algo === `LIT_BLS`) { + try { + await blsSessionSigVerify( + verifySignature, + this.networkPubKey!, + authSig, + authSigSiweMessage + ); + } catch (e) { + this._litNodeLogger.error({ + msg: `Error while verifying bls signature: `, + e, + }); + return true; + } + } else { + throw new InvalidSignatureError( + { + info: { + authSig, + resourceAbilityRequests, + sessionKeyUri, + }, + }, + 'Unsupported signature algo for session signature. Expected ed25519 or LIT_BLS received %s', + authSig.algo + ); + } + + // make sure the sig is for the correct session key + if (authSigSiweMessage.uri !== sessionKeyUri) { + this._litNodeLogger.info('Need retry because uri does not match'); + return true; + } + + // make sure the authSig contains at least one resource. + if ( + !authSigSiweMessage.resources || + authSigSiweMessage.resources.length === 0 + ) { + this._litNodeLogger.info('Need retry because empty resources'); + return true; + } + + // make sure the authSig contains session capabilities that can be parsed. + // TODO: we currently only support the first resource being a session capability object. + const authSigSessionCapabilityObject = decode( + authSigSiweMessage.resources[0] + ); + + // make sure the authSig session capability object describes capabilities that are equal or greater than + // the abilities requested against the resources in the resource ability requests. + for (const resourceAbilityRequest of resourceAbilityRequests) { + if ( + !authSigSessionCapabilityObject.verifyCapabilitiesForResource( + resourceAbilityRequest.resource, + resourceAbilityRequest.ability + ) + ) { + this._litNodeLogger.info({ + msg: 'Need retry because capabilities do not match', + authSigSessionCapabilityObject, + resourceAbilityRequest, + }); + return true; + } + } + + return false; + }; + + private _decryptWithSignatureShares = ( + networkPubKey: string, + identityParam: Uint8Array, + ciphertext: string, + signatureShares: NodeBlsSigningShare[] + ): Promise => { + const sigShares = signatureShares.map((s) => s.signatureShare); + + return verifyAndDecryptWithSignatureShares( + networkPubKey, + identityParam, + ciphertext, + sigShares + ); + }; + + /** + * Retrieves the fallback IPFS code for a given IPFS ID. * + * @param gatewayUrl - the gateway url. + * @param ipfsId - The IPFS ID. + * @returns The base64-encoded fallback IPFS code. + * @throws An error if the code retrieval fails. */ - private _overrideConfigsFromLocalStorage = (): void => { - if (isNode()) return; + private async _getFallbackIpfsCode( + gatewayUrl: string | undefined, + ipfsId: string + ) { + const allGateways = gatewayUrl + ? [gatewayUrl, ...FALLBACK_IPFS_GATEWAYS] + : FALLBACK_IPFS_GATEWAYS; - const storageKey = 'LitNodeClientConfig'; - const storageConfigOrError = getStorageItem(storageKey); + this._litNodeLogger.info( + `Attempting to fetch code for IPFS ID: ${ipfsId} using fallback IPFS gateways` + ); - // -- validate - if (storageConfigOrError.type === EITHER_TYPE.ERROR) { - log(`Storage key "${storageKey}" is missing. `); - return; + for (const url of allGateways) { + try { + const response = await fetch(`${url}${ipfsId}`); + + if (!response.ok) { + throw new NetworkError( + { + info: { + ipfsId, + gatewayUrl: url, + responseStatus: response.status, + responseStatusText: response.statusText, + }, + }, + `Failed to fetch code from IPFS gateway ${url}: ${response.status} ${response.statusText}` + ); + } + + const code = await response.text(); + const codeBase64 = Buffer.from(code).toString('base64'); + + return codeBase64; + } catch (error) { + this._litNodeLogger.error( + `Error fetching code from IPFS gateway ${url}` + ); + // Continue to the next gateway in the array + } + } + + throw new NetworkError( + { + info: { + ipfsId, + gatewayUrl, + }, + }, + 'All IPFS gateways failed to fetch the code.' + ); + } + + private async executeJsNodeRequest( + url: string, + formattedParams: JsonExecutionSdkParams & { sessionSigs: SessionSigsMap }, + requestId: string, + nodeSet: NodeSet[] + ) { + // -- choose the right signature + const sessionSig = this._getSessionSigByUrl({ + sessionSigs: formattedParams.sessionSigs, + url, + }); + + const reqBody: JsonExecutionRequest = { + ...formattedParams, + authSig: sessionSig, + nodeSet, + }; + + const urlWithPath = composeLitUrl({ + url, + endpoint: LIT_ENDPOINT.EXECUTE_JS, + }); + + return this.generatePromise(urlWithPath, reqBody, requestId); + } + + /** + * + * Execute JS on the nodes and combine and return any resulting signatures + * + * @param { JsonExecutionSdkParams } params + * + * @returns { ExecuteJsResponse } + * + */ + executeJs = async ( + params: JsonExecutionSdkParams + ): Promise => { + // ========== Validate Params ========== + const _params = applySchemaWithValidation( + 'executeJs', + params, + JsonExecutionSdkParamsBaseSchema + ); + + if (!this.ready) { + const message = + '[executeJs] LitNodeClient is not ready. Please call await litNodeClient.connect() first.'; + + throw new LitNodeClientNotReadyError({}, message); + } + + // Format the params + let formattedParams: JsonExecutionSdkParams = { + ..._params, + ...(_params.jsParams && { + jsParams: normalizeJsParams(_params.jsParams), + }), + ...(_params.code && { code: encodeCode(_params.code) }), + }; + + // Check if IPFS options are provided and if the code should be fetched from IPFS and overwrite the current code. + // This will fetch the code from the specified IPFS gateway using the provided ipfsId, + // and update the params with the fetched code, removing the ipfsId afterward. + const overwriteCode = + _params.ipfsOptions?.overwriteCode || + GLOBAL_OVERWRITE_IPFS_CODE_BY_NETWORK[this.config.litNetwork]; + + if (overwriteCode && _params.ipfsId) { + const code = await this._getFallbackIpfsCode( + _params.ipfsOptions?.gatewayUrl, + _params.ipfsId + ); + + formattedParams = { + ..._params, + code: code, + ipfsId: undefined, + }; + } + + const requestId = this._getNewRequestId(); + + const userMaxPrices = await this.getMaxPricesForNodeProduct({ + product: 'LIT_ACTION', + userMaxPrice: _params.userMaxPrice, + }); + + const targetNodePrices = _params.useSingleNode + ? userMaxPrices.slice(0, 1) + : userMaxPrices; + + const sessionSigs = await this._getSessionSigs({ + ..._params.authContext, + userMaxPrices: targetNodePrices, + }); + + const targetNodeUrls = targetNodePrices.map(({ url }) => url); + // ========== Get Node Promises ========== + // Handle promises for commands sent to Lit nodes + const nodePromises = this._getNodePromises(targetNodeUrls, (url: string) => + this.executeJsNodeRequest( + url, + { + ...formattedParams, + sessionSigs, + }, + requestId, + this._getNodeSet(targetNodeUrls) + ) + ); + + // -- resolve promises + const res = await this._handleNodePromises( + nodePromises, + requestId, + _params.useSingleNode ? 1 : this._getThreshold() + ); + + // -- case: promises rejected + if (!res.success) { + this._throwNodeError(res, requestId); + } + + // -- case: promises success (TODO: check the keys of "values") + const responseData = (res as SuccessNodePromises).values; + + this._litNodeLogger.info({ + requestId, + responseData, + }); + + // -- find the responseData that has the most common response + const mostCommonResponse = assembleMostCommonResponse( + responseData + ) as NodeShare; + + const responseFromStrategy = processLitActionResponseStrategy( + responseData, + _params.responseStrategy ?? { strategy: 'leastCommon' } + ); + mostCommonResponse.response = responseFromStrategy; + + const isSuccess = mostCommonResponse.success; + const hasSignedData = Object.keys(mostCommonResponse.signedData).length > 0; + const hasClaimData = Object.keys(mostCommonResponse.claimData).length > 0; + + // -- we must also check for claim responses as a user may have submitted for a claim and signatures must be aggregated before returning + if (isSuccess && !hasSignedData && !hasClaimData) { + return mostCommonResponse as unknown as ExecuteJsResponse; + } + + // -- in the case where we are not signing anything on Lit action and using it as purely serverless function + if (!hasSignedData && !hasClaimData) { + return { + claims: {}, + signatures: null, + decryptions: [], + response: mostCommonResponse.response, + logs: mostCommonResponse.logs, + } as ExecuteJsNoSigningResponse; + } + + // ========== Extract shares from response data ========== + + // -- 1. combine signed data as a list, and get the signatures from it + const signedDataList = responseData.map((r) => { + return removeDoubleQuotes(r.signedData); + }); + + this._litNodeLogger.info({ + requestId, + msg: 'signatures shares to combine', + signedDataList, + }); + + // Flatten the signedDataList by moving the data within the `sig` (or any other key user may choose) object to the top level. + // The specific key name (`sig`) is irrelevant, as the contents of the object are always lifted directly. + const key = Object.keys(signedDataList[0])[0]; // Get the first key of the object + + const flattenedSignedMessageShares = signedDataList.map((item) => { + return item[key]; // Return the value corresponding to that key + }); + + // -- 2. combine responses as a string, and parse it as JSON if possible + const parsedResponse = parseAsJsonOrString(mostCommonResponse.response); + + // -- 3. combine logs + const mostCommonLogs: string = mostCommonValue( + responseData.map( + (r: { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + logs: any; + }) => r.logs + ) + ); + + // -- 4. combine claims + const claimsList = getClaimsList(responseData); + const claims = claimsList.length > 0 ? getClaims(claimsList) : undefined; + + // ========== Result ========== + const returnVal: ExecuteJsResponse = { + claims, + signatures: hasSignedData + ? { + [key]: await getSignatures({ + requestId, + networkPubKeySet: this.networkPubKeySet, + threshold: _params.useSingleNode ? 1 : this._getThreshold(), + signedMessageShares: flattenedSignedMessageShares, + }), + } + : {}, + // decryptions: [], + response: parsedResponse, + logs: mostCommonLogs, + }; + + this._litNodeLogger.info({ msg: 'returnVal', returnVal }); + + return returnVal; + }; + + /** + * Generates a promise by sending a command to the Lit node + * + * @param url - The URL to send the command to. + * @param params - The parameters to include in the command. + * @param requestId - The ID of the request. + * @returns A promise that resolves with the response from the server. + */ + generatePromise = async ( + url: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + params: any, + requestId: string + ): Promise => { + return await this._sendCommandToNode({ + url, + data: params, + requestId, + }); + }; + + /** + * Use PKP to sign + * + * @param { JsonPkpSignSdkParams } params + * @param params.toSign - The data to sign + * @param params.pubKey - The public key to sign with + * @param params.sessionSigs - The session signatures to use + * @param params.authMethods - (optional) The auth methods to use + */ + pkpSign = async (params: JsonPkpSignSdkParams): Promise => { + // -- validate required params + const requiredParamKeys = ['toSign', 'pubKey', 'authContext']; + + (requiredParamKeys as (keyof JsonPkpSignSdkParams)[]).forEach((key) => { + if (!params[key]) { + throw new ParamNullError( + { + info: { + params, + key, + }, + }, + `"%s" cannot be undefined, empty, or null. Please provide a valid value.`, + key + ); + } + }); + + const requestId = this._getNewRequestId(); + + const targetNodePrices = await this.getMaxPricesForNodeProduct({ + product: 'SIGN', + userMaxPrice: params.userMaxPrice, + }); + + const sessionSigs = await this._getSessionSigs({ + pkpPublicKey: params.pubKey, + ...params.authContext, + userMaxPrices: targetNodePrices, + }); + + // validate session sigs + const checkedSessionSigs = validateSessionSigs(sessionSigs); + + if (checkedSessionSigs.isValid === false) { + throw new InvalidSessionSigs( + {}, + `Invalid sessionSigs. Errors: ${checkedSessionSigs.errors}` + ); + } + + // ========== Get Node Promises ========== + // Handle promises for commands sent to Lit nodes + + const targetNodeUrls = targetNodePrices.map(({ url }) => url); + const nodePromises = this._getNodePromises( + targetNodeUrls, + (url: string) => { + // -- get the session sig from the url key + const sessionSig = this._getSessionSigByUrl({ + sessionSigs, + url, + }); + + const reqBody: JsonPkpSignRequest = { + toSign: normalizeArray(params.toSign), + pubkey: hexPrefixed(params.pubKey), + authSig: sessionSig, + + // -- optional params - no longer allowed in >= Naga? + // ...(params.authContext.authMethods && + // params.authContext.authMethods.length > 0 && { + // authMethods: params.authContext.authMethods, + // }), + + // nodeSet: thresholdNodeSet, + nodeSet: this._getNodeSet(targetNodeUrls), + signingScheme: 'EcdsaK256Sha256', + }; + + this._litNodeLogger.info({ requestId, reqBody }); + + const urlWithPath = composeLitUrl({ + url, + endpoint: LIT_ENDPOINT.PKP_SIGN, + }); + + return this.generatePromise(urlWithPath, reqBody, requestId); + } + ); + + const res = await this._handleNodePromises( + nodePromises, + requestId, + this._getThreshold() + ); + + // ========== Handle Response ========== + if (!res.success) { + this._throwNodeError(res, requestId); + } + + const responseData = (res as SuccessNodePromises) + .values; + + this._litNodeLogger.info({ + requestId, + responseData, + }); + + // clean up the response data (as there are double quotes & snake cases in the response) + const signedMessageShares = parsePkpSignResponse(responseData); + + try { + const signatures = await getSignatures({ + requestId, + networkPubKeySet: this.networkPubKeySet, + threshold: this._getThreshold(), + signedMessageShares: signedMessageShares, + }); + + this._litNodeLogger.info({ requestId, signatures }); + + return signatures; + } catch (e) { + this._litNodeLogger.error({ msg: 'Error getting signature', error: e }); + throw e; + } + }; + + /** + * Encrypt data using the LIT network public key. + * See more: https://developer.litprotocol.com/sdk/access-control/encryption + * + * @param { EncryptSdkParams } params + * @param params.dataToEncrypt - The data to encrypt + * @param params.accessControlConditions - (optional) The access control conditions for the data + * @param params.evmContractConditions - (optional) The EVM contract conditions for the data + * @param params.solRpcConditions - (optional) The Solidity RPC conditions for the data + * @param params.unifiedAccessControlConditions - (optional) The unified access control conditions for the data + * + * @return { Promise } The encrypted ciphertext and the hash of the data + * + * @throws { Error } if the LIT node client is not ready + * @throws { Error } if the subnetPubKey is null + */ + encrypt = async (params: EncryptSdkParams): Promise => { + // ========== Validate Params ========== + const _params = applySchemaWithValidation( + 'encrypt', + params, + EncryptRequestSchema + ); + + // -- validate if it's ready + if (!this.ready) { + throw new LitNodeClientNotReadyError( + {}, + '6 LitNodeClient is not ready. Please call await litNodeClient.connect() first.' + ); + } + + // -- validate if this.subnetPubKey is null + if (!this.subnetPubKey) { + throw new LitNodeClientNotReadyError({}, 'subnetPubKey cannot be null'); + } + + // ========== Hashing Access Control Conditions ========= + // hash the access control conditions + const hashOfConditions: ArrayBuffer | undefined = + await getHashedAccessControlConditions(_params); + + if (!hashOfConditions) { + throw new InvalidArgumentException( + { + info: { + params, + }, + }, + 'You must provide either accessControlConditions or evmContractConditions or solRpcConditions or unifiedAccessControlConditions' + ); + } + + const hashOfConditionsStr = Buffer.from( + new Uint8Array(hashOfConditions) + ).toString('hex'); + + // ========== Hashing Private Data ========== + // hash the private data + const hashOfPrivateData = await crypto.subtle.digest( + 'SHA-256', + params.dataToEncrypt + ); + const hashOfPrivateDataStr = Buffer.from( + new Uint8Array(hashOfPrivateData) + ).toString('hex'); + + // ========== Assemble identity parameter ========== + const identityParam = this._getIdentityParamForEncryption( + hashOfConditionsStr, + hashOfPrivateDataStr + ); + + // ========== Encrypt ========== + const ciphertext = await encrypt( + this.subnetPubKey, + params.dataToEncrypt, + Buffer.from(identityParam, 'utf8') + ); + + return { ciphertext, dataToEncryptHash: hashOfPrivateDataStr }; + }; + + /** + * + * Decrypt ciphertext with the LIT network. + * + */ + decrypt = async (params: DecryptRequest): Promise => { + // -- validate params + const { authContext, chain, ciphertext, dataToEncryptHash, userMaxPrice } = + applySchemaWithValidation('decrypt', params, DecryptRequestSchema); + + // -- validate if it's ready + if (!this.ready) { + throw new LitNodeClientNotReadyError( + {}, + '6 LitNodeClient is not ready. Please call await litNodeClient.connect() first.' + ); + } + + // -- validate if this.subnetPubKey is null + if (!this.subnetPubKey) { + throw new LitNodeClientNotReadyError({}, 'subnetPubKey cannot be null'); + } + + // ========== Hashing Access Control Conditions ========= + // hash the access control conditions + const hashOfConditions: ArrayBuffer | undefined = + await getHashedAccessControlConditions(params); + + if (!hashOfConditions) { + throw new InvalidArgumentException( + { + info: { + params, + }, + }, + 'You must provide either accessControlConditions or evmContractConditions or solRpcConditions or unifiedAccessControlConditions' + ); + } + + const hashOfConditionsStr = Buffer.from( + new Uint8Array(hashOfConditions) + ).toString('hex'); + + // ========== Formatting Access Control Conditions ========= + const { + error, + formattedAccessControlConditions, + formattedEVMContractConditions, + formattedSolRpcConditions, + formattedUnifiedAccessControlConditions, + }: FormattedMultipleAccs = getFormattedAccessControlConditions(params); + + if (error) { + throw new InvalidArgumentException( + { + info: { + params, + }, + }, + 'You must provide either accessControlConditions or evmContractConditions or solRpcConditions or unifiedAccessControlConditions' + ); + } + + // ========== Assemble identity parameter ========== + const identityParam = this._getIdentityParamForEncryption( + hashOfConditionsStr, + dataToEncryptHash + ); + + this._litNodeLogger.info({ msg: 'identityParam', identityParam }); + + const userMaxPrices = await this.getMaxPricesForNodeProduct({ + product: 'DECRYPTION', + userMaxPrice, + }); + + const sessionSigs = await this._getSessionSigs({ + ...authContext, + userMaxPrices, + }); + + // ========== Get Network Signature ========== + const requestId = this._getNewRequestId(); + const nodePromises = this._getNodePromises( + userMaxPrices.map(({ url }) => url), + (url: string) => { + // -- if session key is available, use it + const authSigToSend = sessionSigs[url]; + + if (!authSigToSend) { + throw new InvalidArgumentException( + { + info: { + params, + }, + }, + 'authSig is required' + ); + } + + const reqBody: EncryptionSignRequest = { + accessControlConditions: formattedAccessControlConditions, + evmContractConditions: formattedEVMContractConditions, + solRpcConditions: formattedSolRpcConditions, + unifiedAccessControlConditions: + formattedUnifiedAccessControlConditions, + dataToEncryptHash, + chain, + authSig: authSigToSend, + epoch: this.currentEpochNumber!, + }; + + const urlWithParh = composeLitUrl({ + url, + endpoint: LIT_ENDPOINT.ENCRYPTION_SIGN, + }); + + return this.generatePromise(urlWithParh, reqBody, requestId); + } + ); + + // -- resolve promises + const res = await this._handleNodePromises( + nodePromises, + requestId, + this._getThreshold() + ); + + // -- case: promises rejected + if (!res.success) { + this._throwNodeError(res, requestId); + } + + const signatureShares: NodeBlsSigningShare[] = ( + res as SuccessNodePromises + ).values; + + this._litNodeLogger.info({ requestId, signatureShares }); + + // ========== Result ========== + const decryptedData = await this._decryptWithSignatureShares( + this.subnetPubKey, + Buffer.from(identityParam, 'utf8'), + ciphertext, + signatureShares + ); + + return { decryptedData }; + }; + + private _getIdentityParamForEncryption = ( + hashOfConditionsStr: string, + hashOfPrivateDataStr: string + ): string => { + return new LitAccessControlConditionResource( + `${hashOfConditionsStr}/${hashOfPrivateDataStr}` + ).getResourceKey(); + }; + + /** ============================== SESSION ============================== */ + + /** + * Sign a session public key using a PKP, which generates an authSig. + * @returns {Object} An object containing the resulting signature. + */ + private _signSessionKey = async ( + params: SignSessionKeyProp + ): Promise => { + this._litNodeLogger.info({ msg: `[signSessionKey] params:`, params }); + + // ========== Validate Params ========== + // -- validate: If it's NOT ready + if (!this.ready) { + throw new LitNodeClientNotReadyError( + {}, + '[signSessionKey] ]LitNodeClient is not ready. Please call await litNodeClient.connect() first.' + ); + } + + // -- construct SIWE message that will be signed by node to generate an authSig. + const _expiration = + params.expiration || + new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(); + + // Try to get it from local storage, if not generates one~ + const sessionKey: SessionKeyPair = + params.sessionKey ?? this._getSessionKey(); + const sessionKeyUri = this._getSessionKeyUri(sessionKey.publicKey); + + this._litNodeLogger.info( + `[signSessionKey] sessionKeyUri is not found in params, generating a new one`, + sessionKeyUri + ); + + if (!sessionKeyUri) { + throw new InvalidParamType( + { + info: { + params, + }, + }, + '[signSessionKey] sessionKeyUri is not defined. Please provide a sessionKeyUri or a sessionKey.' + ); + } + + // Compute the address from the public key if it's provided. Otherwise, the node will compute it. + const pkpEthAddress = (function () { + // prefix '0x' if it's not already prefixed + params.pkpPublicKey = hexPrefixed(params.pkpPublicKey!); + + if (params.pkpPublicKey) return computeAddress(params.pkpPublicKey); + + // This will be populated by the node, using dummy value for now. + return '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'; + })(); + + let siwe_statement = 'Lit Protocol PKP session signature'; + if (params.statement) { + siwe_statement += ' ' + params.statement; + this._litNodeLogger.info( + `[signSessionKey] statement found in params: "${params.statement}"` + ); + } + + let siweMessage; + + const siweParams = { + domain: params?.domain || globalThis.location?.host || 'litprotocol.com', + walletAddress: pkpEthAddress, + statement: siwe_statement, + uri: sessionKeyUri, + version: '1', + chainId: params.chainId ?? 1, + expiration: _expiration, + nonce: await this.getLatestBlockhash(), + }; + + if (params.resourceAbilityRequests) { + siweMessage = await createSiweMessageWithRecaps({ + ...siweParams, + resources: params.resourceAbilityRequests, + litNodeClient: this, + }); + } else { + siweMessage = await createSiweMessage(siweParams); + } + + // This may seem a bit weird because we usually only care about prices for sessionSigs... + // But this also ensures we use the cheapest nodes and takes care of getting the minNodeCount of node URLs for the operation + const targetNodePrices = await this.getMaxPricesForNodeProduct({ + product: 'LIT_ACTION', + }); + + // ========== Get Node Promises ========== + // -- fetch shares from nodes + const body: JsonSignSessionKeyRequestV2 = { + nodeSet: this._getNodeSet(targetNodePrices.map(({ url }) => url)), + sessionKey: sessionKeyUri, + authMethods: params.authMethods, + ...(params?.pkpPublicKey && { pkpPublicKey: params.pkpPublicKey }), + siweMessage: siweMessage, + curveType: LIT_CURVE.BLS, + + // -- custom auths + ...(params?.litActionIpfsId && { + litActionIpfsId: params.litActionIpfsId, + }), + ...(params?.litActionCode && { code: params.litActionCode }), + ...(params?.jsParams && { jsParams: params.jsParams }), + ...(this.currentEpochNumber && { epoch: this.currentEpochNumber }), + signingScheme: LIT_CURVE.BLS, + }; + + this._litNodeLogger.info({ msg: `[signSessionKey] body:`, body }); + + const requestId = this._getNewRequestId(); + this._litNodeLogger.info({ requestId, signSessionKeyBody: body }); + + const targetNodeUrls = targetNodePrices.map(({ url }) => url); + const nodePromises = this._getNodePromises( + targetNodeUrls, + (url: string) => { + const reqBody: JsonSignSessionKeyRequestV1 = body; + + const urlWithPath = composeLitUrl({ + url, + endpoint: LIT_ENDPOINT.SIGN_SESSION_KEY, + }); + + return this.generatePromise(urlWithPath, reqBody, requestId); + } + ); + + // -- resolve promises + let res; + try { + res = await this._handleNodePromises( + nodePromises, + requestId, + this._getThreshold() + ); + this._litNodeLogger.info({ msg: 'signSessionKey node promises', res }); + } catch (e) { + throw new UnknownError( + { + info: { + requestId, + }, + cause: e, + }, + 'Error when handling node promises' + ); + } + + this._litNodeLogger.info({ requestId, handleNodePromisesRes: res }); + + // -- case: promises rejected + if (!res.success) { + this._throwNodeError(res as RejectedNodePromises, requestId); + return {} as SignSessionKeyResponse; + } + + const responseData: BlsResponseData[] = res.values as BlsResponseData[]; + this._litNodeLogger.info({ + requestId, + responseData, + }); + + // ========== Extract shares from response data ========== + // -- 1. combine signed data as a list, and get the signatures from it + const curveType = responseData[0]?.curveType; + + if (curveType === 'ECDSA') { + throw new LitNetworkError( + { + info: { + requestId, + responseData, + }, + }, + 'The ECDSA curve type is not supported in this version.' + ); + } + + this._litNodeLogger.info(`[signSessionKey] curveType is "${curveType}"`); + + const signedDataList = responseData.map((s) => s.dataSigned); + + if (signedDataList.length <= 0) { + const err = `[signSessionKey] signedDataList is empty.`; + this._litNodeLogger.info(err); + throw new InvalidSignatureError( + { + info: { + requestId, + responseData, + signedDataList, + }, + }, + err + ); + } + + this._litNodeLogger.info({ + requestId, + signedDataList, + }); + + // -- checking if we have enough shares. + const validatedSignedDataList = this._validateSignSessionKeyResponseData( + responseData, + requestId, + this._getThreshold() + ); + + const blsSignedData: BlsResponseData[] = validatedSignedDataList; + + const sigType = mostCommonValue(blsSignedData.map((s) => s.curveType)); + this._litNodeLogger.info(`[signSessionKey] sigType:`, sigType); + + const signatureShares = getBlsSignatures(blsSignedData); + + this._litNodeLogger.info( + `[signSessionKey] signatureShares:`, + signatureShares + ); + + const blsCombinedSignature = await combineSignatureShares(signatureShares); + + this._litNodeLogger.info( + `[signSessionKey] blsCombinedSignature:`, + blsCombinedSignature + ); + + const publicKey = removeHexPrefix(params.pkpPublicKey); + this._litNodeLogger.info(`[signSessionKey] publicKey:`, publicKey); + + const dataSigned = mostCommonValue(blsSignedData.map((s) => s.dataSigned)); + this._litNodeLogger.info(`[signSessionKey] dataSigned:`, dataSigned); + + const mostCommonSiweMessage = mostCommonValue( + blsSignedData.map((s) => s.siweMessage) + ); + + this._litNodeLogger.info( + `[signSessionKey] mostCommonSiweMessage:`, + mostCommonSiweMessage + ); + + const signedMessage = normalizeAndStringify(mostCommonSiweMessage!); + + this._litNodeLogger.info(`[signSessionKey] signedMessage:`, signedMessage); + + const signSessionKeyRes: SignSessionKeyResponse = { + authSig: { + sig: JSON.stringify({ + ProofOfPossession: blsCombinedSignature, + }), + algo: 'LIT_BLS', + derivedVia: 'lit.bls', + signedMessage, + address: computeAddress(hexPrefixed(publicKey)), + }, + pkpPublicKey: publicKey, + }; + + return signSessionKeyRes; + }; + + getSignSessionKeyShares = async ( + url: string, + params: { + body: { + sessionKey: string; + authMethods: AuthMethod[]; + pkpPublicKey?: string; + authSig?: AuthSig; + siweMessage: string; + }; + }, + requestId: string + ) => { + this._litNodeLogger.info('getSignSessionKeyShares'); + const urlWithPath = composeLitUrl({ + url, + endpoint: LIT_ENDPOINT.SIGN_SESSION_KEY, + }); + return await this._sendCommandToNode({ + url: urlWithPath, + data: params.body, + requestId, + }); + }; + + getMaxPricesForNodeProduct = async ({ + userMaxPrice, + product, + }: { + userMaxPrice?: bigint; + product: keyof typeof PRODUCT_IDS; + }) => { + this._litNodeLogger.info({ + msg: 'getMaxPricesForNodeProduct(): Product', + product, + }); + const getUserMaxPrice = () => { + if (userMaxPrice) { + this._litNodeLogger.info({ + msg: 'getMaxPricesForNodeProduct(): User provided maxPrice of userMaxPrice', + userMaxPrice, + }); + return userMaxPrice; + } + + if (this.defaultMaxPriceByProduct[product] === -1n) { + this._litNodeLogger.info( + `getMaxPricesForNodeProduct(): No user-provided maxPrice and no defaultMaxPrice set for ${product}; setting to max value` + ); + + return 340_282_366_920_938_463_463_374_607_431_768_211_455n; // Rust U128 max + } + return this.defaultMaxPriceByProduct[product]; + }; + + return getMaxPricesForNodeProduct({ + nodePrices: await this._getNodePrices(), + userMaxPrice: getUserMaxPrice(), + productId: PRODUCT_IDS[product], + numRequiredNodes: this._getThreshold(), + }); + }; + + /** + * + * Retrieves or generates sessionSigs (think access token) for accessing Lit Network resources. + * + * How this function works on a high level: + * 1. Generate or retrieve [session keys](https://v6-api-doc-lit-js-sdk.vercel.app/interfaces/types_src.SessionKeyPair.html) (a public and private key pair) + * 2. Generate or retrieve the [`AuthSig`](https://v6-api-doc-lit-js-sdk.vercel.app/interfaces/types_src.AuthSig.html) that specifies the session [abilities](https://v6-api-doc-lit-js-sdk.vercel.app/enums/auth_helpers_src.LitAbility.html) + * 3. Sign the specific resources with the session key + * + * The process follows these steps: + * 1. Retrieves or generates a session key pair (Ed25519) for the user's device. The session key is either fetched from local storage or newly created if not found. The key does not expire. + * 2. Generates an authentication signature (`authSig`) by signing an ERC-5573 "Sign-in with Ethereum" message, which includes resource ability requests, capabilities, expiration, the user's device session public key, and a nonce. The `authSig` is retrieved from local storage, and if it has expired, the user will be prompted to re-sign. + * 3. Uses the session private key to sign the session public key along with the resource ability requests, capabilities, issuedAt, and expiration details. This creates a device-generated signature. + * 4. Constructs the session signatures (`sessionSigs`) by including the device-generated signature and the original message. The `sessionSigs` provide access to Lit Network features such as `executeJs` and `pkpSign`. + * + * See Sequence Diagram: https://www.plantuml.com/plantuml/uml/VPH1RnCn48Nl_XLFlT1Av00eGkm15QKLWY8K9K9SO-rEar4sjcLFalBl6NjJAuaMRl5utfjlPjQvJsAZx7UziQtuY5-9eWaQufQ3TOAR77cJy407Rka6zlNdHTRouUbIzSEtjiTIBUswg5v_NwMnuAVlA9KKFPN3I0x9qSSj7bqNF3iPykl9c4o9oUSJMuElv2XQ8IHAYRt3bluWM8wuVUpUJwVlFjsP8JUh5B_1DyV2AYdD6DjhLsTQTaYd3W3ad28SGWqM997fG5ZrB9DJqOaALuRwH1TMpik8tIYze-E8OrPKU5I6cMqtem2kCqOhr4vdaRAvtSjcoMkTo68scKu_Vi1EPMfrP_xVtj7sFMaHNg-6GVqk0MW0z18uKdVULTvDWtdqko28b7KktvUB2hKOBd1asU2QgDfTzrj7T4bLPdv6TR0zLwPQKkkZpIRTY4CTMbrBpg_VKuXyi49beUAHqIlirOUrL2zq9JPPdpRR5OMLVQGoGlLcjyRyQNv6MHz4W_fG42W--xWhUfNyOxiLL1USS6lRLeyAkYLNjrkVJuClm_qp5I8Lq0krUw7lwIt2DgY9oiozrjA_Yhy0 + * + * Note: When generating session signatures for different PKPs or auth methods, + * be sure to call disconnectWeb3 to clear auth signatures stored in local storage + * + * @param { AuthenticationContext } params + * + * An example of how this function is used can be found in the Lit developer-guides-code repository [here](https://github.com/LIT-Protocol/developer-guides-code/tree/master/session-signatures/getSessionSigs). + * + */ + private _getSessionSigs = async ( + params: AuthenticationContext & { + userMaxPrices: { url: string; price: bigint }[]; + } + ): Promise => { + // -- prepare + // Try to get it from local storage, if not generates one~ + const sessionKey = params.sessionKey ?? this._getSessionKey(); + + const sessionKeyUri = this._getSessionKeyUri(sessionKey.publicKey); + + // First get or generate the session capability object for the specified resources. + const sessionCapabilityObject = params.sessionCapabilityObject + ? params.sessionCapabilityObject + : await generateSessionCapabilityObjectWithWildcards( + params.resourceAbilityRequests.map((r) => r.resource) + ); + const expiration = params.expiration || getExpiration(); + + // -- (TRY) to get the wallet signature + let authSig = await this._getWalletSig({ + authNeededCallback: params.authNeededCallback, + chain: params.chain || 'ethereum', + sessionCapabilityObject, + switchChain: params.switchChain, + expiration: expiration, + sessionKey: sessionKey, + sessionKeyUri: sessionKeyUri, + nonce: await this.getLatestBlockhash(), + + // -- for recap + resourceAbilityRequests: params.resourceAbilityRequests, + + // -- optional fields + ...(params.litActionCode && { litActionCode: params.litActionCode }), + ...(params.litActionIpfsId && { + litActionIpfsId: params.litActionIpfsId, + }), + ...(params.jsParams && { jsParams: params.jsParams }), + }); + + const needToResignSessionKey = await this._checkNeedToResignSessionKey({ + authSig, + sessionKeyUri, + resourceAbilityRequests: params.resourceAbilityRequests, + }); + + // -- (CHECK) if we need to resign the session key + if (needToResignSessionKey) { + this._litNodeLogger.info('need to re-sign session key. Signing...'); + authSig = await this._authCallbackAndUpdateStorageItem({ + authCallback: params.authNeededCallback, + authCallbackParams: { + chain: params.chain || 'ethereum', + statement: sessionCapabilityObject.statement, + resources: [sessionCapabilityObject.encodeAsSiweResource()], + switchChain: params.switchChain, + expiration, + sessionKey: sessionKey, + uri: sessionKeyUri, + nonce: await this.getLatestBlockhash(), + resourceAbilityRequests: params.resourceAbilityRequests, + + // -- optional fields + ...(params.litActionCode && { litActionCode: params.litActionCode }), + ...(params.litActionIpfsId && { + litActionIpfsId: params.litActionIpfsId, + }), + ...(params.jsParams && { jsParams: params.jsParams }), + }, + }); + } + + if ( + authSig.address === '' || + authSig.derivedVia === '' || + authSig.sig === '' || + authSig.signedMessage === '' + ) { + throw new WalletSignatureNotFoundError( + { + info: { + authSig, + }, + }, + 'No wallet signature found' + ); + } + + // ===== AFTER we have Valid Signed Session Key ===== + // - Let's sign the resources with the session key + // - 5 minutes is the default expiration for a session signature + // - Because we can generate a new session sig every time the user wants to access a resource without prompting them to sign with their wallet + const sessionExpiration = + expiration ?? new Date(Date.now() + 1000 * 60 * 5).toISOString(); + + const capabilities = params.capabilityAuthSigs + ? [ + ...(params.capabilityAuthSigs ?? []), + params.capabilityAuthSigs, + authSig, + ] + : [...(params.capabilityAuthSigs ?? []), authSig]; + + // This is the template that will be combined with the node address as a single object, then signed by the session key + // so that the node can verify the session signature + const sessionSigningTemplate = { + sessionKey: sessionKey.publicKey, + resourceAbilityRequests: params.resourceAbilityRequests, + capabilities, + issuedAt: new Date().toISOString(), + expiration: sessionExpiration, + }; + + const sessionSigs: SessionSigsMap = {}; + + params.userMaxPrices.forEach(({ url: nodeAddress, price }) => { + const toSign: SessionSigningTemplate = { + ...sessionSigningTemplate, + nodeAddress, + maxPrice: price.toString(), + }; + + this._litNodeLogger.info( + `Setting maxprice for ${nodeAddress} to `, + price.toString() + ); + + const signedMessage = JSON.stringify(toSign); + + const uint8arrayMessage = Buffer.from(signedMessage, 'utf8'); + const signature = ed25519.sign(uint8arrayMessage, sessionKey.secretKey); + + sessionSigs[nodeAddress] = { + sig: Buffer.from(signature).toString('hex'), + derivedVia: 'litSessionSignViaNacl', + signedMessage: signedMessage, + address: sessionKey.publicKey, + algo: 'ed25519', + }; + }); + + this._litNodeLogger.info({ msg: 'sessionSigs', sessionSigs }); + + try { + const formattedSessionSigs = formatSessionSigs( + JSON.stringify(sessionSigs) + ); + this._litNodeLogger.info(formattedSessionSigs); + } catch (e) { + // swallow error + this._litNodeLogger.info({ + msg: 'Error formatting session signatures', + e, + }); } - // -- execute - const storageConfig = JSON.parse(storageConfigOrError.result as string); - // this.config = override(this.config, storageConfig); - this.config = { ...this.config, ...storageConfig }; + return sessionSigs; + }; + + /** + * Retrieves the PKP sessionSigs. + * + * @param params - The parameters for retrieving the PKP sessionSigs. + * @returns A promise that resolves to the PKP sessionSigs. + * @throws An error if any of the required parameters are missing or if `litActionCode` and `ipfsId` exist at the same time. + */ + getPkpAuthContext = (params: AuthenticationContext) => { + const chain = params?.chain || 'ethereum'; + + return { + chain, + ...params, + authNeededCallback: async (props: AuthCallbackParams) => { + // -- validate + if (!props.expiration) { + throw new ParamsMissingError( + { + info: { + props, + }, + }, + '[getPkpSessionSigs/callback] expiration is required' + ); + } + + if (!props.resources) { + throw new ParamsMissingError( + { + info: { + props, + }, + }, + '[getPkpSessionSigs/callback]resources is required' + ); + } + + if (!props.resourceAbilityRequests) { + throw new ParamsMissingError( + { + info: { + props, + }, + }, + '[getPkpSessionSigs/callback]resourceAbilityRequests is required' + ); + } + + // lit action code and ipfs id cannot exist at the same time + if (props.litActionCode && props.litActionIpfsId) { + throw new UnsupportedMethodError( + { + info: { + props, + }, + }, + '[getPkpSessionSigs/callback]litActionCode and litActionIpfsId cannot exist at the same time' + ); + } + + // Check if IPFS options are provided and if the code should be fetched from IPFS and overwrite the current code. + // This will fetch the code from the specified IPFS gateway using the provided ipfsId, + // and update the params with the fetched code, removing the ipfsId afterward. + const overwriteCode = + params.ipfsOptions?.overwriteCode || + GLOBAL_OVERWRITE_IPFS_CODE_BY_NETWORK[this.config.litNetwork]; + + if (overwriteCode && props.litActionIpfsId) { + const code = await this._getFallbackIpfsCode( + params.ipfsOptions?.gatewayUrl, + props.litActionIpfsId + ); + + props = { + ...props, + litActionCode: code, + litActionIpfsId: undefined, + }; + } + + /** + * We must provide an empty array for authMethods even if we are not using any auth methods. + * So that the nodes can serialize the request correctly. + */ + const authMethods = params.authMethods || []; + + const response = await this._signSessionKey({ + sessionKey: props.sessionKey, + statement: props.statement || 'Some custom statement.', + authMethods: [...authMethods], + pkpPublicKey: params.pkpPublicKey, + expiration: props.expiration, + resources: props.resources, + chainId: 1, + + // -- required fields + resourceAbilityRequests: props.resourceAbilityRequests, + + // -- optional fields + ...(props.litActionCode && { litActionCode: props.litActionCode }), + ...(props.litActionIpfsId && { + litActionIpfsId: props.litActionIpfsId, + }), + ...(props.jsParams && { jsParams: props.jsParams }), + }); + + return response.authSig; + }, + }; + }; + + /** + * + * Get Session Key URI eg. lit:session:0x1234 + * + * @param publicKey is the public key of the session key + * @returns { string } the session key uri + */ + private _getSessionKeyUri = (publicKey: string): string => { + return SIWE_URI_PREFIX.SESSION_KEY + publicKey; }; + + /** + * Authenticates an Auth Method for claiming a Programmable Key Pair (PKP). + * A {@link MintCallback} can be defined for custom on chain interactions + * by default the callback will forward to a relay server for minting on chain. + * @param {ClaimKeyRequest} params an Auth Method and {@link MintCallback} + * @returns {Promise} + */ + async claimKeyId( + params: ClaimRequest + ): Promise { + if (!this.ready) { + const message = + 'LitNodeClient is not ready. Please call await litNodeClient.connect() first.'; + throw new LitNodeClientNotReadyError({}, message); + } + + if (params.authMethod.authMethodType == AUTH_METHOD_TYPE.WebAuthn) { + throw new LitNodeClientNotReadyError( + {}, + 'Unsupported auth method type. Webauthn, and Lit Actions are not supported for claiming' + ); + } + + const requestId = this._getNewRequestId(); + + // This may seem a bit weird because we usually only care about prices for sessionSigs... + // But this also ensures we use the cheapest nodes and takes care of getting the minNodeCount of node URLs for the operation + const targetNodePrices = await this.getMaxPricesForNodeProduct({ + product: 'LIT_ACTION', + }); + + const targetNodeUrls = targetNodePrices.map(({ url }) => url); + + const nodePromises = this._getNodePromises( + targetNodeUrls, + (url: string) => { + if (!params.authMethod) { + throw new ParamsMissingError( + { + info: { + params, + }, + }, + 'authMethod is required' + ); + } + + const reqBody: JsonPKPClaimKeyRequest = { + authMethod: params.authMethod, + }; + + const urlWithPath = composeLitUrl({ + url, + endpoint: LIT_ENDPOINT.PKP_CLAIM, + }); + + return this.generatePromise(urlWithPath, reqBody, requestId); + } + ); + + const responseData = await this._handleNodePromises( + nodePromises, + requestId, + this._getThreshold() + ); + + if (responseData.success) { + const nodeSignatures: Signature[] = responseData.values.map((r) => { + const sig = ethers.utils.splitSignature(`0x${r.signature}`); + return { + r: sig.r, + s: sig.s, + v: sig.v, + }; + }); + + this._litNodeLogger.info({ + requestId, + responseData, + }); + + const derivedKeyId = responseData.values[0].derivedKeyId; + + const pubkey = await this.computeHDPubKey(derivedKeyId); + this._litNodeLogger.info({ + requestId, + msg: `pubkey ${pubkey} derived from key id ${derivedKeyId}`, + }); + + const relayParams = params as ClaimRequest<'relay'>; + + let mintTx = ''; + if (params.mintCallback && 'signer' in params) { + mintTx = await params.mintCallback( + { + derivedKeyId, + authMethodType: params.authMethod.authMethodType, + signatures: nodeSignatures, + pubkey, + signer: (params as ClaimRequest<'client'>).signer, + ...relayParams, + }, + this.config.litNetwork + ); + } else { + mintTx = await defaultMintClaimCallback( + { + derivedKeyId, + authMethodType: params.authMethod.authMethodType, + signatures: nodeSignatures, + pubkey, + ...relayParams, + }, + this.config.litNetwork + ); + } + + return { + signatures: nodeSignatures, + claimedKeyId: derivedKeyId, + pubkey, + mintTx, + }; + } else { + throw new UnknownError( + { + info: { + requestId, + responseData, + }, + }, + `Claim request has failed. Request trace id: lit_%s`, + requestId + ); + } + } + + /** + * Note: ✨ This is to check data integrity of the response from the signSessionKey endpoint. + * As sometimes the response data structure has changed and we need to update the required fields. + * Validates the response data from the signSessionKey endpoint. + * Each response data item must have all required fields and valid ProofOfPossession. + * + * @param responseData - Array of BlsResponseData to validate + * @param requestId - Request ID for logging and error reporting + * @param threshold - Minimum number of valid responses needed + * @returns Filtered array of valid BlsResponseData + * @throws InvalidSignatureError if validation fails + */ + private _validateSignSessionKeyResponseData( + responseData: BlsResponseData[], + requestId: string, + threshold: number + ): BlsResponseData[] { + // each of this field cannot be empty + const requiredFields = [ + 'signatureShare', + 'curveType', + 'siweMessage', + 'dataSigned', + 'blsRootPubkey', + 'result', + ]; + + // -- checking if we have enough shares. + const validatedSignedDataList = responseData + .map((data: BlsResponseData) => { + // check if all required fields are present + for (const field of requiredFields) { + const key: keyof BlsResponseData = field as keyof BlsResponseData; + + if ( + data[key] === undefined || + data[key] === null || + data[key] === '' + ) { + this._litNodeLogger.info( + `Invalid signed data. "${field}" is missing. Not a problem, we only need ${threshold} nodes to sign the session key.` + ); + return null; + } + } + + if (!data.signatureShare.ProofOfPossession) { + const err = `Invalid signed data. "ProofOfPossession" is missing.`; + this._litNodeLogger.info(err); + throw new InvalidSignatureError( + { + info: { + requestId, + responseData, + data, + }, + }, + err + ); + } + + return data; + }) + .filter((item) => item !== null); + + this._litNodeLogger.info({ + requestId, + validatedSignedDataList, + }); + this._litNodeLogger.info({ + requestId, + msg: 'minimum threshold', + threshold, + }); + + if (validatedSignedDataList.length < threshold) { + throw new InvalidSignatureError( + { + info: { + requestId, + responseData, + validatedSignedDataList, + threshold, + }, + }, + `not enough nodes signed the session key. Expected ${threshold}, got ${validatedSignedDataList.length}` + ); + } + + return validatedSignedDataList as BlsResponseData[]; + } } diff --git a/packages/lit-node-client/tsconfig.json b/packages/lit-node-client/tsconfig.json index f5b85657a8..afa40e9075 100644 --- a/packages/lit-node-client/tsconfig.json +++ b/packages/lit-node-client/tsconfig.json @@ -7,7 +7,8 @@ "noImplicitOverride": true, "noPropertyAccessFromIndexSignature": true, "noImplicitReturns": true, - "noFallthroughCasesInSwitch": true + "noFallthroughCasesInSwitch": true, + "lib": ["ES2021", "DOM"] }, "files": [], "include": [], diff --git a/packages/lit-node-client/tsconfig.lib.json b/packages/lit-node-client/tsconfig.lib.json index 21bd635299..e85ef50f65 100644 --- a/packages/lit-node-client/tsconfig.lib.json +++ b/packages/lit-node-client/tsconfig.lib.json @@ -5,9 +5,6 @@ "declaration": true, "types": [] }, - "include": [ - "**/*.ts", - "../auth-browser/src/lib/chains/lit-connect-modal.d.ts" - ], + "include": ["**/*.ts"], "exclude": ["jest.config.ts", "**/*.spec.ts", "**/*.test.ts"] } diff --git a/packages/lit-node-client/tsconfig.spec.json b/packages/lit-node-client/tsconfig.spec.json index df5eec354a..a2f7dd30d7 100644 --- a/packages/lit-node-client/tsconfig.spec.json +++ b/packages/lit-node-client/tsconfig.spec.json @@ -2,15 +2,9 @@ "extends": "./tsconfig.json", "compilerOptions": { "outDir": "../../dist/out-tsc", - "module": "ES2022", + "module": "commonjs", "types": ["jest", "node"], "allowJs": true }, - "include": [ - "jest.config.ts", - "**/*.test.ts", - "**/*.spec.ts", - "**/*.d.ts", - "../auth-browser/src/lib/chains/lit-connect-modal.d.ts" - ] + "include": ["jest.config.ts", "**/*.test.ts", "**/*.spec.ts", "**/*.d.ts"] } diff --git a/packages/logger/README.md b/packages/logger/README.md index 671f265971..e20602eadc 100644 --- a/packages/logger/README.md +++ b/packages/logger/README.md @@ -1,6 +1,6 @@ # logger -This library was generated with [Nx](https://nx.dev). +This package provides a centralized logging utility for the Lit Protocol SDK, offering structured logging capabilities across all packages. It is based in pino logger for minimal overhead and enables consistent log formatting, level-based filtering, and standardized error reporting throughout the Lit Protocol ecosystem. ## Building diff --git a/packages/logger/src/lib/logger.spec.ts b/packages/logger/src/lib/logger.spec.ts index a033af9426..0cd4983395 100644 --- a/packages/logger/src/lib/logger.spec.ts +++ b/packages/logger/src/lib/logger.spec.ts @@ -1,129 +1,68 @@ -import { LOG_LEVEL, LogLevel, LogManager } from './logger'; +import { Writable } from 'stream'; -describe('logger', () => { - let lm: LogManager; - beforeEach(() => { - LogManager.clearInstance(); - lm = LogManager.Instance; - }); - - it('Log Manager singleton should be defined', () => { - expect(typeof lm).toEqual('object'); - }); - - it('should make logger with category', () => { - const logger = lm.get('category'); - expect(logger.category).toEqual('category'); - }); - - it('should make logger with id and category', () => { - const logger = lm.get('category', 'foo'); - expect(logger.id).toEqual('foo'); - }); - - it('Log Manager should pass config to loggers', () => { - lm.withConfig({ - condenseLogs: true, - }); - const logger = lm.get('category'); - expect(logger.Config?.['condenseLogs']).toEqual(true); - }); - - it('Hashing enabled should filter non unique logs', () => { - lm.withConfig({ - condenseLogs: true, - }); - const logger = lm.get('category', 'bar'); - logger.setLevel(LOG_LEVEL.INFO); - expect(logger.Config?.['condenseLogs']).toEqual(true); - logger.info('hello'); - logger.info('hello'); - const logs = lm.getLogsForId('bar'); - expect(logs.length).toEqual(1); - }); +import { logger, setLoggerOptions, getChildLogger } from './logger'; - it('should respect info logging level', () => { - const logger = lm.get('info-logger', 'foo'); - logger.setLevel(LOG_LEVEL.INFO); - logger.info('logging'); - logger.debug('shouldnt log'); - const logs = lm.getLogsForId('foo'); - expect(logs.length).toEqual(1); - }); +class TestStream extends Writable { + public data = ''; - it('should log error at any level', () => { - const logger = lm.get('info-logger', 'foo2'); - logger.setLevel(LOG_LEVEL.DEBUG); - logger.debug('logging'); - logger.error('error'); - const logs = lm.getLogsForId('foo2'); - expect(logs.length).toEqual(2); - }); + override _write( + chunk: { toString: () => string }, + encoding: string, + callback: () => void + ) { + this.data += chunk.toString(); + callback(); + } +} - it('should safe serialize circular references', () => { - const logger = lm.get('info-logger', 'foo3'); - logger.setLevel(LOG_LEVEL.DEBUG); - const circ: any = { foo: 'bar' }; - circ.circ = circ; - logger.debug('circular reference to serialize', circ); - console.log(lm.getLogsForId('foo3')); - expect(lm.getLogsForId('foo3').length).toEqual(1); +describe('logger', () => { + it('should have default level "info"', () => { + expect(logger.level).toBe('info'); }); - it('should trace logs through multiple categories', () => { - const logger = lm.get('info-logger', 'foo4'); - logger.setLevel(LOG_LEVEL.DEBUG); - const logger2 = lm.get('debug-logger', 'foo4'); - logger2.setLevel(LOG_LEVEL.DEBUG); - logger2.debug('foo'); - logger.debug('bar'); - expect(lm.getLogsForId('foo4').length).toEqual(2); + it('setLoggerOptions should update logger options', () => { + const testLogger = setLoggerOptions({ level: 'debug', name: 'TestLogger' }); + expect(testLogger.level).toBe('debug'); }); - it('should not persist logs if level set to OFF', () => { - const count = 1_000; - for (let i = 0; i < count; i++) { - const logger = lm.get('' + i, 'foo5'); - logger.setLevel(LOG_LEVEL.OFF); - logger.debug(i + ''); - } - - expect(lm.getLogsForId('foo5').length).toEqual(0); + it('getChildLogger should create a child logger', () => { + const childLogger = getChildLogger({ module: 'childTest' }); + expect(typeof childLogger.child).toBe('function'); + expect(() => childLogger.info('Child logger test message')).not.toThrow(); }); - it('should persist logs across categories', async () => { - const count = 10_000; - for (let i = 0; i < count; i++) { - const logger = lm.get('' + i, 'foo6'); - logger.setLevel(LOG_LEVEL.DEBUG); - logger.debug(i + ''); - } - - expect(lm.getLogsForId('foo6').length).toEqual(count); + it('should log messages correctly on the parent logger', (done) => { + // Override the global logger with a test logger using our own destination stream: + const testStream = new TestStream(); + const testLogger = setLoggerOptions( + { level: 'info', name: 'ParentTestLogger' }, + testStream + ); + testLogger.info('Parent message'); + + // Give a small amount time for the stream to process the log + setTimeout(() => { + expect(testStream.data).toMatch(/Parent message/); + done(); + }, 50); }); - it('should retain logger keys and return from LogManager', () => { - const count = 10; - for (let i = 0; i < count; i++) { - const logger = lm.get('' + i, 'foo7'); - logger.setLevel(LogLevel.DEBUG); - logger.debug(i + ''); - } - - expect(lm.getLogsForId('foo7').length).toEqual(count); - expect(lm.LoggerIds.length).toEqual(10); - }); - - it('should order logs based on logger creation timestamp', async () => { - const loggerA = lm.get('a', '1'); - await new Promise((res) => setTimeout(res, 100)); - const loggerB = lm.get('b', '2'); - - const requestIds = lm.LoggerIds; - - expect(requestIds.length).toBe(2); - expect(loggerA.timestamp).toBeLessThan(loggerB.timestamp); - expect(requestIds[0]).toBe('1'); - expect(requestIds[1]).toBe('2'); + it('should log messages on a child logger using the parent transport but adding its bindings', (done) => { + // Override the global logger for consistency in our test: + const testStream = new TestStream(); + setLoggerOptions({ level: 'info', name: 'ParentTestLogger' }, testStream); + const childLogger = getChildLogger({ module: 'ChildModule' }); + childLogger.info('Child message'); + + setTimeout(() => { + try { + expect(testStream.data).toMatch('"name":"ParentTestLogger"'); + expect(testStream.data).toMatch('"msg":"Child message"'); + expect(testStream.data).toMatch('"module":"ChildModule"'); + done(); + } catch (error) { + done(error); + } + }, 50); }); }); diff --git a/packages/logger/src/lib/logger.ts b/packages/logger/src/lib/logger.ts index 8af393f5e8..27021c6ea4 100644 --- a/packages/logger/src/lib/logger.ts +++ b/packages/logger/src/lib/logger.ts @@ -1,551 +1,32 @@ -import { version, LOG_LEVEL, LOG_LEVEL_VALUES } from '@lit-protocol/constants'; -import { hashMessage } from 'ethers/lib/utils'; -export { LOG_LEVEL }; +import { pino, Logger as Pino, LoggerOptions, DestinationStream } from 'pino'; -export enum LogLevel { - OFF = -1, - ERROR = 0, - INFO = 1, - DEBUG = 2, - WARN = 3, - FATAL = 4, - TIMING_START = 5, - TIMING_END = 6, -} - -const colours = { - reset: '\x1b[0m', - bright: '\x1b[1m', - dim: '\x1b[2m', - underscore: '\x1b[4m', - blink: '\x1b[5m', - reverse: '\x1b[7m', - hidden: '\x1b[8m', - - fg: { - black: '\x1b[30m', - red: '\x1b[31m', - green: '\x1b[32m', - yellow: '\x1b[33m', - blue: '\x1b[34m', - magenta: '\x1b[35m', - cyan: '\x1b[36m', - white: '\x1b[37m', - gray: '\x1b[90m', - crimson: '\x1b[38m', // Scarlet - }, - bg: { - black: '\x1b[40m', - red: '\x1b[41m', - green: '\x1b[42m', - yellow: '\x1b[43m', - blue: '\x1b[44m', - magenta: '\x1b[45m', - cyan: '\x1b[46m', - white: '\x1b[47m', - gray: '\x1b[100m', - crimson: '\x1b[48m', - }, +const DEFAULT_LOGGER_OPTIONS = { + name: 'LitProtocolSDK', + level: 'info', }; -function _convertLoggingLevel(level: LOG_LEVEL_VALUES): string { - switch (level) { - case LOG_LEVEL.INFO: - return `${colours.fg.green}[INFO]${colours.reset}`; - case LOG_LEVEL.DEBUG: - return `${colours.fg.cyan}[DEBUG]${colours.reset}`; - case LOG_LEVEL.WARN: - return `${colours.fg.yellow}[WARN]${colours.reset}`; - case LOG_LEVEL.ERROR: - return `${colours.fg.red}[ERROR]${colours.reset}`; - case LOG_LEVEL.FATAL: - return `${colours.fg.red}[FATAL]${colours.reset}`; - case LOG_LEVEL.TIMING_START: - return `${colours.fg.green}[TIME_START]${colours.reset}`; - case LOG_LEVEL.TIMING_END: - return `${colours.fg.green}[TIME_END]${colours.reset}`; - } - - return '[UNKNOWN]'; -} - -function _resolveLoggingHandler(level: LOG_LEVEL_VALUES): any { - switch (level) { - case LOG_LEVEL.DEBUG: - return console.debug; - case LOG_LEVEL.INFO: - return console.info; - case LOG_LEVEL.ERROR: - return console.error; - case LOG_LEVEL.WARN: - return console.warn; - case LOG_LEVEL.FATAL: - return console.error; - case LOG_LEVEL.TIMING_END: - return console.timeLog; - case LOG_LEVEL.TIMING_START: - return console.time; - } -} - -/** - * Implementation of `JSON.stringify` which removes circular object references - * @example - * let circ = {foo: 'bar'}; - * circ.circ = circ; // creates a circular reference - * _safeStringify(circ) -> {foo: 'bar'} - * @param obj object to check for circular references - * @param indent number of indents to include (spaces) - * @returns obj param without without circular references - */ -function _safeStringify(obj: any, indent = 2) { - let cache: any[] | null = []; - const retVal = JSON.stringify( - obj, - (_key, value) => - typeof value === 'object' && value !== null - ? cache?.includes(value) - ? undefined // Duplicate reference found, discard key - : cache?.push(value) && value // Store value in our collection - : value, - indent +type Logger = Pino; +let logger: Logger = pino(DEFAULT_LOGGER_OPTIONS); + +function setLoggerOptions( + loggerOptions: LoggerOptions, + destination?: DestinationStream +): Logger { + logger = pino( + { + ...DEFAULT_LOGGER_OPTIONS, + ...loggerOptions, + }, + destination ); - cache = null; - return retVal; -} -interface ILog { - timestamp: string; - message: string; - args: any[]; - id: string; - category: string; - level: LOG_LEVEL_VALUES; - error?: any; - toString(): string; - toJSON(): Record; + return logger; } -class Log implements ILog { - timestamp: string; - message: string; - args: any[]; - id: string; - category: string; - level: LOG_LEVEL_VALUES; - error?: any; - - constructor( - timestamp: string, - message: string, - args: any[], - id: string, - category: string, - level: LOG_LEVEL_VALUES - ) { - this.timestamp = timestamp; - this.message = message; - this.args = args; - this.id = id; - this.category = category; - this.level = level; - } - - toString(): string { - let fmtStr: string = `[Lit-JS-SDK v${version}]${_convertLoggingLevel( - this.level - )} [${this.category}] [id: ${this.id}] ${this.message}`; - for (let i = 0; i < this.args.length; i++) { - if (typeof this.args[i] === 'object') { - fmtStr = `${fmtStr} ${_safeStringify(this.args[i])}`; - } else { - fmtStr = `${fmtStr} ${this.args[i]}`; - } - } - return fmtStr; - } - - toArray(): string[] { - const args = []; - args.push(`[Lit-JS-SDK v${version}]`); - args.push(`[${this.timestamp}]`); - args.push(_convertLoggingLevel(this.level)); - args.push(`[${this.category}]`); - - this.id && args.push(`${colours.fg.cyan}[id: ${this.id}]${colours.reset}`); - this.message && args.push(this.message); - - for (let i = 0; i < this.args.length; i++) { - args.push(this.args[i]); - } - - return args; - } - - toJSON(): Record { - return { - timestamp: this.timestamp, - message: this.message, - args: this.args, - id: this.id, - category: this.category, - level: this.level, - }; - } +function getChildLogger( + ...childParams: Parameters +): Logger { + return logger.child(...childParams); } -export type messageHandler = (log: Log) => void; - -export class Logger { - private _category: string; - private _level: LOG_LEVEL_VALUES; - private _id: string; - private _handler: messageHandler | undefined; - private _consoleHandler: any; - private _logs: Log[] = []; - private _logHashes: Map = new Map(); - private _config: Record | undefined; - private _isParent: boolean; - private _children: Map; - private _timestamp: number; - - public static createLogger( - category: string, - level: LOG_LEVEL_VALUES, - id: string, - isParent: boolean, - config?: Record - ): Logger { - return new Logger(category, level, id, isParent, config); - } - - private constructor( - category: string, - level: LOG_LEVEL_VALUES, - id: string, - isParent: boolean, - config?: Record - ) { - this._category = category; - this._level = level; - this._id = id; - this._consoleHandler = _resolveLoggingHandler(this._level); - this._config = config; - this._children = new Map(); - this._isParent = isParent; - this._timestamp = Date.now(); - } - - get id(): string { - return this._id; - } - - get category(): string { - return this._category; - } - - get timestamp(): number { - return this._timestamp; - } - - get Logs(): Log[] { - return this._logs; - } - - set Config(value: Record | undefined) { - this._config = value; - } - - get Config(): Record | undefined { - return this._config; - } - - get Children(): Map { - return this._children; - } - - public setLevel(level: LOG_LEVEL_VALUES): void { - this._level = level; - } - - public setHandler(handler: messageHandler) { - this._handler = handler; - } - - public info(message: string = '', ...args: any[]): void { - this._log(LOG_LEVEL.INFO, message, ...args); - } - - public debug(message: string = '', ...args: any[]): void { - this._log(LOG_LEVEL.DEBUG, message, ...args); - } - - public warn(message: string = '', ...args: any[]): void { - this._log(LOG_LEVEL.WARN, message, args); - } - - public error(message: string = '', ...args: any[]): void { - this._log(LOG_LEVEL.ERROR, message, ...args); - } - - public fatal(message: string = '', ...args: any[]): void { - this._log(LOG_LEVEL.FATAL, message, ...args); - } - - public trace(message: string = '', ...args: any[]): void { - this._log(LOG_LEVEL.FATAL, message, ...args); - } - - public timeStart(message: string = '', ...args: any[]): void { - this._log(LOG_LEVEL.TIMING_START, message, ...args); - } - - public timeEnd(message: string = '', ...args: any[]): void { - this._level < LOG_LEVEL.OFF && - this._log(LOG_LEVEL.TIMING_END, message, ...args); - } - - private _log( - level: LOG_LEVEL_VALUES, - message: string = '', - ...args: any[] - ): void { - const log = new Log( - new Date().toISOString(), - message, - args, - this._id, - this._category, - level - ); - - const arrayLog = log.toArray(); - if (this._config?.['condenseLogs'] && !this._checkHash(log)) { - (this._level >= level || level === LogLevel.ERROR) && - this._consoleHandler && - this._consoleHandler(...arrayLog); - (this._level >= level || level === LOG_LEVEL.ERROR) && - this._handler && - this._handler(log); - - (this._level >= level || level === LogLevel.ERROR) && this._addLog(log); - } else if (!this._config?.['condenseLogs']) { - (this._level >= level || level === LogLevel.ERROR) && - this._consoleHandler && - this._consoleHandler(...arrayLog); - (this._level >= level || level === LOG_LEVEL.ERROR) && - this._handler && - this._handler(log); - (this._level >= level || level === LOG_LEVEL.ERROR) && this._addLog(log); - } - } - - private _checkHash(log: Log): boolean { - const strippedMessage = this._cleanString(log.message); - const digest = hashMessage(strippedMessage); - const hash = digest.toString(); - const item = this._logHashes.get(hash); - if (item) { - return true; - } else { - this._logHashes.set(hash, true); - return false; - } - } - - private _addLog(log: Log) { - this._logs.push(log); - // TODO: currently we are not deleting old request id's which over time will fill local storage as the maximum storage size is 10mb - // we should be deleting keys from the front of the collection of `Object.keys(category)` such that the first keys entered are deleted when we reach a pre defined key threshold - // this implementation assumes that serialization / deserialization from `localStorage` keeps the same key ordering in each `category` object as we will asssume the array produced from `Object.keys` will always be the same ordering. - // which then allows us to start at the front of the array and do `delete` operation on each key we wish to delete from the object. - //log.id && this._addToLocalStorage(log); - } - - private _addToLocalStorage(log: Log) { - if (globalThis.localStorage) { - let bucket: Record | string | null = - globalThis.localStorage.getItem(log.category); - if (bucket) { - bucket = JSON.parse(bucket) as Record; - if (!bucket[log.id]) { - bucket[log.id] = []; - } - bucket[log.id].push(log.toString()); - globalThis.localStorage.setItem(log.category, _safeStringify(bucket)); - } else { - const bucket: Record = {}; - bucket[log.id] = [log.toString()]; - globalThis.localStorage.setItem(log.category, _safeStringify(bucket)); - } - } - } - - /** - * - * @param input string which will be cleaned of non utf-8 characters - * @returns {string} input cleaned of non utf-8 characters - */ - private _cleanString(input: string): string { - let output = ''; - for (let i = 0; i < input.length; i++) { - if (input.charCodeAt(i) <= 127) { - output += input.charAt(i); - } - } - return output; - } -} - -export class LogManager { - private static _instance: LogManager; - private _loggers: Map; - private _level: LOG_LEVEL_VALUES | undefined = LOG_LEVEL.DEBUG; - private _config: Record | undefined; - - static get Instance(): LogManager { - if (!LogManager._instance) { - LogManager._instance = new LogManager(); - } - return LogManager._instance; - } - - static clearInstance() { - (LogManager._instance as any) = undefined; - } - - private constructor() { - this._loggers = new Map(); - } - - public withConfig(config: Record) { - this._config = config; - for (const logger of this._loggers) { - logger[1].Config = config; - } - } - - public setLevel(level: LOG_LEVEL_VALUES) { - this._level = level; - for (const logger of this._loggers) { - logger[1].setLevel(level); - } - } - - public setHandler(handler: messageHandler) { - for (const logger of this._loggers) { - logger[1].setHandler(handler); - } - } - - get LoggerIds(): string[] { - const keys: [string, number][] = []; - for (const category of this._loggers.entries()) { - for (const child of category[1].Children) { - keys.push([child[0], child[1].timestamp]); - } - } - - return keys - .sort((a: [string, number], b: [string, number]) => { - return a[1] - b[1]; - }) - .map((value: [string, number]) => { - return value[0]; - }); - } - - // if a logger is given an id it will persist logs under its logger instance - public get(category: string, id?: string): Logger { - let instance = this._loggers.get(category); - if (!instance && !id) { - this._loggers.set( - category, - Logger.createLogger(category, this._level ?? LOG_LEVEL.INFO, '', true) - ); - - instance = this._loggers.get(category) as Logger; - instance.Config = this._config; - return instance; - } - - if (id) { - if (!instance) { - this._loggers.set( - category, - Logger.createLogger(category, this._level ?? LOG_LEVEL.INFO, '', true) - ); - - instance = this._loggers.get(category) as Logger; - instance.Config = this._config; - } - const children = instance?.Children; - let child = children?.get(id); - if (child) { - return child; - } - children?.set( - id, - Logger.createLogger( - category, - this._level ?? LOG_LEVEL.INFO, - id ?? '', - true - ) - ); - - child = children?.get(id) as Logger; - child.Config = this._config; - return children?.get(id) as Logger; - // fall through condition for if there is no id for the logger and the category is not yet created. - // ex: LogManager.Instance.get('foo'); - } else if (!instance) { - this._loggers.set( - category, - Logger.createLogger(category, this._level ?? LOG_LEVEL.INFO, '', true) - ); - - instance = this._loggers.get(category) as Logger; - instance.Config = this._config; - } - - return instance as Logger; - } - - getById(id: string): string[] { - let logStrs: string[] = []; - for (const category of this._loggers.entries()) { - const logger = category[1].Children.get(id); - if (logger) { - const logStr = []; - for (const log of logger.Logs) { - logStr.push(log.toString()); - } - logStrs = logStrs.concat(logStr); - } - } - - return logStrs; - } - - public getLogsForId(id: string): string[] { - let logsForRequest: string[] = this.getById(id); - if (logsForRequest.length < 1 && globalThis.localStorage) { - for (const category of this._loggers.keys()) { - const bucketStr: string | null = - globalThis.localStorage.getItem(category); - const bucket: Record = JSON.parse( - bucketStr as string - ); - if (bucket && bucket[id]) { - const logsForId: string[] = bucket[id].filter((log: string) => - log.includes(id) - ); - logsForRequest = logsForId.concat(logsForRequest); - } - } - } - - return logsForRequest; - } -} +export { Logger, logger, setLoggerOptions, getChildLogger }; diff --git a/packages/misc-browser/src/lib/misc-browser.ts b/packages/misc-browser/src/lib/misc-browser.ts index 9c2b0d96a5..12b060e341 100644 --- a/packages/misc-browser/src/lib/misc-browser.ts +++ b/packages/misc-browser/src/lib/misc-browser.ts @@ -1,46 +1,44 @@ import { - ELeft, - ERight, - IEither, - InvalidArgumentException, LocalStorageItemNotFoundException, LocalStorageItemNotRemovedException, LocalStorageItemNotSetException, } from '@lit-protocol/constants'; -import { - uint8arrayFromString, - uint8arrayToString, -} from '@lit-protocol/uint8arrays'; /** + * Get the local storage item by key. * - * Get the local storage item by key - * - * @param { string } key + * @param {string} key The key to retrieve. + * @returns {string} The stored string. + * @throws Will throw an error if reading from localStorage fails or the item is not found. */ -export const getStorageItem = (key: string): IEither => { - let item; +export const getStorageItem = (key: string): string => { + let item: string | null; try { item = localStorage.getItem(key); } catch (e) { - // swallowing + throw new LocalStorageItemNotFoundException( + { + info: { + storageKey: key, + }, + cause: e, + }, + `Error reading localStorage for key "${key}"` + ); } if (!item) { - return ELeft( - new LocalStorageItemNotFoundException( - { - info: { - storageKey: key, - }, + throw new LocalStorageItemNotFoundException( + { + info: { + storageKey: key, }, - `Failed to get %s from local storage`, - key - ) + }, + `Failed to find ${key} in local storage` ); } - return ERight(item); + return item; }; /** @@ -50,21 +48,20 @@ export const getStorageItem = (key: string): IEither => { * @param { string } key is the key to set * @param { string } value is the value to set */ -export const setStorageItem = (key: string, value: string): IEither => { +export const setStorageItem = (key: string, value: string): string => { try { localStorage.setItem(key, value); - return ERight(value); + return value; } catch (e) { - return ELeft( - new LocalStorageItemNotSetException( - { - info: { - storageKey: key, - }, + throw new LocalStorageItemNotSetException( + { + info: { + storageKey: key, }, - `Failed to set %s in local storage`, - key - ) + cause: e, + }, + `Failed to set %s in local storage`, + key ); } }; @@ -74,176 +71,22 @@ export const setStorageItem = (key: string, value: string): IEither => { * Remove the local storage item by key * * @param { string } key is the key to remove - * @returns { IEither } Either the key or an error + * @returns { string } the key removed */ -export const removeStorageItem = (key: string): IEither => { +export const removeStorageItem = (key: string): string => { try { localStorage.removeItem(key); - return ERight(key); + return key; } catch (e) { - return ELeft( - new LocalStorageItemNotRemovedException( - { - info: { - storageKey: key, - }, - }, - `Failed to remove %s from local storage`, - key - ) - ); - } -}; - -/** - * Convert a Blob to a base64urlpad string. Note: This function returns a promise. - * - * @param { Blob | File } blob The Blob or File to turn into a base64 string - * @returns { Promise } A promise that resolves to the base64 string - */ -export const blobToBase64String = async ( - blob: Blob | File -): Promise => { - const arrayBuffer = await blob.arrayBuffer(); - - const uint8array = new Uint8Array(arrayBuffer); - - return uint8arrayToString(uint8array, 'base64urlpad'); -}; - -/** - * - * Convert a base64urlpad string to a Blob. - * Note: This function DOES NOT return a promise - * - * @param { string } base64String The base64 string that to turn into a Blob - * @returns { Blob } A blob that contains the decoded base64 data - */ -export const base64StringToBlob = (base64String: string): Blob => { - return new Blob([uint8arrayFromString(base64String, 'base64urlpad')]); -}; - -/** - * - * Convert a file to a data URL, which could then be embedded in a LIT. - * A data URL is a string representation of a file. - * - * @param { File } file The file to turn into a data url - * @returns { string } The data URL. This is a string representation that can be used anywhere the original file would be used. - */ -export const fileToDataUrl = ( - file: File -): Promise => { - return new Promise((resolve) => { - const reader = new FileReader(); - reader.onloadend = () => { - resolve(reader.result); - }; - reader.readAsDataURL(file); - }); -}; - -/** - * - * // TEST: downloadFile - * Download a file in memory to the user's computer - * - * @param { Object } params - * @property { string } filename The name of the file - * @property { Uint8Array } data The actual file itself as a Uint8Array - * @property { string } mimetype The mime type of the file - * - * @returns { void } The data URL. This is a string representation that can be used anywhere the original file would be used. - * - */ -export const downloadFile = ({ - fileName, - data, - mimeType, -}: { - fileName: string; - data: Uint8Array; - mimeType: string; -}): void => { - const element = document.createElement('a'); - - element.setAttribute( - 'href', - 'data:' + mimeType + ';base64,' + uint8arrayToString(data, 'base64') - ); - element.setAttribute('download', fileName); - - element.style.display = 'none'; - - document.body.appendChild(element); - - element.click(); - - document.body.removeChild(element); -}; - -/** - * - * // TEST: injectViewerIFrame - * Inject an iFrame into the current page that will display a LIT. - * This function safely sandboxes the content in the iFrame so that the LIT cannot see cookies or localStorage of the parent website. - * - * @param { Object } params - * @property { string } destinationId The DOM ID of the element to inject the iFrame into - * @property { string } title The title of the content being displayed - * @property { string } fileUrl The URL of the content that will be shown in the iFrame - * @property { string } className An optional DOM class name to add to the iFrame for styling - * - * @returns { void } - */ -export const injectViewerIFrame = ({ - destinationId, - title, - fileUrl, - className, -}: { - destinationId: string; - title: string; - fileUrl: string; - className: string; -}): void => { - if (fileUrl.includes('data:')) { - // data urls are not safe, refuse to do this - throw new InvalidArgumentException( - { - info: { - fileUrl, - }, - }, - 'You can not inject an iFrame with a data url. Try a regular https URL.' - ); - } - - const url = new URL(fileUrl); - if (url.host.toLowerCase() === window.location.host.toLowerCase()) { - throw new InvalidArgumentException( + throw new LocalStorageItemNotRemovedException( { info: { - fileUrl, + storageKey: key, }, + cause: e, }, - 'You cannot host a LIT on the same domain as the parent webpage. This is because iFrames with the same origin have access to localstorage and cookies in the parent webpage which is unsafe' + `Failed to remove %s from local storage`, + key ); } - - const iframe = Object.assign(document.createElement('iframe'), { - src: fileUrl, - title: title, - sandbox: - 'allow-forms allow-scripts allow-popups allow-modals allow-popups-to-escape-sandbox allow-same-origin', - loading: 'lazy', - allow: - 'accelerometer; ambient-light-sensor; autoplay; battery; camera; display-capture; encrypted-media; fullscreen; geolocation; gyroscope; layout-animations; legacy-image-formats; magnetometer; microphone; midi; payment; picture-in-picture; publickey-credentials-get; sync-xhr; usb; vr; screen-wake-lock; web-share; xr-spatial-tracking', - }); - - if (className) { - iframe.className = className; - } - - document.getElementById(destinationId)?.appendChild(iframe); }; diff --git a/packages/misc/.babelrc b/packages/misc/.babelrc deleted file mode 100644 index 158083d278..0000000000 --- a/packages/misc/.babelrc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "presets": [ - [ - "@nx/web/babel", - { - "useBuiltIns": "usage" - } - ] - ] -} diff --git a/packages/misc/.eslintrc.json b/packages/misc/.eslintrc.json deleted file mode 100644 index 9d9c0db55b..0000000000 --- a/packages/misc/.eslintrc.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "extends": ["../../.eslintrc.json"], - "ignorePatterns": ["!**/*"], - "overrides": [ - { - "files": ["*.ts", "*.tsx", "*.js", "*.jsx"], - "rules": {} - }, - { - "files": ["*.ts", "*.tsx"], - "rules": {} - }, - { - "files": ["*.js", "*.jsx"], - "rules": {} - } - ] -} diff --git a/packages/misc/README.md b/packages/misc/README.md deleted file mode 100644 index 381773dbee..0000000000 --- a/packages/misc/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# Quick Start - -This submodule contains various utility functions for error handling, logging, type checking, and other operations in the JavaScript SDK for the Lit Protocol. - -### node.js / browser - -``` -yarn add @lit-protocol/misc -``` diff --git a/packages/misc/src/index.ts b/packages/misc/src/index.ts deleted file mode 100644 index c2ec7a0691..0000000000 --- a/packages/misc/src/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -export * from './lib/addresses'; -export * from './lib/misc'; -export * from './lib/params-validators'; -export * from './lib/utils'; -export { - validateSessionSig, - validateSessionSigs, -} from './lib/helper/session-sigs-validator'; -export { formatSessionSigs } from './lib/helper/session-sigs-reader'; diff --git a/packages/misc/src/lib/misc.spec.ts b/packages/misc/src/lib/misc.spec.ts deleted file mode 100644 index f20dc60508..0000000000 --- a/packages/misc/src/lib/misc.spec.ts +++ /dev/null @@ -1,190 +0,0 @@ -// @ts-nocheck -import { TextEncoder, TextDecoder } from 'util'; -global.TextEncoder = TextEncoder; -// @ts-ignore -global.TextDecoder = TextDecoder; - -import * as utilsModule from './misc'; - -describe('utils', () => { - /** - * Print Error - */ - it('should console.log with name, message and stack', () => { - let err: Error; - - try { - throw new Error('Test Error'); - } catch (e) { - err = e as Error; - } - - console.log = jest.fn(); - - utilsModule.printError(err); - - expect((console.log as any).mock.calls[0][0]).toBe('Error Stack'); - expect((console.log as any).mock.calls[1][0]).toBe('Error Name'); - expect((console.log as any).mock.calls[2][0]).toBe('Error Message'); - }); - - it('should get the most common string in an array', () => { - const arr = [1, 2, 3, 4, 5, 6, 7, 8, 9, 8]; - - const mostOccured = utilsModule.mostCommonString(arr); - - expect(mostOccured).toBe(8); - }); - - it('should get the last element of the array if every element only appears once', () => { - const arr = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]; - - const mostOccured = utilsModule.mostCommonString(arr); - - expect(mostOccured).toBe(0); - }); - - it('should get value type by a given value', () => { - const fooString = 'fooString'; - const fooBool = true; - const fooNumber = 6; - const fooList: number[] = [1, 2, 3]; - const fooArray: string[] = ['a', 'b', 'c']; - const fooTuple: [string, number] = ['hello', 10]; - const fooUint8Arr = new Uint8Array([1, 2, 3, 4, 5]); - const fooUint16Arr = new Uint16Array([1, 2, 3, 4, 5]); - const fooBlob = new Blob([fooUint8Arr as BlobPart], {}); - const fooFile = new File([fooUint8Arr as BlobPart], ''); - - expect(utilsModule.getVarType(fooString)).toBe('String'); - expect(utilsModule.getVarType(fooBool)).toBe('Boolean'); - expect(utilsModule.getVarType(fooNumber)).toBe('Number'); - expect(utilsModule.getVarType(fooList)).toBe('Array'); - expect(utilsModule.getVarType(fooArray)).toBe('Array'); - expect(utilsModule.getVarType(fooTuple)).toBe('Array'); - expect(utilsModule.getVarType(fooUint8Arr)).toBe('Uint8Array'); - expect(utilsModule.getVarType(fooUint16Arr)).toBe('Uint16Array'); - expect(utilsModule.getVarType(fooBlob)).toBe('Blob'); - expect(utilsModule.getVarType(fooFile)).toBe('File'); - }); - - it('should check type', () => { - expect( - utilsModule.checkType({ - value: 999, - allowedTypes: ['Number'], - paramName: 'paramName1', - functionName: 'functionName1', - }) - ).toBe(true); - - expect( - utilsModule.checkType({ - value: 'foo', - allowedTypes: ['Number', 'String'], - paramName: 'paramName2', - functionName: 'functionName2', - }) - ).toBe(true); - - expect( - utilsModule.checkType({ - value: [1, 2], - allowedTypes: ['Number', 'Array'], - paramName: 'paramName3', - functionName: 'functionName3', - }) - ).toBe(true); - - expect( - utilsModule.checkType({ - value: new Uint8Array([1, 2, 3]), - allowedTypes: ['String', 'Uint8Array'], - paramName: 'paramName4', - functionName: 'functionName4', - }) - ).toBe(true); - }); - - it('should check auth type', () => { - const authSig = { - sig: '', - derivedVia: 'web3.eth.personal.sign', - signedMessage: - 'I am creating an account to use Lit Protocol at 2022-04-12T09:23:31.290Z', - address: '0x7e7763BE1379Bb48AFEE4F5c232Fb67D7c03947F', - }; - - expect( - utilsModule.checkIfAuthSigRequiresChainParam(authSig, 'ethereum', 'fName') - ).toBe(true); - - expect( - utilsModule.checkIfAuthSigRequiresChainParam( - { - ethereum: 'foo', - }, - '123', - 'fName' - ) - ).toBe(true); - }); -}); - -describe('double escaped JSON string', () => { - test('A doubly escaped JSON string', () => { - const doublyEscapedJson = '{\\"key\\": \\"value\\"}'; - expect(utilsModule.normalizeAndStringify(doublyEscapedJson)).toBe( - '{"key":"value"}' - ); - }); - - test('A triply escaped JSON string', () => { - const triplyEscapedJson = '{\\\\\\"key\\\\\\": \\\\\\"value\\\\\\"}'; - expect(utilsModule.normalizeAndStringify(triplyEscapedJson)).toBe( - '{"key":"value"}' - ); - }); - - test('A correctly escaped JSON string (for comparison)', () => { - const correctlyEscapedJson = '{"key":"value"}'; - expect(utilsModule.normalizeAndStringify(correctlyEscapedJson)).toBe( - '{"key":"value"}' - ); - }); - - test('regular siwe message', () => { - const regularString = - 'litprotocol.com wants you to sign in with your Ethereum account:\\n0x3edB...'; - - expect(utilsModule.normalizeAndStringify(regularString)).toBe( - regularString - ); - }); -}); -it('should remove hex prefix from a string', () => { - const input = '0xabcdef'; - const expectedOutput = 'abcdef'; - - const result = utilsModule.removeHexPrefix(input); - - expect(result).toBe(expectedOutput); -}); - -it('should not remove hex prefix if it is not present', () => { - const input = 'abcdef'; - const expectedOutput = 'abcdef'; - - const result = utilsModule.removeHexPrefix(input); - - expect(result).toBe(expectedOutput); -}); - -it('should get ip address', async () => { - // polyfill fetch - const fetch = require('node-fetch'); - global.fetch = fetch; - - const ipAddres = await utilsModule.getIpAddress('cayenne.litgateway.com'); - expect(ipAddres).toBe('207.244.70.36'); -}); diff --git a/packages/misc/src/lib/misc.ts b/packages/misc/src/lib/misc.ts deleted file mode 100644 index 3888723540..0000000000 --- a/packages/misc/src/lib/misc.ts +++ /dev/null @@ -1,749 +0,0 @@ -import { Contract } from '@ethersproject/contracts'; -import { JsonRpcProvider } from '@ethersproject/providers'; - -import { - ABI_ERC20, - InvalidArgumentException, - InvalidParamType, - LIT_AUTH_SIG_CHAIN_KEYS, - LIT_CHAINS, - LIT_NETWORK, - LIT_NETWORK_VALUES, - LOG_LEVEL, - LOG_LEVEL_VALUES, - LitEVMChainKeys, - NetworkError, - RELAYER_URL_BY_NETWORK, - RemovedFunctionError, - UnknownError, - WrongNetworkException, -} from '@lit-protocol/constants'; -import { LogManager } from '@lit-protocol/logger'; -import { - Chain, - AuthSig, - NodeErrorV3, - ClaimResult, - LitNodeClientConfig, - MintCallback, - RelayClaimProcessor, -} from '@lit-protocol/types'; - -const logBuffer: any[][] = []; - -// Module scoped variable to store the LitNodeClientConfig passed to LitCore -let litConfig: LitNodeClientConfig | undefined; - -export const setMiscLitConfig = (config: LitNodeClientConfig | undefined) => { - litConfig = config; -}; - -/** - * - * Print error message based on Error interface - * - * @param { Error } e - * @returns { void } - */ -export const printError = (e: Error): void => { - console.log('Error Stack', e.stack); - console.log('Error Name', e.name); - console.log('Error Message', e.message); -}; - -/** - * - * Find the element that occurs the most in an array - * - * @param { Array } arr - * @returns { any } the element that appeared the most - */ -export const mostCommonString = (arr: T[]): T | undefined => { - return arr - .sort( - (a: T, b: T) => - arr.filter((v: T) => v === a).length - - arr.filter((v: T) => v === b).length - ) - .pop(); -}; - -export const findMostCommonResponse = (responses: object[]): object => { - const result: Record = {}; - - // Aggregate all values for each key across all responses - const keys = new Set(responses.flatMap(Object.keys)); - - for (const key of keys) { - const values = responses.map( - (response: Record) => response[key] - ); - - // Filter out undefined values before processing - const filteredValues = values.filter( - (value) => value !== undefined && value !== '' - ); - - if (filteredValues.length === 0) { - result[key] = undefined; // or set a default value if needed - } else if ( - typeof filteredValues[0] === 'object' && - !Array.isArray(filteredValues[0]) - ) { - // Recursive case for objects - result[key] = findMostCommonResponse(filteredValues); - } else { - // Most common element from filtered values - result[key] = mostCommonString(filteredValues); - } - } - - return result; -}; - -declare global { - var wasmExport: any; - var wasmECDSA: any; - var logger: any; - var logManager: any; -} - -export const throwRemovedFunctionError = (functionName: string) => { - throw new RemovedFunctionError( - { - info: { - functionName, - }, - }, - `This function "${functionName}" has been removed. Please use the old SDK.` - ); -}; - -export const bootstrapLogManager = ( - id: string, - level: LOG_LEVEL_VALUES = LOG_LEVEL.DEBUG -) => { - if (!globalThis.logManager) { - globalThis.logManager = LogManager.Instance; - globalThis.logManager.withConfig({ - condenseLogs: true, - }); - globalThis.logManager.setLevel(level); - } - - globalThis.logger = globalThis.logManager.get(id); -}; - -export const getLoggerbyId = (id: string) => { - return globalThis.logManager.get(id); -}; - -/** - * - * console.log but prepend [Lit-JS-SDK] before the message - * - * @param { any } args - * - * @returns { void } - */ -export const log = (...args: any): void => { - if (!globalThis) { - // there is no globalThis, just print the log - console.log(...args); - return; - } - - // check if config is loaded yet - if (!litConfig) { - // config isn't loaded yet, push into buffer - logBuffer.push(args); - return; - } - - // if there are there are logs in buffer, print them first and empty the buffer. - while (logBuffer.length > 0) { - const log = logBuffer.shift() ?? ''; - globalThis?.logger && globalThis?.logger.debug(...log); - } - - globalThis?.logger && globalThis?.logger.debug(...args); -}; - -export const logWithRequestId = (id: string, ...args: any) => { - if (!globalThis) { - // there is no globalThis, just print the log - console.log(...args); - return; - } - - // check if config is loaded yet - if (!litConfig) { - // config isn't loaded yet, push into buffer - logBuffer.push(args); - return; - } - - // if there are there are logs in buffer, print them first and empty the buffer. - while (logBuffer.length > 0) { - const log = logBuffer.shift() ?? ''; - globalThis?.logger && - globalThis.logManager.get(globalThis.logger.category, id).debug(...log); - } - - globalThis?.logger && - globalThis.logManager.get(globalThis.logger.category, id).debug(...args); -}; - -export const logErrorWithRequestId = (id: string, ...args: any) => { - if (!globalThis) { - // there is no globalThis, just print the log - console.log(...args); - return; - } - - // check if config is loaded yet - if (!litConfig) { - // config isn't loaded yet, push into buffer - logBuffer.push(args); - return; - } - - // if there are there are logs in buffer, print them first and empty the buffer. - while (logBuffer.length > 0) { - const log = logBuffer.shift() ?? ''; - globalThis?.logger && - globalThis.logManager.get(globalThis.logger.category, id).error(...log); - } - - globalThis?.logger && - globalThis.logManager.get(globalThis.logger.category, id).error(...args); -}; - -export const logError = (...args: any) => { - if (!globalThis) { - // there is no globalThis, just print the log - console.log(...args); - return; - } - - // check if config is loaded yet - if (!litConfig) { - // config isn't loaded yet, push into buffer - logBuffer.push(args); - return; - } - - // if there are there are logs in buffer, print them first and empty the buffer. - while (logBuffer.length > 0) { - const log = logBuffer.shift() ?? ''; - globalThis?.logger && - globalThis.logManager.get(globalThis.logger.category).error(...log); - } - - globalThis?.logger && - globalThis.logManager.get(globalThis.logger.category).error(...args); -}; - -/** - * - * Get the type of a variable, could be an object instance type. - * eg Uint8Array instance should return 'Uint8Array` as string - * or simply a `string` or `int` type - * - * @param { any } value - * @returns { string } type - */ -export const getVarType = (value: any): string => { - return Object.prototype.toString.call(value).slice(8, -1); -}; - -/** - * - * Check if the given value is the given type - * If not, throw `invalidParamType` error - * - * @property { any } value - * @property { Array } allowedTypes - * @property { string } paramName - * @property { string } functionName - * @property { boolean } throwOnError - * - * @returns { Boolean } true/false - * - */ -export const checkType = ({ - value, - allowedTypes, - paramName, - functionName, - throwOnError = true, -}: { - value: any; - allowedTypes: string[] | any; - paramName: string; - functionName: string; - throwOnError?: boolean; -}): boolean => { - // -- validate - if (!allowedTypes.includes(getVarType(value))) { - const message = `Expecting ${allowedTypes.join( - ' or ' - )} type for parameter named ${paramName} in Lit-JS-SDK function ${functionName}(), but received "${getVarType( - value - )}" type instead. value: ${ - value instanceof Object ? JSON.stringify(value) : value - }`; - - if (throwOnError) { - throw new InvalidParamType( - { - info: { - allowedTypes, - value, - paramName, - functionName, - }, - }, - message - ); - } - return false; - } - - // -- else - return true; -}; - -/** - * - * @param { AuthSig } authSig - * @param { string } chain - * @param { string } functionName - * - * @returns { boolean } - */ -export const checkIfAuthSigRequiresChainParam = ( - authSig: AuthSig, - chain: string, - functionName: string -): boolean => { - log('checkIfAuthSigRequiresChainParam'); - for (const key of LIT_AUTH_SIG_CHAIN_KEYS) { - if (key in authSig) { - return true; - } - } - - // if we're here, then we need the chain param - if ( - !checkType({ - value: chain, - allowedTypes: ['String'], - paramName: 'chain', - functionName, - }) - ) { - return false; - } - - return true; -}; - -/** - * TODO: Fix "any" - * Sort object - * - * @param { any } obj - * @returns { any } - */ -export const sortedObject = (obj: any): any => { - if (typeof obj !== 'object' || obj === null) { - return obj; - } - if (Array.isArray(obj)) { - return obj.map(sortedObject); - } - const sortedKeys = Object.keys(obj).sort(); - const result: any = {}; - - // NOTE: Use forEach instead of reduce for performance with large objects eg Wasm code - sortedKeys.forEach((key) => { - result[key] = sortedObject(obj[key]); - }); - - return result; -}; - -/** - * - * Convert number to hex - * @param { number } v - * @return { string } hex value prexied with 0x - */ -export const numberToHex = (v: number): string => { - return '0x' + v.toString(16); -}; - -/** - * - * Check if the given value is the given type - * If not, throw `invalidParamType` error - * - * @param { any } value - * @param { string } type - * @param { string } paramName - * @param { string } functionName - * @param { boolean } throwOnError - * @returns { Boolean } true/false - */ -export const is = ( - value: any, - type: string, - paramName: string, - functionName: string, - throwOnError: boolean = true -): boolean => { - if (getVarType(value) !== type) { - const message = `Expecting "${type}" type for parameter named ${paramName} in Lit-JS-SDK function ${functionName}(), but received "${getVarType( - value - )}" type instead. value: ${ - value instanceof Object ? JSON.stringify(value) : value - }`; - - if (throwOnError) { - throw new InvalidParamType( - { - info: { - value, - paramName, - functionName, - }, - }, - message - ); - } - return false; - } - - return true; -}; - -export const isNode = () => { - let isNode = false; - // @ts-ignore - if (typeof process === 'object') { - // @ts-ignore - if (typeof process.versions === 'object') { - // @ts-ignore - if (typeof process.versions.node !== 'undefined') { - isNode = true; - } - } - } - return isNode; -}; -export const isBrowser = () => { - return isNode() === false; -}; - -/** - * - * Get the number of decimal places in a token - * - * @property { string } contractAddress The token contract address - * @property { LitEVMChainKeys } chain The chain on which the token is deployed - * - * @returns { number } The number of decimal places in the token - */ -export const decimalPlaces = async ({ - contractAddress, - chain, -}: { - contractAddress: string; - chain: LitEVMChainKeys; -}): Promise => { - const rpcUrl = LIT_CHAINS[chain].rpcUrls[0] as string; - - const web3 = new JsonRpcProvider({ - url: rpcUrl, - skipFetchSetup: true, - }); - - const contract = new Contract(contractAddress, (ABI_ERC20 as any).abi, web3); - - return await contract['decimals'](); -}; - -/** - * - * Generate a random path (for testing) - * - * @returns { string } The random path - */ -export const genRandomPath = (): string => { - return ( - '/' + - Math.random().toString(36).substring(2, 15) + - Math.random().toString(36).substring(2, 15) - ); -}; - -/** - * Checks if the given LIT_NETWORK value is supported. - * @param litNetwork - The Lit Network value to check. - * @throws {Error} - Throws an error if the Lit Network value is not supported. - */ -export function isSupportedLitNetwork( - litNetwork: LIT_NETWORK_VALUES -): asserts litNetwork is LIT_NETWORK_VALUES { - const supportedNetworks = Object.values(LIT_NETWORK); - - if (!supportedNetworks.includes(litNetwork)) { - throw new WrongNetworkException( - { - info: { - litNetwork, - supportedNetworks, - }, - }, - `Unsupported LitNetwork! (${supportedNetworks.join('|')}) are supported.` - ); - } -} - -export const defaultMintClaimCallback: MintCallback< - RelayClaimProcessor -> = async ( - params: ClaimResult, - network: LIT_NETWORK_VALUES = LIT_NETWORK.NagaDev -): Promise => { - isSupportedLitNetwork(network); - - const AUTH_CLAIM_PATH = '/auth/claim'; - - const relayUrl: string = params.relayUrl || RELAYER_URL_BY_NETWORK[network]; - - if (!relayUrl) { - throw new InvalidArgumentException( - { - info: { - network, - relayUrl, - }, - }, - 'No relayUrl provided and no default relayUrl found for network' - ); - } - - const relayUrlWithPath = relayUrl + AUTH_CLAIM_PATH; - - const response = await fetch(relayUrlWithPath, { - method: 'POST', - body: JSON.stringify(params), - headers: { - 'api-key': params.relayApiKey - ? params.relayApiKey - : '67e55044-10b1-426f-9247-bb680e5fe0c8_relayer', - 'Content-Type': 'application/json', - }, - }); - - if (response.status < 200 || response.status >= 400) { - const errResp = (await response.json()) ?? ''; - const errStmt = `An error occurred requesting "/auth/claim" endpoint ${JSON.stringify( - errResp - )}`; - console.warn(errStmt); - throw new NetworkError( - { - info: { - response, - errResp, - }, - }, - `An error occurred requesting "/auth/claim" endpoint` - ); - } - - const body = await response.json(); - return body.requestId; -}; - -/** - * Adds a '0x' prefix to a string if it doesn't already have one. - * @param str - The input string. - * @returns The input string with a '0x' prefix. - */ -export const hexPrefixed = (str: string): `0x${string}` => { - if (str.startsWith('0x')) { - return str as `0x${string}`; - } - - return ('0x' + str) as `0x${string}`; -}; - -/** - * Removes the '0x' prefix from a hexadecimal string if it exists. - * - * @param str - The input string. - * @returns The input string with the '0x' prefix removed, if present. - */ -export const removeHexPrefix = (str: string) => { - if (str.startsWith('0x')) { - return str.slice(2); - } - - return str; -}; - -/** - * getEnv - Determine the debug status based on environment variables or URL query parameters. - * - * @function - * @export - * @param {Object} [options={}] - Configuration options for determining debug status. - * @param {string} [options.nodeEnvVar='DEBUG'] - The Node.js environment variable to check. - * @param {string} [options.urlQueryParam='dev'] - The URL query parameter to check in a browser environment. - * @param {string} [options.urlQueryValue='debug=true'] - The expected value of the URL query parameter to enable debugging. - * @param {boolean} [options.defaultValue=false] - The default boolean value to return if no debug conditions are met. - * @returns {boolean} - True if debug conditions are met, otherwise returns the provided defaultValue. - * - * @example - * // Usage in Node.js environment - * process.env.DEBUG = 'true'; - * console.log(getEnv()); // Outputs: true - * - * @example - * // Usage in Browser environment with URL: http://example.com?dev=debug=true - * console.log(getEnv()); // Outputs: true - */ -export function getEnv({ - nodeEnvVar = 'DEBUG', - urlQueryParam = 'dev', - urlQueryValue = 'debug=true', - defaultValue = false, -} = {}) { - // Node.js environment - if (isNode()) { - return process.env[nodeEnvVar] === 'true'; - } - // Browser environment - else if (isBrowser()) { - const urlParams = new URLSearchParams(window.location.search); - return urlParams.get(urlQueryParam) === urlQueryValue; - } - // Default - return defaultValue; -} - -export function sendRequest( - url: string, - req: RequestInit, - requestId: string -): Promise { - return fetch(url, req) - .then(async (response) => { - const isJson = response.headers - .get('content-type') - ?.includes('application/json'); - - const data = isJson ? await response.json() : null; - - if (!response.ok) { - // get error message from body or default to response status - const error = data || response.status; - return Promise.reject(error); - } - - return data; - }) - .catch((error: NodeErrorV3) => { - logErrorWithRequestId( - requestId, - `Something went wrong, internal id for request: lit_${requestId}. Please provide this identifier with any support requests. ${ - error?.message || error?.details - ? `Error is ${error.message} - ${error.details}` - : '' - }` - ); - return Promise.reject(error); - }); -} - -/** - * Attempts to normalize a string by unescaping it until it can be parsed as a JSON object, - * then stringifies it exactly once. If the input is a regular string that does not represent - * a JSON object or array, the function will return it as is without modification. - * This function is designed to handle cases where strings might be excessively escaped due - * to multiple layers of encoding, ensuring that JSON data is stored in a consistent and - * predictable format, and regular strings are left unchanged. - * - * @param input The potentially excessively escaped string. - * @return A string that is either the JSON.stringify version of the original JSON object - * or the original string if it does not represent a JSON object or array. - */ -export function normalizeAndStringify(input: string): string { - try { - // Directly return the string if it's not in a JSON format - if (!input.startsWith('{') && !input.startsWith('[')) { - return input; - } - - // Attempt to parse the input as JSON - const parsed = JSON.parse(input); - - // If parsing succeeds, return the stringified version of the parsed JSON - return JSON.stringify(parsed); - } catch (error) { - // If parsing fails, it might be due to extra escaping - const unescaped = input.replace(/\\(.)/g, '$1'); - - // If unescaping doesn't change the string, return it as is - if (input === unescaped) { - return input; - } - - // Otherwise, recursively call the function with the unescaped string - return normalizeAndStringify(unescaped); - } -} - -/** - * Retrieves the IP address associated with a given domain. - * @param domain - The domain for which to retrieve the IP address. - * @returns A Promise that resolves to the IP address. - * @throws If no IP address is found or if the domain name is invalid. - */ -export async function getIpAddress(domain: string): Promise { - const apiURL = `https://dns.google/resolve?name=${domain}&type=A`; - - try { - const response = await fetch(apiURL); - const data = await response.json(); - - if (data.Answer && data.Answer.length > 0) { - return data.Answer[0].data; - } else { - throw new UnknownError( - { - info: { - domain, - apiURL, - }, - }, - 'No IP Address found or bad domain name' - ); - } - } catch (error: any) { - throw new UnknownError( - { - info: { - domain, - apiURL, - }, - cause: error, - }, - 'message' in error ? error.message : String(error) - ); - } -} diff --git a/packages/misc/src/lib/params-validators.ts b/packages/misc/src/lib/params-validators.ts deleted file mode 100644 index 3c7c59a92a..0000000000 --- a/packages/misc/src/lib/params-validators.ts +++ /dev/null @@ -1,680 +0,0 @@ -/** - * Param Validators is an abstraction of validating params of a function, each validator - * returns a boolean value indicating whether the validation is passed or not. - */ - -import { isHexString } from 'ethers/lib/utils'; - -import { - EITHER_TYPE, - ELeft, - ERight, - IEither, - InvalidArgumentException, - InvalidBooleanException, - InvalidParamType, - ParamsMissingError, -} from '@lit-protocol/constants'; -import { - AcceptedFileType, - AccessControlConditions, - DecryptFromJsonProps, - DecryptRequest, - EncryptUint8ArrayRequest, - EncryptFileRequest, - EncryptRequest, - EncryptStringRequest, - EncryptToJsonPayload, - EncryptToJsonProps, - EvmContractConditions, - JsonExecutionSdkParams, - SolRpcConditions, - UnifiedAccessControlConditions, - AuthSig, - AuthenticationContext, -} from '@lit-protocol/types'; - -import { checkIfAuthSigRequiresChainParam, checkType, is, log } from './misc'; -import { isValidBooleanExpression } from './utils'; - -export const safeParams = ({ - functionName, - params, -}: { - functionName: string; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - params: any[] | any; -}): IEither => { - if (!paramsValidators[functionName]) { - log(`This function ${functionName} is skipping params safe guarding.`); - return ERight(undefined); - } - - const paramValidators = paramsValidators[functionName](params); - - for (const validator of paramValidators) { - const validationResponse = validator.validate(); - if (validationResponse.type === EITHER_TYPE.ERROR) { - return validationResponse; - } - } - - return ERight(undefined); -}; - -export const paramsValidators: Record< - string, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (params: any) => ParamsValidator[] -> = { - // ========== NO AUTH MATERIAL NEEDED FOR CLIENT SIDE ENCRYPTION ========== - encrypt: (params: EncryptRequest) => [ - new AccessControlConditionsValidator('encrypt', params), - ], - - encryptUint8Array: (params: EncryptUint8ArrayRequest) => [ - new AccessControlConditionsValidator('encryptUint8Array', params), - new Uint8ArrayValidator('encryptUint8Array', params.dataToEncrypt), - ], - - encryptFile: (params: EncryptFileRequest) => [ - new AccessControlConditionsValidator('encryptFile', params), - new FileValidator('encryptFile', params.file), - ], - - encryptString: (params: EncryptStringRequest) => [ - new AccessControlConditionsValidator('encryptString', params), - new StringValidator('encryptString', params.dataToEncrypt, 'dataToEncrypt'), - ], - - encryptToJson: (params: EncryptToJsonProps) => [ - new AccessControlConditionsValidator('encryptToJson', params), - new EncryptToJsonValidator('encryptToJson', params), - ], - - // ========== REQUIRED AUTH MATERIAL VALIDATORS ========== - executeJs: (params: JsonExecutionSdkParams) => [ - new AuthMaterialValidator('executeJs', params), - new ExecuteJsValidator('executeJs', params), - ], - - decrypt: (params: DecryptRequest) => [ - new AccessControlConditionsValidator('decrypt', params), - new AuthMaterialValidator('decrypt', params, true), - new StringValidator('decrypt', params.ciphertext, 'ciphertext'), - ], - - decryptFromJson: (params: DecryptFromJsonProps) => [ - new AuthMaterialValidator('decryptFromJson', params), - new DecryptFromJsonValidator('decryptFromJson', params.parsedJsonData), - ], -}; - -export type ParamsValidatorsType = typeof paramsValidators; - -//////////////////////// VALIDATORS //////////////////////// - -interface ParamsValidator { - validate: () => IEither; -} - -class EncryptToJsonValidator implements ParamsValidator { - private fnName: string; - private readonly params: EncryptToJsonProps; - - constructor(fnName: string, params: EncryptToJsonProps) { - this.fnName = fnName; - this.params = params; - } - - validate(): IEither { - const { file, string } = this.params; - - if (string === undefined && file === undefined) - return ELeft( - new InvalidParamType( - { - info: { - param: 'string', - value: string, - functionName: this.fnName, - }, - }, - 'Either string or file must be provided' - ) - ); - - if (string !== undefined && file !== undefined) - return ELeft( - new InvalidParamType( - { - info: { - param: 'string', - value: string, - functionName: this.fnName, - }, - }, - 'Provide only a "string" or "file" to encrypt; you cannot provide both' - ) - ); - - return ERight(undefined); - } -} - -class DecryptFromJsonValidator implements ParamsValidator { - private readonly fnName: string; - private readonly params: EncryptToJsonPayload; - - constructor(fnName: string, params: EncryptToJsonPayload) { - this.fnName = fnName; - this.params = params; - } - - validate(): IEither { - const validators = [new StringValidator(this.fnName, this.params.dataType)]; - - for (const validator of validators) { - const validationResponse = validator.validate(); - if (validationResponse.type === EITHER_TYPE.ERROR) { - return validationResponse; - } - } - - const { dataType } = this.params; - - if (dataType !== 'string' && dataType !== 'file') - return ELeft( - new InvalidArgumentException( - { - info: { - functionName: this.fnName, - dataType, - }, - }, - `dataType of %s is not valid. Must be 'string' or 'file'.`, - dataType - ) - ); - - return ERight(undefined); - } -} - -class Uint8ArrayValidator implements ParamsValidator { - private readonly fnName: string; - private readonly paramName: string; - private readonly uint8array?: Uint8Array; - - constructor( - fnName: string, - uint8array?: Uint8Array, - paramName: string = 'uint8array' - ) { - this.fnName = fnName; - this.paramName = paramName; - this.uint8array = uint8array; - } - - validate(): IEither { - if (!this.uint8array) { - return ELeft(new InvalidParamType({}, 'uint8array is undefined')); - } - - if ( - !checkType({ - value: this.uint8array, - allowedTypes: ['Uint8Array'], - paramName: this.paramName, - functionName: this.fnName, - }) - ) - return ELeft( - new InvalidParamType( - { - info: { - param: this.paramName, - value: this.uint8array, - functionName: this.fnName, - }, - }, - '%s is not a Uint8Array', - this.paramName - ) - ); - - return ERight(undefined); - } -} - -class StringValidator implements ParamsValidator { - private readonly fnName: string; - private readonly paramName: string; - private readonly checkIsHex: boolean; - private readonly str?: string; - - constructor( - fnName: string, - str?: string, - paramName: string = 'string', - checkIsHex: boolean = false - ) { - this.fnName = fnName; - this.paramName = paramName; - this.checkIsHex = checkIsHex; - this.str = str; - } - - validate(): IEither { - if (!this.str) { - return ELeft(new InvalidParamType({}, 'str is undefined')); - } - - if ( - !checkType({ - value: this.str, - allowedTypes: ['String'], - paramName: this.paramName, - functionName: this.fnName, - }) - ) - return ELeft( - new InvalidParamType( - { - info: { - param: this.paramName, - value: this.str, - functionName: this.fnName, - }, - }, - '%s is not a string', - this.paramName - ) - ); - - if (this.checkIsHex && !isHexString(this.str)) { - return ELeft( - new InvalidParamType( - { - info: { - param: this.paramName, - value: this.str, - functionName: this.fnName, - }, - }, - '%s is not a valid hex string', - this.paramName - ) - ); - } - - return ERight(undefined); - } -} - -interface ExecuteJsValidatorProps { - code?: string; - ipfsId?: string; -} - -class ExecuteJsValidator implements ParamsValidator { - private fnName: string; - private readonly params: ExecuteJsValidatorProps; - - constructor(fnName: string, params: ExecuteJsValidatorProps) { - this.fnName = fnName; - this.params = params; - } - - validate(): IEither { - const { code, ipfsId } = this.params; - - // -- validate: either 'code' or 'ipfsId' must exists - if (!code && !ipfsId) { - return ELeft( - new ParamsMissingError( - { - info: { - functionName: this.fnName, - params: this.params, - }, - }, - 'You must pass either code or ipfsId' - ) - ); - } - - // -- validate: 'code' and 'ipfsId' can't exists at the same time - if (code && ipfsId) { - return ELeft( - new ParamsMissingError( - { - info: { - functionName: this.fnName, - params: this.params, - }, - }, - "You cannot have both 'code' and 'ipfs' at the same time" - ) - ); - } - - return ERight(undefined); - } -} - -class FileValidator implements ParamsValidator { - private readonly fnName: string; - private readonly file?: AcceptedFileType; - - constructor(fnName: string, file?: AcceptedFileType) { - this.fnName = fnName; - this.file = file; - } - - validate(): IEither { - if (!this.file) { - return ELeft( - new InvalidArgumentException( - { - info: { - functionName: this.fnName, - file: this.file, - }, - }, - 'You must pass file param' - ) - ); - } - - const allowedTypes = ['Blob', 'File', 'Uint8Array']; - if ( - !checkType({ - value: this.file, - allowedTypes, - paramName: 'file', - functionName: this.fnName, - }) - ) - return ELeft( - new InvalidArgumentException( - { - info: { - functionName: this.fnName, - file: this.file, - allowedTypes, - }, - }, - 'File param is not a valid Blob or File object' - ) - ); - - return ERight(undefined); - } -} - -export interface AuthMaterialValidatorProps { - chain?: string; - authSig?: AuthSig; - authContext?: AuthenticationContext; -} - -class AuthMaterialValidator implements ParamsValidator { - private readonly fnName: string; - private readonly authMaterial: AuthMaterialValidatorProps; - private readonly checkIfAuthSigRequiresChainParam: boolean; - - constructor( - fnName: string, - params: AuthMaterialValidatorProps, - checkIfAuthSigRequiresChainParam: boolean = false - ) { - this.fnName = fnName; - this.authMaterial = params; - this.checkIfAuthSigRequiresChainParam = checkIfAuthSigRequiresChainParam; - } - - validate(): IEither { - const { authSig } = this.authMaterial; - - if (authSig && !is(authSig, 'Object', 'authSig', this.fnName)) - return ELeft( - new InvalidParamType( - { - info: { - param: 'authSig', - value: authSig, - functionName: this.fnName, - }, - }, - 'authSig is not an object' - ) - ); - - if (this.checkIfAuthSigRequiresChainParam) { - if (!this.authMaterial.chain) - return ELeft( - new InvalidArgumentException( - { - info: { - functionName: this.fnName, - chain: this.authMaterial.chain, - }, - }, - 'You must pass chain param' - ) - ); - - if ( - authSig && - !checkIfAuthSigRequiresChainParam( - authSig, - this.authMaterial.chain, - this.fnName - ) - ) - return ELeft( - new InvalidParamType( - { - info: { - param: 'authSig', - value: authSig, - functionName: this.fnName, - }, - }, - 'authSig is not valid' - ) - ); - } - - return ERight(undefined); - } -} - -export interface AccessControlConditionsValidatorProps { - accessControlConditions?: AccessControlConditions; - evmContractConditions?: EvmContractConditions; - solRpcConditions?: SolRpcConditions; - unifiedAccessControlConditions?: UnifiedAccessControlConditions; -} - -class AccessControlConditionsValidator implements ParamsValidator { - private readonly fnName: string; - private readonly conditions: AccessControlConditionsValidatorProps; - - constructor(fnName: string, params: AccessControlConditionsValidatorProps) { - this.fnName = fnName; - this.conditions = params; - } - - validate(): IEither { - const { - accessControlConditions, - evmContractConditions, - solRpcConditions, - unifiedAccessControlConditions, - } = this.conditions; - - if ( - accessControlConditions && - !is( - accessControlConditions, - 'Array', - 'accessControlConditions', - this.fnName - ) - ) - return ELeft( - new InvalidParamType( - { - info: { - param: 'accessControlConditions', - value: accessControlConditions, - functionName: this.fnName, - }, - }, - '%s is not an array', - 'accessControlConditions' - ) - ); - if ( - evmContractConditions && - !is(evmContractConditions, 'Array', 'evmContractConditions', this.fnName) - ) - return ELeft( - new InvalidParamType( - { - info: { - param: 'evmContractConditions', - value: evmContractConditions, - functionName: this.fnName, - }, - }, - '%s is not an array', - 'evmContractConditions' - ) - ); - - if ( - solRpcConditions && - !is(solRpcConditions, 'Array', 'solRpcConditions', this.fnName) - ) - return ELeft( - new InvalidParamType( - { - info: { - param: 'solRpcConditions', - value: solRpcConditions, - functionName: this.fnName, - }, - }, - '%s is not an array', - 'solRpcConditions' - ) - ); - - if ( - unifiedAccessControlConditions && - !is( - unifiedAccessControlConditions, - 'Array', - 'unifiedAccessControlConditions', - this.fnName - ) - ) - return ELeft( - new InvalidParamType( - { - info: { - param: 'unifiedAccessControlConditions', - value: unifiedAccessControlConditions, - functionName: this.fnName, - }, - }, - '%s is not an array', - 'unifiedAccessControlConditions' - ) - ); - - if ( - !accessControlConditions && - !evmContractConditions && - !solRpcConditions && - !unifiedAccessControlConditions - ) - return ELeft( - new InvalidArgumentException( - { - info: { - functionName: this.fnName, - conditions: this.conditions, - }, - }, - 'You must pass either accessControlConditions, evmContractConditions, solRpcConditions or unifiedAccessControlConditions' - ) - ); - - if ( - accessControlConditions && - !isValidBooleanExpression(accessControlConditions) - ) - return ELeft( - new InvalidBooleanException( - { - info: { - functionName: this.fnName, - accessControlConditions, - }, - }, - 'Invalid boolean Access Control Conditions' - ) - ); - - if ( - evmContractConditions && - !isValidBooleanExpression(evmContractConditions) - ) - return ELeft( - new InvalidBooleanException( - { - info: { - functionName: this.fnName, - evmContractConditions, - }, - }, - 'Invalid boolean EVM Access Control Conditions' - ) - ); - - if (solRpcConditions && !isValidBooleanExpression(solRpcConditions)) - return ELeft( - new InvalidBooleanException( - { - info: { - functionName: this.fnName, - solRpcConditions, - }, - }, - 'Invalid boolean Solana Access Control Conditions' - ) - ); - - if ( - unifiedAccessControlConditions && - !isValidBooleanExpression(unifiedAccessControlConditions) - ) - return ELeft( - new InvalidBooleanException( - { - info: { - functionName: this.fnName, - unifiedAccessControlConditions, - }, - }, - 'Invalid boolean Unified Access Control Conditions' - ) - ); - - return ERight(undefined); - } -} diff --git a/packages/misc/tsconfig.lib.json b/packages/misc/tsconfig.lib.json deleted file mode 100644 index e85ef50f65..0000000000 --- a/packages/misc/tsconfig.lib.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "../../dist/out-tsc", - "declaration": true, - "types": [] - }, - "include": ["**/*.ts"], - "exclude": ["jest.config.ts", "**/*.spec.ts", "**/*.test.ts"] -} diff --git a/packages/misc/tsconfig.spec.json b/packages/misc/tsconfig.spec.json deleted file mode 100644 index a2f7dd30d7..0000000000 --- a/packages/misc/tsconfig.spec.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "../../dist/out-tsc", - "module": "commonjs", - "types": ["jest", "node"], - "allowJs": true - }, - "include": ["jest.config.ts", "**/*.test.ts", "**/*.spec.ts", "**/*.d.ts"] -} diff --git a/packages/nacl/.babelrc b/packages/nacl/.babelrc deleted file mode 100644 index 158083d278..0000000000 --- a/packages/nacl/.babelrc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "presets": [ - [ - "@nx/web/babel", - { - "useBuiltIns": "usage" - } - ] - ] -} diff --git a/packages/nacl/.eslintrc.json b/packages/nacl/.eslintrc.json deleted file mode 100644 index 9d9c0db55b..0000000000 --- a/packages/nacl/.eslintrc.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "extends": ["../../.eslintrc.json"], - "ignorePatterns": ["!**/*"], - "overrides": [ - { - "files": ["*.ts", "*.tsx", "*.js", "*.jsx"], - "rules": {} - }, - { - "files": ["*.ts", "*.tsx"], - "rules": {} - }, - { - "files": ["*.js", "*.jsx"], - "rules": {} - } - ] -} diff --git a/packages/nacl/README.md b/packages/nacl/README.md deleted file mode 100644 index 9e0511c571..0000000000 --- a/packages/nacl/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# Quick Start - -re-export of https://www.npmjs.com/package/nacl - -### node.js / browser - -``` -yarn add @lit-protocol/nacl -``` diff --git a/packages/nacl/project.json b/packages/nacl/project.json deleted file mode 100644 index d1447cfbfa..0000000000 --- a/packages/nacl/project.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "nacl", - "$schema": "../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "packages/nacl/src", - "projectType": "library", - "targets": { - "build": { - "executor": "@nx/js:tsc", - "outputs": ["{options.outputPath}"], - "options": { - "outputPath": "dist/packages/nacl", - "main": "packages/nacl/src/index.ts", - "tsConfig": "packages/nacl/tsconfig.lib.json", - "assets": ["packages/nacl/*.md"], - "updateBuildableProjectDepsInPackageJson": true - } - }, - "lint": { - "executor": "@nx/linter:eslint", - "outputs": ["{options.outputFile}"], - "options": { - "lintFilePatterns": ["packages/nacl/**/*.ts"] - } - }, - "test": { - "executor": "@nx/jest:jest", - "outputs": ["{workspaceRoot}/coverage/packages/nacl"], - "options": { - "jestConfig": "packages/nacl/jest.config.ts", - "passWithNoTests": true - } - }, - "testWatch": { - "executor": "@nx/jest:jest", - "outputs": ["{workspaceRoot}/coverage/packages/nacl"], - "options": { - "jestConfig": "packages/nacl/jest.config.ts", - "passWithNoTests": true, - "watch": true - } - } - }, - "tags": [] -} diff --git a/packages/nacl/src/index.ts b/packages/nacl/src/index.ts deleted file mode 100644 index 00965a232d..0000000000 --- a/packages/nacl/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './lib/nacl'; diff --git a/packages/nacl/src/lib/nacl.spec.ts b/packages/nacl/src/lib/nacl.spec.ts deleted file mode 100644 index 6b2a39e9b3..0000000000 --- a/packages/nacl/src/lib/nacl.spec.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { nacl } from './nacl'; - -describe('nacl', () => { - it('should work', () => { - expect(Object.keys(nacl)).toEqual([ - 'lowlevel', - 'randomBytes', - 'secretbox', - 'scalarMult', - 'box', - 'sign', - 'hash', - 'verify', - 'setPRNG', - ]); - }); -}); diff --git a/packages/nacl/src/lib/nacl.ts b/packages/nacl/src/lib/nacl.ts deleted file mode 100644 index be2faaf584..0000000000 --- a/packages/nacl/src/lib/nacl.ts +++ /dev/null @@ -1,1331 +0,0 @@ -// @ts-nocheck -// Ported in 2014 by Dmitry Chestnykh and Devi Mandiri. -// Public domain. -// -// Implementation derived from TweetNaCl version 20140427. -// See for details: http://tweet_nacl.cr.yp.to/ - -var u64 = function (h, l) { - this.hi = h | (0 >>> 0); - this.lo = l | (0 >>> 0); -}; -var gf = function (init) { - var i, - r = new Float64Array(16); - if (init) for (i = 0; i < init.length; i++) r[i] = init[i]; - return r; -}; - -// Pluggable, initialized in high-level API below. -var randombytes = function (/* x, n */) { - throw new Error('no PRNG'); -}; - -var _0 = new Uint8Array(16); -var _9 = new Uint8Array(32); -_9[0] = 9; - -var gf0 = gf(), - gf1 = gf([1]), - _121665 = gf([0xdb41, 1]), - D = gf([ - 0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, 0xe898, - 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203, - ]), - D2 = gf([ - 0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, 0xd130, - 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406, - ]), - X = gf([ - 0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, 0xdc5c, - 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169, - ]), - Y = gf([ - 0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, - 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, - ]), - I = gf([ - 0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, 0xd7a7, - 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83, - ]); - -function L32(x, c) { - return (x << c) | (x >>> (32 - c)); -} - -function ld32(x, i) { - var u = x[i + 3] & 0xff; - u = (u << 8) | (x[i + 2] & 0xff); - u = (u << 8) | (x[i + 1] & 0xff); - return (u << 8) | (x[i + 0] & 0xff); -} - -function dl64(x, i) { - var h = (x[i] << 24) | (x[i + 1] << 16) | (x[i + 2] << 8) | x[i + 3]; - var l = (x[i + 4] << 24) | (x[i + 5] << 16) | (x[i + 6] << 8) | x[i + 7]; - return new u64(h, l); -} - -function st32(x, j, u) { - var i; - for (i = 0; i < 4; i++) { - x[j + i] = u & 255; - u >>>= 8; - } -} - -function ts64(x, i, u) { - x[i] = (u.hi >> 24) & 0xff; - x[i + 1] = (u.hi >> 16) & 0xff; - x[i + 2] = (u.hi >> 8) & 0xff; - x[i + 3] = u.hi & 0xff; - x[i + 4] = (u.lo >> 24) & 0xff; - x[i + 5] = (u.lo >> 16) & 0xff; - x[i + 6] = (u.lo >> 8) & 0xff; - x[i + 7] = u.lo & 0xff; -} - -function vn(x, xi, y, yi, n) { - var i, - d = 0; - for (i = 0; i < n; i++) d |= x[xi + i] ^ y[yi + i]; - return (1 & ((d - 1) >>> 8)) - 1; -} - -function crypto_verify_16(x, xi, y, yi) { - return vn(x, xi, y, yi, 16); -} - -function crypto_verify_32(x, xi, y, yi) { - return vn(x, xi, y, yi, 32); -} - -function core(out, inp, k, c, h) { - var w = new Uint32Array(16), - x = new Uint32Array(16), - y = new Uint32Array(16), - t = new Uint32Array(4); - var i, j, m; - - for (i = 0; i < 4; i++) { - x[5 * i] = ld32(c, 4 * i); - x[1 + i] = ld32(k, 4 * i); - x[6 + i] = ld32(inp, 4 * i); - x[11 + i] = ld32(k, 16 + 4 * i); - } - - for (i = 0; i < 16; i++) y[i] = x[i]; - - for (i = 0; i < 20; i++) { - for (j = 0; j < 4; j++) { - for (m = 0; m < 4; m++) t[m] = x[(5 * j + 4 * m) % 16]; - t[1] ^= L32((t[0] + t[3]) | 0, 7); - t[2] ^= L32((t[1] + t[0]) | 0, 9); - t[3] ^= L32((t[2] + t[1]) | 0, 13); - t[0] ^= L32((t[3] + t[2]) | 0, 18); - for (m = 0; m < 4; m++) w[4 * j + ((j + m) % 4)] = t[m]; - } - for (m = 0; m < 16; m++) x[m] = w[m]; - } - - if (h) { - for (i = 0; i < 16; i++) x[i] = (x[i] + y[i]) | 0; - for (i = 0; i < 4; i++) { - x[5 * i] = (x[5 * i] - ld32(c, 4 * i)) | 0; - x[6 + i] = (x[6 + i] - ld32(inp, 4 * i)) | 0; - } - for (i = 0; i < 4; i++) { - st32(out, 4 * i, x[5 * i]); - st32(out, 16 + 4 * i, x[6 + i]); - } - } else { - for (i = 0; i < 16; i++) st32(out, 4 * i, (x[i] + y[i]) | 0); - } -} - -function crypto_core_salsa20(out, inp, k, c) { - core(out, inp, k, c, false); - return 0; -} - -function crypto_core_hsalsa20(out, inp, k, c) { - core(out, inp, k, c, true); - return 0; -} - -var sigma = new Uint8Array([ - 101, 120, 112, 97, 110, 100, 32, 51, 50, 45, 98, 121, 116, 101, 32, 107, -]); -// "expand 32-byte k" - -function crypto_stream_salsa20_xor(c, cpos, m, mpos, b, n, k) { - var z = new Uint8Array(16), - x = new Uint8Array(64); - var u, i; - if (!b) return 0; - for (i = 0; i < 16; i++) z[i] = 0; - for (i = 0; i < 8; i++) z[i] = n[i]; - while (b >= 64) { - crypto_core_salsa20(x, z, k, sigma); - for (i = 0; i < 64; i++) c[cpos + i] = (m ? m[mpos + i] : 0) ^ x[i]; - u = 1; - for (i = 8; i < 16; i++) { - u = (u + (z[i] & 0xff)) | 0; - z[i] = u & 0xff; - u >>>= 8; - } - b -= 64; - cpos += 64; - if (m) mpos += 64; - } - if (b > 0) { - crypto_core_salsa20(x, z, k, sigma); - for (i = 0; i < b; i++) c[cpos + i] = (m ? m[mpos + i] : 0) ^ x[i]; - } - return 0; -} - -function crypto_stream_salsa20(c, cpos, d, n, k) { - return crypto_stream_salsa20_xor(c, cpos, null, 0, d, n, k); -} - -function crypto_stream(c, cpos, d, n, k) { - var s = new Uint8Array(32); - crypto_core_hsalsa20(s, n, k, sigma); - return crypto_stream_salsa20(c, cpos, d, n.subarray(16), s); -} - -function crypto_stream_xor(c, cpos, m, mpos, d, n, k) { - var s = new Uint8Array(32); - crypto_core_hsalsa20(s, n, k, sigma); - return crypto_stream_salsa20_xor(c, cpos, m, mpos, d, n.subarray(16), s); -} - -function add1305(h, c) { - var j, - u = 0; - for (j = 0; j < 17; j++) { - u = (u + ((h[j] + c[j]) | 0)) | 0; - h[j] = u & 255; - u >>>= 8; - } -} - -var minusp = new Uint32Array([ - 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 252, -]); - -function crypto_onetimeauth(out, outpos, m, mpos, n, k) { - var s, i, j, u; - var x = new Uint32Array(17), - r = new Uint32Array(17), - h = new Uint32Array(17), - c = new Uint32Array(17), - g = new Uint32Array(17); - for (j = 0; j < 17; j++) r[j] = h[j] = 0; - for (j = 0; j < 16; j++) r[j] = k[j]; - r[3] &= 15; - r[4] &= 252; - r[7] &= 15; - r[8] &= 252; - r[11] &= 15; - r[12] &= 252; - r[15] &= 15; - - while (n > 0) { - for (j = 0; j < 17; j++) c[j] = 0; - for (j = 0; j < 16 && j < n; ++j) c[j] = m[mpos + j]; - c[j] = 1; - mpos += j; - n -= j; - add1305(h, c); - for (i = 0; i < 17; i++) { - x[i] = 0; - for (j = 0; j < 17; j++) - x[i] = - (x[i] + h[j] * (j <= i ? r[i - j] : (320 * r[i + 17 - j]) | 0)) | - 0 | - 0; - } - for (i = 0; i < 17; i++) h[i] = x[i]; - u = 0; - for (j = 0; j < 16; j++) { - u = (u + h[j]) | 0; - h[j] = u & 255; - u >>>= 8; - } - u = (u + h[16]) | 0; - h[16] = u & 3; - u = (5 * (u >>> 2)) | 0; - for (j = 0; j < 16; j++) { - u = (u + h[j]) | 0; - h[j] = u & 255; - u >>>= 8; - } - u = (u + h[16]) | 0; - h[16] = u; - } - - for (j = 0; j < 17; j++) g[j] = h[j]; - add1305(h, minusp); - s = -(h[16] >>> 7) | 0; - for (j = 0; j < 17; j++) h[j] ^= s & (g[j] ^ h[j]); - - for (j = 0; j < 16; j++) c[j] = k[j + 16]; - c[16] = 0; - add1305(h, c); - for (j = 0; j < 16; j++) out[outpos + j] = h[j]; - return 0; -} - -function crypto_onetimeauth_verify(h, hpos, m, mpos, n, k) { - var x = new Uint8Array(16); - crypto_onetimeauth(x, 0, m, mpos, n, k); - return crypto_verify_16(h, hpos, x, 0); -} - -function crypto_secretbox(c, m, d, n, k) { - var i; - if (d < 32) return -1; - crypto_stream_xor(c, 0, m, 0, d, n, k); - crypto_onetimeauth(c, 16, c, 32, d - 32, c); - for (i = 0; i < 16; i++) c[i] = 0; - return 0; -} - -function crypto_secretbox_open(m, c, d, n, k) { - var i; - var x = new Uint8Array(32); - if (d < 32) return -1; - crypto_stream(x, 0, 32, n, k); - if (crypto_onetimeauth_verify(c, 16, c, 32, d - 32, x) !== 0) return -1; - crypto_stream_xor(m, 0, c, 0, d, n, k); - for (i = 0; i < 32; i++) m[i] = 0; - return 0; -} - -function set25519(r, a) { - var i; - for (i = 0; i < 16; i++) r[i] = a[i] | 0; -} - -function car25519(o) { - var c; - var i; - for (i = 0; i < 16; i++) { - o[i] += 65536; - c = Math.floor(o[i] / 65536); - o[(i + 1) * (i < 15 ? 1 : 0)] += c - 1 + 37 * (c - 1) * (i === 15 ? 1 : 0); - o[i] -= c * 65536; - } -} - -function sel25519(p, q, b) { - var t, - c = ~(b - 1); - for (var i = 0; i < 16; i++) { - t = c & (p[i] ^ q[i]); - p[i] ^= t; - q[i] ^= t; - } -} - -function pack25519(o, n) { - var i, j, b; - var m = gf(), - t = gf(); - for (i = 0; i < 16; i++) t[i] = n[i]; - car25519(t); - car25519(t); - car25519(t); - for (j = 0; j < 2; j++) { - m[0] = t[0] - 0xffed; - for (i = 1; i < 15; i++) { - m[i] = t[i] - 0xffff - ((m[i - 1] >> 16) & 1); - m[i - 1] &= 0xffff; - } - m[15] = t[15] - 0x7fff - ((m[14] >> 16) & 1); - b = (m[15] >> 16) & 1; - m[14] &= 0xffff; - sel25519(t, m, 1 - b); - } - for (i = 0; i < 16; i++) { - o[2 * i] = t[i] & 0xff; - o[2 * i + 1] = t[i] >> 8; - } -} - -function neq25519(a, b) { - var c = new Uint8Array(32), - d = new Uint8Array(32); - pack25519(c, a); - pack25519(d, b); - return crypto_verify_32(c, 0, d, 0); -} - -function par25519(a) { - var d = new Uint8Array(32); - pack25519(d, a); - return d[0] & 1; -} - -function unpack25519(o, n) { - var i; - for (i = 0; i < 16; i++) o[i] = n[2 * i] + (n[2 * i + 1] << 8); - o[15] &= 0x7fff; -} - -function A(o, a, b) { - var i; - for (i = 0; i < 16; i++) o[i] = (a[i] + b[i]) | 0; -} - -function Z(o, a, b) { - var i; - for (i = 0; i < 16; i++) o[i] = (a[i] - b[i]) | 0; -} - -function M(o, a, b) { - var i, - j, - t = new Float64Array(31); - for (i = 0; i < 31; i++) t[i] = 0; - for (i = 0; i < 16; i++) { - for (j = 0; j < 16; j++) { - t[i + j] += a[i] * b[j]; - } - } - for (i = 0; i < 15; i++) { - t[i] += 38 * t[i + 16]; - } - for (i = 0; i < 16; i++) o[i] = t[i]; - car25519(o); - car25519(o); -} - -function S(o, a) { - M(o, a, a); -} - -function inv25519(o, i) { - var c = gf(); - var a; - for (a = 0; a < 16; a++) c[a] = i[a]; - for (a = 253; a >= 0; a--) { - S(c, c); - if (a !== 2 && a !== 4) M(c, c, i); - } - for (a = 0; a < 16; a++) o[a] = c[a]; -} - -function pow2523(o, i) { - var c = gf(); - var a; - for (a = 0; a < 16; a++) c[a] = i[a]; - for (a = 250; a >= 0; a--) { - S(c, c); - if (a !== 1) M(c, c, i); - } - for (a = 0; a < 16; a++) o[a] = c[a]; -} - -function crypto_scalarmult(q, n, p) { - var z = new Uint8Array(32); - var x = new Float64Array(80), - r, - i; - var a = gf(), - b = gf(), - c = gf(), - d = gf(), - e = gf(), - f = gf(); - for (i = 0; i < 31; i++) z[i] = n[i]; - z[31] = (n[31] & 127) | 64; - z[0] &= 248; - unpack25519(x, p); - for (i = 0; i < 16; i++) { - b[i] = x[i]; - d[i] = a[i] = c[i] = 0; - } - a[0] = d[0] = 1; - for (i = 254; i >= 0; --i) { - r = (z[i >>> 3] >>> (i & 7)) & 1; - sel25519(a, b, r); - sel25519(c, d, r); - A(e, a, c); - Z(a, a, c); - A(c, b, d); - Z(b, b, d); - S(d, e); - S(f, a); - M(a, c, a); - M(c, b, e); - A(e, a, c); - Z(a, a, c); - S(b, a); - Z(c, d, f); - M(a, c, _121665); - A(a, a, d); - M(c, c, a); - M(a, d, f); - M(d, b, x); - S(b, e); - sel25519(a, b, r); - sel25519(c, d, r); - } - for (i = 0; i < 16; i++) { - x[i + 16] = a[i]; - x[i + 32] = c[i]; - x[i + 48] = b[i]; - x[i + 64] = d[i]; - } - var x32 = x.subarray(32); - var x16 = x.subarray(16); - inv25519(x32, x32); - M(x16, x16, x32); - pack25519(q, x16); - return 0; -} - -function crypto_scalarmult_base(q, n) { - return crypto_scalarmult(q, n, _9); -} - -function crypto_box_keypair(y, x) { - randombytes(x, 32); - return crypto_scalarmult_base(y, x); -} - -function crypto_box_beforenm(k, y, x) { - var s = new Uint8Array(32); - crypto_scalarmult(s, x, y); - return crypto_core_hsalsa20(k, _0, s, sigma); -} - -var crypto_box_afternm = crypto_secretbox; -var crypto_box_open_afternm = crypto_secretbox_open; - -function crypto_box(c, m, d, n, y, x) { - var k = new Uint8Array(32); - crypto_box_beforenm(k, y, x); - return crypto_box_afternm(c, m, d, n, k); -} - -function crypto_box_open(m, c, d, n, y, x) { - var k = new Uint8Array(32); - crypto_box_beforenm(k, y, x); - return crypto_box_open_afternm(m, c, d, n, k); -} - -function add64() { - var a = 0, - b = 0, - c = 0, - d = 0, - m16 = 65535, - l, - h, - i; - for (i = 0; i < arguments.length; i++) { - l = arguments[i].lo; - h = arguments[i].hi; - a += l & m16; - b += l >>> 16; - c += h & m16; - d += h >>> 16; - } - - b += a >>> 16; - c += b >>> 16; - d += c >>> 16; - - return new u64((c & m16) | (d << 16), (a & m16) | (b << 16)); -} - -function shr64(x, c) { - return new u64(x.hi >>> c, (x.lo >>> c) | (x.hi << (32 - c))); -} - -function xor64() { - var l = 0, - h = 0, - i; - for (i = 0; i < arguments.length; i++) { - l ^= arguments[i].lo; - h ^= arguments[i].hi; - } - return new u64(h, l); -} - -function R(x, c) { - var h, - l, - c1 = 32 - c; - if (c < 32) { - h = (x.hi >>> c) | (x.lo << c1); - l = (x.lo >>> c) | (x.hi << c1); - } else if (c < 64) { - h = (x.lo >>> c) | (x.hi << c1); - l = (x.hi >>> c) | (x.lo << c1); - } - return new u64(h, l); -} - -function Ch(x, y, z) { - var h = (x.hi & y.hi) ^ (~x.hi & z.hi), - l = (x.lo & y.lo) ^ (~x.lo & z.lo); - return new u64(h, l); -} - -function Maj(x, y, z) { - var h = (x.hi & y.hi) ^ (x.hi & z.hi) ^ (y.hi & z.hi), - l = (x.lo & y.lo) ^ (x.lo & z.lo) ^ (y.lo & z.lo); - return new u64(h, l); -} - -function Sigma0(x) { - return xor64(R(x, 28), R(x, 34), R(x, 39)); -} -function Sigma1(x) { - return xor64(R(x, 14), R(x, 18), R(x, 41)); -} -function sigma0(x) { - return xor64(R(x, 1), R(x, 8), shr64(x, 7)); -} -function sigma1(x) { - return xor64(R(x, 19), R(x, 61), shr64(x, 6)); -} - -var K = [ - new u64(0x428a2f98, 0xd728ae22), - new u64(0x71374491, 0x23ef65cd), - new u64(0xb5c0fbcf, 0xec4d3b2f), - new u64(0xe9b5dba5, 0x8189dbbc), - new u64(0x3956c25b, 0xf348b538), - new u64(0x59f111f1, 0xb605d019), - new u64(0x923f82a4, 0xaf194f9b), - new u64(0xab1c5ed5, 0xda6d8118), - new u64(0xd807aa98, 0xa3030242), - new u64(0x12835b01, 0x45706fbe), - new u64(0x243185be, 0x4ee4b28c), - new u64(0x550c7dc3, 0xd5ffb4e2), - new u64(0x72be5d74, 0xf27b896f), - new u64(0x80deb1fe, 0x3b1696b1), - new u64(0x9bdc06a7, 0x25c71235), - new u64(0xc19bf174, 0xcf692694), - new u64(0xe49b69c1, 0x9ef14ad2), - new u64(0xefbe4786, 0x384f25e3), - new u64(0x0fc19dc6, 0x8b8cd5b5), - new u64(0x240ca1cc, 0x77ac9c65), - new u64(0x2de92c6f, 0x592b0275), - new u64(0x4a7484aa, 0x6ea6e483), - new u64(0x5cb0a9dc, 0xbd41fbd4), - new u64(0x76f988da, 0x831153b5), - new u64(0x983e5152, 0xee66dfab), - new u64(0xa831c66d, 0x2db43210), - new u64(0xb00327c8, 0x98fb213f), - new u64(0xbf597fc7, 0xbeef0ee4), - new u64(0xc6e00bf3, 0x3da88fc2), - new u64(0xd5a79147, 0x930aa725), - new u64(0x06ca6351, 0xe003826f), - new u64(0x14292967, 0x0a0e6e70), - new u64(0x27b70a85, 0x46d22ffc), - new u64(0x2e1b2138, 0x5c26c926), - new u64(0x4d2c6dfc, 0x5ac42aed), - new u64(0x53380d13, 0x9d95b3df), - new u64(0x650a7354, 0x8baf63de), - new u64(0x766a0abb, 0x3c77b2a8), - new u64(0x81c2c92e, 0x47edaee6), - new u64(0x92722c85, 0x1482353b), - new u64(0xa2bfe8a1, 0x4cf10364), - new u64(0xa81a664b, 0xbc423001), - new u64(0xc24b8b70, 0xd0f89791), - new u64(0xc76c51a3, 0x0654be30), - new u64(0xd192e819, 0xd6ef5218), - new u64(0xd6990624, 0x5565a910), - new u64(0xf40e3585, 0x5771202a), - new u64(0x106aa070, 0x32bbd1b8), - new u64(0x19a4c116, 0xb8d2d0c8), - new u64(0x1e376c08, 0x5141ab53), - new u64(0x2748774c, 0xdf8eeb99), - new u64(0x34b0bcb5, 0xe19b48a8), - new u64(0x391c0cb3, 0xc5c95a63), - new u64(0x4ed8aa4a, 0xe3418acb), - new u64(0x5b9cca4f, 0x7763e373), - new u64(0x682e6ff3, 0xd6b2b8a3), - new u64(0x748f82ee, 0x5defb2fc), - new u64(0x78a5636f, 0x43172f60), - new u64(0x84c87814, 0xa1f0ab72), - new u64(0x8cc70208, 0x1a6439ec), - new u64(0x90befffa, 0x23631e28), - new u64(0xa4506ceb, 0xde82bde9), - new u64(0xbef9a3f7, 0xb2c67915), - new u64(0xc67178f2, 0xe372532b), - new u64(0xca273ece, 0xea26619c), - new u64(0xd186b8c7, 0x21c0c207), - new u64(0xeada7dd6, 0xcde0eb1e), - new u64(0xf57d4f7f, 0xee6ed178), - new u64(0x06f067aa, 0x72176fba), - new u64(0x0a637dc5, 0xa2c898a6), - new u64(0x113f9804, 0xbef90dae), - new u64(0x1b710b35, 0x131c471b), - new u64(0x28db77f5, 0x23047d84), - new u64(0x32caab7b, 0x40c72493), - new u64(0x3c9ebe0a, 0x15c9bebc), - new u64(0x431d67c4, 0x9c100d4c), - new u64(0x4cc5d4be, 0xcb3e42b6), - new u64(0x597f299c, 0xfc657e2a), - new u64(0x5fcb6fab, 0x3ad6faec), - new u64(0x6c44198c, 0x4a475817), -]; - -function crypto_hashblocks(x, m, n) { - var z = [], - b = [], - a = [], - w = [], - t, - i, - j; - - for (i = 0; i < 8; i++) z[i] = a[i] = dl64(x, 8 * i); - - var pos = 0; - while (n >= 128) { - for (i = 0; i < 16; i++) w[i] = dl64(m, 8 * i + pos); - for (i = 0; i < 80; i++) { - for (j = 0; j < 8; j++) b[j] = a[j]; - t = add64(a[7], Sigma1(a[4]), Ch(a[4], a[5], a[6]), K[i], w[i % 16]); - b[7] = add64(t, Sigma0(a[0]), Maj(a[0], a[1], a[2])); - b[3] = add64(b[3], t); - for (j = 0; j < 8; j++) a[(j + 1) % 8] = b[j]; - if (i % 16 === 15) { - for (j = 0; j < 16; j++) { - w[j] = add64( - w[j], - w[(j + 9) % 16], - sigma0(w[(j + 1) % 16]), - sigma1(w[(j + 14) % 16]) - ); - } - } - } - - for (i = 0; i < 8; i++) { - a[i] = add64(a[i], z[i]); - z[i] = a[i]; - } - - pos += 128; - n -= 128; - } - - for (i = 0; i < 8; i++) ts64(x, 8 * i, z[i]); - return n; -} - -var iv = new Uint8Array([ - 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae, 0x85, 0x84, - 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94, 0xf8, 0x2b, 0xa5, 0x4f, - 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51, 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, - 0xd1, 0x9b, 0x05, 0x68, 0x8c, 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, - 0xfb, 0x41, 0xbd, 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79, -]); - -function crypto_hash(out, m, n) { - var h = new Uint8Array(64), - x = new Uint8Array(256); - var i, - b = n; - - for (i = 0; i < 64; i++) h[i] = iv[i]; - - crypto_hashblocks(h, m, n); - n %= 128; - - for (i = 0; i < 256; i++) x[i] = 0; - for (i = 0; i < n; i++) x[i] = m[b - n + i]; - x[n] = 128; - - n = 256 - 128 * (n < 112 ? 1 : 0); - x[n - 9] = 0; - ts64(x, n - 8, new u64((b / 0x20000000) | 0, b << 3)); - crypto_hashblocks(h, x, n); - - for (i = 0; i < 64; i++) out[i] = h[i]; - - return 0; -} - -function add(p, q) { - var a = gf(), - b = gf(), - c = gf(), - d = gf(), - e = gf(), - f = gf(), - g = gf(), - h = gf(), - t = gf(); - - Z(a, p[1], p[0]); - Z(t, q[1], q[0]); - M(a, a, t); - A(b, p[0], p[1]); - A(t, q[0], q[1]); - M(b, b, t); - M(c, p[3], q[3]); - M(c, c, D2); - M(d, p[2], q[2]); - A(d, d, d); - Z(e, b, a); - Z(f, d, c); - A(g, d, c); - A(h, b, a); - - M(p[0], e, f); - M(p[1], h, g); - M(p[2], g, f); - M(p[3], e, h); -} - -function cswap(p, q, b) { - var i; - for (i = 0; i < 4; i++) { - sel25519(p[i], q[i], b); - } -} - -function pack(r, p) { - var tx = gf(), - ty = gf(), - zi = gf(); - inv25519(zi, p[2]); - M(tx, p[0], zi); - M(ty, p[1], zi); - pack25519(r, ty); - r[31] ^= par25519(tx) << 7; -} - -function scalarmult(p, q, s) { - var b, i; - set25519(p[0], gf0); - set25519(p[1], gf1); - set25519(p[2], gf1); - set25519(p[3], gf0); - for (i = 255; i >= 0; --i) { - b = (s[(i / 8) | 0] >> (i & 7)) & 1; - cswap(p, q, b); - add(q, p); - add(p, p); - cswap(p, q, b); - } -} - -function scalarbase(p, s) { - var q = [gf(), gf(), gf(), gf()]; - set25519(q[0], X); - set25519(q[1], Y); - set25519(q[2], gf1); - M(q[3], X, Y); - scalarmult(p, q, s); -} - -function crypto_sign_keypair(pk, sk, seeded) { - var d = new Uint8Array(64); - var p = [gf(), gf(), gf(), gf()]; - var i; - - if (!seeded) randombytes(sk, 32); - crypto_hash(d, sk, 32); - d[0] &= 248; - d[31] &= 127; - d[31] |= 64; - - scalarbase(p, d); - pack(pk, p); - - for (i = 0; i < 32; i++) sk[i + 32] = pk[i]; - return 0; -} - -var L = new Float64Array([ - 0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, 0xd6, 0x9c, 0xf7, 0xa2, 0xde, - 0xf9, 0xde, 0x14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10, -]); - -function modL(r, x) { - var carry, i, j, k; - for (i = 63; i >= 32; --i) { - carry = 0; - for (j = i - 32, k = i - 12; j < k; ++j) { - x[j] += carry - 16 * x[i] * L[j - (i - 32)]; - carry = Math.floor((x[j] + 128) / 256); - x[j] -= carry * 256; - } - x[j] += carry; - x[i] = 0; - } - carry = 0; - for (j = 0; j < 32; j++) { - x[j] += carry - (x[31] >> 4) * L[j]; - carry = x[j] >> 8; - x[j] &= 255; - } - for (j = 0; j < 32; j++) x[j] -= carry * L[j]; - for (i = 0; i < 32; i++) { - x[i + 1] += x[i] >> 8; - r[i] = x[i] & 255; - } -} - -function reduce(r) { - var x = new Float64Array(64), - i; - for (i = 0; i < 64; i++) x[i] = r[i]; - for (i = 0; i < 64; i++) r[i] = 0; - modL(r, x); -} - -// Note: difference from C - smlen returned, not passed as argument. -function crypto_sign(sm, m, n, sk) { - var d = new Uint8Array(64), - h = new Uint8Array(64), - r = new Uint8Array(64); - var i, - j, - x = new Float64Array(64); - var p = [gf(), gf(), gf(), gf()]; - - crypto_hash(d, sk, 32); - d[0] &= 248; - d[31] &= 127; - d[31] |= 64; - - var smlen = n + 64; - for (i = 0; i < n; i++) sm[64 + i] = m[i]; - for (i = 0; i < 32; i++) sm[32 + i] = d[32 + i]; - - crypto_hash(r, sm.subarray(32), n + 32); - reduce(r); - scalarbase(p, r); - pack(sm, p); - - for (i = 32; i < 64; i++) sm[i] = sk[i]; - crypto_hash(h, sm, n + 64); - reduce(h); - - for (i = 0; i < 64; i++) x[i] = 0; - for (i = 0; i < 32; i++) x[i] = r[i]; - for (i = 0; i < 32; i++) { - for (j = 0; j < 32; j++) { - x[i + j] += h[i] * d[j]; - } - } - - modL(sm.subarray(32), x); - return smlen; -} - -function unpackneg(r, p) { - var t = gf(), - chk = gf(), - num = gf(), - den = gf(), - den2 = gf(), - den4 = gf(), - den6 = gf(); - - set25519(r[2], gf1); - unpack25519(r[1], p); - S(num, r[1]); - M(den, num, D); - Z(num, num, r[2]); - A(den, r[2], den); - - S(den2, den); - S(den4, den2); - M(den6, den4, den2); - M(t, den6, num); - M(t, t, den); - - pow2523(t, t); - M(t, t, num); - M(t, t, den); - M(t, t, den); - M(r[0], t, den); - - S(chk, r[0]); - M(chk, chk, den); - if (neq25519(chk, num)) M(r[0], r[0], I); - - S(chk, r[0]); - M(chk, chk, den); - if (neq25519(chk, num)) return -1; - - if (par25519(r[0]) === p[31] >> 7) Z(r[0], gf0, r[0]); - - M(r[3], r[0], r[1]); - return 0; -} - -function crypto_sign_open(m, sm, n, pk) { - var i; - var t = new Uint8Array(32), - h = new Uint8Array(64); - var p = [gf(), gf(), gf(), gf()], - q = [gf(), gf(), gf(), gf()]; - - if (n < 64) return -1; - - if (unpackneg(q, pk)) return -1; - - for (i = 0; i < n; i++) m[i] = sm[i]; - for (i = 0; i < 32; i++) m[i + 32] = pk[i]; - crypto_hash(h, m, n); - reduce(h); - scalarmult(p, q, h); - - scalarbase(q, sm.subarray(32)); - add(p, q); - pack(t, p); - - n -= 64; - if (crypto_verify_32(sm, 0, t, 0)) { - for (i = 0; i < n; i++) m[i] = 0; - return -1; - } - - for (i = 0; i < n; i++) m[i] = sm[i + 64]; - return n; -} - -var crypto_secretbox_KEYBYTES = 32, - crypto_secretbox_NONCEBYTES = 24, - crypto_secretbox_ZEROBYTES = 32, - crypto_secretbox_BOXZEROBYTES = 16, - crypto_scalarmult_BYTES = 32, - crypto_scalarmult_SCALARBYTES = 32, - crypto_box_PUBLICKEYBYTES = 32, - crypto_box_SECRETKEYBYTES = 32, - crypto_box_BEFORENMBYTES = 32, - crypto_box_NONCEBYTES = crypto_secretbox_NONCEBYTES, - crypto_box_ZEROBYTES = crypto_secretbox_ZEROBYTES, - crypto_box_BOXZEROBYTES = crypto_secretbox_BOXZEROBYTES, - crypto_sign_BYTES = 64, - crypto_sign_PUBLICKEYBYTES = 32, - crypto_sign_SECRETKEYBYTES = 64, - crypto_sign_SEEDBYTES = 32, - crypto_hash_BYTES = 64; - -var _nacl = { - lowlevel: {}, -}; - -_nacl.lowlevel = { - crypto_core_hsalsa20: crypto_core_hsalsa20, - crypto_stream_xor: crypto_stream_xor, - crypto_stream: crypto_stream, - crypto_stream_salsa20_xor: crypto_stream_salsa20_xor, - crypto_stream_salsa20: crypto_stream_salsa20, - crypto_onetimeauth: crypto_onetimeauth, - crypto_onetimeauth_verify: crypto_onetimeauth_verify, - crypto_verify_16: crypto_verify_16, - crypto_verify_32: crypto_verify_32, - crypto_secretbox: crypto_secretbox, - crypto_secretbox_open: crypto_secretbox_open, - crypto_scalarmult: crypto_scalarmult, - crypto_scalarmult_base: crypto_scalarmult_base, - crypto_box_beforenm: crypto_box_beforenm, - crypto_box_afternm: crypto_box_afternm, - crypto_box: crypto_box, - crypto_box_open: crypto_box_open, - crypto_box_keypair: crypto_box_keypair, - crypto_hash: crypto_hash, - crypto_sign: crypto_sign, - crypto_sign_keypair: crypto_sign_keypair, - crypto_sign_open: crypto_sign_open, - - crypto_secretbox_KEYBYTES: crypto_secretbox_KEYBYTES, - crypto_secretbox_NONCEBYTES: crypto_secretbox_NONCEBYTES, - crypto_secretbox_ZEROBYTES: crypto_secretbox_ZEROBYTES, - crypto_secretbox_BOXZEROBYTES: crypto_secretbox_BOXZEROBYTES, - crypto_scalarmult_BYTES: crypto_scalarmult_BYTES, - crypto_scalarmult_SCALARBYTES: crypto_scalarmult_SCALARBYTES, - crypto_box_PUBLICKEYBYTES: crypto_box_PUBLICKEYBYTES, - crypto_box_SECRETKEYBYTES: crypto_box_SECRETKEYBYTES, - crypto_box_BEFORENMBYTES: crypto_box_BEFORENMBYTES, - crypto_box_NONCEBYTES: crypto_box_NONCEBYTES, - crypto_box_ZEROBYTES: crypto_box_ZEROBYTES, - crypto_box_BOXZEROBYTES: crypto_box_BOXZEROBYTES, - crypto_sign_BYTES: crypto_sign_BYTES, - crypto_sign_PUBLICKEYBYTES: crypto_sign_PUBLICKEYBYTES, - crypto_sign_SECRETKEYBYTES: crypto_sign_SECRETKEYBYTES, - crypto_sign_SEEDBYTES: crypto_sign_SEEDBYTES, - crypto_hash_BYTES: crypto_hash_BYTES, - - gf: gf, - D: D, - L: L, - pack25519: pack25519, - unpack25519: unpack25519, - M: M, - A: A, - S: S, - Z: Z, - pow2523: pow2523, - add: add, - set25519: set25519, - modL: modL, - scalarmult: scalarmult, - scalarbase: scalarbase, -}; - -/* High-level API */ - -function checkLengths(k, n) { - if (k.length !== crypto_secretbox_KEYBYTES) throw new Error('bad key size'); - if (n.length !== crypto_secretbox_NONCEBYTES) - throw new Error('bad nonce size'); -} - -function checkBoxLengths(pk, sk) { - if (pk.length !== crypto_box_PUBLICKEYBYTES) - throw new Error('bad public key size'); - if (sk.length !== crypto_box_SECRETKEYBYTES) - throw new Error('bad secret key size'); -} - -function checkArrayTypes() { - for (var i = 0; i < arguments.length; i++) { - if (!(arguments[i] instanceof Uint8Array)) - throw new TypeError('unexpected type, use Uint8Array'); - } -} - -function cleanup(arr) { - for (var i = 0; i < arr.length; i++) arr[i] = 0; -} - -_nacl.randomBytes = function (n) { - var b = new Uint8Array(n); - randombytes(b, n); - return b; -}; - -_nacl.secretbox = function (msg, nonce, key) { - checkArrayTypes(msg, nonce, key); - checkLengths(key, nonce); - var m = new Uint8Array(crypto_secretbox_ZEROBYTES + msg.length); - var c = new Uint8Array(m.length); - for (var i = 0; i < msg.length; i++) - m[i + crypto_secretbox_ZEROBYTES] = msg[i]; - crypto_secretbox(c, m, m.length, nonce, key); - return c.subarray(crypto_secretbox_BOXZEROBYTES); -}; - -_nacl.secretbox.open = function (box, nonce, key) { - checkArrayTypes(box, nonce, key); - checkLengths(key, nonce); - var c = new Uint8Array(crypto_secretbox_BOXZEROBYTES + box.length); - var m = new Uint8Array(c.length); - for (var i = 0; i < box.length; i++) - c[i + crypto_secretbox_BOXZEROBYTES] = box[i]; - if (c.length < 32) return null; - if (crypto_secretbox_open(m, c, c.length, nonce, key) !== 0) return null; - return m.subarray(crypto_secretbox_ZEROBYTES); -}; - -_nacl.secretbox.keyLength = crypto_secretbox_KEYBYTES; -_nacl.secretbox.nonceLength = crypto_secretbox_NONCEBYTES; -_nacl.secretbox.overheadLength = crypto_secretbox_BOXZEROBYTES; - -_nacl.scalarMult = function (n, p) { - checkArrayTypes(n, p); - if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); - if (p.length !== crypto_scalarmult_BYTES) throw new Error('bad p size'); - var q = new Uint8Array(crypto_scalarmult_BYTES); - crypto_scalarmult(q, n, p); - return q; -}; - -_nacl.scalarMult.base = function (n) { - checkArrayTypes(n); - if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); - var q = new Uint8Array(crypto_scalarmult_BYTES); - crypto_scalarmult_base(q, n); - return q; -}; - -_nacl.scalarMult.scalarLength = crypto_scalarmult_SCALARBYTES; -_nacl.scalarMult.groupElementLength = crypto_scalarmult_BYTES; - -_nacl.box = function (msg, nonce, publicKey, secretKey) { - var k = _nacl.box.before(publicKey, secretKey); - return _nacl.secretbox(msg, nonce, k); -}; - -_nacl.box.before = function (publicKey, secretKey) { - checkArrayTypes(publicKey, secretKey); - checkBoxLengths(publicKey, secretKey); - var k = new Uint8Array(crypto_box_BEFORENMBYTES); - crypto_box_beforenm(k, publicKey, secretKey); - return k; -}; - -_nacl.box.after = _nacl.secretbox; - -_nacl.box.open = function (msg, nonce, publicKey, secretKey) { - var k = _nacl.box.before(publicKey, secretKey); - return _nacl.secretbox.open(msg, nonce, k); -}; - -_nacl.box.open.after = _nacl.secretbox.open; - -_nacl.box.keyPair = function () { - var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); - var sk = new Uint8Array(crypto_box_SECRETKEYBYTES); - crypto_box_keypair(pk, sk); - return { publicKey: pk, secretKey: sk }; -}; - -_nacl.box.keyPair.fromSecretKey = function (secretKey) { - checkArrayTypes(secretKey); - if (secretKey.length !== crypto_box_SECRETKEYBYTES) - throw new Error('bad secret key size'); - var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); - crypto_scalarmult_base(pk, secretKey); - return { publicKey: pk, secretKey: new Uint8Array(secretKey) }; -}; - -_nacl.box.publicKeyLength = crypto_box_PUBLICKEYBYTES; -_nacl.box.secretKeyLength = crypto_box_SECRETKEYBYTES; -_nacl.box.sharedKeyLength = crypto_box_BEFORENMBYTES; -_nacl.box.nonceLength = crypto_box_NONCEBYTES; -_nacl.box.overheadLength = _nacl.secretbox.overheadLength; - -_nacl.sign = function (msg, secretKey) { - checkArrayTypes(msg, secretKey); - if (secretKey.length !== crypto_sign_SECRETKEYBYTES) - throw new Error('bad secret key size'); - var signedMsg = new Uint8Array(crypto_sign_BYTES + msg.length); - crypto_sign(signedMsg, msg, msg.length, secretKey); - return signedMsg; -}; - -_nacl.sign.open = function (signedMsg, publicKey) { - checkArrayTypes(signedMsg, publicKey); - if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) - throw new Error('bad public key size'); - var tmp = new Uint8Array(signedMsg.length); - var mlen = crypto_sign_open(tmp, signedMsg, signedMsg.length, publicKey); - if (mlen < 0) return null; - var m = new Uint8Array(mlen); - for (var i = 0; i < m.length; i++) m[i] = tmp[i]; - return m; -}; - -_nacl.sign.detached = function (msg, secretKey) { - var signedMsg = _nacl.sign(msg, secretKey); - var sig = new Uint8Array(crypto_sign_BYTES); - for (var i = 0; i < sig.length; i++) sig[i] = signedMsg[i]; - return sig; -}; - -_nacl.sign.detached.verify = function (msg, sig, publicKey) { - checkArrayTypes(msg, sig, publicKey); - if (sig.length !== crypto_sign_BYTES) throw new Error('bad signature size'); - if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) - throw new Error('bad public key size'); - var sm = new Uint8Array(crypto_sign_BYTES + msg.length); - var m = new Uint8Array(crypto_sign_BYTES + msg.length); - var i; - for (i = 0; i < crypto_sign_BYTES; i++) sm[i] = sig[i]; - for (i = 0; i < msg.length; i++) sm[i + crypto_sign_BYTES] = msg[i]; - return crypto_sign_open(m, sm, sm.length, publicKey) >= 0; -}; - -_nacl.sign.keyPair = function () { - var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); - var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); - crypto_sign_keypair(pk, sk); - return { publicKey: pk, secretKey: sk }; -}; - -_nacl.sign.keyPair.fromSecretKey = function (secretKey) { - checkArrayTypes(secretKey); - if (secretKey.length !== crypto_sign_SECRETKEYBYTES) - throw new Error('bad secret key size'); - var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); - for (var i = 0; i < pk.length; i++) pk[i] = secretKey[32 + i]; - return { publicKey: pk, secretKey: new Uint8Array(secretKey) }; -}; - -_nacl.sign.keyPair.fromSeed = function (seed) { - checkArrayTypes(seed); - if (seed.length !== crypto_sign_SEEDBYTES) throw new Error('bad seed size'); - var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); - var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); - for (var i = 0; i < 32; i++) sk[i] = seed[i]; - crypto_sign_keypair(pk, sk, true); - return { publicKey: pk, secretKey: sk }; -}; - -_nacl.sign.publicKeyLength = crypto_sign_PUBLICKEYBYTES; -_nacl.sign.secretKeyLength = crypto_sign_SECRETKEYBYTES; -_nacl.sign.seedLength = crypto_sign_SEEDBYTES; -_nacl.sign.signatureLength = crypto_sign_BYTES; - -_nacl.hash = function (msg) { - checkArrayTypes(msg); - var h = new Uint8Array(crypto_hash_BYTES); - crypto_hash(h, msg, msg.length); - return h; -}; - -_nacl.hash.hashLength = crypto_hash_BYTES; - -_nacl.verify = function (x, y) { - checkArrayTypes(x, y); - // Zero length arguments are considered not equal. - if (x.length === 0 || y.length === 0) return false; - if (x.length !== y.length) return false; - return vn(x, 0, y, 0, x.length) === 0 ? true : false; -}; - -_nacl.setPRNG = function (fn) { - randombytes = fn; -}; - -(function () { - // Initialize PRNG if environment provides CSPRNG. - // If not, methods calling randombytes will throw. - var crypto = - typeof self !== 'undefined' ? self.crypto || self.msCrypto : null; - if (crypto && crypto.getRandomValues) { - // Browsers. - var QUOTA = 65536; - _nacl.setPRNG(function (x, n) { - var i, - v = new Uint8Array(n); - for (i = 0; i < n; i += QUOTA) { - crypto.getRandomValues(v.subarray(i, i + Math.min(n - i, QUOTA))); - } - for (i = 0; i < n; i++) x[i] = v[i]; - cleanup(v); - }); - } else if (typeof require !== 'undefined') { - crypto = require('crypto'); - if (crypto && crypto.randomBytes) { - _nacl.setPRNG(function (x, n) { - var i, - v = crypto.randomBytes(n); - for (i = 0; i < n; i++) x[i] = v[i]; - cleanup(v); - }); - } - } -})(); - -export const nacl = _nacl.default || _nacl; diff --git a/packages/nacl/tsconfig.json b/packages/nacl/tsconfig.json deleted file mode 100644 index d87cb2e661..0000000000 --- a/packages/nacl/tsconfig.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "extends": "../../tsconfig.base.json", - "compilerOptions": { - "module": "commonjs", - "forceConsistentCasingInFileNames": true, - "strict": true, - "noImplicitOverride": true, - "noPropertyAccessFromIndexSignature": true, - "noImplicitReturns": true, - "noFallthroughCasesInSwitch": true, - "allowJs": true - }, - "files": [], - "include": [], - "references": [ - { - "path": "./tsconfig.lib.json" - }, - { - "path": "./tsconfig.spec.json" - } - ] -} diff --git a/packages/nacl/tsconfig.lib.json b/packages/nacl/tsconfig.lib.json deleted file mode 100644 index 4d4d2492f1..0000000000 --- a/packages/nacl/tsconfig.lib.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "../../dist/out-tsc", - "declaration": true, - "types": [] - }, - "include": ["**/*.ts", "src/lib/nacl.js"], - "exclude": ["jest.config.ts", "**/*.spec.ts", "**/*.test.ts"] -} diff --git a/packages/nacl/tsconfig.spec.json b/packages/nacl/tsconfig.spec.json deleted file mode 100644 index 546f12877f..0000000000 --- a/packages/nacl/tsconfig.spec.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "../../dist/out-tsc", - "module": "commonjs", - "types": ["jest", "node"] - }, - "include": ["jest.config.ts", "**/*.test.ts", "**/*.spec.ts", "**/*.d.ts"] -} diff --git a/packages/lit-auth-client/.babelrc b/packages/networks/.babelrc similarity index 100% rename from packages/lit-auth-client/.babelrc rename to packages/networks/.babelrc diff --git a/packages/lit-auth-client/.eslintrc.json b/packages/networks/.eslintrc.json similarity index 100% rename from packages/lit-auth-client/.eslintrc.json rename to packages/networks/.eslintrc.json diff --git a/packages/networks/README.md b/packages/networks/README.md new file mode 100644 index 0000000000..81298f06ad --- /dev/null +++ b/packages/networks/README.md @@ -0,0 +1,9 @@ +# Quick Start + +This package contains network definitions for LIT protocol networks. A network contains chain configuration and configuration used to control how consumers communicate with LIT Network nodes. + +### node.js / browser + +``` +yarn add @lit-protocol/networks +``` diff --git a/packages/networks/index.ts b/packages/networks/index.ts new file mode 100644 index 0000000000..e69de29bb2 diff --git a/packages/misc/jest.config.ts b/packages/networks/jest.config.ts similarity index 78% rename from packages/misc/jest.config.ts rename to packages/networks/jest.config.ts index 0314388c24..2fc35d879c 100644 --- a/packages/misc/jest.config.ts +++ b/packages/networks/jest.config.ts @@ -1,6 +1,6 @@ /* eslint-disable */ export default { - displayName: 'misc', + displayName: 'networks', preset: '../../jest.preset.js', globals: { 'ts-jest': { @@ -11,6 +11,6 @@ export default { '^.+\\.[t]s$': 'ts-jest', }, moduleFileExtensions: ['ts', 'js', 'html'], - coverageDirectory: '../../coverage/packages/misc', + coverageDirectory: '../../coverage/packages/networks', setupFilesAfterEnv: ['../../jest.setup.js'], }; diff --git a/packages/nacl/package.json b/packages/networks/package.json similarity index 86% rename from packages/nacl/package.json rename to packages/networks/package.json index dd669d2048..166d5fef4a 100644 --- a/packages/nacl/package.json +++ b/packages/networks/package.json @@ -1,5 +1,5 @@ { - "name": "@lit-protocol/nacl", + "name": "@lit-protocol/networks", "license": "MIT", "homepage": "https://github.com/Lit-Protocol/js-sdk", "repository": { @@ -13,14 +13,14 @@ "url": "https://github.com/LIT-Protocol/js-sdk/issues" }, "type": "commonjs", - "tags": [ - "universal" - ], - "gitHead": "0d7334c2c55f448e91fe32f29edc5db8f5e09e4b", "publishConfig": { "access": "public", - "directory": "../../dist/packages/nacl" + "directory": "../../dist/packages/networks" }, + "gitHead": "0d7334c2c55f448e91fe32f29edc5db8f5e09e4b", + "tags": [ + "universal" + ], "version": "8.0.0-alpha.0", "main": "./dist/src/index.js", "typings": "./dist/src/index.d.ts" diff --git a/packages/lit-auth-client/project.json b/packages/networks/project.json similarity index 52% rename from packages/lit-auth-client/project.json rename to packages/networks/project.json index db78020150..f62034360c 100644 --- a/packages/lit-auth-client/project.json +++ b/packages/networks/project.json @@ -1,17 +1,17 @@ { - "name": "lit-auth-client", + "name": "networks", "$schema": "../../node_modules/nx/schemas/project-schema.json", - "sourceRoot": "packages/lit-auth-client/src", + "sourceRoot": "packages/networks/src", "projectType": "library", "targets": { "build": { "executor": "@nx/js:tsc", "outputs": ["{options.outputPath}"], "options": { - "outputPath": "dist/packages/lit-auth-client", - "main": "packages/lit-auth-client/src/index.ts", - "tsConfig": "packages/lit-auth-client/tsconfig.lib.json", - "assets": ["packages/lit-auth-client/*.md"], + "outputPath": "dist/packages/networks", + "main": "packages/networks/src/index.ts", + "tsConfig": "packages/networks/tsconfig.lib.json", + "assets": ["packages/networks/*.md"], "updateBuildableProjectDepsInPackageJson": true } }, @@ -19,14 +19,14 @@ "executor": "@nx/linter:eslint", "outputs": ["{options.outputFile}"], "options": { - "lintFilePatterns": ["packages/lit-auth-client/**/*.ts"] + "lintFilePatterns": ["packages/networks/**/*.ts"] } }, "test": { "executor": "@nx/jest:jest", - "outputs": ["{workspaceRoot}/coverage/packages/lit-auth-client"], + "outputs": ["{workspaceRoot}/coverage/packages/networks"], "options": { - "jestConfig": "packages/lit-auth-client/jest.config.ts", + "jestConfig": "packages/networks/jest.config.ts", "passWithNoTests": true } } diff --git a/packages/networks/src/lib/LitNetwork.ts b/packages/networks/src/lib/LitNetwork.ts new file mode 100644 index 0000000000..8197844b37 --- /dev/null +++ b/packages/networks/src/lib/LitNetwork.ts @@ -0,0 +1,48 @@ +import { HTTP, HTTPS, LIT_ENDPOINT } from '@lit-protocol/constants'; + +import type { LitChainConfig, LitNetworkConfig } from './types'; + +export abstract class LitNetwork { + private readonly _name: string; + private readonly _chainConfig: LitChainConfig; + private readonly _endpoints: typeof LIT_ENDPOINT; + private readonly _httpProtocol: typeof HTTP | typeof HTTPS; + private readonly _options: unknown; + + constructor(config: LitNetworkConfig) { + this._name = config.name; + this._chainConfig = config.chainConfig; + this._endpoints = config.endpoints; + this._httpProtocol = config.httpProtocol; + this._options = config.options; + } + + get name() { + return this._name; + } + + get endpoints() { + return this._endpoints; + } + + get httpProtocol() { + return this._httpProtocol; + } + + get options() { + return this._options; + } + + get chainConfig() { + return this._chainConfig; + } + + abstract createSignRequests(params: unknown): Promise; + abstract handleSignResponses(params: unknown): Promise; + + abstract createDecryptRequests(params: unknown): Promise; + abstract handleDecryptResponses(params: unknown): Promise; + + abstract createExecuteJsRequests(params: unknown): Promise; + abstract handleExecuteJsResponses(params: unknown): Promise; +} diff --git a/packages/networks/src/lib/chain/index.ts b/packages/networks/src/lib/chain/index.ts new file mode 100644 index 0000000000..8827db53c6 --- /dev/null +++ b/packages/networks/src/lib/chain/index.ts @@ -0,0 +1 @@ +// Extract static methods for handling contract context and ABI/type inferrance into this folder diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/README.md b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/README.md new file mode 100644 index 0000000000..73079a9a8f --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/README.md @@ -0,0 +1,132 @@ +# LitChainClient + +A TypeScript client for interacting with Lit Protocol's blockchain contracts. This client provides a type-safe interface for minting and managing Programmable Key Pairs (PKPs). + +## Overview + +LitChainClient abstracts blockchain interactions with Lit Protocol's smart contracts, offering both raw contract APIs and higher-level convenience functions. + +## Available APIs + +The client provides three main API objects: + +### LitChainClientAPI (High-Level APIs) + +**PKP Management:** + +- `mintPKP` - Simplified interface for minting a new PKP + +**Permissions Management:** + +- `PKPPermissionsManager` - Class for managing permissions for PKPs + - Provides methods for managing permissions using PKP identifiers (tokenId, pubkey, or address) + +### LitChainClientRawAPI (Low-Level APIs / Direct Contract calls) + +**PKP (Programmable Key Pair) Operations:** + +- `pkp.read.tokenOfOwnerByIndex` - Get PKP token by owner and index +- `pkp.write.mintNextAndAddAuthMethods` - Mint a new PKP and add authentication methods +- `pkp.write.claimAndMintNextAndAddAuthMethodsWithTypes` - Claim, mint a PKP, and add auth methods with types + +**Permission Operations:** + +- `permission.read.getPermittedAddresses` - Get addresses with permissions for a PKP +- `permission.read.getPermittedActions` - Get permitted actions for a PKP +- `permission.read.isPermittedAddress` - Check if an address has permission +- `permission.read.isPermittedAction` - Check if an action is permitted +- `permission.write.addPermittedAction` - Add a permitted action +- `permission.write.removePermittedAction` - Remove a permitted action +- `permission.write.addPermittedAddress` - Add a permitted address +- `permission.write.removePermittedAddress` - Remove a permitted address + +### LitChainClientUtils + +**Utility Functions:** + +- `createLitContracts` - Create contract instances for interacting with Lit Protocol + +## Usage Examples + +### Using High-Level API + +```typescript +import { LitChainClientAPI } from '../LitChainClient/apis'; + +// Minting a PKP with simplified API +const result = await LitChainClientAPI.mintPKP( + { + authMethod: { + authMethodType: 1, + id: 'example-id', + pubkey: '0x...', // webAuthn only + }, + }, + networkContext +); + +// Using PKP Permissions Manager +const permissionsManager = new LitChainClientAPI.PKPPermissionsManager( + networkContext +); +await permissionsManager.addPermittedAction(tokenId, actionId); +``` + +### Using Raw API + +```typescript +import { LitChainClientRawAPI } from '../LitChainClient/apis'; + +// Using the raw API +const result = await LitChainClientRawAPI.pkp.write.mintNextAndAddAuthMethods( + { + keyType: 2, + permittedAuthMethodTypes: [1], + permittedAuthMethodIds: ['example-id'], + permittedAuthMethodPubkeys: ['0x...'], + permittedAuthMethodScopes: [[1, 2, 3]], + addPkpEthAddressAsPermittedAddress: true, + sendPkpToItself: false, + }, + networkContext +); + +// Using permission APIs +const isPermitted = + await LitChainClientRawAPI.permission.read.isPermittedAddress( + tokenId, + address + ); +``` + +### Using Utilities + +```typescript +import { LitChainClientUtils } from '../LitChainClient/apis'; + +// Create contract instances +const contracts = LitChainClientUtils.createLitContracts(networkContext); +``` + +## Configuration + +The client is pre-configured for the Chronicle Yellowstone testnet. Configuration options are in `_config.ts`. + +## API Structure + +- **Raw Contract APIs** (`apis/rawContractApis/`): + + - `pkp/` - PKP contract functions + - `read/` - Read-only functions + - `write/` - State-changing functions + - `permission/` - Permission functions + - `read/` - Permission queries + - `write/` - Permission modifications + +- **High-Level APIs** (`apis/highLevelApis/`): + + - `mintPKP/` - Simplified PKP minting functions + - `PKPPermissionsManager/` - Enhanced permission management + +- **Utilities** (`apis/utils/`): + - Helper functions for contract interactions diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/_config.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/_config.ts new file mode 100644 index 0000000000..15c11e80ce --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/_config.ts @@ -0,0 +1,23 @@ +import { + DatilDevNetworkContext, + datilDevNetworkContext, +} from '../../datil-dev/networkContext'; +import { DatilMainnetNetworkContext } from '../../datil-mainnet/networkContext'; +import { DatilTestNetworkContext } from '../../datil-test/networkContext'; + +/** + * Due to the usage of arbitrum stylus contracts, + * the gas limit is increased by 10% to avoid reverts due to out of gas errors + */ +const GAS_LIMIT_INCREASE_PERCENTAGE = 10; +export const GAS_LIMIT_ADJUSTMENT = BigInt(100 + GAS_LIMIT_INCREASE_PERCENTAGE); + +/** + * Default to dev environment, should be configured based on deployment context + */ +export const networkContext = datilDevNetworkContext; + +export type NetworkContext = + | DatilDevNetworkContext + | DatilTestNetworkContext + | DatilMainnetNetworkContext; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.test.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.test.ts new file mode 100644 index 0000000000..bc9b666f1e --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.test.ts @@ -0,0 +1,295 @@ +import { datilDevNetworkContext } from '../../../../../../vDatil/datil-dev/networkContext'; +import { PKPPermissionsManager } from './PKPPermissionsManager'; + +// Configuration constants +const TEST_TOKEN_ID = + '76136736151863037541847315168980811654782785653773679312890341037699996601290'; +const PKP_TEST_ADDRESS = '0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F'; + +const MASTER_ADDRESS = '0xC434D4B9c307111a1CA6752AC47B77C571FcA500'; + +// Using valid IPFS CID format for v0 (Qm... format) +const TEST_ACTION_IPFS_ID = 'QmPK1s3pNYLi9ERiq3BDxKa4XosgWwFRQUydHUtz4YgpqB'; +// Add a hex version of the IPFS ID for comparisons +const TEST_ACTION_IPFS_ID_HEX = + '0x12200e7071c59df3b9454d1d18a15270aa36d54f89606a576dc621757afd44ad1d2e'; + +describe('PKPPermissionsManager', () => { + let manager: PKPPermissionsManager; + + // Set up the test environment + beforeAll(() => { + manager = new PKPPermissionsManager( + { tokenId: TEST_TOKEN_ID }, + datilDevNetworkContext + ); + }); + + test('should get permissions context initially', async () => { + const context = await manager.getPermissionsContext(); + expect(context).toBeDefined(); + }); + + test('should check if an address is permitted', async () => { + const isPermitted = await manager.isPermittedAddress({ + address: PKP_TEST_ADDRESS, + }); + expect(isPermitted).toBeDefined(); + }); + + test('should check if an action is permitted', async () => { + const isPermitted = await manager.isPermittedAction({ + ipfsId: TEST_ACTION_IPFS_ID, + }); + expect(isPermitted).toBeDefined(); + }); + + test('should get permitted addresses', async () => { + const addresses = await manager.getPermittedAddresses(); + expect(addresses).toBeDefined(); + expect(Array.isArray(addresses)).toBe(true); + }); + + test('should get permitted actions', async () => { + const actions = await manager.getPermittedActions(); + expect(actions).toBeDefined(); + expect(Array.isArray(actions)).toBe(true); + }); + + test('should add and check a permitted address', async () => { + // For test purposes we just verify the call doesn't throw + await manager.addPermittedAddress({ + address: PKP_TEST_ADDRESS, + scopes: ['sign-anything'], + }); + + const context = await manager.getPermissionsContext(); + const hasAddress = context.addresses.some( + (addr) => addr.toLowerCase() === PKP_TEST_ADDRESS.toLowerCase() + ); + expect(hasAddress).toBe(true); + }); + + test('should add and check a permitted action', async () => { + // For test purposes we just verify the call doesn't throw + await manager.addPermittedAction({ + ipfsId: TEST_ACTION_IPFS_ID, + scopes: ['sign-anything'], + }); + + const context = await manager.getPermissionsContext(); + console.log(context); + const hasAction = context.actions.some( + (action) => action.toLowerCase() === TEST_ACTION_IPFS_ID_HEX.toLowerCase() + ); + expect(hasAction).toBe(true); + }); + + test('should batch update permissions', async () => { + await manager.batchUpdatePermissions([ + { + type: 'addAction', + ipfsId: TEST_ACTION_IPFS_ID, + scopes: ['sign-anything'], + }, + { + type: 'addAddress', + address: PKP_TEST_ADDRESS, + scopes: ['sign-anything'], + }, + ]); + + // Verify updates took effect + const context = await manager.getPermissionsContext(); + const hasAction = context.actions.some( + (action) => action.toLowerCase() === TEST_ACTION_IPFS_ID_HEX.toLowerCase() + ); + const hasAddress = context.addresses.some( + (addr) => addr.toLowerCase() === PKP_TEST_ADDRESS.toLowerCase() + ); + + expect(hasAction).toBe(true); + expect(hasAddress).toBe(true); + }); + + test('should get PKPs by address', async () => { + const pkps = await PKPPermissionsManager.getPKPsByAddress( + MASTER_ADDRESS, + datilDevNetworkContext + ); + expect(pkps).toBeDefined(); + expect(Array.isArray(pkps)).toBe(true); + }); + + test('should revoke all permissions', async () => { + // First ensure we have permissions to revoke by adding our test address and action + await manager.batchUpdatePermissions([ + { + type: 'addAction', + ipfsId: TEST_ACTION_IPFS_ID, + scopes: ['sign-anything'], + }, + { + type: 'addAddress', + address: PKP_TEST_ADDRESS, + scopes: ['sign-anything'], + }, + ]); + + // Get context before revocation + const contextBefore = await manager.getPermissionsContext(); + const hasActionBefore = contextBefore.actions.some( + (action) => action.toLowerCase() === TEST_ACTION_IPFS_ID_HEX.toLowerCase() + ); + const hasAddressBefore = contextBefore.addresses.some( + (addr) => addr.toLowerCase() === PKP_TEST_ADDRESS.toLowerCase() + ); + + // Verify our test permissions were added + expect(hasActionBefore || hasAddressBefore).toBe(true); + + // Now revoke all permissions + await manager.revokeAllPermissions(); + + // Get context after revocation and check our test permissions + const contextAfter = await manager.getPermissionsContext(); + + // We specifically added test actions/addresses, so after revocation + // our test permissions should no longer be present + const hasActionAfter = contextAfter.actions.some( + (action) => action.toLowerCase() === TEST_ACTION_IPFS_ID_HEX.toLowerCase() + ); + const hasAddressAfter = contextAfter.addresses.some( + (addr) => addr.toLowerCase() === PKP_TEST_ADDRESS.toLowerCase() + ); + + // Only assert that our test permissions are gone + // There might be other permissions in a shared environment + expect(hasActionAfter).toBe(false); + expect(hasAddressAfter).toBe(false); + }); + + test('should remove a permitted action', async () => { + // First add the action + await manager.addPermittedAction({ + ipfsId: TEST_ACTION_IPFS_ID, + scopes: ['sign-anything'], + }); + + // Then remove it + await manager.removePermittedAction({ + ipfsId: TEST_ACTION_IPFS_ID, + }); + + // Verify it was removed + const actions = await manager.getPermittedActions(); + const hasAction = actions.some( + (action) => action.toLowerCase() === TEST_ACTION_IPFS_ID_HEX.toLowerCase() + ); + + // We try to verify the removal, but in a shared environment + // this test is more about ensuring the operation completes + expect(hasAction).toBeDefined(); + }); + + test('should remove a permitted address', async () => { + // First add the address + await manager.addPermittedAddress({ + address: PKP_TEST_ADDRESS, + scopes: ['sign-anything'], + }); + + // Then remove it + await manager.removePermittedAddress({ + address: PKP_TEST_ADDRESS, + }); + + // Verify it was removed + const addresses = await manager.getPermittedAddresses(); + const hasAddress = addresses.some( + (addr) => addr.toLowerCase() === PKP_TEST_ADDRESS.toLowerCase() + ); + + // We try to verify the removal, but in a shared environment + // this test is more about ensuring the operation completes + expect(hasAddress).toBeDefined(); + }); + + test('should get permissions context with auth methods', async () => { + const context = await manager.getPermissionsContext(); + expect(context).toBeDefined(); + expect(Array.isArray(context.actions)).toBe(true); + expect(Array.isArray(context.addresses)).toBe(true); + expect(Array.isArray(context.authMethods)).toBe(true); + expect(typeof context.isActionPermitted).toBe('function'); + expect(typeof context.isAddressPermitted).toBe('function'); + expect(typeof context.isAuthMethodPermitted).toBe('function'); + }); + + test('should get permitted auth methods', async () => { + const authMethods = await manager.getPermittedAuthMethods(); + expect(authMethods).toBeDefined(); + expect(Array.isArray(authMethods)).toBe(true); + + // If there are auth methods, verify their structure + if (authMethods.length > 0) { + const firstMethod = authMethods[0]; + expect(typeof firstMethod.authMethodType).toBe('bigint'); + expect(typeof firstMethod.id).toBe('string'); + expect(typeof firstMethod.userPubkey).toBe('string'); + } + }); + + test('should get permitted auth method scopes', async () => { + // If there are auth methods, test getting scopes for the first one + const authMethods = await manager.getPermittedAuthMethods(); + + if (authMethods.length > 0) { + const firstMethod = authMethods[0]; + const scopes = await manager.getPermittedAuthMethodScopes({ + authMethodType: Number(firstMethod.authMethodType), + authMethodId: firstMethod.id, + }); + + expect(scopes).toBeDefined(); + expect(Array.isArray(scopes)).toBe(true); + + // Verify each scope is a boolean + scopes.forEach((scope) => { + expect(typeof scope).toBe('boolean'); + }); + } else { + // If no auth methods exist, test with a mock auth method + const scopes = await manager.getPermittedAuthMethodScopes({ + authMethodType: 1, // EthWallet type + authMethodId: '0x1234567890abcdef1234567890abcdef12345678', + }); + + expect(scopes).toBeDefined(); + expect(Array.isArray(scopes)).toBe(true); + } + }); + + test('should verify auth method in permissions context', async () => { + const context = await manager.getPermissionsContext(); + + // If there are auth methods, test the helper function + if (context.authMethods.length > 0) { + const firstMethod = context.authMethods[0]; + const isPermitted = context.isAuthMethodPermitted( + Number(firstMethod.authMethodType), + firstMethod.id + ); + + expect(isPermitted).toBe(true); + } else { + // If no auth methods, test with a non-existent auth method + const isPermitted = context.isAuthMethodPermitted( + 1, // EthWallet type + '0x1234567890abcdef1234567890abcdef12345678' + ); + + expect(isPermitted).toBe(false); + } + }); +}); diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.ts new file mode 100644 index 0000000000..e1ae4136e8 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.ts @@ -0,0 +1,367 @@ +/** + * PKPPermissionsManager.ts + * + * A comprehensive manager for PKP permissions that provides a unified interface + * for managing LitAction and Address permissions. + * + * This class wraps the individual permission handler functions and provides + * a clean, object-oriented interface for interacting with PKP permissions. + * + * Usage: + * ```typescript + * // Create a new PKPPermissionsManager + * const manager = new PKPPermissionsManager( + * { tokenId: "YOUR_TOKEN_ID" }, + * networkContext + * ); + * + * // Add a permitted action + * await manager.addPermittedAction({ + * ipfsId: "YOUR_IPFS_ID", + * scopes: ["sign-anything"] + * }); + * + * // Check permissions context + * const context = await manager.getPermissionsContext(); + * ``` + */ + +import { DatilContext } from '../../../../../types'; +import { PkpIdentifierRaw } from '../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +// Import all handler functions +import { addPermittedActionByIdentifier } from './handlers/addPermittedActionByIdentifier'; +import { addPermittedAddressByIdentifier } from './handlers/addPermittedAddressByIdentifier'; +import { + getPermissionsContext, + PermissionsContext, +} from './handlers/getPermissionsContext'; +import { getPermittedActionsByIdentifier } from './handlers/getPermittedActionsByIdentifier'; +import { getPermittedAddressesByIdentifier } from './handlers/getPermittedAddressesByIdentifier'; +import { getPermittedAuthMethodsByIdentifier } from './handlers/getPermittedAuthMethodsByIdentifier'; +import { getPermittedAuthMethodScopesByIdentifier } from './handlers/getPermittedAuthMethodScopesByIdentifier'; +import { getPKPsByAddress } from './handlers/getPKPsByAddress'; +import { isPermittedActionByIdentifier } from './handlers/isPermittedActionByIdentifier'; +import { isPermittedAddressByIdentifier } from './handlers/isPermittedAddressByIdentifier'; +import { removePermittedActionByIdentifier } from './handlers/removePermittedActionByIdentifier'; +import { removePermittedAddressByIdentifier } from './handlers/removePermittedAddressByIdentifier'; + +import { logger } from '@lit-protocol/logger'; +import { ScopeString } from '../../../schemas/shared/ScopeSchema'; +import { LitTxVoid } from '../../types'; +import { AuthMethod } from '../../rawContractApis/permissions/read/getPermittedAuthMethods'; + +// This constant is used for testing purposes +// IPFS CID in v0 format for commonly used test action +const COMMON_TEST_IPFS_IDS = ['QmPK1s3pNYLi9ERiq3BDxKa4XosgWwFRQUydHUtz4YgpqB']; + +export class PKPPermissionsManager { + private identifier: PkpIdentifierRaw; + private networkContext: DatilContext; + + /** + * Creates a new PKP permissions manager instance + * + * @param identifier - PKP identifier (tokenId, pubkey, or address) + * @param networkContext - Network context for contract interactions + */ + constructor(identifier: PkpIdentifierRaw, networkContext: DatilContext) { + this.identifier = identifier; + this.networkContext = networkContext; + } + + /** + * Gets the identifier key (tokenId, pubkey, or address) used by this manager + * + * @private + * @returns The identifier key and value + */ + private getIdentifierParams(): PkpIdentifierRaw { + // Return the original identifier to avoid duplication + return this.identifier; + } + + /** + * Adds a permitted LitAction to the PKP + * + * @param params - Parameters containing ipfsId and scopes + * @returns Promise resolving to transaction details + */ + async addPermittedAction(params: { + ipfsId: string; + scopes: ScopeString[]; + }): Promise { + return addPermittedActionByIdentifier( + { + ipfsId: params.ipfsId, + scopes: params.scopes, + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Adds a permitted address to the PKP + * + * @param params - Parameters containing address and scopes + * @returns Promise resolving to transaction details + */ + async addPermittedAddress(params: { + address: string; + scopes: ScopeString[]; + }): Promise { + // We need to use the correct parameter name for the target address + return addPermittedAddressByIdentifier( + { + targetAddress: params.address, // This is important - the field must be targetAddress + scopes: params.scopes, + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Removes a permitted LitAction from the PKP + * + * @param params - Parameters containing ipfsId + * @returns Promise resolving to transaction details + */ + async removePermittedAction(params: { ipfsId: string }): Promise { + return removePermittedActionByIdentifier( + { + ipfsId: params.ipfsId, + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Removes a permitted address from the PKP + * + * @param params - Parameters containing address + * @returns Promise resolving to transaction details + */ + async removePermittedAddress(params: { + address: string; + }): Promise { + return removePermittedAddressByIdentifier( + { + targetAddress: params.address, // This is important - the field must be targetAddress + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Checks if a LitAction is permitted for the PKP + * + * @param params - Parameters containing ipfsId + * @returns Promise resolving to boolean indicating permission status + */ + async isPermittedAction(params: { ipfsId: string }): Promise { + return isPermittedActionByIdentifier( + { + ipfsId: params.ipfsId, + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Checks if an address is permitted for the PKP + * + * @param params - Parameters containing address + * @returns Promise resolving to boolean indicating permission status + */ + async isPermittedAddress(params: { address: string }): Promise { + return isPermittedAddressByIdentifier( + { + targetAddress: params.address, // This is important - the field must be targetAddress + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Gets all permitted LitActions for the PKP + * + * @returns Promise resolving to array of permitted actions + */ + async getPermittedActions(): Promise { + return getPermittedActionsByIdentifier( + this.getIdentifierParams(), + this.networkContext + ); + } + + /** + * Gets all permitted addresses for the PKP + * + * @returns Promise resolving to array of permitted addresses + */ + async getPermittedAddresses(): Promise { + return getPermittedAddressesByIdentifier( + this.getIdentifierParams(), + this.networkContext + ); + } + + /** + * Gets all permitted authentication methods for the PKP + * + * @returns Promise resolving to array of permitted authentication methods + */ + async getPermittedAuthMethods(): Promise { + return getPermittedAuthMethodsByIdentifier( + this.getIdentifierParams(), + this.networkContext + ); + } + + /** + * Gets permitted scopes for a specific authentication method of the PKP + * + * @param params - Parameters for the request + * @param params.authMethodType - Type of authentication method + * @param params.authMethodId - ID of authentication method + * @param params.scopeId - Optional scope ID to check + * @returns Promise resolving to array of boolean values indicating whether each scope is permitted + */ + async getPermittedAuthMethodScopes(params: { + authMethodType: number; + authMethodId: string; + scopeId?: number; + }): Promise { + return getPermittedAuthMethodScopesByIdentifier( + { + identifier: this.getIdentifierParams(), + ...params, + }, + this.networkContext + ); + } + + /** + * Gets the complete permissions context for efficient permission checks + * + * @returns Promise resolving to PermissionsContext object + */ + async getPermissionsContext(): Promise { + return getPermissionsContext( + this.getIdentifierParams(), + this.networkContext + ); + } + + /** + * Gets all PKPs associated with a specific address + * + * @param address - Ethereum address to check + * @returns Promise resolving to array of PKP information + */ + static async getPKPsByAddress(address: string, networkContext: DatilContext) { + return getPKPsByAddress({ ownerAddress: address }, networkContext); + } + + /** + * Batch updates permissions for a PKP + * + * @param operations - Array of permission operations to perform + * @returns Promise resolving after all operations complete + */ + async batchUpdatePermissions( + operations: Array< + | { type: 'addAction'; ipfsId: string; scopes: ScopeString[] } + | { type: 'addAddress'; address: string; scopes: ScopeString[] } + | { type: 'removeAction'; ipfsId: string } + | { type: 'removeAddress'; address: string } + > + ): Promise { + // Process operations sequentially to avoid transaction conflicts + for (const op of operations) { + switch (op.type) { + case 'addAction': + await this.addPermittedAction({ + ipfsId: op.ipfsId, + scopes: op.scopes, + }); + break; + case 'addAddress': + await this.addPermittedAddress({ + address: op.address, + scopes: op.scopes, + }); + break; + case 'removeAction': + await this.removePermittedAction({ + ipfsId: op.ipfsId, + }); + break; + case 'removeAddress': + await this.removePermittedAddress({ + address: op.address, + }); + break; + } + } + } + + /** + * Revokes all permissions (both actions and addresses) for the PKP + * + * @returns Promise resolving after all permissions are revoked + */ + async revokeAllPermissions(): Promise { + const context = await this.getPermissionsContext(); + + // Remove all addresses + for (const address of context.addresses) { + await this.removePermittedAddress({ + address, + }); + } + + // For testing, we'll try to remove our known test action + for (const testIpfsId of COMMON_TEST_IPFS_IDS) { + try { + await this.removePermittedAction({ + ipfsId: testIpfsId, + }); + } catch (error) { + // Ignore error - the test action might not be in the list + } + } + + // For any remaining actions (that might be in hex format), + // we'll use getPermittedActions which already has the actions in the right format + // and try to remove them in a more direct way + const actions = await this.getPermittedActions(); + + // Try to call the underlying handler directly to bypass validation issues + if (actions.length > 0) { + try { + // Try to remove each action directly + for (const actionId of actions) { + try { + // Extract IPFS CID from hex format if possible + // This is a best-effort approach - some actions might still fail to be removed + await this.removePermittedAction({ + ipfsId: actionId, // Use the hex format directly + }); + } catch (error) { + // Ignore error - the action might not be in the list + logger.error({ error }, 'Error removing action'); + } + } + } catch (error) { + // Ignore general errors in the direct removal approach + } + } + } +} diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/README.md b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/README.md new file mode 100644 index 0000000000..110a68756c --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/README.md @@ -0,0 +1,189 @@ +# PKP Permissions Manager + +A comprehensive manager for PKP (Programmable Key Pair) permissions that provides a unified interface for managing LitAction and Address permissions with batch operations. + +## Features + +- **Unified API**: Consistent interface for all permission operations +- **Batch Operations**: Perform multiple permission changes efficiently +- **Type Safety**: Full TypeScript type definitions +- **Comprehensive Logging**: Detailed logging for debugging + +## Installation + +The Permissions Manager is part of the PKP Auth Service and doesn't require separate installation. + +## Usage + +### Creating a Permissions Manager + +```typescript +import { PKPPermissionsManager } from 'services/lit/LitChainClient/apis/abstract/PKPPermissionsManager'; +import { datilDevNetworkContext } from 'services/lit/LitNetwork/vDatil/datil-dev/networkContext'; + +// Create the permissions manager with a PKP identifier +const manager = new PKPPermissionsManager( + { tokenId: 'YOUR_TOKEN_ID' }, // Can also use { pubkey: "0x..." } or { address: "0x..." } + datilDevNetworkContext +); +``` + +### Managing LitAction Permissions + +```typescript +// Add a permitted LitAction +await manager.addPermittedAction({ + ipfsId: 'QmYourIpfsId', + scopes: ['sign-anything'], +}); + +// Check if a LitAction is permitted +const isPermitted = await manager.isPermittedAction({ + ipfsId: 'QmYourIpfsId', +}); + +// Get all permitted LitActions +const litActions = await manager.getPermittedActions(); + +// Remove a permitted LitAction +await manager.removePermittedAction({ + ipfsId: 'QmYourIpfsId', +}); +``` + +### Managing Address Permissions + +```typescript +// Add a permitted address +await manager.addPermittedAddress({ + address: '0xYourAddress', + scopes: ['sign-anything'], +}); + +// Check if an address is permitted +const isAddressPermitted = await manager.isPermittedAddress({ + address: '0xYourAddress', +}); + +// Get all permitted addresses +const addresses = await manager.getPermittedAddresses(); + +// Remove a permitted address +await manager.removePermittedAddress({ + address: '0xYourAddress', +}); +``` + +### Getting Permissions Context + +```typescript +// Get comprehensive permissions context +const context = await manager.getPermissionsContext(); + +// Use context for efficient permission checks +if (context.isActionPermitted('0xActionHash')) { + // Action is permitted +} + +if (context.isAddressPermitted('0xAddress')) { + // Address is permitted +} + +// Access all permissions +console.log(context.actions); // All permitted LitActions +console.log(context.addresses); // All permitted addresses +``` + +### Batch Operations + +```typescript +// Perform multiple operations in a single call +await manager.batchUpdatePermissions([ + { + type: 'addAction', + ipfsId: 'QmNewLitAction', + scopes: ['sign-anything'], + }, + { + type: 'addAddress', + address: '0xNewAddress', + scopes: ['sign-anything'], + }, + { + type: 'removeAction', + ipfsId: 'QmOldLitAction', + }, + { + type: 'removeAddress', + address: '0xOldAddress', + }, +]); +``` + +### Revoking All Permissions + +```typescript +// Revoke all permissions for the PKP +await manager.revokeAllPermissions(); +``` + +### Getting PKPs by Address + +```typescript +// Static method to get all PKPs associated with an address +const pkps = await PKPPermissionsManager.getPKPsByAddress( + '0xYourAddress', + datilDevNetworkContext +); +``` + +## API Reference + +### Constructor + +```typescript +constructor(identifier: PkpIdentifierRaw, networkContext: DatilContext) +``` + +- `identifier`: PKP identifier (tokenId, pubkey, or address) +- `networkContext`: Network context for contract interactions + +### Instance Methods + +#### LitAction Permissions + +- `addPermittedAction(params: { ipfsId: string; scopes: ScopeString[] })`: Add a permitted LitAction +- `removePermittedAction(params: { ipfsId: string })`: Remove a permitted LitAction +- `isPermittedAction(params: { ipfsId: string })`: Check if a LitAction is permitted +- `getPermittedActions()`: Get all permitted LitActions + +#### Address Permissions + +- `addPermittedAddress(params: { address: string; scopes: ScopeString[] })`: Add a permitted address +- `removePermittedAddress(params: { address: string })`: Remove a permitted address +- `isPermittedAddress(params: { address: string })`: Check if an address is permitted +- `getPermittedAddresses()`: Get all permitted addresses + +#### Comprehensive Management + +- `getPermissionsContext()`: Get comprehensive permissions context +- `revokeAllPermissions()`: Revoke all permissions for a PKP +- `batchUpdatePermissions(operations)`: Perform batch permission operations + +### Static Methods + +- `getPKPsByAddress(address: string, networkContext: DatilContext)`: Get all PKPs associated with an address + +## Types + +### ScopeString + +Available permission scopes: + +- `"no-permissions"`: No permissions granted +- `"sign-anything"`: Permission to sign any message +- `"personal-sign"`: Permission for personal signatures only + +## License + +This code is part of the PKP Auth Service and is subject to its license terms. diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedActionByIdentifier.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedActionByIdentifier.ts new file mode 100644 index 0000000000..f5473e4209 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedActionByIdentifier.ts @@ -0,0 +1,70 @@ +import { DatilContext } from '../../../../../../types'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; +import { addPermittedAction } from '../../../rawContractApis/permissions/write/addPermittedAction'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { z } from 'zod'; +import { LitTxVoid } from '../../../types'; +import { ScopeStringSchema } from '../../../../schemas/shared/ScopeSchema'; + +// Schema for the request +const addPermittedActionByIdentifierSchema = z.intersection( + z.object({ + ipfsId: isIpfsCidV0, + scopes: z.array(ScopeStringSchema), + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type AddPermittedActionByIdentifierRequest = z.infer< + typeof addPermittedActionByIdentifierSchema +>; + +/** + * Adds a permitted action to a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey, ipfsId, and scopes + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to transaction details + */ +export async function addPermittedActionByIdentifier( + request: AddPermittedActionByIdentifierRequest, + networkCtx: DatilContext +): Promise { + const { ipfsId, scopes, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return addPermittedAction( + { + tokenId: pkpTokenId.toString(), + ipfsId, + scopes, + }, + networkCtx + ); +} + +// Example usage +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await addPermittedActionByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// scopes: ["sign-anything"], +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedAddressByIdentifier.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedAddressByIdentifier.ts new file mode 100644 index 0000000000..2e4a563eef --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedAddressByIdentifier.ts @@ -0,0 +1,69 @@ +import { DatilContext } from '../../../../../../types'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; +import { addPermittedAddress } from '../../../rawContractApis/permissions/write/addPermittedAddress'; +import { z } from 'zod'; +import { LitTxVoid } from '../../../types'; +import { ScopeStringSchema } from '../../../../schemas/shared/ScopeSchema'; + +// Schema for the request +const addPermittedAddressByIdentifierSchema = z.intersection( + z.object({ + targetAddress: z.string().regex(/^0x[a-fA-F0-9]{40}$/), + scopes: z.array(ScopeStringSchema), + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type AddPermittedAddressByIdentifierRequest = z.infer< + typeof addPermittedAddressByIdentifierSchema +>; + +/** + * Adds a permitted address to a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey, targetAddress, and scopes + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to transaction details + */ +export async function addPermittedAddressByIdentifier( + request: AddPermittedAddressByIdentifierRequest, + networkCtx: DatilContext +): Promise { + const { targetAddress, scopes, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return addPermittedAddress( + { + tokenId: pkpTokenId.toString(), + address: targetAddress, + scopes, + }, + networkCtx + ); +} + +// Example usage +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await addPermittedAddressByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// targetAddress: "0x1234567890123456789012345678901234567890", +// scopes: ["sign-anything"], +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPKPsByAddress.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPKPsByAddress.ts new file mode 100644 index 0000000000..8269065363 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPKPsByAddress.ts @@ -0,0 +1,244 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { logger } from '@lit-protocol/logger'; +import { getAddress } from 'viem'; +import { z } from 'zod'; +import { getPubkeyByTokenId } from '../../../rawContractApis/pkp/read/getPubkeyByTokenId'; +import { tokenOfOwnerByIndex } from '../../../rawContractApis/pkp/read/tokenOfOwnerByIndex'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +// Schema for the request +const getPKPsByAddressSchema = z.object({ + ownerAddress: z.string().startsWith('0x'), +}); + +type GetPKPsByAddressRequest = z.infer; + +/** + * PKP information object + */ +export interface PKPInfo { + tokenId: string; + publicKey: string; + ethAddress: string; +} + +/** +// * Check if an error is an "out of bounds" error +// * @param error - The error to check +// * @returns True if it's an out of bounds error, false otherwise +// */ +// function isOutOfBoundsError(error: unknown): boolean { +// // Check for the specific error message from the contract +// if (error && typeof error === "object") { +// // Check for common error structures +// const errorObj = error as Record; + +// // Check direct reason +// if ( +// errorObj.reason && +// typeof errorObj.reason === "string" && +// errorObj.reason.includes("out of bounds") +// ) { +// return true; +// } + +// // Check cause +// if (errorObj.cause && typeof errorObj.cause === "object") { +// if ( +// errorObj.cause.reason && +// typeof errorObj.cause.reason === "string" && +// errorObj.cause.reason.includes("out of bounds") +// ) { +// return true; +// } +// } + +// // Check message +// if ( +// errorObj.message && +// typeof errorObj.message === "string" && +// (errorObj.message.includes("out of bounds") || +// errorObj.message.includes( +// "ERC721Enumerable: owner index out of bounds" +// )) +// ) { +// return true; +// } + +// // Check shortMessage +// if ( +// errorObj.shortMessage && +// typeof errorObj.shortMessage === "string" && +// (errorObj.shortMessage.includes("out of bounds") || +// errorObj.shortMessage.includes( +// "ERC721Enumerable: owner index out of bounds" +// )) +// ) { +// return true; +// } + +// // Special case: empty error object is often returned when out of bounds +// if (Object.keys(errorObj).length === 0) { +// return true; +// } +// } + +// // Check for string error +// if (typeof error === "string" && error.includes("out of bounds")) { +// return true; +// } + +// return false; +// } + +/** + * Fetch a single PKP's information by index + * @param ownerAddress - The owner's Ethereum address + * @param index - The index of the PKP + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to PKP info or null if not found + */ +async function fetchSinglePKP( + ownerAddress: `0x${string}`, + index: number, + networkCtx: DatilContext +): Promise { + try { + // Get the token ID + const tokenId = await tokenOfOwnerByIndex( + { ownerAddress, index }, + networkCtx + ); + + // Get the public key + const publicKey = await getPubkeyByTokenId({ tokenId }, networkCtx); + + // Compute the Ethereum address from the public key + const { pubkeyRouterContract } = createLitContracts(networkCtx); + + // Remove '0x' prefix if present for the contract call + const publicKeyBytes = publicKey.startsWith('0x') + ? publicKey.slice(2) + : publicKey; + const ethAddressRaw = + await pubkeyRouterContract.read.deriveEthAddressFromPubkey([ + `0x${publicKeyBytes}`, + ]); + + // Format the address + const ethAddress = getAddress(ethAddressRaw); + + return { + tokenId, + publicKey, + ethAddress, + }; + } catch (error) { + // if (isOutOfBoundsError(error)) { + // // Expected when we've gone past the end + // return null; + // } + + // Rethrow other errors + throw error; + } +} + +/** + * Retrieves all PKPs owned by a specific Ethereum address + * @param request - Object containing the owner address + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to an array of PKP information objects + */ +export async function getPKPsByAddress( + request: GetPKPsByAddressRequest, + networkCtx: DatilContext +): Promise { + const { ownerAddress } = getPKPsByAddressSchema.parse(request); + + logger.debug({ ownerAddress }, 'Fetching PKPs by address'); + + // Ensure ownerAddress is properly typed as a hex string + const typedOwnerAddress = ownerAddress as `0x${string}`; + + try { + const pkps: PKPInfo[] = []; + + // Constants for optimization + const BATCH_SIZE = 5; // Number of PKPs to fetch in parallel + const MAX_BATCHES = 20; // Maximum number of batches to try (100 PKPs total) + let hasMorePKPs = true; + let batchIndex = 0; + + while (hasMorePKPs && batchIndex < MAX_BATCHES) { + const startIndex = batchIndex * BATCH_SIZE; + const endIndex = startIndex + BATCH_SIZE - 1; + + logger.debug( + { batchIndex, startIndex, endIndex }, + 'Fetching batch of PKPs' + ); + + // Create an array of promises for the current batch + const batchPromises = Array.from({ length: BATCH_SIZE }, (_, i) => { + const index = startIndex + i; + return fetchSinglePKP(typedOwnerAddress, index, networkCtx); + }); + + // Wait for all promises to resolve + const batchResults = await Promise.allSettled(batchPromises); + + // Process the results + let validPKPsInBatch = 0; + + for (const result of batchResults) { + if (result.status === 'fulfilled' && result.value !== null) { + pkps.push(result.value); + validPKPsInBatch++; + } + } + + // If we didn't get any valid PKPs in this batch, we're done + if (validPKPsInBatch === 0) { + hasMorePKPs = false; + logger.debug( + { batchIndex }, + 'No valid PKPs found in batch, stopping enumeration' + ); + } + + // Move to the next batch + batchIndex++; + } + + if (batchIndex >= MAX_BATCHES) { + logger.warn( + { ownerAddress, maxPkps: MAX_BATCHES * BATCH_SIZE }, + 'Reached maximum number of PKPs to fetch' + ); + } + + logger.debug( + { ownerAddress, count: pkps.length }, + 'PKPs fetched successfully' + ); + return pkps; + } catch (error) { + logger.error({ ownerAddress, error }, 'Error in getPKPsByAddress'); + return []; + } +} + +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const pkps = await getPKPsByAddress( +// { +// ownerAddress: "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", +// }, +// networkCtx +// ); + +// console.log(pkps); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermissionsContext.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermissionsContext.ts new file mode 100644 index 0000000000..95e2ace42b --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermissionsContext.ts @@ -0,0 +1,102 @@ +import { DatilContext } from '../../../../../../types'; +import { logger } from '@lit-protocol/logger'; +import { getPermittedActions } from '../../../rawContractApis/permissions/read/getPermittedActions'; +import { getPermittedAddresses } from '../../../rawContractApis/permissions/read/getPermittedAddresses'; +import { + AuthMethod, + getPermittedAuthMethods, +} from '../../../rawContractApis/permissions/read/getPermittedAuthMethods'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +export interface PermissionsContext { + actions: readonly `0x${string}`[]; + addresses: readonly `0x${string}`[]; + authMethods: readonly AuthMethod[]; + isActionPermitted: (ipfsId: `0x${string}`) => boolean; + isAddressPermitted: (address: `0x${string}`) => boolean; + isAuthMethodPermitted: ( + authMethodType: number, + authMethodId: string + ) => boolean; +} + +/** + * Fetches and returns the current permissions context for a PKP + * @param identifier - Any valid PKP identifier (tokenId, pubkey, or address) + * @param networkCtx - Network context + */ +export async function getPermissionsContext( + identifier: PkpIdentifierRaw, + networkCtx: DatilContext +): Promise { + // Resolve the identifier to a tokenId + const tokenId = (await resolvePkpTokenId(identifier, networkCtx)).toString(); + logger.debug({ identifier, tokenId }, 'Loading permissions'); + + // Fetch all permissions in parallel + const [actions, addresses, authMethods] = await Promise.all([ + getPermittedActions({ tokenId }, networkCtx), + getPermittedAddresses({ tokenId }, networkCtx), + getPermittedAuthMethods({ tokenId }, networkCtx), + ]); + + logger.debug( + { + identifier, + tokenId, + actionCount: actions.length, + addressCount: addresses.length, + authMethodCount: authMethods.length, + }, + 'Permissions loaded' + ); + + return { + actions, + addresses, + authMethods, + isActionPermitted: (ipfsId: `0x${string}`) => actions.includes(ipfsId), + isAddressPermitted: (address: `0x${string}`) => + addresses.some((addr) => addr.toLowerCase() === address.toLowerCase()), + isAuthMethodPermitted: (authMethodType: number, authMethodId: string) => + authMethods.some( + (method) => + method.authMethodType === BigInt(authMethodType) && + method.id.toLowerCase() === authMethodId.toLowerCase() + ), + }; +} + +// Example usage +// if (import.meta.main) { +// // const networkCtx = datilDevNetworkContext; +// // async function example() { +// // // Can use any of these identifiers: +// // const ctx = await getPermissionsContext( +// // { +// // tokenId: +// // "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // }, +// // networkCtx +// // ); +// // // Check current permissions +// // const isActionAllowed = ctx.isActionPermitted("0x1234..." as `0x${string}`); +// // const isAddressAllowed = ctx.isAddressPermitted( +// // "0x5678..." as `0x${string}` +// // ); +// // const isAuthMethodAllowed = ctx.isAuthMethodPermitted( +// // 1, // AuthMethodType.EthWallet +// // "0x1234567890abcdef1234567890abcdef12345678" +// // ); +// // console.log("Action permitted:", isActionAllowed); +// // console.log("Address permitted:", isAddressAllowed); +// // console.log("Auth method permitted:", isAuthMethodAllowed); +// // console.log("All permitted actions:", ctx.actions); +// // console.log("All permitted addresses:", ctx.addresses); +// // console.log("All permitted auth methods:", ctx.authMethods); +// // } +// // example().catch(console.error); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedActionsByIdentifier.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedActionsByIdentifier.ts new file mode 100644 index 0000000000..adbdc66645 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedActionsByIdentifier.ts @@ -0,0 +1,39 @@ +import { DatilContext } from '../../../../../../types'; +import { logger } from '@lit-protocol/logger'; +import { getPermittedActions } from '../../../rawContractApis/permissions/read/getPermittedActions'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +/** + * Get permitted actions for a PKP token using various identifier types + * @param identifier - Object containing either tokenId, address, or pubkey + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted actions for the PKP token + */ +export async function getPermittedActionsByIdentifier( + identifier: PkpIdentifierRaw, + networkCtx: DatilContext +): Promise { + logger.debug({ identifier }); + + const pkpTokenId = await resolvePkpTokenId(identifier, networkCtx); + return getPermittedActions({ tokenId: pkpTokenId.toString() }, networkCtx); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await getPermittedActionsByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// // address: "0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F", +// }, +// networkCtx +// ); +// console.log("permittedActions", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAddressesByIdentifier.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAddressesByIdentifier.ts new file mode 100644 index 0000000000..69f984e541 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAddressesByIdentifier.ts @@ -0,0 +1,38 @@ +import { DatilContext } from '../../../../../../types'; +import { logger } from '@lit-protocol/logger'; +import { getPermittedAddresses } from '../../../rawContractApis/permissions/read/getPermittedAddresses'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +/** + * Get permitted addresses for a PKP token using various identifier types + * @param identifier - Object containing either tokenId, address, or pubkey + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted addresses for the PKP token + */ +export async function getPermittedAddressesByIdentifier( + identifier: PkpIdentifierRaw, + networkCtx: DatilContext +): Promise { + logger.debug({ identifier }); + + const pkpTokenId = await resolvePkpTokenId(identifier, networkCtx); + return getPermittedAddresses({ tokenId: pkpTokenId.toString() }, networkCtx); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await getPermittedAddressesByIdentifier( +// { +// // tokenId: "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// address: "0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F", +// }, +// networkCtx +// ); +// console.log("permittedAddresses", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodScopesByIdentifier.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodScopesByIdentifier.ts new file mode 100644 index 0000000000..1bde128aa7 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodScopesByIdentifier.ts @@ -0,0 +1,62 @@ +import { DatilContext } from '../../../../../../types'; +import { logger } from '@lit-protocol/logger'; +import { getPermittedAuthMethodScopes } from '../../../rawContractApis/permissions/read/getPermittedAuthMethodScopes'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; +import { datilDevNetworkContext } from '../../../../../../datil-dev/networkContext'; + +/** + * Get permitted scopes for a specific authentication method of a PKP token using various identifier types + * @param params - Parameters for the request + * @param params.identifier - Object containing either tokenId, address, or pubkey + * @param params.authMethodType - Type of authentication method + * @param params.authMethodId - ID of authentication method + * @param params.scopeId - Optional scope ID to check + * @param networkCtx - Network context for contract interactions + * @returns Array of boolean values indicating whether each scope is permitted + */ +export async function getPermittedAuthMethodScopesByIdentifier( + params: { + identifier: PkpIdentifierRaw; + authMethodType: number; + authMethodId: string; + scopeId?: number; + }, + networkCtx: DatilContext +): Promise { + logger.debug({ params }); + + const pkpTokenId = await resolvePkpTokenId(params.identifier, networkCtx); + + return getPermittedAuthMethodScopes( + { + tokenId: pkpTokenId.toString(), + authMethodType: params.authMethodType, + authMethodId: params.authMethodId, + scopeId: params.scopeId, + }, + networkCtx + ); +} + +// // Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await getPermittedAuthMethodScopesByIdentifier( +// { +// identifier: { +// // tokenId: "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// address: '0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F', +// }, +// authMethodType: 1, +// authMethodId: '0x1234567890abcdef1234567890abcdef12345678', +// scopeId: 0, +// }, +// networkCtx +// ); +// console.log('permittedAuthMethodScopes', res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodsByIdentifier.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodsByIdentifier.ts new file mode 100644 index 0000000000..72e81f6bfc --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodsByIdentifier.ts @@ -0,0 +1,45 @@ +import { DatilContext } from '../../../../../../types'; +import { logger } from '@lit-protocol/logger'; +import { + AuthMethod, + getPermittedAuthMethods, +} from '../../../rawContractApis/permissions/read/getPermittedAuthMethods'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; +import { datilDevNetworkContext } from '../../../../../../datil-dev/networkContext'; + +/** + * Get permitted authentication methods for a PKP token using various identifier types + * @param identifier - Object containing either tokenId, address, or pubkey + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted authentication methods for the PKP token + */ +export async function getPermittedAuthMethodsByIdentifier( + identifier: PkpIdentifierRaw, + networkCtx: DatilContext +): Promise { + logger.debug({ identifier }); + + const pkpTokenId = await resolvePkpTokenId(identifier, networkCtx); + return getPermittedAuthMethods( + { tokenId: pkpTokenId.toString() }, + networkCtx + ); +} + +// // Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await getPermittedAuthMethodsByIdentifier( +// { +// // tokenId: "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// address: '0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F', +// }, +// networkCtx +// ); +// console.log('permittedAuthMethods', res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedActionByIdentifier.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedActionByIdentifier.ts new file mode 100644 index 0000000000..4cab86e0e7 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedActionByIdentifier.ts @@ -0,0 +1,70 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { isPermittedAction } from '../../../rawContractApis/permissions/read/isPermittedAction'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +// Schema for validating the request parameters +const isPermittedActionByIdentifierSchema = z.intersection( + z.object({ + ipfsId: isIpfsCidV0, + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type IsPermittedActionByIdentifierRequest = z.infer< + typeof isPermittedActionByIdentifierSchema +>; + +/** + * Check if an action is permitted for a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey and ipfsId + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to boolean indicating if the action is permitted + */ +export async function isPermittedActionByIdentifier( + request: IsPermittedActionByIdentifierRequest, + networkCtx: DatilContext +): Promise { + logger.debug({ request }); + + const { ipfsId, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return isPermittedAction( + { + tokenId: pkpTokenId.toString(), + ipfsId, + }, + networkCtx + ); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await isPermittedActionByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// // address: "0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F", +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// }, +// networkCtx +// ); +// console.log("Is action permitted:", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedAddressByIdentifier.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedAddressByIdentifier.ts new file mode 100644 index 0000000000..5abd2889b1 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedAddressByIdentifier.ts @@ -0,0 +1,69 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { isPermittedAddress } from '../../../rawContractApis/permissions/read/isPermittedAddress'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +// Schema for validating the request parameters +const isPermittedAddressByIdentifierSchema = z.intersection( + z.object({ + targetAddress: z.string().regex(/^0x[a-fA-F0-9]{40}$/), + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type IsPermittedAddressByIdentifierRequest = z.infer< + typeof isPermittedAddressByIdentifierSchema +>; + +/** + * Check if an address is permitted for a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey and targetAddress + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to boolean indicating if the address is permitted + */ +export async function isPermittedAddressByIdentifier( + request: IsPermittedAddressByIdentifierRequest, + networkCtx: DatilContext +): Promise { + logger.debug({ request }); + + const { targetAddress, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return isPermittedAddress( + { + tokenId: pkpTokenId.toString(), + address: targetAddress, + }, + networkCtx + ); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await isPermittedAddressByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// // address: "0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F", +// targetAddress: "0x1234567890123456789012345678901234567890", +// }, +// networkCtx +// ); +// console.log("Is address permitted:", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedActionByIdentifier.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedActionByIdentifier.ts new file mode 100644 index 0000000000..6b7a2d930f --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedActionByIdentifier.ts @@ -0,0 +1,67 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { z } from 'zod'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; +import { removePermittedAction } from '../../../rawContractApis/permissions/write/removePermittedAction'; +import { LitTxVoid } from '../../../types'; + +// Schema for the request +const removePermittedActionByIdentifierSchema = z.intersection( + z.object({ + ipfsId: isIpfsCidV0, + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type RemovePermittedActionByIdentifierRequest = z.infer< + typeof removePermittedActionByIdentifierSchema +>; + +/** + * Removes a permitted action from a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey and ipfsId + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to transaction details + */ +export async function removePermittedActionByIdentifier( + request: RemovePermittedActionByIdentifierRequest, + networkCtx: DatilContext +): Promise { + const { ipfsId, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return removePermittedAction( + { + tokenId: pkpTokenId.toString(), + ipfsId, + }, + networkCtx + ); +} + +// Example usage +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await removePermittedActionByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedAddressByIdentifier.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedAddressByIdentifier.ts new file mode 100644 index 0000000000..8ba03ed390 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedAddressByIdentifier.ts @@ -0,0 +1,66 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; +import { removePermittedAddress } from '../../../rawContractApis/permissions/write/removePermittedAddress'; +import { z } from 'zod'; +import { LitTxVoid } from '../../../types'; + +// Schema for the request +const removePermittedAddressByIdentifierSchema = z.intersection( + z.object({ + targetAddress: z.string().regex(/^0x[a-fA-F0-9]{40}$/), + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type RemovePermittedAddressByIdentifierRequest = z.infer< + typeof removePermittedAddressByIdentifierSchema +>; + +/** + * Removes a permitted address from a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey and targetAddress + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to transaction details + */ +export async function removePermittedAddressByIdentifier( + request: RemovePermittedAddressByIdentifierRequest, + networkCtx: DatilContext +): Promise { + const { targetAddress, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return removePermittedAddress( + { + tokenId: pkpTokenId.toString(), + address: targetAddress, + }, + networkCtx + ); +} + +// Example usage +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await removePermittedAddressByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// targetAddress: "0x1234567890123456789012345678901234567890", +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/index.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/index.ts new file mode 100644 index 0000000000..02975ed0e5 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/index.ts @@ -0,0 +1,13 @@ +export { PKPPermissionsManager } from './PKPPermissionsManager'; + +// export { addPermittedActionByIdentifier } from "./handlers/addPermittedActionByIdentifier"; +// export { addPermittedAddressByIdentifier } from "./handlers/addPermittedAddressByIdentifier"; +// export { removePermittedActionByIdentifier } from "./handlers/removePermittedActionByIdentifier"; +// export { removePermittedAddressByIdentifier } from "./handlers/removePermittedAddressByIdentifier"; +// export { isPermittedActionByIdentifier } from "./handlers/isPermittedActionByIdentifier"; +// export { isPermittedAddressByIdentifier } from "./handlers/isPermittedAddressByIdentifier"; +// export { getPermittedActionsByIdentifier } from "./handlers/getPermittedActionsByIdentifier"; +// export { getPermittedAddressesByIdentifier } from "./handlers/getPermittedAddressesByIdentifier"; +// export { getPermittedAuthMethodsByIdentifier } from "./handlers/getPermittedAuthMethodsByIdentifier"; +// export { getPermittedAuthMethodScopesByIdentifier } from "./handlers/getPermittedAuthMethodScopesByIdentifier"; +// export { getPermissionsContext } from "./handlers/getPermissionsContext"; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/README.md b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/README.md new file mode 100644 index 0000000000..d3bce3d583 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/README.md @@ -0,0 +1 @@ +Abstracted APIs handle data transformation, combine multiple operations, and provide more user-friendly interfaces diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.spec.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.spec.ts new file mode 100644 index 0000000000..9250d6ad17 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.spec.ts @@ -0,0 +1,57 @@ +import { + datilDevNetworkContext, + DatilDevNetworkContext, +} from '../../../../../../vDatil/datil-dev/networkContext'; +import { getConnectionInfo } from './getConnectionInfo'; + +describe('ConnectionInfo', () => { + let networkCtx: DatilDevNetworkContext; + + beforeAll(async () => { + networkCtx = datilDevNetworkContext; + }); + + test('getConnectionInfo returns properly formatted connection data', async () => { + const connectionInfo = await getConnectionInfo({ + networkCtx, + }); + + // Verify the structure and data types + expect(connectionInfo).toHaveProperty('epochInfo'); + expect(connectionInfo).toHaveProperty('minNodeCount'); + expect(connectionInfo).toHaveProperty('bootstrapUrls'); + + // Verify the epochInfo structure + expect(connectionInfo.epochInfo).toHaveProperty('epochLength'); + expect(connectionInfo.epochInfo).toHaveProperty('number'); + expect(connectionInfo.epochInfo).toHaveProperty('endTime'); + expect(connectionInfo.epochInfo).toHaveProperty('retries'); + expect(connectionInfo.epochInfo).toHaveProperty('timeout'); + + // Verify data types and ranges + expect(connectionInfo.minNodeCount).toBeGreaterThanOrEqual(1); + expect(connectionInfo.bootstrapUrls.length).toBeGreaterThanOrEqual( + connectionInfo.minNodeCount + ); + + // Verify that all URLs start with http:// or https:// + connectionInfo.bootstrapUrls.forEach((url) => { + expect(url.startsWith('http://') || url.startsWith('https://')).toBe( + true + ); + }); + }); + + test('getConnectionInfo applies custom protocol when provided', async () => { + const customProtocol = 'https://'; + const connectionInfo = await getConnectionInfo({ + networkCtx, + nodeProtocol: customProtocol, + }); + + // Verify that all URLs use the custom protocol + connectionInfo.bootstrapUrls.forEach((url) => { + expect(url.startsWith(customProtocol)).toBe(true); + }); + }); +}); diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.ts new file mode 100644 index 0000000000..4344a1ac1c --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.ts @@ -0,0 +1,98 @@ +import { DatilContext } from '../../../../../types'; +import { GetActiveUnkickedValidatorStructsAndCountsTransformed } from '../../../schemas/GetActiveUnkickedValidatorStructsAndCountsSchema'; +import { getActiveUnkickedValidatorStructsAndCounts } from '../../rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts'; + +/** + * Interface representing the structure of connection information + */ +interface ConnectionInfo { + epochInfo: { + epochLength: number; + number: number; + endTime: number; + retries: number; + timeout: number; + }; + minNodeCount: number; + bootstrapUrls: string[]; +} + +/** + * Interface for the parameters of getConnectionInfo function + */ +interface GetConnectionInfoParams { + networkCtx: DatilContext; + nodeProtocol?: string | null; +} + +/** + * Retrieves the connection information for a network. + * + * This high-level API builds on the raw contract API to provide formatted connection + * information including epoch details, minimum node count, and bootstrap URLs with + * proper protocol prefixes. + * + * @param params - Parameters for retrieving connection information + * @param params.networkCtx - The network context for the contract + * @param [params.nodeProtocol] - Optional protocol for the network node (HTTP or HTTPS) + * + * @returns An object containing the epoch information, minimum node count and an array of bootstrap URLs + * + * @throws Error if the minimum node count is not set or if the active validator set does not meet the threshold + */ +export async function getConnectionInfo({ + networkCtx, + nodeProtocol, +}: GetConnectionInfoParams): Promise { + // Get the validated data from the raw contract API + const validatedData = await getActiveUnkickedValidatorStructsAndCounts( + networkCtx + ); + + const { epochInfo, minNodeCount, validatorURLs } = + validatedData as GetActiveUnkickedValidatorStructsAndCountsTransformed; + + // Verify minimum node count + if (!minNodeCount) { + throw new Error('❌ Minimum validator count is not set'); + } + + // Verify validator set meets the minimum threshold + if (validatorURLs.length < Number(minNodeCount)) { + throw new Error( + `❌ Active validator set does not meet the threshold. Required: ${minNodeCount} but got: ${validatorURLs.length}` + ); + } + + // Transform the URLs to bootstrap URLs based on the provided protocol + // Note: validatorURLs from the schema are already processed with the network's httpProtocol + // but we can override that with the nodeProtocol parameter if provided + const bootstrapUrls = nodeProtocol + ? validatorURLs.map((url: string) => { + // Extract the hostname and port from the URL (remove any existing protocol) + const urlWithoutProtocol = url.replace(/^https?:\/\//, ''); + return `${nodeProtocol}${urlWithoutProtocol}`; + }) + : validatorURLs; + + return { + epochInfo, + minNodeCount: Number(minNodeCount), + bootstrapUrls, + }; +} + +/** + * Self-executable script for testing the getConnectionInfo function + * + * Usage: bun run src/services/lit/LitNetwork/vDatil/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.ts + */ +// if (import.meta.main) { +// // Use the development network context for testing +// const results = await getConnectionInfo({ +// networkCtx: datilDevNetworkContext, +// }); + +// console.log('Connection Info Results:'); +// console.log(JSON.stringify(results, null, 2)); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/mintPKP/MintPKPSchema.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/mintPKP/MintPKPSchema.ts new file mode 100644 index 0000000000..ec78ff24ed --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/mintPKP/MintPKPSchema.ts @@ -0,0 +1,31 @@ +import { isHex, toBytes, toHex } from 'viem'; +import { z } from 'zod'; +import { AuthMethodSchema } from '../../../schemas/shared/AuthMethodSchema'; +import { ScopeSchemaRaw } from '../../../schemas/shared/ScopeSchema'; + +export const MintPKPSchema = z + .object({ + authMethod: AuthMethodSchema, + scopes: z.array(ScopeSchemaRaw), + pubkey: z.string().optional(), + customAuthMethodId: z.string().optional(), + }) + .transform((data) => { + // If no customAuthMethodId provided, return data as-is + if (!data.customAuthMethodId) { + return data; + } + + // Convert customAuthMethodId to hex if not already in hex format + const hexAuthMethodId = isHex(data.customAuthMethodId) + ? data.customAuthMethodId + : toHex(toBytes(data.customAuthMethodId)); + + // Return data with transformed customAuthMethodId + return { + ...data, + customAuthMethodId: hexAuthMethodId, + }; + }); + +export type MintPKPRequest = z.input; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.spec.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.spec.ts new file mode 100644 index 0000000000..31c4e842f3 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.spec.ts @@ -0,0 +1,78 @@ +import { + datilDevNetworkContext, + DatilDevNetworkContext, +} from '../../../../../../vDatil/datil-dev/networkContext'; +import { mintPKP } from './mintPKP'; + +describe('mintPKP', () => { + let networkCtx: DatilDevNetworkContext; + + beforeAll(() => { + networkCtx = datilDevNetworkContext; + }); + + test('should mint PKP with customAuthMethodId and return correct data format', async () => { + const res = await mintPKP( + { + authMethod: { + authMethodType: 1, + accessToken: '0x', + }, + scopes: ['sign-anything'], + customAuthMethodId: 'app-id-xxx:user-id-yyy', + }, + networkCtx + ); + + // Check response structure + expect(res).toHaveProperty('hash'); + expect(res).toHaveProperty('receipt'); + expect(res).toHaveProperty('data'); + expect(res.data).toHaveProperty('tokenId'); + expect(res.data).toHaveProperty('pubkey'); + expect(res.data).toHaveProperty('ethAddress'); + + // Verify data types + expect(typeof res.data.tokenId).toBe('bigint'); + expect(typeof res.data.pubkey).toBe('string'); + expect(typeof res.data.ethAddress).toBe('string'); + expect(res.data.pubkey).toMatch(/^0x/); + expect(res.data.ethAddress).toMatch(/^0x/); + }); + + test('show auto-convert native authMethod to authMethodId when customAuthMethodId is omitted', async () => { + const eoaAuthSig = { + sig: '', + derivedVia: 'web3.eth.personal.sign', + signedMessage: + 'I am creating an account to use Lit Protocol at 2022-04-12T09:23:31.290Z', + address: '0x7e7763BE1379Bb48AFEE4F5c232Fb67D7c03947F', + }; + + const res = await mintPKP( + { + authMethod: { + authMethodType: 1, + accessToken: JSON.stringify(eoaAuthSig), + }, + scopes: ['sign-anything'], + }, + networkCtx + ); + + // Find relevant events in decoded logs + const permittedAuthMethodScopeAddedEvent = res.decodedLogs.find( + (log) => log.eventName === 'PermittedAuthMethodScopeAdded' + ); + const permittedAuthMethodAddedEvent = res.decodedLogs.find( + (log) => log.eventName === 'PermittedAuthMethodAdded' + ); + + expect(permittedAuthMethodScopeAddedEvent?.args['id']).toBe( + '0x4cb822e6f51d9723f22b9374c4ef7d41ae2b1a5463738516aeb117ff387ba51a' + ); + expect(permittedAuthMethodAddedEvent?.args['id']).toBe( + '0x4cb822e6f51d9723f22b9374c4ef7d41ae2b1a5463738516aeb117ff387ba51a' + ); + }); +}); diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.ts new file mode 100644 index 0000000000..fc94f84fae --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.ts @@ -0,0 +1,69 @@ +import { Hex } from 'viem'; +import { logger } from '@lit-protocol/logger'; +import { getAuthIdByAuthMethod } from '@lit-protocol/auth'; +import { DatilContext } from '../../../../../types'; +import { PKPData } from '../../../schemas/shared/PKPDataSchema'; +import { mintNextAndAddAuthMethods } from '../../rawContractApis/pkp/write/mintNextAndAddAuthMethods'; +import { LitTxRes } from '../../types'; +import { MintPKPRequest, MintPKPSchema } from './MintPKPSchema'; + +/** + * authMethod + * * authMethodType - you should be getting this directly from the authenticator + * + * scopes + * * no-permissions - This scope allows no permissions + * * sign-anything - This scope allows signing any data + * * personal-sign - This scope only allows signing messages using the EIP-191 scheme + * which prefixes "Ethereum Signed Message" to the data to be signed. + * This prefix prevents creating signatures that can be used for transactions. + * + * pubkey + * * Only apply to WebAuthn. Otherwise, default to '0x' + * + * customAuthMethodId + * * This field is usually used by the dApp owner to identify the user - eg. app-id-xxx:user-id-yyy + * + * ```ts + * const customAuthMethod = { + * authMethodType: 89989, + * authMethodId: 'app-id-xxx:user-id-yyy', + * accessToken: 'xxx', + * }; + * ``` + */ +export const mintPKP = async ( + request: MintPKPRequest, + networkCtx: DatilContext +): Promise> => { + const validatedRequest = MintPKPSchema.parse(request); + + logger.debug({ validatedRequest }); + + let _authMethodId: Hex; + + if (validatedRequest.customAuthMethodId) { + _authMethodId = validatedRequest.customAuthMethodId as Hex; + } else { + // Generate the authMethodId automatically from the auth method + const authMethodId = await getAuthIdByAuthMethod( + validatedRequest.authMethod + ); + _authMethodId = authMethodId as Hex; + } + + const tx = await mintNextAndAddAuthMethods( + { + keyType: 2, + permittedAuthMethodTypes: [validatedRequest.authMethod.authMethodType], + permittedAuthMethodIds: [_authMethodId], + permittedAuthMethodPubkeys: [validatedRequest.pubkey || '0x'], + permittedAuthMethodScopes: [validatedRequest.scopes], + addPkpEthAddressAsPermittedAddress: true, + sendPkpToItself: true, + }, + networkCtx + ); + + return tx; +}; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/index.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/index.ts new file mode 100644 index 0000000000..c17c7f13e2 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/index.ts @@ -0,0 +1,81 @@ +// ==================== Imports ==================== +import { claimAndMintNextAndAddAuthMethodsWithTypes } from './rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes'; +import { mintNextAndAddAuthMethods } from './rawContractApis/pkp/write/mintNextAndAddAuthMethods'; +import { tokenOfOwnerByIndex } from './rawContractApis/pkp/read/tokenOfOwnerByIndex'; +import { getPermittedAddresses } from './rawContractApis/permissions/read/getPermittedAddresses'; +import { getPermittedActions } from './rawContractApis/permissions/read/getPermittedActions'; +import { isPermittedAddress } from './rawContractApis/permissions/read/isPermittedAddress'; +import { isPermittedAction } from './rawContractApis/permissions/read/isPermittedAction'; +import { addPermittedAction } from './rawContractApis/permissions/write/addPermittedAction'; +import { removePermittedAction } from './rawContractApis/permissions/write/removePermittedAction'; +import { addPermittedAddress } from './rawContractApis/permissions/write/addPermittedAddress'; +import { removePermittedAddress } from './rawContractApis/permissions/write/removePermittedAddress'; +import { createLitContracts } from './utils/createLitContracts'; + +// High-level APIs +import { mintPKP } from './highLevelApis/mintPKP/mintPKP'; +import { PKPPermissionsManager } from './highLevelApis/PKPPermissionsManager'; + +// Define type for utils to avoid TypeScript serialization error +type UtilsType = { + createLitContracts: typeof createLitContracts; +}; + +// ==================== Exports ==================== +// ========== Treeshakable ========== +// Individual exports allow better tree-shaking +// export { claimAndMintNextAndAddAuthMethodsWithTypes } from "./rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes"; +// export { mintNextAndAddAuthMethods } from "./rawContractApis/pkp/write/mintNextAndAddAuthMethods"; +// export { tokenOfOwnerByIndex } from "./rawContractApis/pkp/read/tokenOfOwnerByIndex"; +// export { getPermittedAddresses } from "./rawContractApis/permissions/read/getPermittedAddresses"; +// export { getPermittedActions } from "./rawContractApis/permissions/read/getPermittedActions"; +// export { isPermittedAddress } from "./rawContractApis/permissions/read/isPermittedAddress"; +// export { isPermittedAction } from "./rawContractApis/permissions/read/isPermittedAction"; +// export { addPermittedAction } from "./rawContractApis/permissions/write/addPermittedAction"; +// export { removePermittedAction } from "./rawContractApis/permissions/write/removePermittedAction"; +// export { addPermittedAddress } from "./rawContractApis/permissions/write/addPermittedAddress"; +// export { removePermittedAddress } from "./rawContractApis/permissions/write/removePermittedAddress"; +// export { createLitContracts } from "./utils/createLitContracts"; + +// High-level APIs +// export { mintPKP } from "./highLevelApis/mintPKP/mintPKP"; +// export { PKPPermissionsManager } from "./highLevelApis/PKPPermissionsManager"; + +// ========== Convenience API ========== +export const rawApi = { + pkp: { + read: { + tokenOfOwnerByIndex, + }, + write: { + claimAndMintNextAndAddAuthMethodsWithTypes, + mintNextAndAddAuthMethods, + }, + }, + permission: { + read: { + getPermittedAddresses, + isPermittedAddress, + getPermittedActions, + isPermittedAction, + }, + write: { + addPermittedAction, + removePermittedAction, + addPermittedAddress, + removePermittedAddress, + }, + }, +}; + +export const api = { + // PKP Management + mintPKP, + + // Permissions Management + PKPPermissionsManager, +}; + +export const utils: UtilsType = { + createLitContracts, +}; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/README.md b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/README.md new file mode 100644 index 0000000000..9dcda6e047 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/README.md @@ -0,0 +1 @@ +Raw APIs provide direct, unmodified access to smart contract functions diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedActions.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedActions.ts new file mode 100644 index 0000000000..2a9ed87295 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedActions.ts @@ -0,0 +1,51 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +const getPermittedActionsSchema = z.object({ + tokenId: toBigInt, +}); + +type GetPermittedActionsRequest = z.input; +type ValidatedGetPermittedActionsRequest = z.output< + typeof getPermittedActionsSchema +>; + +/** + * Get permitted actions for a PKP token + * @param request - Object containing tokenId + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted actions for the PKP token + */ +export async function getPermittedActions( + request: GetPermittedActionsRequest, + networkCtx: DatilContext +): Promise { + const validatedRequest: ValidatedGetPermittedActionsRequest = + getPermittedActionsSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + const res = await pkpPermissionsContract.read.getPermittedActions([ + validatedRequest.tokenId, + ]); + + return res; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await getPermittedActions( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// }, +// networkCtx +// ); +// console.log("permittedActions", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAddresses.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAddresses.ts new file mode 100644 index 0000000000..8067c4c014 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAddresses.ts @@ -0,0 +1,51 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +const getPermittedAddressesSchema = z.object({ + tokenId: toBigInt, +}); + +type GetPermittedAddressesRequest = z.input; +type ValidatedGetPermittedAddressesRequest = z.output< + typeof getPermittedAddressesSchema +>; + +/** + * Get permitted addresses for a PKP token + * @param request - Object containing tokenId + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted addresses for the PKP token + */ +export async function getPermittedAddresses( + request: GetPermittedAddressesRequest, + networkCtx: DatilContext +): Promise { + const validatedRequest: ValidatedGetPermittedAddressesRequest = + getPermittedAddressesSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + const res = await pkpPermissionsContract.read.getPermittedAddresses([ + validatedRequest.tokenId, + ]); + + return res; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await getPermittedAddresses( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// }, +// networkCtx +// ); +// console.log("permittedAddresses", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethodScopes.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethodScopes.ts new file mode 100644 index 0000000000..b14b44fbd1 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethodScopes.ts @@ -0,0 +1,65 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { datilDevNetworkContext } from '../../../../../../datil-dev/networkContext'; + +const getPermittedAuthMethodScopesSchema = z.object({ + tokenId: toBigInt, + authMethodType: z.number(), + authMethodId: z.string(), + scopeId: z.number().optional(), +}); + +type GetPermittedAuthMethodScopesRequest = z.input< + typeof getPermittedAuthMethodScopesSchema +>; +type ValidatedGetPermittedAuthMethodScopesRequest = z.output< + typeof getPermittedAuthMethodScopesSchema +>; + +/** + * Get permitted scopes for a specific authentication method of a PKP token + * @param request - Object containing tokenId, authMethodType, authMethodId, and optional scopeId + * @param networkCtx - Network context for contract interactions + * @returns Array of boolean values indicating whether each scope is permitted + */ +export async function getPermittedAuthMethodScopes( + request: GetPermittedAuthMethodScopesRequest, + networkCtx: DatilContext +): Promise { + const validatedRequest: ValidatedGetPermittedAuthMethodScopesRequest = + getPermittedAuthMethodScopesSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + const res = await pkpPermissionsContract.read.getPermittedAuthMethodScopes([ + validatedRequest.tokenId, + BigInt(validatedRequest.authMethodType), + validatedRequest.authMethodId as `0x${string}`, + validatedRequest.scopeId !== undefined + ? BigInt(validatedRequest.scopeId) + : BigInt(0), + ]); + + return res; +} + +// // Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await getPermittedAuthMethodScopes( +// { +// tokenId: +// '76136736151863037541847315168980811654782785653773679312890341037699996601290', +// authMethodType: 1, +// authMethodId: '0x1234567890abcdef1234567890abcdef12345678', +// // scopeId: 0, +// }, +// networkCtx +// ); +// console.log('permittedAuthMethodScopes', res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethods.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethods.ts new file mode 100644 index 0000000000..5753aac05c --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethods.ts @@ -0,0 +1,61 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { datilDevNetworkContext } from '../../../../../../datil-dev/networkContext'; + +const getPermittedAuthMethodsSchema = z.object({ + tokenId: toBigInt, +}); + +type GetPermittedAuthMethodsRequest = z.input< + typeof getPermittedAuthMethodsSchema +>; +type ValidatedGetPermittedAuthMethodsRequest = z.output< + typeof getPermittedAuthMethodsSchema +>; + +// Define the auth method return type +export interface AuthMethod { + authMethodType: bigint; + id: `0x${string}`; + userPubkey: `0x${string}`; +} + +/** + * Get permitted authentication methods for a PKP token + * @param request - Object containing tokenId + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted authentication methods for the PKP token + */ +export async function getPermittedAuthMethods( + request: GetPermittedAuthMethodsRequest, + networkCtx: DatilContext +): Promise { + const validatedRequest: ValidatedGetPermittedAuthMethodsRequest = + getPermittedAuthMethodsSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + const res = await pkpPermissionsContract.read.getPermittedAuthMethods([ + validatedRequest.tokenId, + ]); + + return res; +} + +// // Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await getPermittedAuthMethods( +// { +// tokenId: +// '76136736151863037541847315168980811654782785653773679312890341037699996601290', +// }, +// networkCtx +// ); +// console.log('permittedAuthMethods', res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAction.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAction.ts new file mode 100644 index 0000000000..86617dad2d --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAction.ts @@ -0,0 +1,63 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { ipfsCidV0ToHex } from '../../../../../../../shared/utils/transformers/ipfsCidV0ToHex'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +const isPermittedActionSchema = z + .object({ + ipfsId: isIpfsCidV0, + tokenId: toBigInt, + }) + .transform((data) => { + return { + ...data, + ipfsId: ipfsCidV0ToHex(data.ipfsId), + }; + }); + +type IsPermittedActionRequest = z.input; +type ValidatedIsPermittedActionRequest = z.output< + typeof isPermittedActionSchema +>; + +/** + * Checks if an action is permitted for a PKP token + * @param request - Object containing tokenId and ipfsId + * @param networkCtx - Network context for the transaction + * @returns Promise resolving to boolean indicating if the action is permitted + */ +export async function isPermittedAction( + request: IsPermittedActionRequest, + networkCtx: DatilContext +): Promise { + const validatedRequest: ValidatedIsPermittedActionRequest = + isPermittedActionSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + + return pkpPermissionsContract.read.isPermittedAction([ + validatedRequest.tokenId, + validatedRequest.ipfsId, + ]); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await isPermittedAction( +// { +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// }, +// networkCtx +// ); + +// console.log("Is action permitted:", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAddress.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAddress.ts new file mode 100644 index 0000000000..8839ae3b09 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAddress.ts @@ -0,0 +1,57 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +const isPermittedAddressSchema = z.object({ + tokenId: toBigInt, + address: z + .string() + .regex(/^0x[a-fA-F0-9]{40}$/) + .transform((val): `0x${string}` => val as `0x${string}`), +}); + +type IsPermittedAddressRequest = z.input; +type ValidatedIsPermittedAddressRequest = z.output< + typeof isPermittedAddressSchema +>; + +/** + * Checks if an address is permitted for a PKP token + * @param request - Object containing tokenId and address + * @param networkCtx - Network context for the transaction + * @returns Promise resolving to boolean indicating if the address is permitted + */ +export async function isPermittedAddress( + request: IsPermittedAddressRequest, + networkCtx: DatilContext +): Promise { + const validatedRequest: ValidatedIsPermittedAddressRequest = + isPermittedAddressSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + + return pkpPermissionsContract.read.isPermittedAddress([ + validatedRequest.tokenId, + validatedRequest.address, + ]); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await isPermittedAddress( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// address: "0x1234567890123456789012345678901234567890", +// }, +// networkCtx +// ); + +// console.log("Is address permitted:", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/utils/pubkeyToTokenId.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/utils/pubkeyToTokenId.ts new file mode 100644 index 0000000000..d4ad7ce410 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/utils/pubkeyToTokenId.ts @@ -0,0 +1,31 @@ +import { hexToBigInt, keccak256, toBytes } from 'viem'; + +/** + * Convert a public key to a token ID + * @param pubkey - The public key to convert + * @returns The token ID + * + * NOTE: code converted from: + * https://github.com/LIT-Protocol/lit-assets/blob/167d6908acc09c0aebdb6909f703b83921da4400/rust/lit-node/lit-node/src/utils/web.rs#L788-L802 + */ +export function pubkeyToTokenId(pubkey: string): bigint { + let pubkeyBytes: Uint8Array; + try { + pubkeyBytes = toBytes(pubkey); + } catch (e) { + throw new Error( + `Conversion error: ${e instanceof Error ? e.message : String(e)}` + ); + } + + if (pubkeyBytes.length !== 65) { + throw new Error( + `Invalid pubkey length. Expected 65 bytes, got ${pubkeyBytes.length}` + ); + } + + // this is what the original code did, but it returns a hex string instead of a bigint + // const tokenId = toHex(keccak256(pubkeyBytes)); + const tokenId = hexToBigInt(keccak256(pubkeyBytes)); + return tokenId; +} diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/utils/resolvePkpTokenId.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/utils/resolvePkpTokenId.ts new file mode 100644 index 0000000000..f6a6f23bbe --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/utils/resolvePkpTokenId.ts @@ -0,0 +1,117 @@ +/** + * Utility for resolving PKP token IDs from various input types (pubkey, address, or direct tokenId) + * This module provides a consistent way to obtain PKP token IDs regardless of the input format. + */ + +import { DatilContext } from '../../../../../../types'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { isEthAddress } from '../../../../../../../shared/utils/z-validate'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { pubkeyToTokenId } from './pubkeyToTokenId'; + +// Input validation schema +export const PkpIdentifierSchema = z.discriminatedUnion('field', [ + z + .object({ + field: z.literal('tokenId'), + tokenId: toBigInt, + }) + .strict(), + z + .object({ + field: z.literal('address'), + address: isEthAddress, + }) + .strict(), + z + .object({ + field: z.literal('pubkey'), + pubkey: z.string(), + }) + .strict(), +]); + +// Helper type to ensure only one property exists +type ExactlyOne = { + [K in keyof T]: Record & Partial, never>>; +}[keyof T]; + +// Raw input type that ensures only one identifier is provided +export type PkpIdentifierRaw = ExactlyOne<{ + tokenId: string | number | bigint; + address: string; + pubkey: string; +}>; + +/** + * Resolves a PKP token ID from various input types + * @param identifier - Object containing exactly one of: tokenId, address, or pubkey + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to the PKP token ID as bigint + * @throws Error if unable to resolve token ID or if input is invalid + */ +export async function resolvePkpTokenId( + identifier: PkpIdentifierRaw, + networkCtx?: DatilContext +): Promise { + // Check for multiple fields + const providedFields = Object.keys(identifier); + if (providedFields.length !== 1) { + throw new Error( + `Invalid identifier: exactly one of tokenId, address, or pubkey must be provided. Found: ${providedFields.join( + ', ' + )}` + ); + } + + // Determine the field type and validate input + const validatedInput = PkpIdentifierSchema.parse({ + field: + 'tokenId' in identifier + ? 'tokenId' + : 'address' in identifier + ? 'address' + : 'pubkey' in identifier + ? 'pubkey' + : (() => { + throw new Error( + 'Invalid identifier: must provide tokenId, address, or pubkey' + ); + })(), + ...identifier, + }); + + logger.debug({ validatedInput }); + + // Handle direct token ID + if (validatedInput.field === 'tokenId') { + return validatedInput.tokenId; + } + + // Handle pubkey + if (validatedInput.field === 'pubkey') { + return pubkeyToTokenId(validatedInput.pubkey); + } + + // Handle address (requires network context) + if (validatedInput.field === 'address') { + if (!networkCtx) { + throw new Error('Network context required for address resolution'); + } + + const { pubkeyRouterContract } = createLitContracts(networkCtx); + const pkpTokenId = await pubkeyRouterContract.read.ethAddressToPkpId([ + validatedInput.address as `0x${string}`, + ]); + + if (!pkpTokenId) { + throw new Error('PKP token ID not found for address'); + } + + return pkpTokenId; + } + + throw new Error('Unable to resolve PKP token ID'); +} diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAction.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAction.ts new file mode 100644 index 0000000000..2bc6b59fdf --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAction.ts @@ -0,0 +1,70 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { ipfsCidV0ToHex } from '../../../../../../../shared/utils/transformers/ipfsCidV0ToHex'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { ScopeSchemaRaw } from '../../../../schemas/shared/ScopeSchema'; +import { LitTxVoid } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +const addPermittedActionSchema = z + .object({ + ipfsId: isIpfsCidV0, + tokenId: toBigInt, + scopes: z.array(ScopeSchemaRaw), + }) + .transform((data) => { + return { + ...data, + ipfsId: ipfsCidV0ToHex(data.ipfsId), + }; + }); + +type AddPermittedActionRequest = z.input; +type ValidatedAddPermittedActionRequest = z.output< + typeof addPermittedActionSchema +>; + +export async function addPermittedAction( + request: AddPermittedActionRequest, + networkCtx: DatilContext +): Promise { + const validatedRequest: ValidatedAddPermittedActionRequest = + addPermittedActionSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract, pkpNftContract, publicClient, walletClient } = + createLitContracts(networkCtx); + + const hash = await callWithAdjustedOverrides( + pkpPermissionsContract, + 'addPermittedAction', + [validatedRequest.tokenId, validatedRequest.ipfsId, validatedRequest.scopes] + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + return { hash, receipt, decodedLogs }; +} + +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await addPermittedAction( +// { +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// scopes: ["sign-anything"], +// }, +// networkCtx +// ); + +// console.log(res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAddress.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAddress.ts new file mode 100644 index 0000000000..52d0e1985e --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAddress.ts @@ -0,0 +1,77 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { ScopeSchemaRaw } from '../../../../schemas/shared/ScopeSchema'; +import { LitTxVoid } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +const addPermittedAddressSchema = z.object({ + tokenId: toBigInt, + address: z + .string() + .regex(/^0x[a-fA-F0-9]{40}$/) + .transform((val): `0x${string}` => val as `0x${string}`), + scopes: z.array(ScopeSchemaRaw), +}); + +type AddPermittedAddressRequest = z.input; +type ValidatedAddPermittedAddressRequest = z.output< + typeof addPermittedAddressSchema +>; + +/** + * Adds a permitted address to a PKP token + * @param request - Object containing tokenId, address and scopes + * @param networkCtx - Network context for the transaction + * @returns Promise resolving to transaction details + */ +export async function addPermittedAddress( + request: AddPermittedAddressRequest, + networkCtx: DatilContext +): Promise { + const validatedRequest: ValidatedAddPermittedAddressRequest = + addPermittedAddressSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract, pkpNftContract, publicClient, walletClient } = + createLitContracts(networkCtx); + + pkpPermissionsContract.write.addPermittedAddress; + + const hash = await callWithAdjustedOverrides( + pkpPermissionsContract, + 'addPermittedAddress', + [ + validatedRequest.tokenId, + validatedRequest.address, + validatedRequest.scopes, + ] + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + return { hash, receipt, decodedLogs }; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await addPermittedAddress( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// address: "0x1234567890123456789012345678901234567890", +// scopes: ["sign-anything"], +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAction.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAction.ts new file mode 100644 index 0000000000..e93cdf6ebc --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAction.ts @@ -0,0 +1,74 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { ipfsCidV0ToHex } from '../../../../../../../shared/utils/transformers/ipfsCidV0ToHex'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { LitTxVoid } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +const removePermittedActionSchema = z + .object({ + ipfsId: isIpfsCidV0, + tokenId: toBigInt, + }) + .transform((data) => { + return { + ...data, + ipfsId: ipfsCidV0ToHex(data.ipfsId), + }; + }); + +type RemovePermittedActionRequest = z.input; +type ValidatedRemovePermittedActionRequest = z.output< + typeof removePermittedActionSchema +>; + +/** + * Removes a permitted action from a PKP token + * @param request - Object containing tokenId and ipfsId + * @param networkCtx - Network context for the transaction + * @returns Promise resolving to transaction details + */ +export async function removePermittedAction( + request: RemovePermittedActionRequest, + networkCtx: DatilContext +): Promise { + const validatedRequest: ValidatedRemovePermittedActionRequest = + removePermittedActionSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract, pkpNftContract, publicClient, walletClient } = + createLitContracts(networkCtx); + + const hash = await callWithAdjustedOverrides( + pkpPermissionsContract, + 'removePermittedAction', + [validatedRequest.tokenId, validatedRequest.ipfsId] + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + return { hash, receipt, decodedLogs }; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await removePermittedAction( +// { +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAddress.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAddress.ts new file mode 100644 index 0000000000..3c789d8c52 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAddress.ts @@ -0,0 +1,70 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { LitTxVoid } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +const removePermittedAddressSchema = z.object({ + tokenId: toBigInt, + address: z + .string() + .regex(/^0x[a-fA-F0-9]{40}$/) + .transform((val): `0x${string}` => val as `0x${string}`), +}); + +type RemovePermittedAddressRequest = z.input< + typeof removePermittedAddressSchema +>; +type ValidatedRemovePermittedAddressRequest = z.output< + typeof removePermittedAddressSchema +>; + +/** + * Removes a permitted address from a PKP token + * @param request - Object containing tokenId and address + * @param networkCtx - Network context for the transaction + * @returns Promise resolving to transaction details + */ +export async function removePermittedAddress( + request: RemovePermittedAddressRequest, + networkCtx: DatilContext +): Promise { + const validatedRequest: ValidatedRemovePermittedAddressRequest = + removePermittedAddressSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract, pkpNftContract, publicClient, walletClient } = + createLitContracts(networkCtx); + + const hash = await callWithAdjustedOverrides( + pkpPermissionsContract, + 'removePermittedAddress', + [validatedRequest.tokenId, validatedRequest.address] + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + return { hash, receipt, decodedLogs }; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const res = await removePermittedAddress( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// address: "0x1234567890123456789012345678901234567890", +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/read/getPubkeyByTokenId.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/read/getPubkeyByTokenId.ts new file mode 100644 index 0000000000..b6b77801e6 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/read/getPubkeyByTokenId.ts @@ -0,0 +1,42 @@ +import { DatilContext } from '../../../../../../types'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +// Schema for the request +const getPubkeyByTokenIdSchema = z.object({ + tokenId: z.string(), +}); + +type GetPubkeyByTokenIdRequest = z.infer; + +/** + * Retrieves the public key associated with a PKP token ID + * @param request - Object containing the token ID + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to the public key as a string + */ +export async function getPubkeyByTokenId( + request: GetPubkeyByTokenIdRequest, + networkCtx: DatilContext +): Promise { + const { tokenId } = getPubkeyByTokenIdSchema.parse(request); + + logger.debug({ tokenId }, 'Fetching public key by token ID'); + + // Create contract instances + const { pubkeyRouterContract } = createLitContracts(networkCtx); + + // Convert tokenId to bigint for contract call + const tokenIdBigInt = BigInt(tokenId); + + // Call the contract to get the public key + const result = await pubkeyRouterContract.read.getPubkey([tokenIdBigInt]); + + // Ensure the result is a string + const publicKey = result.toString(); + + logger.debug({ tokenId, publicKey }, 'Public key fetched'); + + return publicKey; +} diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/read/tokenOfOwnerByIndex.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/read/tokenOfOwnerByIndex.ts new file mode 100644 index 0000000000..37a6dee042 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/read/tokenOfOwnerByIndex.ts @@ -0,0 +1,68 @@ +// import { datilDevNetworkContext } from "services/lit/LitNetwork/vDatil/datil-dev/networkContext"; +import { DatilContext } from '../../../../../../types'; +import { logger } from '@lit-protocol/logger'; +import { z } from 'zod'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +// Schema for the request +const tokenOfOwnerByIndexSchema = z.object({ + ownerAddress: z.string().startsWith('0x'), + index: z.number().int().nonnegative(), +}); + +type TokenOfOwnerByIndexRequest = z.infer; + +/** + * Retrieves a PKP token ID owned by a specific address at a given index + * @param request - Object containing owner address and index + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to the token ID as a string + */ +export async function tokenOfOwnerByIndex( + request: TokenOfOwnerByIndexRequest, + networkCtx: DatilContext +): Promise { + const { ownerAddress, index } = tokenOfOwnerByIndexSchema.parse(request); + + logger.debug({ ownerAddress, index }, 'Fetching token of owner by index'); + + // Create contract instances + const { pkpNftContract } = createLitContracts(networkCtx); + // Convert index to bigint for contract call + const indexBigInt = BigInt(index); + + // Ensure ownerAddress is properly typed as a hex string + const typedOwnerAddress = ownerAddress as `0x${string}`; + // Call the contract to get the token ID + try { + const result = await pkpNftContract.read.tokenOfOwnerByIndex([ + typedOwnerAddress, + indexBigInt, + ]); + // Convert the result to a string + const tokenId = result.toString(); + + logger.debug( + { ownerAddress, index, tokenId }, + 'Token of owner by index fetched' + ); + + return tokenId; + } catch (e) { + throw new Error('Error fetching token of owner by index'); + } +} + +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; + +// const tokenId = await tokenOfOwnerByIndex( +// { +// ownerAddress: "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", +// index: 0, +// }, +// networkCtx +// ); + +// console.log(tokenId); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.spec.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.spec.ts new file mode 100644 index 0000000000..90c82a244a --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.spec.ts @@ -0,0 +1,109 @@ +import { + datilDevNetworkContext, + DatilDevNetworkContext, +} from '../../../../../../../vDatil/datil-dev/networkContext'; +import { CallExecutionError, ContractFunctionRevertedError } from 'viem'; +import { claimAndMint } from './claimAndMint'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { ClaimAndMintSchema } from '../../../../schemas/ClaimAndMintSchema'; + +describe('LitChainClient', () => { + let networkCtx: DatilDevNetworkContext; + + beforeAll(async () => { + networkCtx = datilDevNetworkContext; + }); + + test('claimAndMint', async () => { + try { + const tx = await claimAndMint( + { + derivedKeyId: + '4d90d864b5f6adb1dd8ef5fbfc3d7ca74f6dd973f8c52ce12f8ce61aa6a1dfa4', + signatures: [ + { + r: '0xcc544fa05678fddff726ec2070bf0c4d2862e35f26ab74baede84dfdf117c841', + s: '0x2286aef0cd151175c63116cd622df3ea7bb8113982525ac07c0bd50d33ee7136', + v: 27, + }, + { + r: '0x7b2bbef14e4e277abe1ebb16e6803a4192c7157f2a7e190c6651b27d2b8eb98b', + s: '0x149d547cc36f1b996afa799c854fbe8776290864d22677e57f4fbbfac952f728', + v: 28, + }, + { + r: '0x59459b3830a4f5b365270a7cf559a8a4a8c90f348a68544e64fac3ed22190ad3', + s: '0x4d2bf3d3a9520fa205a60b6031aea84c5fe788fb5198a4a453fb9e20acb05488', + v: 28, + }, + ], + }, + networkCtx + ); + + console.log(tx); + + expect(tx.receipt.logs.length).toBeGreaterThan(0); + expect(tx.hash).toBeDefined(); + expect(tx.decodedLogs.length).toBeGreaterThan(0); + } catch (error) { + console.log(error); + + console.warn(`❗️If you want to pass this test then you need to generate a new unique keyId eg. + const res = await devEnv.litNodeClient.executeJs({ + authContext: getEoaAuthContext(devEnv, alice), + code: \`(async () => { + Lit.Actions.claimKey({keyId: "my-very-unique-key-id"}); + })();\`, + }); + `); + + const reason = ( + (error as CallExecutionError).cause as ContractFunctionRevertedError + ).reason; + expect(reason).toBe('PubkeyRouter: pubkey already has routing data'); + } + }); + + test('simulate claimAndMint', async () => { + const validatedRequest = ClaimAndMintSchema.parse({ + derivedKeyId: + 'fa9c79fc322d407c2b1f9e1589edd444c95bbadf4baf1f3a2863d33ee1ff7ab4', + signatures: [ + { + r: '0x87446889e5e551d88e968788d4f9651adcff0d2f4188ea9a27fe5d2436ddea9b', + s: '0x132ff3bdb078365c83bb5d24ee2c05408155b24234b39b962c8321a82d0c1f7f', + v: 27, + }, + { + r: '0xb15a8ed3a10f919301307ef463a72d40079c163107f43393cbf65701c73902de', + s: '0x20a4f1469c935363ac9cea5a7c5b65ffbd8f37c5d48be5c2e15966c9bbddde06', + v: 27, + }, + { + r: '0x97dee43dfbf3be22bc530e5322b33bf6a571d15c234e3d2251207d6c888bf140', + s: '0x7cfab33b2d4a9140089d2f0a4178b5fad0725fef4b6335741684f99715539bd1', + v: 27, + }, + ], + }); + const { derivedKeyId, signatures } = validatedRequest; + const { pkpNftContract, publicClient, stakingContract, walletClient } = + createLitContracts(networkCtx); + + const mintCost = await pkpNftContract.read.mintCost(); + + const result = await publicClient.simulateContract({ + address: pkpNftContract.address, + abi: pkpNftContract.abi, + functionName: 'claimAndMint', + args: [2n, derivedKeyId, signatures, stakingContract.address], + value: mintCost, + account: walletClient.account!, + }); + + expect(result.result).toBe( + 39540774701362869188416741706549054806716702330527798538695592469657559009284n + ); + }); +}); diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.ts new file mode 100644 index 0000000000..f760511183 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.ts @@ -0,0 +1,50 @@ +import { DatilContext } from '../../../../../../../VDatil/types'; +import { + ClaimAndMintRaw, + ClaimAndMintSchema, +} from '../../../../schemas/ClaimAndMintSchema'; +import { + PKPData, + PKPDataSchema, +} from '../../../../schemas/shared/PKPDataSchema'; +import { LitTxRes } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +export async function claimAndMint( + request: ClaimAndMintRaw, + networkCtx: DatilContext +): Promise> { + const validatedRequest = ClaimAndMintSchema.parse(request); + + const { derivedKeyId, signatures } = validatedRequest; + + const { pkpNftContract, publicClient, stakingContract, walletClient } = + createLitContracts(networkCtx); + + // Get mint cost + const mintCost = await pkpNftContract.read.mintCost(); + const ECDSA_SECP256K1 = 2n; + + const hash = await callWithAdjustedOverrides( + pkpNftContract, + 'claimAndMint', + [ECDSA_SECP256K1, derivedKeyId, signatures, stakingContract.address], + { + account: walletClient.account!, + chain: walletClient.chain!, + value: mintCost, + } + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + const args = decodedLogs.find((log) => log.eventName === 'PKPMinted')?.args; + + const data = PKPDataSchema.parse(args); + + return { hash, receipt, decodedLogs, data }; +} diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.spec.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.spec.ts new file mode 100644 index 0000000000..be9a5e2dd7 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.spec.ts @@ -0,0 +1,66 @@ +import { + datilDevNetworkContext, + DatilDevNetworkContext, +} from '../../../../../../../vDatil/datil-dev/networkContext'; +import { CallExecutionError, ContractFunctionRevertedError } from 'viem'; +import { claimAndMintNextAndAddAuthMethodsWithTypes } from './claimAndMintNextAndAddAuthMethodsWithTypes'; + +describe('LitChainClient', () => { + let networkCtx: DatilDevNetworkContext; + + beforeAll(async () => { + networkCtx = datilDevNetworkContext; + }); + + test('claimAndMintNextAndAddAuthMethodsWithTypes', async () => { + try { + const tx = await claimAndMintNextAndAddAuthMethodsWithTypes( + { + derivedKeyId: + '62439a75ed81afa9366245c9107c413315a141b27129bd6340a9a7f9e63898a9', + signatures: [ + { + r: '0x08b8b9092f0e0a312b00be491382658ac18b3d6cb42c08a17b73eeeb92d7ac54', + s: '0x06da29df3f35b9db99cbfd20ebee83226777ebe52163f6cfe31baa25c829eb8a', + v: 27, + }, + { + r: '0x630e08a6feca8bc5d4078d87d8e846a7945bf0a8251d33f282a705ffedfce159', + s: '0x762fb3380187746975241f2441cf7579053517826ebf6baa798c820db565956f', + v: 28, + }, + { + r: '0x3757d04ea285fe52ec9efde9ae71d9f7113822ed7f34e112f5fbf4350c5161cc', + s: '0x027884f5fc8fb0079a4ce9d2c1021874ce36c3d1eca5a8832f85a5abcf9f50af', + v: 28, + }, + ], + authMethodType: 1, + authMethodId: '0x', + authMethodPubkey: '0x', + }, + networkCtx + ); + + console.log(tx); + + expect(tx.receipt.logs.length).toBeGreaterThan(0); + expect(tx.hash).toBeDefined(); + expect(tx.decodedLogs.length).toBeGreaterThan(0); + } catch (error) { + console.warn(`❗️If you want to pass this test then you need to generate a new unique keyId eg. +const res = await devEnv.litNodeClient.executeJs({ + authContext: getEoaAuthContext(devEnv, alice), + code: \`(async () => { + Lit.Actions.claimKey({keyId: "my-very-unique-key-id"}); + })();\`, +}); + `); + + const reason = ( + (error as CallExecutionError).cause as ContractFunctionRevertedError + ).reason; + expect(reason).toBe('PubkeyRouter: pubkey already has routing data'); + } + }); +}); diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.ts new file mode 100644 index 0000000000..b20a8e69cf --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.ts @@ -0,0 +1,90 @@ +import { DatilContext } from '../../../../../../../VDatil/types'; +import { + ClaimRequestRaw, + ClaimRequestSchema, +} from '../../../../schemas/ClaimRequestSchema'; +import { + PKPData, + PKPDataSchema, +} from '../../../../schemas/shared/PKPDataSchema'; +import { LitTxRes } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +/** + * Claims and mints a PKP using derived key ID and signatures, then adds authentication methods. + * + * @param {ClaimRequestRaw} request - The request object containing PKP claiming parameters + * @param {string} request.derivedKeyId - The derived key ID for claiming + * @param {Signature[]} request.signatures - Array of signatures required for claiming + * @param {number} request.authMethodType - The type of authentication method to add + * @param {string} request.authMethodId - The ID of the authentication method + * @param {string} request.authMethodPubkey - The public key of the authentication method + * + * @returns {Promise} Object containing transaction hash, receipt, and decoded logs + */ +export async function claimAndMintNextAndAddAuthMethodsWithTypes( + request: ClaimRequestRaw, + networkCtx: DatilContext +): Promise> { + const validatedRequest = ClaimRequestSchema.parse(request); + const { pkpHelperContract, pkpNftContract, publicClient, walletClient } = + createLitContracts(networkCtx); + + // Get mint cost + const mintCost = await pkpNftContract.read.mintCost(); + const ECDSA_SECP256K1 = 2n; + + const AUTH_METHOD_SCOPE = { + SIGN_ANYTHING: 1n, + PERSONAL_SIGN: 2n, + } as const; + + const claimMaterial = { + keyType: ECDSA_SECP256K1, + derivedKeyId: validatedRequest.derivedKeyId, + signatures: validatedRequest.signatures, + }; + + const authMethodData = { + keyType: ECDSA_SECP256K1, + permittedIpfsCIDs: [], + permittedIpfsCIDScopes: [], + permittedAddresses: [], + permittedAddressScopes: [], + permittedAuthMethodTypes: [validatedRequest.authMethodType], + permittedAuthMethodIds: [validatedRequest.authMethodId], + permittedAuthMethodPubkeys: [validatedRequest.authMethodPubkey], + permittedAuthMethodScopes: [[AUTH_METHOD_SCOPE.SIGN_ANYTHING]], + addPkpEthAddressAsPermittedAddress: true, + sendPkpToItself: true, + }; + + const hash = await callWithAdjustedOverrides( + pkpHelperContract, + 'claimAndMintNextAndAddAuthMethodsWithTypes', + [claimMaterial, authMethodData], + { + account: walletClient.account!, + chain: walletClient.chain!, + value: mintCost, + } + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + // { + // eventName: "PKPMinted", + // args: { + // tokenId: 46617443650351102737177954764827728186501111543181803171452029133339804161639n, + // pubkey: "0x045fb12df3d5c8482ab64f7cef10b7c44f9a55256e14ffe8bebe0c526279daa8379fd576b5ea5d26bc0b0973a1260138dfce3951b83378414acf8fe02fea299ccf", + // }, + // }, + const args = decodedLogs.find((log) => log.eventName === 'PKPMinted')?.args; + + const data = PKPDataSchema.parse(args); + + return { hash, receipt, decodedLogs, data }; +} diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.spec.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.spec.ts new file mode 100644 index 0000000000..20624fad20 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.spec.ts @@ -0,0 +1,39 @@ +import { + datilDevNetworkContext, + DatilDevNetworkContext, +} from '../../../../../../../vDatil/datil-dev/networkContext'; +import { mintNextAndAddAuthMethods } from './mintNextAndAddAuthMethods'; + +describe('LitChainClient', () => { + let networkCtx: DatilDevNetworkContext; + + beforeAll(async () => { + networkCtx = datilDevNetworkContext; + }); + + test('mintNextAndAddAuthMethods', async () => { + const tx = await mintNextAndAddAuthMethods( + { + keyType: 2, + permittedAuthMethodTypes: [2], + permittedAuthMethodIds: [ + '170d13600caea2933912f39a0334eca3d22e472be203f937c4bad0213d92ed71', + ], + permittedAuthMethodPubkeys: ['0x'], + permittedAuthMethodScopes: [[1]], + addPkpEthAddressAsPermittedAddress: true, + sendPkpToItself: true, + }, + networkCtx + ); + + console.log(tx); + + expect(tx.receipt.logs.length).toBeGreaterThan(0); + expect(tx.hash).toBeDefined(); + expect(tx.decodedLogs.length).toBeGreaterThan(0); + expect(tx.data.tokenId).toBeDefined(); + expect(tx.data.pubkey).toMatch(/^0x/); + expect(tx.data.ethAddress).toMatch(/^0x/); + }); +}); diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.ts new file mode 100644 index 0000000000..528fe594be --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.ts @@ -0,0 +1,75 @@ +import { DatilContext } from '../../../../../../types'; +import { + MintRequestRaw, + MintRequestSchema, +} from '../../../../schemas/MintRequestSchema'; +import { + PKPData, + PKPDataSchema, +} from '../../../../schemas/shared/PKPDataSchema'; +import { LitTxRes } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +/** + * Mints a new Programmable Key Pair (PKP) with specified authentication methods. + * + * @param {MintRequestRaw} request - The request object containing PKP minting parameters + * @param {number} request.keyType - The type of key to mint + * @param {number[]} request.permittedAuthMethodTypes - Array of permitted authentication method types + * @param {string[]} request.permittedAuthMethodIds - Array of permitted authentication method IDs + * @param {string[]} request.permittedAuthMethodPubkeys - Array of permitted authentication method public keys + * @param {string[][]} request.permittedAuthMethodScopes - Array of scopes for each authentication method + * @param {boolean} request.addPkpEthAddressAsPermittedAddress - Whether to add the PKP's Ethereum address as a permitted address + * @param {boolean} request.sendPkpToItself - Whether to send the PKP to itself + * + * @returns {Promise} Object containing transaction hash, receipt, and decoded logs + */ +export async function mintNextAndAddAuthMethods( + request: MintRequestRaw, + networkCtx: DatilContext +): Promise> { + const validatedRequest = MintRequestSchema.parse(request); + + const { pkpHelperContract, pkpNftContract, publicClient, walletClient } = + createLitContracts(networkCtx); + + const mintCost = await pkpNftContract.read.mintCost(); + + const hash = await callWithAdjustedOverrides( + pkpHelperContract, + 'mintNextAndAddAuthMethods', + [ + validatedRequest.keyType, + validatedRequest.permittedAuthMethodTypes, + validatedRequest.permittedAuthMethodIds, + validatedRequest.permittedAuthMethodPubkeys, + validatedRequest.permittedAuthMethodScopes, + validatedRequest.addPkpEthAddressAsPermittedAddress, + validatedRequest.sendPkpToItself, + ], + { + account: walletClient.account!, + chain: walletClient.chain!, + value: mintCost, + } + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + // { + // eventName: "PKPMinted", + // args: { + // tokenId: 46617443650351102737177954764827728186501111543181803171452029133339804161639n, + // pubkey: "0x045fb12df3d5c8482ab64f7cef10b7c44f9a55256e14ffe8bebe0c526279daa8379fd576b5ea5d26bc0b0973a1260138dfce3951b83378414acf8fe02fea299ccf", + // }, + // }, + const args = decodedLogs.find((log) => log.eventName === 'PKPMinted')?.args; + + const data = PKPDataSchema.parse(args); + + return { hash, receipt, decodedLogs, data }; +} diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.spec.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.spec.ts new file mode 100644 index 0000000000..c5aebad2fd --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.spec.ts @@ -0,0 +1,35 @@ +import { + datilDevNetworkContext, + DatilDevNetworkContext, +} from '../../../../../../vDatil/datil-dev/networkContext'; +import { getActiveUnkickedValidatorStructsAndCounts } from './getActiveUnkickedValidatorStructsAndCounts'; + +describe('LitChainClient', () => { + let networkCtx: DatilDevNetworkContext; + + beforeAll(async () => { + networkCtx = datilDevNetworkContext; + }); + + // Expected output: + // { + // epochInfo: { + // epochLength: 300, + // number: 31316, + // endTime: 1740008064, + // retries: 0, + // timeout: 60, + // }, + // minNodeCount: 2, + // validatorURLs: [ "https://15.235.83.220:7470", "https://15.235.83.220:7472", "https://15.235.83.220:7471" ], + // } + test('getActiveUnkickedValidatorStructsAndCounts', async () => { + const res = await getActiveUnkickedValidatorStructsAndCounts(networkCtx); + console.log(res); + expect(res.minNodeCount).toBeGreaterThanOrEqual(2); + expect(res.epochInfo.epochLength).toBeGreaterThan(0); + expect(res.validatorURLs.length).toBeGreaterThanOrEqual( + Number(res.minNodeCount) + ); + }); +}); diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.ts new file mode 100644 index 0000000000..064cabe101 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.ts @@ -0,0 +1,42 @@ +import { DatilContext } from '../../../../../types'; +import { GetActiveUnkickedValidatorStructsAndCountsSchema } from '../../../schemas/GetActiveUnkickedValidatorStructsAndCountsSchema'; +import { createLitContracts } from '../../utils/createLitContracts'; + +export async function getActiveUnkickedValidatorStructsAndCounts( + networkCtx: DatilContext +) { + const { stakingContract } = createLitContracts(networkCtx); + + const res = + await stakingContract.read.getActiveUnkickedValidatorStructsAndCounts(); + + const validatedRes = + GetActiveUnkickedValidatorStructsAndCountsSchema.parse(res); + + const transformedRes = { + ...validatedRes, + validatorURLs: validatedRes.validatorURLs.map( + (url) => networkCtx.httpProtocol + url + ), + }; + + return transformedRes; +} + +// Expected output: +// { +// epochInfo: { +// epochLength: 300, +// number: 34144, +// endTime: 1741198445, +// retries: 0, +// timeout: 60, +// }, +// minNodeCount: 2, +// validatorURLs: [ "https://15.235.83.220:7470", "https://15.235.83.220:7472", "https://15.235.83.220:7471" ], +// } +// if (import.meta.main) { +// const networkCtx = datilDevNetworkContext; +// const res = await getActiveUnkickedValidatorStructsAndCounts(networkCtx); +// console.log(res); +// } diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/types.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/types.ts new file mode 100644 index 0000000000..93e6450ab7 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/types.ts @@ -0,0 +1,15 @@ +import { Hex, TransactionReceipt } from 'viem'; +import { DecodedLog } from './utils/decodeLogs'; + +export type LitTxRes = { + hash: Hex; + receipt: TransactionReceipt; + decodedLogs: DecodedLog[]; + data: T; +}; + +export type LitTxVoid = { + hash: Hex; + receipt: TransactionReceipt; + decodedLogs: DecodedLog[]; +}; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/archived/callWithSeparatedSteps.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/archived/callWithSeparatedSteps.ts new file mode 100644 index 0000000000..2140c7500d --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/archived/callWithSeparatedSteps.ts @@ -0,0 +1,115 @@ +import { + Chain, + Hash, + PublicClient, + WalletClient, + encodeFunctionData, +} from 'viem'; +import { GAS_LIMIT_ADJUSTMENT } from '../../../_config'; + +/** + * Similar to {@link callWithAdjustedOverrides}, key difference is that it allows granular processing and not strongly-typed. + * That's because we are not using an instance of a contract, so we can't infer the correct types. + * + * @param {object} params - The parameters for the contract interaction + * @param {object} params.abi - The ABI of the contract + * @param {string} params.address - The address of the contract + * @param {string} params.functionName - The name of the function to call + * @param {any[]} params.args - The arguments to pass to the function + * @param {object} params.overrides - Optional transaction overrides (e.g. value) + * @param {object} params.clients - The viem clients needed for the transaction + * @param {object} params.clients.publicClient - The public client for reading from the chain + * @param {object} params.clients.walletClient - The wallet client for sending transactions + * @param {object} params.chain - The chain configuration + * + * @example + const hash = await callWithSeparatedSteps({ + abi: [parseAbiItem(abiStringSignature)], + address: contractAddress, + functionName: "mintNextAndAddAuthMethods", + args: [ + validatedRequest.keyType, + validatedRequest.permittedAuthMethodTypes, + validatedRequest.permittedAuthMethodIds, + validatedRequest.permittedAuthMethodPubkeys, + validatedRequest.permittedAuthMethodScopes, + validatedRequest.addPkpEthAddressAsPermittedAddress, + // validatedRequest.sendPkpToItself, + ], + chain: networkCtx.chainConfig.chain, + clients: { + publicClient, + walletClient, + }, + overrides: { + value: mintCost, + }, + }); + * + * @returns {Promise} The transaction hash + */ + +export async function callWithSeparatedSteps({ + abi, + address, + functionName, + args, + overrides = {}, + clients: { publicClient, walletClient }, + chain, +}: { + abi: any[]; + address: `0x${string}`; + functionName: string; + args: any[]; + overrides?: { + value?: bigint; + gas?: bigint; + [key: string]: any; + }; + clients: { + publicClient: PublicClient; + walletClient: WalletClient; + }; + chain: Chain; +}): Promise { + // Step 1: Encode function data + const encodedData = encodeFunctionData({ + abi, + functionName, + args, + }); + + console.log('encodedData:', encodedData); + + // Step 2: Estimate gas + const estimatedGas = await publicClient.estimateGas({ + account: walletClient.account!, + to: address, + data: encodedData, + value: overrides.value || 0n, + }); + + console.log('estimatedGas:', estimatedGas); + + // Apply gas adjustment for Arbitrum Stylus contracts + const adjustedGas = + (estimatedGas * BigInt(GAS_LIMIT_ADJUSTMENT)) / BigInt(100); + + console.log('adjustedGas:', adjustedGas); + + // Step 3: Send transaction + const hash = await walletClient.sendTransaction({ + account: walletClient.account!, + to: address, + data: encodedData, + value: overrides.value || 0n, + gas: adjustedGas, + chain, + ...overrides, + }); + + console.log('hash:', hash); + + return hash; +} diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/callWithAdjustedOverrides.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/callWithAdjustedOverrides.ts new file mode 100644 index 0000000000..a22967a7d0 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/callWithAdjustedOverrides.ts @@ -0,0 +1,57 @@ +import { Hash } from 'viem'; +import { GAS_LIMIT_ADJUSTMENT } from '../../_config'; + +/** + * Strongly-typed wrapper around viem's `writeContract` that adjusts gas overrides for Arbitrum Stylus contracts + * NOTE: It must use an instance of a contract (from `getContract` viem function) so that we can infer the correct types + * @param contract The contract instance to call + * @param methodName The name of the contract method to call + * @param args The arguments to pass to the contract method + * @param overrides Optional transaction overrides (e.g. value, gasLimit) + * @returns A Promise that resolves to the transaction hash + */ +export async function callWithAdjustedOverrides< + TContract extends { + write: Record Promise>; + estimateGas: Record Promise>; + }, + TMethodName extends keyof TContract['write'], + TFunction extends TContract['write'][TMethodName], + TArgs extends Parameters[0] +>( + contract: TContract, + methodName: TMethodName & string, + args: TArgs, + overrides?: Parameters[1] +): Promise { + // Get the write function from the contract + const writeFunction = contract.write[methodName]; + if (!writeFunction) { + throw new Error(`Method ${methodName} not found on contract`); + } + + if (!overrides?.gas) { + // Otherwise estimate and adjust gas + const estimatedGas = await contract.estimateGas[methodName]( + args, + overrides + ); + + const adjustedGas = + (estimatedGas * BigInt(GAS_LIMIT_ADJUSTMENT)) / BigInt(100); + overrides = { + ...overrides, + gas: adjustedGas, + }; + } + + // For contract methods that expect array arguments, we need to pass the first array argument + // This handles cases where the contract method expects [arg1, arg2, ...] but we pass [[arg1, arg2, ...]] + const contractArgs = + Array.isArray(args) && args.length === 1 && Array.isArray(args[0]) + ? args[0] + : args; + + // Call the contract method with the provided arguments and overrides + return writeFunction(contractArgs, overrides); +} diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/createLitContracts.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/createLitContracts.ts new file mode 100644 index 0000000000..5a81b9e338 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/createLitContracts.ts @@ -0,0 +1,213 @@ +import { chronicleYellowstone } from '../../../../../shared/chains/yellowstone.ts'; +import { DatilContext } from '../../../../../VDatil/types'; +import { + createPublicClient, + createWalletClient, + getContract, + http, + PublicClient, + WalletClient, + Hex, +} from 'viem'; +import { privateKeyToAccount } from 'viem/accounts'; + +interface CreateLitContractsOptions { + publicClient?: PublicClient; +} + +// ============================================================================================================================================= +// ❗️ These types are required to fix the following error +// ERROR: The inferred type of this node exceeds the maximum length the compiler will serialize. An explicit type annotation is needed.ts(7056) +// If you could fix this WITHOUT breaking this code apart, or without setting the tsconfig's "declaration" to false, please do fix this. 🙏 +// ============================================================================================================================================= + +// Import the network context to get the contract data type +import { datilDevNetworkContext } from '../../../../../VDatil/datil-dev/networkContext'; + +// Extract just the ContractData type, and you can use this type for variables that will eventually hold contract data +let futureContractData = datilDevNetworkContext.chainConfig.contractData; + +const pkpNftContractType = getContract({ + address: undefined as unknown as Hex, + abi: [ + futureContractData.PKPNFT.methods.claimAndMint, + futureContractData.PKPNFT.methods.mintCost, + futureContractData.PKPNFT.methods.tokenOfOwnerByIndex, + ], + client: { + public: undefined as unknown as PublicClient, + wallet: undefined as unknown as WalletClient, + }, +}); + +const pkpHelperContractType = getContract({ + address: undefined as unknown as Hex, + abi: [ + futureContractData.PKPHelper.methods + .claimAndMintNextAndAddAuthMethodsWithTypes, + futureContractData.PKPHelper.methods.mintNextAndAddAuthMethods, + ], + client: { + public: undefined as unknown as PublicClient, + wallet: undefined as unknown as WalletClient, + }, +}); + +const stakingContractType = getContract({ + address: undefined as unknown as Hex, + abi: [ + futureContractData.Staking.methods + .getActiveUnkickedValidatorStructsAndCounts, + ], + client: { + public: undefined as unknown as PublicClient, + wallet: undefined as unknown as WalletClient, + }, +}); + +const pkpPermissionsContractType = getContract({ + address: undefined as unknown as Hex, + abi: [ + futureContractData.PKPPermissions.methods.addPermittedAction, + futureContractData.PKPPermissions.methods.addPermittedAddress, + futureContractData.PKPPermissions.methods.getPermittedActions, + futureContractData.PKPPermissions.methods.getPermittedAddresses, + futureContractData.PKPPermissions.methods.getPermittedAuthMethods, + futureContractData.PKPPermissions.methods.getPermittedAuthMethodScopes, + futureContractData.PKPPermissions.methods.removePermittedAction, + futureContractData.PKPPermissions.methods.removePermittedAddress, + futureContractData.PKPPermissions.methods.isPermittedAction, + futureContractData.PKPPermissions.methods.isPermittedAddress, + ], + client: { + public: undefined as unknown as PublicClient, + wallet: undefined as unknown as WalletClient, + }, +}); + +const pubkeyRouterContractType = getContract({ + address: undefined as unknown as Hex, + abi: [ + futureContractData.PubkeyRouter.methods.deriveEthAddressFromPubkey, + futureContractData.PubkeyRouter.methods.ethAddressToPkpId, + futureContractData.PubkeyRouter.methods.getEthAddress, + futureContractData.PubkeyRouter.methods.getPubkey, + ], + client: { + public: undefined as unknown as PublicClient, + wallet: undefined as unknown as WalletClient, + }, +}); +// Hacky fix ends + +export const createLitContracts = ( + networkCtx: DatilContext, + opts?: CreateLitContractsOptions +) => { + // 1. Fallback to env-based private key if user doesn't supply a wagmi walletClient + const fallbackTransport = http(networkCtx.rpcUrl); + const fallbackAccount = privateKeyToAccount( + networkCtx.privateKey as `0x${string}` + ); + + // 2. Decide which publicClient to use + const publicClient = + opts?.publicClient ?? + createPublicClient({ + chain: networkCtx.chainConfig.chain, + transport: fallbackTransport, + }); + + // 3. Decide which walletClient to use + const walletClient = + networkCtx?.walletClient ?? + createWalletClient({ + chain: networkCtx.chainConfig.chain, + transport: fallbackTransport, + account: fallbackAccount, + }); + + // 4. Get the contract data + const contractData = networkCtx.chainConfig.contractData; + + if (!contractData) { + throw new Error( + `Contract data not found for network: ${networkCtx.network}` + ); + } + + // ---------- All your contracts ---------- + const pkpNftContract = getContract({ + address: contractData.PKPNFT.address, + abi: [ + contractData.PKPNFT.methods.claimAndMint, + contractData.PKPNFT.methods.mintCost, + contractData.PKPNFT.methods.tokenOfOwnerByIndex, + ...contractData.PKPNFT.events, + ], + client: { public: publicClient, wallet: walletClient }, + }); + + const pkpHelperContract = getContract({ + address: contractData.PKPHelper.address, + abi: [ + contractData.PKPHelper.methods.claimAndMintNextAndAddAuthMethodsWithTypes, + contractData.PKPHelper.methods.mintNextAndAddAuthMethods, + ...contractData.PKPHelper.events, + ], + client: { public: publicClient, wallet: walletClient }, + }); + + const stakingContract = getContract({ + address: contractData.Staking.address, + abi: [ + contractData.Staking.methods.getActiveUnkickedValidatorStructsAndCounts, + ...contractData.Staking.events, + ], + client: { public: publicClient, wallet: walletClient }, + }); + + const pkpPermissionsContract = getContract({ + address: contractData.PKPPermissions.address, + abi: [ + contractData.PKPPermissions.methods.addPermittedAction, + contractData.PKPPermissions.methods.addPermittedAddress, + contractData.PKPPermissions.methods.getPermittedActions, + contractData.PKPPermissions.methods.getPermittedAddresses, + contractData.PKPPermissions.methods.getPermittedAuthMethods, + contractData.PKPPermissions.methods.getPermittedAuthMethodScopes, + contractData.PKPPermissions.methods.removePermittedAction, + contractData.PKPPermissions.methods.removePermittedAddress, + contractData.PKPPermissions.methods.isPermittedAction, + contractData.PKPPermissions.methods.isPermittedAddress, + ...contractData.PKPPermissions.events, + ], + client: { public: publicClient, wallet: walletClient }, + }); + + const pubkeyRouterContract = getContract({ + address: contractData.PubkeyRouter.address, + abi: [ + contractData.PubkeyRouter.methods.deriveEthAddressFromPubkey, + contractData.PubkeyRouter.methods.ethAddressToPkpId, + contractData.PubkeyRouter.methods.getEthAddress, + contractData.PubkeyRouter.methods.getPubkey, + ...contractData.PubkeyRouter.events, + ], + client: { public: publicClient, wallet: walletClient }, + }); + + // ---------- End of all your contracts ---------- + return { + pkpNftContract: pkpNftContract as unknown as typeof pkpNftContractType, + pkpHelperContract: + pkpHelperContract as unknown as typeof pkpHelperContractType, + stakingContract: stakingContract as unknown as typeof stakingContractType, + pkpPermissionsContract: + pkpPermissionsContract as unknown as typeof pkpPermissionsContractType, + publicClient, + walletClient, + pubkeyRouterContract: + pubkeyRouterContract as unknown as typeof pubkeyRouterContractType, + }; +}; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/decodeLogs.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/decodeLogs.ts new file mode 100644 index 0000000000..e4e4ea9920 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/apis/utils/decodeLogs.ts @@ -0,0 +1,82 @@ +import { DatilContext } from '../../../../../VDatil/types'; +import { decodeEventLog, Log } from 'viem'; +import { createLitContracts } from './createLitContracts'; + +export type DecodedLog = { + eventName: string; + args: { + [key: string]: any; + }; +}; + +/** + * Decodes event logs from Lit Protocol contract transactions + * @param logs Array of transaction logs to decode + * @returns Array of decoded logs with event names and parameters + */ +export const decodeLogs = async ( + logs: Log[], + networkCtx: DatilContext +): Promise => { + // Get network context for contract ABIs + const networkContext = networkCtx.chainConfig.contractData; + + if (!networkContext) { + throw new Error(`Network "${networkCtx.network}" not found`); + } + + const { + pkpHelperContract, + pkpNftContract, + pkpPermissionsContract, + pubkeyRouterContract, + publicClient, + walletClient, + } = createLitContracts(networkCtx); + + // Map contract addresses to their ABIs + const contractABIs = new Map(); + contractABIs.set(pkpNftContract.address.toLowerCase(), pkpNftContract.abi); + contractABIs.set( + pkpHelperContract.address.toLowerCase(), + pkpHelperContract.abi + ); + contractABIs.set( + pkpPermissionsContract.address.toLowerCase(), + pkpPermissionsContract.abi + ); + contractABIs.set( + pubkeyRouterContract.address.toLowerCase(), + pubkeyRouterContract.abi + ); + + // Decode each log + const decodedLogs = logs.map((log) => { + try { + const abi = contractABIs.get(log.address.toLowerCase()); + if (!abi) { + return { + ...log, + decoded: null, + error: 'No matching ABI found for address', + }; + } + + const decoded = decodeEventLog({ + abi, + data: log.data, + topics: log.topics, + }); + + return decoded; + } catch (error) { + return { + ...log, + decoded: null, + error: error instanceof Error ? error.message : 'Unknown error', + }; + } + }); + + return decodedLogs as DecodedLog[]; +}; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/index.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/index.ts new file mode 100644 index 0000000000..dba87604f4 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/index.ts @@ -0,0 +1 @@ +export * from './apis/index'; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/ClaimAndMintSchema.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/ClaimAndMintSchema.ts new file mode 100644 index 0000000000..bbe6dba2a7 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/ClaimAndMintSchema.ts @@ -0,0 +1,14 @@ +import { z } from 'zod'; +import { toHexString } from '../../../../shared/utils/z-transformers'; +import { SignatureDataSchema } from './shared/SignatureDataSchema'; + +export const ClaimAndMintSchema = z.object({ + derivedKeyId: toHexString, + signatures: z.array(SignatureDataSchema), +}); + +// ✨ Two types from the same schema: +// 1. User Input Type - this is the type that the user will input, eg. the API we expose for the user to call, could be a function of a request body from a POST request. (e.g., number, string, etc.) +// 2. Transformed/Validated Type - this is the type after the user input has been transformed and validated. Usually used for smart contract calls or external API calls (such as communication with nodes). (e.g., BigInt, etc.) +export type ClaimAndMintRaw = z.input; +export type ClaimAndMintTransformed = z.infer; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/ClaimRequestSchema.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/ClaimRequestSchema.ts new file mode 100644 index 0000000000..122c51466c --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/ClaimRequestSchema.ts @@ -0,0 +1,33 @@ +import { t } from 'elysia'; +import { z } from 'zod'; +import { toBigInt, toHexString } from '../../../../shared/utils/z-transformers'; +import { SignatureDataSchema } from './shared/SignatureDataSchema'; + +export const ClaimRequestSchema = z.object({ + derivedKeyId: toHexString, + signatures: z.array(SignatureDataSchema), + authMethodType: toBigInt, + authMethodId: toHexString, + authMethodPubkey: toHexString, +}); + +// ✨ Two types from the same schema: +// 1. User Input Type - this is the type that the user will input, eg. the API we expose for the user to call, could be a function of a request body from a POST request. (e.g., number, string, etc.) +// 2. Transformed/Validated Type - this is the type after the user input has been transformed and validated. Usually used for smart contract calls or external API calls (such as communication with nodes). (e.g., BigInt, etc.) +export type ClaimRequestRaw = z.input; +export type ClaimRequestTransformed = z.infer; + +// ✨ Elysia Schema +export const tClaimRequestSchema = t.Object({ + derivedKeyId: t.String(), + signatures: t.Array( + t.Object({ + r: t.String(), + s: t.String(), + v: t.Number(), + }) + ), + authMethodType: t.Number(), + authMethodId: t.String(), + authMethodPubkey: t.String(), +}); diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/GetActiveUnkickedValidatorStructsAndCountsSchema.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/GetActiveUnkickedValidatorStructsAndCountsSchema.ts new file mode 100644 index 0000000000..bbcdbcb927 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/GetActiveUnkickedValidatorStructsAndCountsSchema.ts @@ -0,0 +1,61 @@ +import { z } from 'zod'; +import { generateValidatorURLs } from '../../../../shared/utils/transformers'; +import { toNumber } from '../../../../shared/utils/z-transformers'; + +const EpochInfoSchema = z.object({ + epochLength: toNumber, + number: toNumber, + endTime: toNumber, + retries: toNumber, + timeout: toNumber, +}); + +type EpochInfo = z.infer; + +const ValidatorStructSchema = z.object({ + ip: z.number(), + ipv6: z.bigint(), + port: z.number(), + nodeAddress: z.string().regex(/^0x[a-fA-F0-9]{40}$/), + reward: z.bigint(), + senderPubKey: z.bigint(), + receiverPubKey: z.bigint(), +}); + +type ValidatorStruct = z.infer; + +export const GetActiveUnkickedValidatorStructsAndCountsSchema = z + .array(z.union([EpochInfoSchema, toNumber, z.array(ValidatorStructSchema)])) + .transform((ctx) => { + const epochInfo = ctx[0] as EpochInfo; + const minNodeCount = ctx[1]; + const activeUnkickedValidatorStructs = ctx[2] as ValidatorStruct[]; + + const validatorURLs = generateValidatorURLs(activeUnkickedValidatorStructs); + + if (!minNodeCount) { + throw new Error('❌ Minimum validator count is not set'); + } + + if (validatorURLs.length < Number(minNodeCount)) { + throw new Error( + `❌ Active validator set does not meet the consensus. Required: ${minNodeCount} but got: ${activeUnkickedValidatorStructs.length}` + ); + } + + return { + epochInfo, + minNodeCount, + validatorURLs, + }; + }); + +// ✨ Two types from the same schema: +// 1. User Input Type - this is the type that the user will input, eg. the API we expose for the user to call, could be a function of a request body from a POST request. (e.g., number, string, etc.) +// 2. Transformed/Validated Type - this is the type after the user input has been transformed and validated. Usually used for smart contract calls or external API calls (such as communication with nodes). (e.g., BigInt, etc.) +export type GetActiveUnkickedValidatorStructsAndCountsRaw = z.input< + typeof GetActiveUnkickedValidatorStructsAndCountsSchema +>; +export type GetActiveUnkickedValidatorStructsAndCountsTransformed = z.infer< + typeof GetActiveUnkickedValidatorStructsAndCountsSchema +>; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/MintRequestSchema.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/MintRequestSchema.ts new file mode 100644 index 0000000000..ac660fa672 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/MintRequestSchema.ts @@ -0,0 +1,37 @@ +import { t } from 'elysia'; + +import { z } from 'zod'; +import { + toBigInt, + toBigIntArray, + toBigIntMatrix, + toBoolean, + toHexStringArray, +} from '../../../../shared/utils/z-transformers'; + +export const MintRequestSchema = z.object({ + keyType: toBigInt, + permittedAuthMethodTypes: toBigIntArray, + permittedAuthMethodIds: toHexStringArray, + permittedAuthMethodPubkeys: toHexStringArray, + permittedAuthMethodScopes: toBigIntMatrix, + addPkpEthAddressAsPermittedAddress: toBoolean, + sendPkpToItself: toBoolean, +}); + +// ✨ Two types from the same schema: +// 1. User Input Type - this is the type that the user will input, eg. the API we expose for the user to call, could be a function of a request body from a POST request. (e.g., number, string, etc.) +// 2. Transformed/Validated Type - this is the type after the user input has been transformed and validated. Usually used for smart contract calls or external API calls (such as communication with nodes). (e.g., BigInt, etc.) +export type MintRequestRaw = z.input; +export type MintRequestTransformed = z.infer; + +// ✨ Elysia Schema +export const tMintRequestSchema = t.Object({ + keyType: t.Number(), + permittedAuthMethodTypes: t.Array(t.Number()), + permittedAuthMethodIds: t.Array(t.String()), + permittedAuthMethodPubkeys: t.Array(t.String()), + permittedAuthMethodScopes: t.Array(t.Array(t.Number())), + addPkpEthAddressAsPermittedAddress: t.Boolean(), + sendPkpToItself: t.Boolean(), +}); diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/AuthMethodSchema.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/AuthMethodSchema.ts new file mode 100644 index 0000000000..507589ae01 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/AuthMethodSchema.ts @@ -0,0 +1,24 @@ +import { z } from 'zod'; + +const AUTH_METHOD_TYPE = { + EthWallet: 1, + LitAction: 2, + WebAuthn: 3, + Discord: 4, + Google: 5, + GoogleJwt: 6, + AppleJwt: 8, + StytchOtp: 9, + StytchEmailFactorOtp: 10, + StytchSmsFactorOtp: 11, + StytchWhatsAppFactorOtp: 12, + StytchTotpFactorOtp: 13, +} as const; + +export const AuthMethodSchema = z.object({ + authMethodType: z.nativeEnum(AUTH_METHOD_TYPE), + accessToken: z.string(), +}); + +// enable this if needed +// export type AuthMethod = z.infer; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/PKPDataSchema.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/PKPDataSchema.ts new file mode 100644 index 0000000000..71c5fe9919 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/PKPDataSchema.ts @@ -0,0 +1,14 @@ +import { computeAddress } from 'ethers/lib/utils'; +import { z } from 'zod'; + +export const PKPDataSchema = z + .object({ + tokenId: z.bigint(), + pubkey: z.string(), + }) + .transform((data) => ({ + ...data, + ethAddress: computeAddress(data.pubkey), + })); + +export type PKPData = z.infer; diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/ScopeSchema.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/ScopeSchema.ts new file mode 100644 index 0000000000..17f0c85eba --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/ScopeSchema.ts @@ -0,0 +1,30 @@ +import { z } from 'zod'; + +/** + * Defines schemas for PKP permission scopes. + * Handles both string inputs and bigint transformations for contract calls. + */ + +// Valid scope values +export const SCOPE_VALUES = [ + 'no-permissions', + 'sign-anything', + 'personal-sign', +] as const; +export type ScopeString = (typeof SCOPE_VALUES)[number]; + +// Mapping from string scopes to their bigint representation +export const SCOPE_MAPPING = { + 'no-permissions': 0n, + 'sign-anything': 1n, + 'personal-sign': 2n, +} as const; +export type ScopeBigInt = (typeof SCOPE_MAPPING)[ScopeString]; + +// Schema for string values (used in high-level APIs) +export const ScopeStringSchema = z.enum(SCOPE_VALUES); + +// Schema that transforms strings to bigints (used in contract calls) +export const ScopeSchemaRaw = ScopeStringSchema.transform( + (val) => SCOPE_MAPPING[val] +); diff --git a/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/SignatureDataSchema.ts b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/SignatureDataSchema.ts new file mode 100644 index 0000000000..05c3fd9c86 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/LitChainClient/schemas/shared/SignatureDataSchema.ts @@ -0,0 +1,8 @@ +import { z } from 'zod'; +import { toHexString } from '../../../../../shared/utils/z-transformers'; + +export const SignatureDataSchema = z.object({ + r: toHexString, + s: toHexString, + v: z.number(), +}); diff --git a/packages/networks/src/lib/networks/VDatil/common/NetworkContext.ts b/packages/networks/src/lib/networks/VDatil/common/NetworkContext.ts new file mode 100644 index 0000000000..de756d9076 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/common/NetworkContext.ts @@ -0,0 +1,17 @@ +import { Chain, WalletClient } from 'viem'; + +const HTTP = 'http://' as const; +const HTTPS = 'https://' as const; + +// Datil Network Context +export interface INetworkContext { + network: string; + rpcUrl: string; + privateKey: string; + chainConfig: { + chain: Chain; + contractData: T; + }; + httpProtocol: typeof HTTP | typeof HTTPS; + walletClient: WalletClient; +} diff --git a/packages/networks/src/lib/networks/VDatil/datil-dev/networkContext.ts b/packages/networks/src/lib/networks/VDatil/datil-dev/networkContext.ts new file mode 100644 index 0000000000..c9560d0900 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/datil-dev/networkContext.ts @@ -0,0 +1,32 @@ +import { datilDevSignatures } from '@lit-protocol/contracts'; +import { createWalletClient, http } from 'viem'; +import { privateKeyToAccount } from 'viem/accounts'; +import { + anvilConfig, + anvilFirstPrivateKey, + anvilRpcUrl, +} from '../../shared/chains/anvil.js'; +import { INetworkContext } from '../common/NetworkContext'; + +export const datilDevNetworkContext: INetworkContext< + typeof datilDevSignatures +> = { + network: 'datil-dev', + rpcUrl: anvilRpcUrl, + privateKey: anvilFirstPrivateKey, + chainConfig: { + chain: anvilConfig, + contractData: datilDevSignatures, + }, + httpProtocol: 'https://', + walletClient: createWalletClient({ + chain: anvilConfig, + transport: http(anvilRpcUrl), + account: privateKeyToAccount(anvilFirstPrivateKey), + }), +}; + +export type DatilDevNetworkContext = typeof datilDevNetworkContext; + +// network object calls the chain client +// LitClient could use the network to figure out diff --git a/packages/networks/src/lib/networks/VDatil/datil-mainnet/networkContext.ts b/packages/networks/src/lib/networks/VDatil/datil-mainnet/networkContext.ts new file mode 100644 index 0000000000..9b99462e52 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/datil-mainnet/networkContext.ts @@ -0,0 +1,29 @@ +import { datilSignatures } from '@lit-protocol/contracts'; +import { createWalletClient, http } from 'viem'; +import { privateKeyToAccount } from 'viem/accounts'; +import { + anvilConfig, + anvilFirstPrivateKey, + anvilRpcUrl, +} from '../../shared/chains/anvil'; +import { INetworkContext } from '../common/NetworkContext'; + +export const datilMainnetNetworkContext: INetworkContext< + typeof datilSignatures +> = { + network: 'datil', + rpcUrl: anvilRpcUrl, + privateKey: anvilFirstPrivateKey, + chainConfig: { + chain: anvilConfig, + contractData: datilSignatures, + }, + httpProtocol: 'https://', + walletClient: createWalletClient({ + chain: anvilConfig, + transport: http(anvilRpcUrl), + account: privateKeyToAccount(anvilFirstPrivateKey), + }), +}; + +export type DatilMainnetNetworkContext = typeof datilMainnetNetworkContext; diff --git a/packages/networks/src/lib/networks/VDatil/datil-test/networkContext.ts b/packages/networks/src/lib/networks/VDatil/datil-test/networkContext.ts new file mode 100644 index 0000000000..86749f5784 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/datil-test/networkContext.ts @@ -0,0 +1,29 @@ +import { datilTestSignatures } from '@lit-protocol/contracts'; +import { createWalletClient, http } from 'viem'; +import { privateKeyToAccount } from 'viem/accounts'; +import { + anvilConfig, + anvilFirstPrivateKey, + anvilRpcUrl, +} from '../../shared/chains/anvil'; +import { INetworkContext } from '../common/NetworkContext'; + +export const datilTestNetworkContext: INetworkContext< + typeof datilTestSignatures +> = { + network: 'datil-test', + rpcUrl: anvilRpcUrl, + privateKey: anvilFirstPrivateKey, + chainConfig: { + chain: anvilConfig, + contractData: datilTestSignatures, + }, + httpProtocol: 'https://', + walletClient: createWalletClient({ + chain: anvilConfig, + transport: http(anvilRpcUrl), + account: privateKeyToAccount(anvilFirstPrivateKey), + }), +}; + +export type DatilTestNetworkContext = typeof datilTestNetworkContext; diff --git a/packages/networks/src/lib/networks/VDatil/types.ts b/packages/networks/src/lib/networks/VDatil/types.ts new file mode 100644 index 0000000000..db35e89234 --- /dev/null +++ b/packages/networks/src/lib/networks/VDatil/types.ts @@ -0,0 +1,25 @@ +import { DatilDevNetworkContext } from './datil-dev/networkContext'; +import { DatilMainnetNetworkContext } from './datil-mainnet/networkContext'; +import { DatilTestNetworkContext } from './datil-test/networkContext'; + +/** + * Union type representing all supported Datil network contexts. + * + * @remarks + * When using this union type, TypeScript will only allow access to properties/methods + * that exist in both network contexts. If you attempt to use a method that exists + * in only one of the network contexts (Dev or Test), TypeScript will throw a + * compilation error. + * + * @example + * ```typescript + * function example(networkCtx: DatilContext) { + * networkCtx.sharedMethod(); // ✅ OK - exists in both contexts + * networkCtx.devOnlyMethod(); // ❌ Error - only exists in DevNetwork + * } + * ``` + */ +export type DatilContext = + | DatilDevNetworkContext + | DatilTestNetworkContext + | DatilMainnetNetworkContext; diff --git a/packages/networks/src/lib/networks/shared/chains/anvil.ts b/packages/networks/src/lib/networks/shared/chains/anvil.ts new file mode 100644 index 0000000000..63912a9023 --- /dev/null +++ b/packages/networks/src/lib/networks/shared/chains/anvil.ts @@ -0,0 +1,30 @@ +import { Chain } from 'viem'; + +export const anvilRpcUrl = 'http://127.0.0.1:8545'; +export const anvilFirstPrivateKey = + '0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80'; +export const anvilConfig: Chain = { + id: 31337, + name: 'Local Anvil', + nativeCurrency: { + name: 'Ether', + symbol: 'ETH', + decimals: 18, + }, + rpcUrls: { + default: { + http: [anvilRpcUrl], + webSocket: [], + }, + public: { + http: [anvilRpcUrl], + webSocket: [], + }, + }, + blockExplorers: { + default: { + name: 'Anvil Explorer', + url: anvilRpcUrl, + }, + }, +}; diff --git a/packages/networks/src/lib/networks/shared/chains/yellowstone.ts.ts b/packages/networks/src/lib/networks/shared/chains/yellowstone.ts.ts new file mode 100644 index 0000000000..29c512dd3a --- /dev/null +++ b/packages/networks/src/lib/networks/shared/chains/yellowstone.ts.ts @@ -0,0 +1,71 @@ +import { Chain, http } from 'viem'; +import { createConfig } from 'wagmi'; + +export const chronicleYellowstone: Chain = { + id: 175188, + name: 'Chronicle Yellowstone - Lit Protocol Testnet', + nativeCurrency: { + name: 'Test LPX', + symbol: 'tstLPX', + decimals: 18, + }, + rpcUrls: { + default: { + http: ['https://yellowstone-rpc.litprotocol.com/'], + webSocket: [], + }, + public: { + http: ['https://yellowstone-rpc.litprotocol.com/'], + webSocket: [], + }, + }, + blockExplorers: { + default: { + name: 'Yellowstone Explorer', + url: 'https://yellowstone-explorer.litprotocol.com/', + }, + }, +}; + +/** + * Here's how your use it: + * + * + * + * + * import React, { useEffect } from "react"; + * import { usePublicClient, useWalletClient } from "wagmi"; + * import { createLitContracts } from "../createLitContracts"; + * + * export function ExampleComponent() { + * const publicClient = usePublicClient(); + * const { data: walletClient } = useWalletClient(); + * + * useEffect(() => { + * if (publicClient && walletClient) { + * // Pass wagmi's clients into your Lit function + * const { pkpNftContract, pkpHelperContract } = createLitContracts( + * "datil-dev", + * { + * publicClient, + * walletClient, + * } + * ); + * + * // Now you can do contract reads/writes with the user's wallet + * (async () => { + * const cost = await pkpNftContract.read.mintCost(); + * console.log("mintCost =", cost); + * })(); + * } + * }, [publicClient, walletClient]); + * + * return
My wagmi + Lit example
; + * } + */ +export const WagmiConfig = createConfig({ + chains: [chronicleYellowstone], + transports: { + [chronicleYellowstone.id]: http(), + }, +}); diff --git a/packages/networks/src/lib/networks/shared/logger.ts b/packages/networks/src/lib/networks/shared/logger.ts new file mode 100644 index 0000000000..4e0d8715da --- /dev/null +++ b/packages/networks/src/lib/networks/shared/logger.ts @@ -0,0 +1,12 @@ +import { pino } from 'pino'; +import pinoCaller from 'pino-caller'; + +const baseLogger = pino({ + level: process.env['LOG_LEVEL'] ?? 'info', + transport: { + target: 'pino-pretty', + options: { colorize: true, translateTime: true }, + }, +}); + +export const logger = pinoCaller(baseLogger as any); diff --git a/packages/networks/src/lib/networks/shared/utils/transformers.ts b/packages/networks/src/lib/networks/shared/utils/transformers.ts new file mode 100644 index 0000000000..aa34329ece --- /dev/null +++ b/packages/networks/src/lib/networks/shared/utils/transformers.ts @@ -0,0 +1,86 @@ +import { Hex } from 'viem'; + +/** + * Ensures a hex string has '0x' prefix + * @param value - The hex string to check + * @returns The hex string with '0x' prefix + */ +export function hexPrefixed(value: string): Hex { + return value.startsWith('0x') ? (value as Hex) : (`0x${value}` as Hex); +} + +/** + * Safely converts a value to BigInt, returns 0n if conversion fails + */ +export function safeBigInt(value: string | number): bigint { + try { + if (typeof value === 'string' && value.trim() === '') return 0n; + return BigInt(value); + } catch { + return 0n; + } +} + +/** + * @example + * const obj = ['a', 'b', 'c'] + * ObjectMapFromArray(obj) // { a: 'a', b: 'b', c: 'c' } + */ +export const ObjectMapFromArray = (arr: T) => { + return arr.reduce( + (acc, scope) => ({ ...acc, [scope]: scope }), + {} as { [K in T[number]]: K } + ); +}; + +/** + * Generates an array of validator URLs based on the given validator structs and network configurations. + * + * @property {ValidatorStruct[]} activeValidatorStructs - Array of validator structures containing IP and port information. + * @returns {string[]} Array of constructed validator URLs. + * + * @example + * // Example input + * const activeValidatorStructs = [ + * { ip: 3232235777, port: 443 }, // IP: 192.168.1.1 + * { ip: 3232235778, port: 80 }, // IP: 192.168.1.2 + * ]; + * + * // Example output + * const urls = generateValidatorURLs(activeValidatorStructs); + * console.log(urls); + * Output: [ + * "192.168.1.1:443", + * "192.168.1.2:80" + * ] + */ +export function generateValidatorURLs( + ipAndPorts: { + ip: number; + port: number; + }[] +): string[] { + return ipAndPorts.map((item) => { + const ip = intToIP(item.ip); + const port = item.port; + return `${ip}:${port}`; + }); +} + +/** + * Converts an integer IP address to a string representation of the IP address. + * + * @param ip - The integer IP address to convert. + * @returns The string representation of the IP address. + */ +export const intToIP = (ip: number) => { + // Convert integer to binary string and pad with leading zeros to make it 32-bit + const binaryString = ip.toString(2).padStart(32, '0'); + // Split into octets and convert each one to decimal + const ipArray = []; + for (let i = 0; i < 32; i += 8) { + ipArray.push(parseInt(binaryString.substring(i, i + 8), 2)); + } + // Join the octets with dots to form the IP address + return ipArray.join('.'); +}; diff --git a/packages/networks/src/lib/networks/shared/utils/transformers/ipfsCidV0ToHex.ts b/packages/networks/src/lib/networks/shared/utils/transformers/ipfsCidV0ToHex.ts new file mode 100644 index 0000000000..a0d50ba645 --- /dev/null +++ b/packages/networks/src/lib/networks/shared/utils/transformers/ipfsCidV0ToHex.ts @@ -0,0 +1,24 @@ +import bs58 from 'bs58'; +import { toHex } from 'viem'; + +/** + * Converts a multihash (IPFS CIDv0) string to a hex string + * @param multihash - The multihash string to convert + * @returns The hex string + * + * @example + * input: "QmSQDKRWEXZ9CGoucSTR11Mv6fhGqaytZ1MqrfHdkuS1Vg" + * output: "0x12203c585c73d37158fa12f5b83f0af99d3d1a8072c9a5a6e3a289dc785b9da88687" + */ +export function ipfsCidV0ToHex(multihash: string) { + const decoded = bs58.decode(multihash); + return toHex(decoded); +} + +// can be executed directly from the command line: +// bun run packages/networks/src/lib/networks/shared/utils/transformers/ipfsCidV0ToHex.ts +// if (import.meta.main) { +// const multihash = 'QmSQDKRWEXZ9CGoucSTR11Mv6fhGqaytZ1MqrfHdkuS1Vg'; +// const bytes = ipfsCidV0ToHex(multihash); +// console.log(bytes); +// } diff --git a/packages/networks/src/lib/networks/shared/utils/z-transformers.ts b/packages/networks/src/lib/networks/shared/utils/z-transformers.ts new file mode 100644 index 0000000000..b82278437c --- /dev/null +++ b/packages/networks/src/lib/networks/shared/utils/z-transformers.ts @@ -0,0 +1,69 @@ +import { z } from 'zod'; +import { hexPrefixed, safeBigInt } from './transformers'; + +// Transform a number or string to a BigInt +// eg. "2" or 2 -> 2n +export const toBigInt = z + .union([z.string(), z.number()]) + .transform((n) => safeBigInt(n)); + +// Transform a number/string or array of numbers/strings to an array of BigInts +// eg. "1" -> [1n] +// eg. [1, "2", 3] -> [1n, 2n, 3n] +export const toBigIntArray = z + .union([z.string(), z.number(), z.array(z.union([z.string(), z.number()]))]) + .transform((val) => { + if (Array.isArray(val)) { + return val.map(safeBigInt); + } + return [safeBigInt(val)]; + }); + +// Transform a string to a hex string type +// eg. "123" -> "0x123" +export const toHexString = z.string().transform((s) => hexPrefixed(s)); + +// Transform a string or array of strings to an array of hex strings +// eg. undefined -> ["0x"] +// eg. "123" -> ["0x123"] +// eg. ["123", "456"] -> ["0x123", "0x456"] +export const toHexStringArray = z + .union([z.string(), z.array(z.string()), z.undefined()]) + .transform((val) => { + if (!val) return [hexPrefixed('')]; + if (Array.isArray(val)) { + return val.map(hexPrefixed); + } + return [hexPrefixed(val)]; + }); + +// Transform arrays of numbers/strings to arrays of arrays of BigInts +// eg. undefined -> [[]] +// eg. [[1, "2"], ["3", 4]] -> [[1n, 2n], [3n, 4n]] +export const toBigIntMatrix = z + .union([ + z.array(z.array(z.union([z.string(), z.number(), z.bigint()]))), + z.undefined(), + ]) + .transform((val) => { + if (!val) return [[]]; + return val.map((inner) => + inner.map((v) => (typeof v === 'bigint' ? v : safeBigInt(v))) + ); + }); + +// Transform undefined or boolean to boolean +// eg. undefined -> false +// eg. true -> true +export const toBoolean = z + .union([z.boolean(), z.undefined()]) + .transform((val) => Boolean(val ?? false)); + +// Transform a number or string to a number +// eg. "2" -> 2 +// eg. 2n -> 2 +export const toNumber = z + .union([z.bigint(), z.number(), z.string()]) + .transform((val) => { + return Number(val); + }); diff --git a/packages/networks/src/lib/networks/shared/utils/z-validate.ts b/packages/networks/src/lib/networks/shared/utils/z-validate.ts new file mode 100644 index 0000000000..428be4e6ce --- /dev/null +++ b/packages/networks/src/lib/networks/shared/utils/z-validate.ts @@ -0,0 +1,8 @@ +import { z } from 'zod'; + +export const isEthAddress = z + .string() + .regex(/^0x[a-fA-F0-9]{40}$/, 'Invalid Ethereum address'); + +// To check if it's IPFS CIDv0 +export const isIpfsCidV0 = z.string().regex(/^Qm[1-9A-HJ-NP-Za-km-z]{44}$/); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/README.md b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/README.md new file mode 100644 index 0000000000..73079a9a8f --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/README.md @@ -0,0 +1,132 @@ +# LitChainClient + +A TypeScript client for interacting with Lit Protocol's blockchain contracts. This client provides a type-safe interface for minting and managing Programmable Key Pairs (PKPs). + +## Overview + +LitChainClient abstracts blockchain interactions with Lit Protocol's smart contracts, offering both raw contract APIs and higher-level convenience functions. + +## Available APIs + +The client provides three main API objects: + +### LitChainClientAPI (High-Level APIs) + +**PKP Management:** + +- `mintPKP` - Simplified interface for minting a new PKP + +**Permissions Management:** + +- `PKPPermissionsManager` - Class for managing permissions for PKPs + - Provides methods for managing permissions using PKP identifiers (tokenId, pubkey, or address) + +### LitChainClientRawAPI (Low-Level APIs / Direct Contract calls) + +**PKP (Programmable Key Pair) Operations:** + +- `pkp.read.tokenOfOwnerByIndex` - Get PKP token by owner and index +- `pkp.write.mintNextAndAddAuthMethods` - Mint a new PKP and add authentication methods +- `pkp.write.claimAndMintNextAndAddAuthMethodsWithTypes` - Claim, mint a PKP, and add auth methods with types + +**Permission Operations:** + +- `permission.read.getPermittedAddresses` - Get addresses with permissions for a PKP +- `permission.read.getPermittedActions` - Get permitted actions for a PKP +- `permission.read.isPermittedAddress` - Check if an address has permission +- `permission.read.isPermittedAction` - Check if an action is permitted +- `permission.write.addPermittedAction` - Add a permitted action +- `permission.write.removePermittedAction` - Remove a permitted action +- `permission.write.addPermittedAddress` - Add a permitted address +- `permission.write.removePermittedAddress` - Remove a permitted address + +### LitChainClientUtils + +**Utility Functions:** + +- `createLitContracts` - Create contract instances for interacting with Lit Protocol + +## Usage Examples + +### Using High-Level API + +```typescript +import { LitChainClientAPI } from '../LitChainClient/apis'; + +// Minting a PKP with simplified API +const result = await LitChainClientAPI.mintPKP( + { + authMethod: { + authMethodType: 1, + id: 'example-id', + pubkey: '0x...', // webAuthn only + }, + }, + networkContext +); + +// Using PKP Permissions Manager +const permissionsManager = new LitChainClientAPI.PKPPermissionsManager( + networkContext +); +await permissionsManager.addPermittedAction(tokenId, actionId); +``` + +### Using Raw API + +```typescript +import { LitChainClientRawAPI } from '../LitChainClient/apis'; + +// Using the raw API +const result = await LitChainClientRawAPI.pkp.write.mintNextAndAddAuthMethods( + { + keyType: 2, + permittedAuthMethodTypes: [1], + permittedAuthMethodIds: ['example-id'], + permittedAuthMethodPubkeys: ['0x...'], + permittedAuthMethodScopes: [[1, 2, 3]], + addPkpEthAddressAsPermittedAddress: true, + sendPkpToItself: false, + }, + networkContext +); + +// Using permission APIs +const isPermitted = + await LitChainClientRawAPI.permission.read.isPermittedAddress( + tokenId, + address + ); +``` + +### Using Utilities + +```typescript +import { LitChainClientUtils } from '../LitChainClient/apis'; + +// Create contract instances +const contracts = LitChainClientUtils.createLitContracts(networkContext); +``` + +## Configuration + +The client is pre-configured for the Chronicle Yellowstone testnet. Configuration options are in `_config.ts`. + +## API Structure + +- **Raw Contract APIs** (`apis/rawContractApis/`): + + - `pkp/` - PKP contract functions + - `read/` - Read-only functions + - `write/` - State-changing functions + - `permission/` - Permission functions + - `read/` - Permission queries + - `write/` - Permission modifications + +- **High-Level APIs** (`apis/highLevelApis/`): + + - `mintPKP/` - Simplified PKP minting functions + - `PKPPermissionsManager/` - Enhanced permission management + +- **Utilities** (`apis/utils/`): + - Helper functions for contract interactions diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/_config.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/_config.ts new file mode 100644 index 0000000000..5b6c0c26eb --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/_config.ts @@ -0,0 +1,15 @@ +import { + NagaLocalDevelopNetworkContext, + nagaLocalDevelopNetworkContext, +} from '../../local-develop/networkContext'; + +/** + * Due to the usage of arbitrum stylus contracts, + * the gas limit is increased by 10% to avoid reverts due to out of gas errors + */ +const GAS_LIMIT_INCREASE_PERCENTAGE = 10; +export const GAS_LIMIT_ADJUSTMENT = BigInt(100 + GAS_LIMIT_INCREASE_PERCENTAGE); + +export const networkContext = nagaLocalDevelopNetworkContext; // we shall change this later + +export type NetworkContext = NagaLocalDevelopNetworkContext; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.test.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.test.ts new file mode 100644 index 0000000000..906032ee44 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.test.ts @@ -0,0 +1,295 @@ +import { networkContext } from '../../../_config'; +import { PKPPermissionsManager } from './PKPPermissionsManager'; + +// Configuration constants +const TEST_TOKEN_ID = + '76136736151863037541847315168980811654782785653773679312890341037699996601290'; +const PKP_TEST_ADDRESS = '0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F'; + +const MASTER_ADDRESS = '0xC434D4B9c307111a1CA6752AC47B77C571FcA500'; + +// Using valid IPFS CID format for v0 (Qm... format) +const TEST_ACTION_IPFS_ID = 'QmPK1s3pNYLi9ERiq3BDxKa4XosgWwFRQUydHUtz4YgpqB'; +// Add a hex version of the IPFS ID for comparisons +const TEST_ACTION_IPFS_ID_HEX = + '0x12200e7071c59df3b9454d1d18a15270aa36d54f89606a576dc621757afd44ad1d2e'; + +describe('PKPPermissionsManager', () => { + let manager: PKPPermissionsManager; + + // Set up the test environment + beforeAll(() => { + manager = new PKPPermissionsManager( + { tokenId: TEST_TOKEN_ID }, + networkContext + ); + }); + + test('should get permissions context initially', async () => { + const context = await manager.getPermissionsContext(); + expect(context).toBeDefined(); + }); + + test('should check if an address is permitted', async () => { + const isPermitted = await manager.isPermittedAddress({ + address: PKP_TEST_ADDRESS, + }); + expect(isPermitted).toBeDefined(); + }); + + test('should check if an action is permitted', async () => { + const isPermitted = await manager.isPermittedAction({ + ipfsId: TEST_ACTION_IPFS_ID, + }); + expect(isPermitted).toBeDefined(); + }); + + test('should get permitted addresses', async () => { + const addresses = await manager.getPermittedAddresses(); + expect(addresses).toBeDefined(); + expect(Array.isArray(addresses)).toBe(true); + }); + + test('should get permitted actions', async () => { + const actions = await manager.getPermittedActions(); + expect(actions).toBeDefined(); + expect(Array.isArray(actions)).toBe(true); + }); + + test('should add and check a permitted address', async () => { + // For test purposes we just verify the call doesn't throw + await manager.addPermittedAddress({ + address: PKP_TEST_ADDRESS, + scopes: ['sign-anything'], + }); + + const context = await manager.getPermissionsContext(); + const hasAddress = context.addresses.some( + (addr) => addr.toLowerCase() === PKP_TEST_ADDRESS.toLowerCase() + ); + expect(hasAddress).toBe(true); + }); + + test('should add and check a permitted action', async () => { + // For test purposes we just verify the call doesn't throw + await manager.addPermittedAction({ + ipfsId: TEST_ACTION_IPFS_ID, + scopes: ['sign-anything'], + }); + + const context = await manager.getPermissionsContext(); + console.log(context); + const hasAction = context.actions.some( + (action) => action.toLowerCase() === TEST_ACTION_IPFS_ID_HEX.toLowerCase() + ); + expect(hasAction).toBe(true); + }); + + test('should batch update permissions', async () => { + await manager.batchUpdatePermissions([ + { + type: 'addAction', + ipfsId: TEST_ACTION_IPFS_ID, + scopes: ['sign-anything'], + }, + { + type: 'addAddress', + address: PKP_TEST_ADDRESS, + scopes: ['sign-anything'], + }, + ]); + + // Verify updates took effect + const context = await manager.getPermissionsContext(); + const hasAction = context.actions.some( + (action) => action.toLowerCase() === TEST_ACTION_IPFS_ID_HEX.toLowerCase() + ); + const hasAddress = context.addresses.some( + (addr) => addr.toLowerCase() === PKP_TEST_ADDRESS.toLowerCase() + ); + + expect(hasAction).toBe(true); + expect(hasAddress).toBe(true); + }); + + test('should get PKPs by address', async () => { + const pkps = await PKPPermissionsManager.getPKPsByAddress( + MASTER_ADDRESS, + networkContext + ); + expect(pkps).toBeDefined(); + expect(Array.isArray(pkps)).toBe(true); + }); + + test('should revoke all permissions', async () => { + // First ensure we have permissions to revoke by adding our test address and action + await manager.batchUpdatePermissions([ + { + type: 'addAction', + ipfsId: TEST_ACTION_IPFS_ID, + scopes: ['sign-anything'], + }, + { + type: 'addAddress', + address: PKP_TEST_ADDRESS, + scopes: ['sign-anything'], + }, + ]); + + // Get context before revocation + const contextBefore = await manager.getPermissionsContext(); + const hasActionBefore = contextBefore.actions.some( + (action) => action.toLowerCase() === TEST_ACTION_IPFS_ID_HEX.toLowerCase() + ); + const hasAddressBefore = contextBefore.addresses.some( + (addr) => addr.toLowerCase() === PKP_TEST_ADDRESS.toLowerCase() + ); + + // Verify our test permissions were added + expect(hasActionBefore || hasAddressBefore).toBe(true); + + // Now revoke all permissions + await manager.revokeAllPermissions(); + + // Get context after revocation and check our test permissions + const contextAfter = await manager.getPermissionsContext(); + + // We specifically added test actions/addresses, so after revocation + // our test permissions should no longer be present + const hasActionAfter = contextAfter.actions.some( + (action) => action.toLowerCase() === TEST_ACTION_IPFS_ID_HEX.toLowerCase() + ); + const hasAddressAfter = contextAfter.addresses.some( + (addr) => addr.toLowerCase() === PKP_TEST_ADDRESS.toLowerCase() + ); + + // Only assert that our test permissions are gone + // There might be other permissions in a shared environment + expect(hasActionAfter).toBe(false); + expect(hasAddressAfter).toBe(false); + }); + + test('should remove a permitted action', async () => { + // First add the action + await manager.addPermittedAction({ + ipfsId: TEST_ACTION_IPFS_ID, + scopes: ['sign-anything'], + }); + + // Then remove it + await manager.removePermittedAction({ + ipfsId: TEST_ACTION_IPFS_ID, + }); + + // Verify it was removed + const actions = await manager.getPermittedActions(); + const hasAction = actions.some( + (action) => action.toLowerCase() === TEST_ACTION_IPFS_ID_HEX.toLowerCase() + ); + + // We try to verify the removal, but in a shared environment + // this test is more about ensuring the operation completes + expect(hasAction).toBeDefined(); + }); + + test('should remove a permitted address', async () => { + // First add the address + await manager.addPermittedAddress({ + address: PKP_TEST_ADDRESS, + scopes: ['sign-anything'], + }); + + // Then remove it + await manager.removePermittedAddress({ + address: PKP_TEST_ADDRESS, + }); + + // Verify it was removed + const addresses = await manager.getPermittedAddresses(); + const hasAddress = addresses.some( + (addr) => addr.toLowerCase() === PKP_TEST_ADDRESS.toLowerCase() + ); + + // We try to verify the removal, but in a shared environment + // this test is more about ensuring the operation completes + expect(hasAddress).toBeDefined(); + }); + + test('should get permissions context with auth methods', async () => { + const context = await manager.getPermissionsContext(); + expect(context).toBeDefined(); + expect(Array.isArray(context.actions)).toBe(true); + expect(Array.isArray(context.addresses)).toBe(true); + expect(Array.isArray(context.authMethods)).toBe(true); + expect(typeof context.isActionPermitted).toBe('function'); + expect(typeof context.isAddressPermitted).toBe('function'); + expect(typeof context.isAuthMethodPermitted).toBe('function'); + }); + + test('should get permitted auth methods', async () => { + const authMethods = await manager.getPermittedAuthMethods(); + expect(authMethods).toBeDefined(); + expect(Array.isArray(authMethods)).toBe(true); + + // If there are auth methods, verify their structure + if (authMethods.length > 0) { + const firstMethod = authMethods[0]; + expect(typeof firstMethod.authMethodType).toBe('bigint'); + expect(typeof firstMethod.id).toBe('string'); + expect(typeof firstMethod.userPubkey).toBe('string'); + } + }); + + test('should get permitted auth method scopes', async () => { + // If there are auth methods, test getting scopes for the first one + const authMethods = await manager.getPermittedAuthMethods(); + + if (authMethods.length > 0) { + const firstMethod = authMethods[0]; + const scopes = await manager.getPermittedAuthMethodScopes({ + authMethodType: Number(firstMethod.authMethodType), + authMethodId: firstMethod.id, + }); + + expect(scopes).toBeDefined(); + expect(Array.isArray(scopes)).toBe(true); + + // Verify each scope is a boolean + scopes.forEach((scope) => { + expect(typeof scope).toBe('boolean'); + }); + } else { + // If no auth methods exist, test with a mock auth method + const scopes = await manager.getPermittedAuthMethodScopes({ + authMethodType: 1, // EthWallet type + authMethodId: '0x1234567890abcdef1234567890abcdef12345678', + }); + + expect(scopes).toBeDefined(); + expect(Array.isArray(scopes)).toBe(true); + } + }); + + test('should verify auth method in permissions context', async () => { + const context = await manager.getPermissionsContext(); + + // If there are auth methods, test the helper function + if (context.authMethods.length > 0) { + const firstMethod = context.authMethods[0]; + const isPermitted = context.isAuthMethodPermitted( + Number(firstMethod.authMethodType), + firstMethod.id + ); + + expect(isPermitted).toBe(true); + } else { + // If no auth methods, test with a non-existent auth method + const isPermitted = context.isAuthMethodPermitted( + 1, // EthWallet type + '0x1234567890abcdef1234567890abcdef12345678' + ); + + expect(isPermitted).toBe(false); + } + }); +}); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.ts new file mode 100644 index 0000000000..c4ed45c5a1 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/PKPPermissionsManager.ts @@ -0,0 +1,367 @@ +/** + * PKPPermissionsManager.ts + * + * A comprehensive manager for PKP permissions that provides a unified interface + * for managing LitAction and Address permissions. + * + * This class wraps the individual permission handler functions and provides + * a clean, object-oriented interface for interacting with PKP permissions. + * + * Usage: + * ```typescript + * // Create a new PKPPermissionsManager + * const manager = new PKPPermissionsManager( + * { tokenId: "YOUR_TOKEN_ID" }, + * networkContext + * ); + * + * // Add a permitted action + * await manager.addPermittedAction({ + * ipfsId: "YOUR_IPFS_ID", + * scopes: ["sign-anything"] + * }); + * + * // Check permissions context + * const context = await manager.getPermissionsContext(); + * ``` + */ + +import { NagaContext } from '../../../../../types'; +import { PkpIdentifierRaw } from '../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +// Import all handler functions +import { addPermittedActionByIdentifier } from './handlers/addPermittedActionByIdentifier'; +import { addPermittedAddressByIdentifier } from './handlers/addPermittedAddressByIdentifier'; +import { + getPermissionsContext, + PermissionsContext, +} from './handlers/getPermissionsContext'; +import { getPermittedActionsByIdentifier } from './handlers/getPermittedActionsByIdentifier'; +import { getPermittedAddressesByIdentifier } from './handlers/getPermittedAddressesByIdentifier'; +import { getPermittedAuthMethodsByIdentifier } from './handlers/getPermittedAuthMethodsByIdentifier'; +import { getPermittedAuthMethodScopesByIdentifier } from './handlers/getPermittedAuthMethodScopesByIdentifier'; +import { getPKPsByAddress } from './handlers/getPKPsByAddress'; +import { isPermittedActionByIdentifier } from './handlers/isPermittedActionByIdentifier'; +import { isPermittedAddressByIdentifier } from './handlers/isPermittedAddressByIdentifier'; +import { removePermittedActionByIdentifier } from './handlers/removePermittedActionByIdentifier'; +import { removePermittedAddressByIdentifier } from './handlers/removePermittedAddressByIdentifier'; + +import { logger } from '../../../../../../shared/logger'; +import { ScopeString } from '../../../schemas/shared/ScopeSchema'; +import { AuthMethod } from '../../rawContractApis/permissions/read/getPermittedAuthMethods'; +import { LitTxVoid } from '../../types'; + +// This constant is used for testing purposes +// IPFS CID in v0 format for commonly used test action +const COMMON_TEST_IPFS_IDS = ['QmPK1s3pNYLi9ERiq3BDxKa4XosgWwFRQUydHUtz4YgpqB']; + +export class PKPPermissionsManager { + private identifier: PkpIdentifierRaw; + private networkContext: NagaContext; + + /** + * Creates a new PKP permissions manager instance + * + * @param identifier - PKP identifier (tokenId, pubkey, or address) + * @param networkContext - Network context for contract interactions + */ + constructor(identifier: PkpIdentifierRaw, networkContext: NagaContext) { + this.identifier = identifier; + this.networkContext = networkContext; + } + + /** + * Gets the identifier key (tokenId, pubkey, or address) used by this manager + * + * @private + * @returns The identifier key and value + */ + private getIdentifierParams(): PkpIdentifierRaw { + // Return the original identifier to avoid duplication + return this.identifier; + } + + /** + * Adds a permitted LitAction to the PKP + * + * @param params - Parameters containing ipfsId and scopes + * @returns Promise resolving to transaction details + */ + async addPermittedAction(params: { + ipfsId: string; + scopes: ScopeString[]; + }): Promise { + return addPermittedActionByIdentifier( + { + ipfsId: params.ipfsId, + scopes: params.scopes, + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Adds a permitted address to the PKP + * + * @param params - Parameters containing address and scopes + * @returns Promise resolving to transaction details + */ + async addPermittedAddress(params: { + address: string; + scopes: ScopeString[]; + }): Promise { + // We need to use the correct parameter name for the target address + return addPermittedAddressByIdentifier( + { + targetAddress: params.address, // This is important - the field must be targetAddress + scopes: params.scopes, + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Removes a permitted LitAction from the PKP + * + * @param params - Parameters containing ipfsId + * @returns Promise resolving to transaction details + */ + async removePermittedAction(params: { ipfsId: string }): Promise { + return removePermittedActionByIdentifier( + { + ipfsId: params.ipfsId, + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Removes a permitted address from the PKP + * + * @param params - Parameters containing address + * @returns Promise resolving to transaction details + */ + async removePermittedAddress(params: { + address: string; + }): Promise { + return removePermittedAddressByIdentifier( + { + targetAddress: params.address, // This is important - the field must be targetAddress + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Checks if a LitAction is permitted for the PKP + * + * @param params - Parameters containing ipfsId + * @returns Promise resolving to boolean indicating permission status + */ + async isPermittedAction(params: { ipfsId: string }): Promise { + return isPermittedActionByIdentifier( + { + ipfsId: params.ipfsId, + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Checks if an address is permitted for the PKP + * + * @param params - Parameters containing address + * @returns Promise resolving to boolean indicating permission status + */ + async isPermittedAddress(params: { address: string }): Promise { + return isPermittedAddressByIdentifier( + { + targetAddress: params.address, // This is important - the field must be targetAddress + ...this.getIdentifierParams(), + }, + this.networkContext + ); + } + + /** + * Gets all permitted LitActions for the PKP + * + * @returns Promise resolving to array of permitted actions + */ + async getPermittedActions(): Promise { + return getPermittedActionsByIdentifier( + this.getIdentifierParams(), + this.networkContext + ); + } + + /** + * Gets all permitted addresses for the PKP + * + * @returns Promise resolving to array of permitted addresses + */ + async getPermittedAddresses(): Promise { + return getPermittedAddressesByIdentifier( + this.getIdentifierParams(), + this.networkContext + ); + } + + /** + * Gets all permitted authentication methods for the PKP + * + * @returns Promise resolving to array of permitted authentication methods + */ + async getPermittedAuthMethods(): Promise { + return getPermittedAuthMethodsByIdentifier( + this.getIdentifierParams(), + this.networkContext + ); + } + + /** + * Gets permitted scopes for a specific authentication method of the PKP + * + * @param params - Parameters for the request + * @param params.authMethodType - Type of authentication method + * @param params.authMethodId - ID of authentication method + * @param params.scopeId - Optional scope ID to check + * @returns Promise resolving to array of boolean values indicating whether each scope is permitted + */ + async getPermittedAuthMethodScopes(params: { + authMethodType: number; + authMethodId: string; + scopeId?: number; + }): Promise { + return getPermittedAuthMethodScopesByIdentifier( + { + identifier: this.getIdentifierParams(), + ...params, + }, + this.networkContext + ); + } + + /** + * Gets the complete permissions context for efficient permission checks + * + * @returns Promise resolving to PermissionsContext object + */ + async getPermissionsContext(): Promise { + return getPermissionsContext( + this.getIdentifierParams(), + this.networkContext + ); + } + + /** + * Gets all PKPs associated with a specific address + * + * @param address - Ethereum address to check + * @returns Promise resolving to array of PKP information + */ + static async getPKPsByAddress(address: string, networkContext: NagaContext) { + return getPKPsByAddress({ ownerAddress: address }, networkContext); + } + + /** + * Batch updates permissions for a PKP + * + * @param operations - Array of permission operations to perform + * @returns Promise resolving after all operations complete + */ + async batchUpdatePermissions( + operations: Array< + | { type: 'addAction'; ipfsId: string; scopes: ScopeString[] } + | { type: 'addAddress'; address: string; scopes: ScopeString[] } + | { type: 'removeAction'; ipfsId: string } + | { type: 'removeAddress'; address: string } + > + ): Promise { + // Process operations sequentially to avoid transaction conflicts + for (const op of operations) { + switch (op.type) { + case 'addAction': + await this.addPermittedAction({ + ipfsId: op.ipfsId, + scopes: op.scopes, + }); + break; + case 'addAddress': + await this.addPermittedAddress({ + address: op.address, + scopes: op.scopes, + }); + break; + case 'removeAction': + await this.removePermittedAction({ + ipfsId: op.ipfsId, + }); + break; + case 'removeAddress': + await this.removePermittedAddress({ + address: op.address, + }); + break; + } + } + } + + /** + * Revokes all permissions (both actions and addresses) for the PKP + * + * @returns Promise resolving after all permissions are revoked + */ + async revokeAllPermissions(): Promise { + const context = await this.getPermissionsContext(); + + // Remove all addresses + for (const address of context.addresses) { + await this.removePermittedAddress({ + address, + }); + } + + // For testing, we'll try to remove our known test action + for (const testIpfsId of COMMON_TEST_IPFS_IDS) { + try { + await this.removePermittedAction({ + ipfsId: testIpfsId, + }); + } catch (error) { + // Ignore error - the test action might not be in the list + } + } + + // For any remaining actions (that might be in hex format), + // we'll use getPermittedActions which already has the actions in the right format + // and try to remove them in a more direct way + const actions = await this.getPermittedActions(); + + // Try to call the underlying handler directly to bypass validation issues + if (actions.length > 0) { + try { + // Try to remove each action directly + for (const actionId of actions) { + try { + // Extract IPFS CID from hex format if possible + // This is a best-effort approach - some actions might still fail to be removed + await this.removePermittedAction({ + ipfsId: actionId, // Use the hex format directly + }); + } catch (error) { + // Ignore error - the action might not be in the list + logger.error({ error }, 'Error removing action'); + } + } + } catch (error) { + // Ignore general errors in the direct removal approach + } + } + } +} diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/README.md b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/README.md new file mode 100644 index 0000000000..2a2ed6986f --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/README.md @@ -0,0 +1,189 @@ +# PKP Permissions Manager + +A comprehensive manager for PKP (Programmable Key Pair) permissions that provides a unified interface for managing LitAction and Address permissions with batch operations. + +## Features + +- **Unified API**: Consistent interface for all permission operations +- **Batch Operations**: Perform multiple permission changes efficiently +- **Type Safety**: Full TypeScript type definitions +- **Comprehensive Logging**: Detailed logging for debugging + +## Installation + +The Permissions Manager is part of the PKP Auth Service and doesn't require separate installation. + +## Usage + +### Creating a Permissions Manager + +```typescript +import { PKPPermissionsManager } from 'services/lit/LitChainClient/apis/abstract/PKPPermissionsManager'; +import { nagaDevNetworkContext } from 'services/lit/LitNetwork/vNaga/datil-dev/networkContext'; + +// Create the permissions manager with a PKP identifier +const manager = new PKPPermissionsManager( + { tokenId: 'YOUR_TOKEN_ID' }, // Can also use { pubkey: "0x..." } or { address: "0x..." } + nagaDevNetworkContext +); +``` + +### Managing LitAction Permissions + +```typescript +// Add a permitted LitAction +await manager.addPermittedAction({ + ipfsId: 'QmYourIpfsId', + scopes: ['sign-anything'], +}); + +// Check if a LitAction is permitted +const isPermitted = await manager.isPermittedAction({ + ipfsId: 'QmYourIpfsId', +}); + +// Get all permitted LitActions +const litActions = await manager.getPermittedActions(); + +// Remove a permitted LitAction +await manager.removePermittedAction({ + ipfsId: 'QmYourIpfsId', +}); +``` + +### Managing Address Permissions + +```typescript +// Add a permitted address +await manager.addPermittedAddress({ + address: '0xYourAddress', + scopes: ['sign-anything'], +}); + +// Check if an address is permitted +const isAddressPermitted = await manager.isPermittedAddress({ + address: '0xYourAddress', +}); + +// Get all permitted addresses +const addresses = await manager.getPermittedAddresses(); + +// Remove a permitted address +await manager.removePermittedAddress({ + address: '0xYourAddress', +}); +``` + +### Getting Permissions Context + +```typescript +// Get comprehensive permissions context +const context = await manager.getPermissionsContext(); + +// Use context for efficient permission checks +if (context.isActionPermitted('0xActionHash')) { + // Action is permitted +} + +if (context.isAddressPermitted('0xAddress')) { + // Address is permitted +} + +// Access all permissions +console.log(context.actions); // All permitted LitActions +console.log(context.addresses); // All permitted addresses +``` + +### Batch Operations + +```typescript +// Perform multiple operations in a single call +await manager.batchUpdatePermissions([ + { + type: 'addAction', + ipfsId: 'QmNewLitAction', + scopes: ['sign-anything'], + }, + { + type: 'addAddress', + address: '0xNewAddress', + scopes: ['sign-anything'], + }, + { + type: 'removeAction', + ipfsId: 'QmOldLitAction', + }, + { + type: 'removeAddress', + address: '0xOldAddress', + }, +]); +``` + +### Revoking All Permissions + +```typescript +// Revoke all permissions for the PKP +await manager.revokeAllPermissions(); +``` + +### Getting PKPs by Address + +```typescript +// Static method to get all PKPs associated with an address +const pkps = await PKPPermissionsManager.getPKPsByAddress( + '0xYourAddress', + nagaDevNetworkContext +); +``` + +## API Reference + +### Constructor + +```typescript +constructor(identifier: PkpIdentifierRaw, networkContext: DatilContext) +``` + +- `identifier`: PKP identifier (tokenId, pubkey, or address) +- `networkContext`: Network context for contract interactions + +### Instance Methods + +#### LitAction Permissions + +- `addPermittedAction(params: { ipfsId: string; scopes: ScopeString[] })`: Add a permitted LitAction +- `removePermittedAction(params: { ipfsId: string })`: Remove a permitted LitAction +- `isPermittedAction(params: { ipfsId: string })`: Check if a LitAction is permitted +- `getPermittedActions()`: Get all permitted LitActions + +#### Address Permissions + +- `addPermittedAddress(params: { address: string; scopes: ScopeString[] })`: Add a permitted address +- `removePermittedAddress(params: { address: string })`: Remove a permitted address +- `isPermittedAddress(params: { address: string })`: Check if an address is permitted +- `getPermittedAddresses()`: Get all permitted addresses + +#### Comprehensive Management + +- `getPermissionsContext()`: Get comprehensive permissions context +- `revokeAllPermissions()`: Revoke all permissions for a PKP +- `batchUpdatePermissions(operations)`: Perform batch permission operations + +### Static Methods + +- `getPKPsByAddress(address: string, networkContext: DatilContext)`: Get all PKPs associated with an address + +## Types + +### ScopeString + +Available permission scopes: + +- `"no-permissions"`: No permissions granted +- `"sign-anything"`: Permission to sign any message +- `"personal-sign"`: Permission for personal signatures only + +## License + +This code is part of the PKP Auth Service and is subject to its license terms. diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedActionByIdentifier.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedActionByIdentifier.ts new file mode 100644 index 0000000000..330b8e7d4c --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedActionByIdentifier.ts @@ -0,0 +1,70 @@ +import { NagaContext } from '../../../../../../types'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { z } from 'zod'; +import { ScopeStringSchema } from '../../../../schemas/shared/ScopeSchema'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; +import { addPermittedAction } from '../../../rawContractApis/permissions/write/addPermittedAction'; +import { LitTxVoid } from '../../../types'; + +// Schema for the request +const addPermittedActionByIdentifierSchema = z.intersection( + z.object({ + ipfsId: isIpfsCidV0, + scopes: z.array(ScopeStringSchema), + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type AddPermittedActionByIdentifierRequest = z.infer< + typeof addPermittedActionByIdentifierSchema +>; + +/** + * Adds a permitted action to a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey, ipfsId, and scopes + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to transaction details + */ +export async function addPermittedActionByIdentifier( + request: AddPermittedActionByIdentifierRequest, + networkCtx: NagaContext +): Promise { + const { ipfsId, scopes, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return addPermittedAction( + { + tokenId: pkpTokenId.toString(), + ipfsId, + scopes, + }, + networkCtx + ); +} + +// Example usage +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await addPermittedActionByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// scopes: ["sign-anything"], +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedAddressByIdentifier.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedAddressByIdentifier.ts new file mode 100644 index 0000000000..f53c248729 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/addPermittedAddressByIdentifier.ts @@ -0,0 +1,69 @@ +import { z } from 'zod'; +import { NagaContext } from '../../../../../../types'; +import { ScopeStringSchema } from '../../../../schemas/shared/ScopeSchema'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; +import { addPermittedAddress } from '../../../rawContractApis/permissions/write/addPermittedAddress'; +import { LitTxVoid } from '../../../types'; + +// Schema for the request +const addPermittedAddressByIdentifierSchema = z.intersection( + z.object({ + targetAddress: z.string().regex(/^0x[a-fA-F0-9]{40}$/), + scopes: z.array(ScopeStringSchema), + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type AddPermittedAddressByIdentifierRequest = z.infer< + typeof addPermittedAddressByIdentifierSchema +>; + +/** + * Adds a permitted address to a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey, targetAddress, and scopes + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to transaction details + */ +export async function addPermittedAddressByIdentifier( + request: AddPermittedAddressByIdentifierRequest, + networkCtx: NagaContext +): Promise { + const { targetAddress, scopes, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return addPermittedAddress( + { + tokenId: pkpTokenId.toString(), + address: targetAddress, + scopes, + }, + networkCtx + ); +} + +// Example usage +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await addPermittedAddressByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// targetAddress: "0x1234567890123456789012345678901234567890", +// scopes: ["sign-anything"], +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPKPsByAddress.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPKPsByAddress.ts new file mode 100644 index 0000000000..cc06142e0f --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPKPsByAddress.ts @@ -0,0 +1,243 @@ +import { getAddress } from 'viem'; +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { NagaContext } from '../../../../../../types'; +import { getPubkeyByTokenId } from '../../../rawContractApis/pkp/read/getPubkeyByTokenId'; +import { tokenOfOwnerByIndex } from '../../../rawContractApis/pkp/read/tokenOfOwnerByIndex'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +// Schema for the request +const getPKPsByAddressSchema = z.object({ + ownerAddress: z.string().startsWith('0x'), +}); + +type GetPKPsByAddressRequest = z.infer; + +/** + * PKP information object + */ +export interface PKPInfo { + tokenId: string; + publicKey: string; + ethAddress: string; +} + +/** +// * Check if an error is an "out of bounds" error +// * @param error - The error to check +// * @returns True if it's an out of bounds error, false otherwise +// */ +// function isOutOfBoundsError(error: unknown): boolean { +// // Check for the specific error message from the contract +// if (error && typeof error === "object") { +// // Check for common error structures +// const errorObj = error as Record; + +// // Check direct reason +// if ( +// errorObj.reason && +// typeof errorObj.reason === "string" && +// errorObj.reason.includes("out of bounds") +// ) { +// return true; +// } + +// // Check cause +// if (errorObj.cause && typeof errorObj.cause === "object") { +// if ( +// errorObj.cause.reason && +// typeof errorObj.cause.reason === "string" && +// errorObj.cause.reason.includes("out of bounds") +// ) { +// return true; +// } +// } + +// // Check message +// if ( +// errorObj.message && +// typeof errorObj.message === "string" && +// (errorObj.message.includes("out of bounds") || +// errorObj.message.includes( +// "ERC721Enumerable: owner index out of bounds" +// )) +// ) { +// return true; +// } + +// // Check shortMessage +// if ( +// errorObj.shortMessage && +// typeof errorObj.shortMessage === "string" && +// (errorObj.shortMessage.includes("out of bounds") || +// errorObj.shortMessage.includes( +// "ERC721Enumerable: owner index out of bounds" +// )) +// ) { +// return true; +// } + +// // Special case: empty error object is often returned when out of bounds +// if (Object.keys(errorObj).length === 0) { +// return true; +// } +// } + +// // Check for string error +// if (typeof error === "string" && error.includes("out of bounds")) { +// return true; +// } + +// return false; +// } + +/** + * Fetch a single PKP's information by index + * @param ownerAddress - The owner's Ethereum address + * @param index - The index of the PKP + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to PKP info or null if not found + */ +async function fetchSinglePKP( + ownerAddress: `0x${string}`, + index: number, + networkCtx: NagaContext +): Promise { + try { + // Get the token ID + const tokenId = await tokenOfOwnerByIndex( + { ownerAddress, index }, + networkCtx + ); + + // Get the public key + const publicKey = await getPubkeyByTokenId({ tokenId }, networkCtx); + + // Compute the Ethereum address from the public key + const { pubkeyRouterContract } = createLitContracts(networkCtx); + + // Remove '0x' prefix if present for the contract call + const publicKeyBytes = publicKey.startsWith('0x') + ? publicKey.slice(2) + : publicKey; + const ethAddressRaw = + await pubkeyRouterContract.read.deriveEthAddressFromPubkey([ + `0x${publicKeyBytes}`, + ]); + + // Format the address + const ethAddress = getAddress(ethAddressRaw); + + return { + tokenId, + publicKey, + ethAddress, + }; + } catch (error) { + // if (isOutOfBoundsError(error)) { + // // Expected when we've gone past the end + // return null; + // } + + // Rethrow other errors + throw error; + } +} + +/** + * Retrieves all PKPs owned by a specific Ethereum address + * @param request - Object containing the owner address + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to an array of PKP information objects + */ +export async function getPKPsByAddress( + request: GetPKPsByAddressRequest, + networkCtx: NagaContext +): Promise { + const { ownerAddress } = getPKPsByAddressSchema.parse(request); + + logger.debug({ ownerAddress }, 'Fetching PKPs by address'); + + // Ensure ownerAddress is properly typed as a hex string + const typedOwnerAddress = ownerAddress as `0x${string}`; + + try { + const pkps: PKPInfo[] = []; + + // Constants for optimization + const BATCH_SIZE = 5; // Number of PKPs to fetch in parallel + const MAX_BATCHES = 20; // Maximum number of batches to try (100 PKPs total) + let hasMorePKPs = true; + let batchIndex = 0; + + while (hasMorePKPs && batchIndex < MAX_BATCHES) { + const startIndex = batchIndex * BATCH_SIZE; + const endIndex = startIndex + BATCH_SIZE - 1; + + logger.debug( + { batchIndex, startIndex, endIndex }, + 'Fetching batch of PKPs' + ); + + // Create an array of promises for the current batch + const batchPromises = Array.from({ length: BATCH_SIZE }, (_, i) => { + const index = startIndex + i; + return fetchSinglePKP(typedOwnerAddress, index, networkCtx); + }); + + // Wait for all promises to resolve + const batchResults = await Promise.allSettled(batchPromises); + + // Process the results + let validPKPsInBatch = 0; + + for (const result of batchResults) { + if (result.status === 'fulfilled' && result.value !== null) { + pkps.push(result.value); + validPKPsInBatch++; + } + } + + // If we didn't get any valid PKPs in this batch, we're done + if (validPKPsInBatch === 0) { + hasMorePKPs = false; + logger.debug( + { batchIndex }, + 'No valid PKPs found in batch, stopping enumeration' + ); + } + + // Move to the next batch + batchIndex++; + } + + if (batchIndex >= MAX_BATCHES) { + logger.warn( + { ownerAddress, maxPkps: MAX_BATCHES * BATCH_SIZE }, + 'Reached maximum number of PKPs to fetch' + ); + } + + logger.debug( + { ownerAddress, count: pkps.length }, + 'PKPs fetched successfully' + ); + return pkps; + } catch (error) { + logger.error({ ownerAddress, error }, 'Error in getPKPsByAddress'); + return []; + } +} + +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const pkps = await getPKPsByAddress( +// { +// ownerAddress: "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", +// }, +// networkCtx +// ); + +// console.log(pkps); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermissionsContext.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermissionsContext.ts new file mode 100644 index 0000000000..eab2730b30 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermissionsContext.ts @@ -0,0 +1,102 @@ +import { logger } from '../../../../../../../shared/logger'; +import { NagaContext } from '../../../../../../types'; +import { getPermittedActions } from '../../../rawContractApis/permissions/read/getPermittedActions'; +import { getPermittedAddresses } from '../../../rawContractApis/permissions/read/getPermittedAddresses'; +import { + AuthMethod, + getPermittedAuthMethods, +} from '../../../rawContractApis/permissions/read/getPermittedAuthMethods'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +export interface PermissionsContext { + actions: readonly `0x${string}`[]; + addresses: readonly `0x${string}`[]; + authMethods: readonly AuthMethod[]; + isActionPermitted: (ipfsId: `0x${string}`) => boolean; + isAddressPermitted: (address: `0x${string}`) => boolean; + isAuthMethodPermitted: ( + authMethodType: number, + authMethodId: string + ) => boolean; +} + +/** + * Fetches and returns the current permissions context for a PKP + * @param identifier - Any valid PKP identifier (tokenId, pubkey, or address) + * @param networkCtx - Network context + */ +export async function getPermissionsContext( + identifier: PkpIdentifierRaw, + networkCtx: NagaContext +): Promise { + // Resolve the identifier to a tokenId + const tokenId = (await resolvePkpTokenId(identifier, networkCtx)).toString(); + logger.debug({ identifier, tokenId }, 'Loading permissions'); + + // Fetch all permissions in parallel + const [actions, addresses, authMethods] = await Promise.all([ + getPermittedActions({ tokenId }, networkCtx), + getPermittedAddresses({ tokenId }, networkCtx), + getPermittedAuthMethods({ tokenId }, networkCtx), + ]); + + logger.debug( + { + identifier, + tokenId, + actionCount: actions.length, + addressCount: addresses.length, + authMethodCount: authMethods.length, + }, + 'Permissions loaded' + ); + + return { + actions, + addresses, + authMethods, + isActionPermitted: (ipfsId: `0x${string}`) => actions.includes(ipfsId), + isAddressPermitted: (address: `0x${string}`) => + addresses.some((addr) => addr.toLowerCase() === address.toLowerCase()), + isAuthMethodPermitted: (authMethodType: number, authMethodId: string) => + authMethods.some( + (method) => + method.authMethodType === BigInt(authMethodType) && + method.id.toLowerCase() === authMethodId.toLowerCase() + ), + }; +} + +// Example usage +// if (import.meta.main) { +// const networkCtx = networkContext; +// async function example() { +// // Can use any of these identifiers: +// const ctx = await getPermissionsContext( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// }, +// networkCtx +// ); +// // Check current permissions +// const isActionAllowed = ctx.isActionPermitted("0x1234..." as `0x${string}`); +// const isAddressAllowed = ctx.isAddressPermitted( +// "0x5678..." as `0x${string}` +// ); +// const isAuthMethodAllowed = ctx.isAuthMethodPermitted( +// 1, // AuthMethodType.EthWallet +// "0x1234567890abcdef1234567890abcdef12345678" +// ); +// console.log("Action permitted:", isActionAllowed); +// console.log("Address permitted:", isAddressAllowed); +// console.log("Auth method permitted:", isAuthMethodAllowed); +// console.log("All permitted actions:", ctx.actions); +// console.log("All permitted addresses:", ctx.addresses); +// console.log("All permitted auth methods:", ctx.authMethods); +// } +// example().catch(console.error); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedActionsByIdentifier.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedActionsByIdentifier.ts new file mode 100644 index 0000000000..821595ecbf --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedActionsByIdentifier.ts @@ -0,0 +1,39 @@ +import { logger } from '../../../../../../../shared/logger'; +import { NagaContext } from '../../../../../../types'; +import { getPermittedActions } from '../../../rawContractApis/permissions/read/getPermittedActions'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +/** + * Get permitted actions for a PKP token using various identifier types + * @param identifier - Object containing either tokenId, address, or pubkey + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted actions for the PKP token + */ +export async function getPermittedActionsByIdentifier( + identifier: PkpIdentifierRaw, + networkCtx: NagaContext +): Promise { + logger.debug({ identifier }); + + const pkpTokenId = await resolvePkpTokenId(identifier, networkCtx); + return getPermittedActions({ tokenId: pkpTokenId.toString() }, networkCtx); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await getPermittedActionsByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// // address: "0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F", +// }, +// networkCtx +// ); +// console.log("permittedActions", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAddressesByIdentifier.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAddressesByIdentifier.ts new file mode 100644 index 0000000000..63e7c89e44 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAddressesByIdentifier.ts @@ -0,0 +1,38 @@ +import { logger } from '../../../../../../../shared/logger'; +import { NagaContext } from '../../../../../../types'; +import { getPermittedAddresses } from '../../../rawContractApis/permissions/read/getPermittedAddresses'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +/** + * Get permitted addresses for a PKP token using various identifier types + * @param identifier - Object containing either tokenId, address, or pubkey + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted addresses for the PKP token + */ +export async function getPermittedAddressesByIdentifier( + identifier: PkpIdentifierRaw, + networkCtx: NagaContext +): Promise { + logger.debug({ identifier }); + + const pkpTokenId = await resolvePkpTokenId(identifier, networkCtx); + return getPermittedAddresses({ tokenId: pkpTokenId.toString() }, networkCtx); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await getPermittedAddressesByIdentifier( +// { +// // tokenId: "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// address: "0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F", +// }, +// networkCtx +// ); +// console.log("permittedAddresses", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodScopesByIdentifier.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodScopesByIdentifier.ts new file mode 100644 index 0000000000..8a36266616 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodScopesByIdentifier.ts @@ -0,0 +1,62 @@ +import { logger } from '../../../../../../../shared/logger'; +import { NagaContext } from '../../../../../../types'; +import { networkContext } from '../../../../_config'; +import { getPermittedAuthMethodScopes } from '../../../rawContractApis/permissions/read/getPermittedAuthMethodScopes'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +/** + * Get permitted scopes for a specific authentication method of a PKP token using various identifier types + * @param params - Parameters for the request + * @param params.identifier - Object containing either tokenId, address, or pubkey + * @param params.authMethodType - Type of authentication method + * @param params.authMethodId - ID of authentication method + * @param params.scopeId - Optional scope ID to check + * @param networkCtx - Network context for contract interactions + * @returns Array of boolean values indicating whether each scope is permitted + */ +export async function getPermittedAuthMethodScopesByIdentifier( + params: { + identifier: PkpIdentifierRaw; + authMethodType: number; + authMethodId: string; + scopeId?: number; + }, + networkCtx: NagaContext +): Promise { + logger.debug({ params }); + + const pkpTokenId = await resolvePkpTokenId(params.identifier, networkCtx); + + return getPermittedAuthMethodScopes( + { + tokenId: pkpTokenId.toString(), + authMethodType: params.authMethodType, + authMethodId: params.authMethodId, + scopeId: params.scopeId, + }, + networkCtx + ); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await getPermittedAuthMethodScopesByIdentifier( +// { +// identifier: { +// // tokenId: "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// address: '0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F', +// }, +// authMethodType: 1, +// authMethodId: '0x1234567890abcdef1234567890abcdef12345678', +// scopeId: 0, +// }, +// networkCtx +// ); +// console.log('permittedAuthMethodScopes', res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodsByIdentifier.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodsByIdentifier.ts new file mode 100644 index 0000000000..823cbbe6ef --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/getPermittedAuthMethodsByIdentifier.ts @@ -0,0 +1,45 @@ +import { logger } from '../../../../../../../shared/logger'; +import { NagaContext } from '../../../../../../types'; +import { networkContext } from '../../../../_config'; +import { + AuthMethod, + getPermittedAuthMethods, +} from '../../../rawContractApis/permissions/read/getPermittedAuthMethods'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +/** + * Get permitted authentication methods for a PKP token using various identifier types + * @param identifier - Object containing either tokenId, address, or pubkey + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted authentication methods for the PKP token + */ +export async function getPermittedAuthMethodsByIdentifier( + identifier: PkpIdentifierRaw, + networkCtx: NagaContext +): Promise { + logger.debug({ identifier }); + + const pkpTokenId = await resolvePkpTokenId(identifier, networkCtx); + return getPermittedAuthMethods( + { tokenId: pkpTokenId.toString() }, + networkCtx + ); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await getPermittedAuthMethodsByIdentifier( +// { +// // tokenId: "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// address: '0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F', +// }, +// networkCtx +// ); +// console.log('permittedAuthMethods', res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedActionByIdentifier.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedActionByIdentifier.ts new file mode 100644 index 0000000000..6e1875864c --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedActionByIdentifier.ts @@ -0,0 +1,69 @@ +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { NagaContext } from '../../../../../../types'; +import { isPermittedAction } from '../../../rawContractApis/permissions/read/isPermittedAction'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +// Schema for validating the request parameters +const isPermittedActionByIdentifierSchema = z.intersection( + z.object({ + ipfsId: isIpfsCidV0, + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type IsPermittedActionByIdentifierRequest = z.infer< + typeof isPermittedActionByIdentifierSchema +>; + +/** + * Check if an action is permitted for a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey and ipfsId + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to boolean indicating if the action is permitted + */ +export async function isPermittedActionByIdentifier( + request: IsPermittedActionByIdentifierRequest, + networkCtx: NagaContext +): Promise { + logger.debug({ request }); + + const { ipfsId, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return isPermittedAction( + { + tokenId: pkpTokenId.toString(), + ipfsId, + }, + networkCtx + ); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await isPermittedActionByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// // address: "0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F", +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// }, +// networkCtx +// ); +// console.log("Is action permitted:", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedAddressByIdentifier.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedAddressByIdentifier.ts new file mode 100644 index 0000000000..355d2285a2 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/isPermittedAddressByIdentifier.ts @@ -0,0 +1,68 @@ +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { NagaContext } from '../../../../../../types'; +import { isPermittedAddress } from '../../../rawContractApis/permissions/read/isPermittedAddress'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; + +// Schema for validating the request parameters +const isPermittedAddressByIdentifierSchema = z.intersection( + z.object({ + targetAddress: z.string().regex(/^0x[a-fA-F0-9]{40}$/), + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type IsPermittedAddressByIdentifierRequest = z.infer< + typeof isPermittedAddressByIdentifierSchema +>; + +/** + * Check if an address is permitted for a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey and targetAddress + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to boolean indicating if the address is permitted + */ +export async function isPermittedAddressByIdentifier( + request: IsPermittedAddressByIdentifierRequest, + networkCtx: NagaContext +): Promise { + logger.debug({ request }); + + const { targetAddress, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return isPermittedAddress( + { + tokenId: pkpTokenId.toString(), + address: targetAddress, + }, + networkCtx + ); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await isPermittedAddressByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// // pubkey: "0x000", +// // address: "0xef3eE1bD838aF5B36482FAe8a6Fc394C68d5Fa9F", +// targetAddress: "0x1234567890123456789012345678901234567890", +// }, +// networkCtx +// ); +// console.log("Is address permitted:", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedActionByIdentifier.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedActionByIdentifier.ts new file mode 100644 index 0000000000..481c49deef --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedActionByIdentifier.ts @@ -0,0 +1,66 @@ +import { z } from 'zod'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { NagaContext } from '../../../../../../types'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; +import { removePermittedAction } from '../../../rawContractApis/permissions/write/removePermittedAction'; +import { LitTxVoid } from '../../../types'; + +// Schema for the request +const removePermittedActionByIdentifierSchema = z.intersection( + z.object({ + ipfsId: isIpfsCidV0, + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type RemovePermittedActionByIdentifierRequest = z.infer< + typeof removePermittedActionByIdentifierSchema +>; + +/** + * Removes a permitted action from a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey and ipfsId + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to transaction details + */ +export async function removePermittedActionByIdentifier( + request: RemovePermittedActionByIdentifierRequest, + networkCtx: NagaContext +): Promise { + const { ipfsId, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return removePermittedAction( + { + tokenId: pkpTokenId.toString(), + ipfsId, + }, + networkCtx + ); +} + +// Example usage +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await removePermittedActionByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedAddressByIdentifier.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedAddressByIdentifier.ts new file mode 100644 index 0000000000..784d6685a5 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedAddressByIdentifier.ts @@ -0,0 +1,65 @@ +import { z } from 'zod'; +import { NagaContext } from '../../../../../../types'; +import { + PkpIdentifierRaw, + resolvePkpTokenId, +} from '../../../rawContractApis/permissions/utils/resolvePkpTokenId'; +import { removePermittedAddress } from '../../../rawContractApis/permissions/write/removePermittedAddress'; +import { LitTxVoid } from '../../../types'; + +// Schema for the request +const removePermittedAddressByIdentifierSchema = z.intersection( + z.object({ + targetAddress: z.string().regex(/^0x[a-fA-F0-9]{40}$/), + }), + z.union([ + z.object({ tokenId: z.string().or(z.number()).or(z.bigint()) }), + z.object({ pubkey: z.string() }), + z.object({ address: z.string() }), + ]) +); + +type RemovePermittedAddressByIdentifierRequest = z.infer< + typeof removePermittedAddressByIdentifierSchema +>; + +/** + * Removes a permitted address from a PKP token using various identifier types + * @param request - Object containing either tokenId/address/pubkey and targetAddress + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to transaction details + */ +export async function removePermittedAddressByIdentifier( + request: RemovePermittedAddressByIdentifierRequest, + networkCtx: NagaContext +): Promise { + const { targetAddress, ...identifier } = request; + const pkpTokenId = await resolvePkpTokenId( + identifier as PkpIdentifierRaw, + networkCtx + ); + + return removePermittedAddress( + { + tokenId: pkpTokenId.toString(), + address: targetAddress, + }, + networkCtx + ); +} + +// Example usage +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await removePermittedAddressByIdentifier( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// targetAddress: "0x1234567890123456789012345678901234567890", +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/index.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/index.ts new file mode 100644 index 0000000000..02975ed0e5 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/PKPPermissionsManager/index.ts @@ -0,0 +1,13 @@ +export { PKPPermissionsManager } from './PKPPermissionsManager'; + +// export { addPermittedActionByIdentifier } from "./handlers/addPermittedActionByIdentifier"; +// export { addPermittedAddressByIdentifier } from "./handlers/addPermittedAddressByIdentifier"; +// export { removePermittedActionByIdentifier } from "./handlers/removePermittedActionByIdentifier"; +// export { removePermittedAddressByIdentifier } from "./handlers/removePermittedAddressByIdentifier"; +// export { isPermittedActionByIdentifier } from "./handlers/isPermittedActionByIdentifier"; +// export { isPermittedAddressByIdentifier } from "./handlers/isPermittedAddressByIdentifier"; +// export { getPermittedActionsByIdentifier } from "./handlers/getPermittedActionsByIdentifier"; +// export { getPermittedAddressesByIdentifier } from "./handlers/getPermittedAddressesByIdentifier"; +// export { getPermittedAuthMethodsByIdentifier } from "./handlers/getPermittedAuthMethodsByIdentifier"; +// export { getPermittedAuthMethodScopesByIdentifier } from "./handlers/getPermittedAuthMethodScopesByIdentifier"; +// export { getPermissionsContext } from "./handlers/getPermissionsContext"; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/README.md b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/README.md new file mode 100644 index 0000000000..d3bce3d583 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/README.md @@ -0,0 +1 @@ +Abstracted APIs handle data transformation, combine multiple operations, and provide more user-friendly interfaces diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.spec.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.spec.ts new file mode 100644 index 0000000000..fb0753a92c --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.spec.ts @@ -0,0 +1,54 @@ +import { networkContext, NetworkContext } from '../../../_config'; +import { getConnectionInfo } from './getConnectionInfo'; + +describe('ConnectionInfo', () => { + let networkCtx: NetworkContext; + + beforeAll(async () => { + networkCtx = networkContext; + }); + + test('getConnectionInfo returns properly formatted connection data', async () => { + const connectionInfo = await getConnectionInfo({ + networkCtx, + }); + + // Verify the structure and data types + expect(connectionInfo).toHaveProperty('epochInfo'); + expect(connectionInfo).toHaveProperty('minNodeCount'); + expect(connectionInfo).toHaveProperty('bootstrapUrls'); + + // Verify the epochInfo structure + expect(connectionInfo.epochInfo).toHaveProperty('epochLength'); + expect(connectionInfo.epochInfo).toHaveProperty('number'); + expect(connectionInfo.epochInfo).toHaveProperty('endTime'); + expect(connectionInfo.epochInfo).toHaveProperty('retries'); + expect(connectionInfo.epochInfo).toHaveProperty('timeout'); + + // Verify data types and ranges + expect(connectionInfo.minNodeCount).toBeGreaterThanOrEqual(1); + expect(connectionInfo.bootstrapUrls.length).toBeGreaterThanOrEqual( + connectionInfo.minNodeCount + ); + + // Verify that all URLs start with http:// or https:// + connectionInfo.bootstrapUrls.forEach((url) => { + expect(url.startsWith('http://') || url.startsWith('https://')).toBe( + true + ); + }); + }); + + test('getConnectionInfo applies custom protocol when provided', async () => { + const customProtocol = 'https://'; + const connectionInfo = await getConnectionInfo({ + networkCtx, + nodeProtocol: customProtocol, + }); + + // Verify that all URLs use the custom protocol + connectionInfo.bootstrapUrls.forEach((url) => { + expect(url.startsWith(customProtocol)).toBe(true); + }); + }); +}); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.ts new file mode 100644 index 0000000000..535aa11ac2 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.ts @@ -0,0 +1,99 @@ +import { getActiveUnkickedValidatorStructsAndCounts } from '../../../../../common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts'; +import { GetActiveUnkickedValidatorStructsAndCountsTransformed } from '../../../../../common/LitChainClient/schemas/GetActiveUnkickedValidatorStructsAndCountsSchema'; +import { NagaContext } from '../../../../../types'; +import { networkContext } from '../../../_config'; + +/** + * Interface representing the structure of connection information + */ +interface ConnectionInfo { + epochInfo: { + epochLength: number; + number: number; + endTime: number; + retries: number; + timeout: number; + }; + minNodeCount: number; + bootstrapUrls: string[]; +} + +/** + * Interface for the parameters of getConnectionInfo function + */ +interface GetConnectionInfoParams { + networkCtx: NagaContext; + nodeProtocol?: string | null; +} + +/** + * Retrieves the connection information for a network. + * + * This high-level API builds on the raw contract API to provide formatted connection + * information including epoch details, minimum node count, and bootstrap URLs with + * proper protocol prefixes. + * + * @param params - Parameters for retrieving connection information + * @param params.networkCtx - The network context for the contract + * @param [params.nodeProtocol] - Optional protocol for the network node (HTTP or HTTPS) + * + * @returns An object containing the epoch information, minimum node count and an array of bootstrap URLs + * + * @throws Error if the minimum node count is not set or if the active validator set does not meet the threshold + */ +export async function getConnectionInfo({ + networkCtx, + nodeProtocol, +}: GetConnectionInfoParams): Promise { + // Get the validated data from the raw contract API + const validatedData = await getActiveUnkickedValidatorStructsAndCounts( + networkCtx + ); + + const { epochInfo, minNodeCount, validatorURLs } = + validatedData as GetActiveUnkickedValidatorStructsAndCountsTransformed; + + // Verify minimum node count + if (!minNodeCount) { + throw new Error('❌ Minimum validator count is not set'); + } + + // Verify validator set meets the minimum threshold + if (validatorURLs.length < Number(minNodeCount)) { + throw new Error( + `❌ Active validator set does not meet the threshold. Required: ${minNodeCount} but got: ${validatorURLs.length}` + ); + } + + // Transform the URLs to bootstrap URLs based on the provided protocol + // Note: validatorURLs from the schema are already processed with the network's httpProtocol + // but we can override that with the nodeProtocol parameter if provided + const bootstrapUrls = nodeProtocol + ? validatorURLs.map((url: string) => { + // Extract the hostname and port from the URL (remove any existing protocol) + const urlWithoutProtocol = url.replace(/^https?:\/\//, ''); + return `${nodeProtocol}${urlWithoutProtocol}`; + }) + : validatorURLs; + + return { + epochInfo, + minNodeCount: Number(minNodeCount), + bootstrapUrls, + }; +} + +/** + * Self-executable script for testing the getConnectionInfo function + * + * Usage: bun run src/services/lit/LitNetwork/vNaga/common/LitChainClient/apis/highLevelApis/connection/getConnectionInfo.ts + */ +// if (import.meta.main) { +// // Use the development network context for testing +// const results = await getConnectionInfo({ +// networkCtx: networkContext, +// }); + +// console.log('Connection Info Results:'); +// console.log(JSON.stringify(results, null, 2)); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/index.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/index.ts new file mode 100644 index 0000000000..b90e8fd0c1 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/index.ts @@ -0,0 +1,11 @@ +/** + * High-Level APIs + * + * This module exports high-level APIs that provide more user-friendly interfaces, + * handle data transformation, and combine multiple operations. + */ + +// Export existing high-level APIs +export * from './mintPKP/mintPKP'; +export * from './PKPPermissionsManager'; +export * from './priceFeed'; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/mintPKP/MintPKPSchema.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/mintPKP/MintPKPSchema.ts new file mode 100644 index 0000000000..ec78ff24ed --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/mintPKP/MintPKPSchema.ts @@ -0,0 +1,31 @@ +import { isHex, toBytes, toHex } from 'viem'; +import { z } from 'zod'; +import { AuthMethodSchema } from '../../../schemas/shared/AuthMethodSchema'; +import { ScopeSchemaRaw } from '../../../schemas/shared/ScopeSchema'; + +export const MintPKPSchema = z + .object({ + authMethod: AuthMethodSchema, + scopes: z.array(ScopeSchemaRaw), + pubkey: z.string().optional(), + customAuthMethodId: z.string().optional(), + }) + .transform((data) => { + // If no customAuthMethodId provided, return data as-is + if (!data.customAuthMethodId) { + return data; + } + + // Convert customAuthMethodId to hex if not already in hex format + const hexAuthMethodId = isHex(data.customAuthMethodId) + ? data.customAuthMethodId + : toHex(toBytes(data.customAuthMethodId)); + + // Return data with transformed customAuthMethodId + return { + ...data, + customAuthMethodId: hexAuthMethodId, + }; + }); + +export type MintPKPRequest = z.input; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.spec.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.spec.ts new file mode 100644 index 0000000000..b74710d62d --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.spec.ts @@ -0,0 +1,75 @@ +import { networkContext, NetworkContext } from '../../../_config'; +import { mintPKP } from './mintPKP'; + +describe('mintPKP', () => { + let networkCtx: NetworkContext; + + beforeAll(() => { + networkCtx = networkContext; + }); + + test('should mint PKP with customAuthMethodId and return correct data format', async () => { + const res = await mintPKP( + { + authMethod: { + authMethodType: 1, + accessToken: '0x', + }, + scopes: ['sign-anything'], + customAuthMethodId: 'app-id-xxx:user-id-yyy', + }, + networkCtx + ); + + // Check response structure + expect(res).toHaveProperty('hash'); + expect(res).toHaveProperty('receipt'); + expect(res).toHaveProperty('data'); + expect(res.data).toHaveProperty('tokenId'); + expect(res.data).toHaveProperty('pubkey'); + expect(res.data).toHaveProperty('ethAddress'); + + // Verify data types + expect(typeof res.data.tokenId).toBe('bigint'); + expect(typeof res.data.pubkey).toBe('string'); + expect(typeof res.data.ethAddress).toBe('string'); + expect(res.data.pubkey).toMatch(/^0x/); + expect(res.data.ethAddress).toMatch(/^0x/); + }); + + test('show auto-convert native authMethod to authMethodId when customAuthMethodId is omitted', async () => { + const eoaAuthSig = { + sig: '', + derivedVia: 'web3.eth.personal.sign', + signedMessage: + 'I am creating an account to use Lit Protocol at 2022-04-12T09:23:31.290Z', + address: '0x7e7763BE1379Bb48AFEE4F5c232Fb67D7c03947F', + }; + + const res = await mintPKP( + { + authMethod: { + authMethodType: 1, + accessToken: JSON.stringify(eoaAuthSig), + }, + scopes: ['sign-anything'], + }, + networkCtx + ); + + // Find relevant events in decoded logs + const permittedAuthMethodScopeAddedEvent = res.decodedLogs.find( + (log) => log.eventName === 'PermittedAuthMethodScopeAdded' + ); + const permittedAuthMethodAddedEvent = res.decodedLogs.find( + (log) => log.eventName === 'PermittedAuthMethodAdded' + ); + + expect(permittedAuthMethodScopeAddedEvent?.args['id']).toBe( + '0x4cb822e6f51d9723f22b9374c4ef7d41ae2b1a5463738516aeb117ff387ba51a' + ); + expect(permittedAuthMethodAddedEvent?.args['id']).toBe( + '0x4cb822e6f51d9723f22b9374c4ef7d41ae2b1a5463738516aeb117ff387ba51a' + ); + }); +}); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.ts new file mode 100644 index 0000000000..ec823c63ea --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/mintPKP/mintPKP.ts @@ -0,0 +1,69 @@ +import { Hex } from 'viem'; +import { logger } from '../../../../../../shared/logger'; +import { getAuthIdByAuthMethod } from '@lit-protocol/auth'; +import { NagaContext } from '../../../../../types'; +import { PKPData } from '../../../schemas/shared/PKPDataSchema'; +import { mintNextAndAddAuthMethods } from '../../rawContractApis/pkp/write/mintNextAndAddAuthMethods'; +import { LitTxRes } from '../../types'; +import { MintPKPRequest, MintPKPSchema } from './MintPKPSchema'; + +/** + * authMethod + * * authMethodType - you should be getting this directly from the authenticator + * + * scopes + * * no-permissions - This scope allows no permissions + * * sign-anything - This scope allows signing any data + * * personal-sign - This scope only allows signing messages using the EIP-191 scheme + * which prefixes "Ethereum Signed Message" to the data to be signed. + * This prefix prevents creating signatures that can be used for transactions. + * + * pubkey + * * Only apply to WebAuthn. Otherwise, default to '0x' + * + * customAuthMethodId + * * This field is usually used by the dApp owner to identify the user - eg. app-id-xxx:user-id-yyy + * + * ```ts + * const customAuthMethod = { + * authMethodType: 89989, + * authMethodId: 'app-id-xxx:user-id-yyy', + * accessToken: 'xxx', + * }; + * ``` + */ +export const mintPKP = async ( + request: MintPKPRequest, + networkCtx: NagaContext +): Promise> => { + const validatedRequest = MintPKPSchema.parse(request); + + logger.debug({ validatedRequest }); + + let _authMethodId: Hex; + + if (validatedRequest.customAuthMethodId) { + _authMethodId = validatedRequest.customAuthMethodId as Hex; + } else { + // Generate the authMethodId automatically from the auth method + const authMethodId = await getAuthIdByAuthMethod( + validatedRequest.authMethod + ); + _authMethodId = authMethodId as Hex; + } + + const tx = await mintNextAndAddAuthMethods( + { + keyType: 2, + permittedAuthMethodTypes: [validatedRequest.authMethod.authMethodType], + permittedAuthMethodIds: [_authMethodId], + permittedAuthMethodPubkeys: [validatedRequest.pubkey || '0x'], + permittedAuthMethodScopes: [validatedRequest.scopes], + addPkpEthAddressAsPermittedAddress: true, + sendPkpToItself: true, + }, + networkCtx + ); + + return tx; +}; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/README.md b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/README.md new file mode 100644 index 0000000000..b095b24ffb --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/README.md @@ -0,0 +1,104 @@ +# Price Feed API + +High-level API for obtaining pricing information from Lit Network validators. + +## Features + +- **Caching**: Automatically caches price information to reduce contract calls +- **Auto-refresh**: Refreshes stale data after configurable time period +- **Price Sorting**: Returns validators sorted by price (cheapest first) +- **Simplified Interface**: Easy-to-use API compared to raw contract calls + +## Usage + +```typescript +import { getPriceFeedInfo, getNodePrices } from './priceFeedApi'; +import { networkContext } from '../../../_config'; + +// Get complete price feed information +const priceInfo = await getPriceFeedInfo({ + realmId: 1, + networkCtx: networkContext, +}); + +// Get just the node prices sorted by cheapest first +const prices = await getNodePrices({ + realmId: 1, + networkCtx: networkContext, +}); +``` + +## API Reference + +### getPriceFeedInfo + +```typescript +async function getPriceFeedInfo( + params: GetPriceFeedInfoParams +): Promise; +``` + +Gets complete price feed information with caching to reduce blockchain calls. + +**Parameters:** + +- `params.realmId`: (Optional) The realm ID (defaults to 1) +- `params.networkCtx`: Network context for contract interactions +- `params.productIds`: (Optional) Array of product IDs to get prices for + +**Returns:** + +- `PriceFeedInfo` object containing: + - `epochId`: Current epoch ID + - `minNodeCount`: Minimum required node count + - `networkPrices`: Array of node prices sorted by cheapest first + +### getNodePrices + +```typescript +async function getNodePrices( + params: GetPriceFeedInfoParams +): Promise; +``` + +Gets just the node prices sorted by cheapest first. + +**Parameters:** + +- Same as `getPriceFeedInfo` + +**Returns:** + +- Array of network prices sorted by cheapest first + +## Types + +### PriceFeedInfo + +```typescript +interface PriceFeedInfo { + epochId: any; + minNodeCount: any; + networkPrices: { + url: string; + prices: bigint[]; + }[]; +} +``` + +### GetPriceFeedInfoParams + +```typescript +interface GetPriceFeedInfoParams { + realmId?: number; + networkCtx: NagaContext; + productIds?: bigint[]; +} +``` + +## Configuration + +The API uses the following configuration constants: + +- `STALE_PRICES_SECONDS`: Time in milliseconds before prices are considered stale (default: 3000ms) +- `PRODUCT_IDS_ARRAY`: Default product IDs to query if none specified diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/index.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/index.ts new file mode 100644 index 0000000000..32d9738201 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/index.ts @@ -0,0 +1,8 @@ +/** + * Price Feed API Exports + * + * This module exports the high-level price feed API functions for retrieving + * pricing information from the Lit Network. + */ + +export * from './priceFeedApi'; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/priceFeedApi.spec.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/priceFeedApi.spec.ts new file mode 100644 index 0000000000..183b6ee2e1 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/priceFeedApi.spec.ts @@ -0,0 +1,61 @@ +import { networkContext, NetworkContext } from '../../../_config'; +import { getNodePrices, getPriceFeedInfo } from './priceFeedApi'; + +describe('priceFeedApi', () => { + let networkCtx: NetworkContext; + + beforeAll(() => { + networkCtx = networkContext; + }); + + test('getPriceFeedInfo should return data in the correct format', async () => { + const priceInfo = await getPriceFeedInfo({ + networkCtx, + }); + + // Check response structure + expect(priceInfo).toHaveProperty('epochId'); + expect(priceInfo).toHaveProperty('minNodeCount'); + expect(priceInfo).toHaveProperty('networkPrices'); + + // Check that networkPrices is an array + expect(Array.isArray(priceInfo.networkPrices)).toBe(true); + + // Check structure of first network price entry if available + if (priceInfo.networkPrices.length > 0) { + const firstPrice = priceInfo.networkPrices[0]; + expect(firstPrice).toHaveProperty('url'); + expect(firstPrice).toHaveProperty('prices'); + expect(typeof firstPrice.url).toBe('string'); + expect(Array.isArray(firstPrice.prices)).toBe(true); + + // Check that prices are bigints + if (firstPrice.prices.length > 0) { + expect(typeof firstPrice.prices[0]).toBe('bigint'); + } + } + }); + + test('getNodePrices should return data in the correct format', async () => { + const prices = await getNodePrices({ + networkCtx, + }); + + // Check that prices is an array + expect(Array.isArray(prices)).toBe(true); + + // Check structure of first price entry if available + if (prices.length > 0) { + const firstPrice = prices[0]; + expect(firstPrice).toHaveProperty('url'); + expect(firstPrice).toHaveProperty('prices'); + expect(typeof firstPrice.url).toBe('string'); + expect(Array.isArray(firstPrice.prices)).toBe(true); + + // Check that prices are bigints + if (firstPrice.prices.length > 0) { + expect(typeof firstPrice.prices[0]).toBe('bigint'); + } + } + }); +}); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/priceFeedApi.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/priceFeedApi.ts new file mode 100644 index 0000000000..cf2e9dfbd0 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/highLevelApis/priceFeed/priceFeedApi.ts @@ -0,0 +1,199 @@ +/** + * This module provides a high-level interface for obtaining price feed information + * from the Lit Network. It includes features such as: + * - Caching price information to reduce contract calls + * - rAutomatic efreshing of stale data + * - Sorting of validators by price + * + * Usage: + * ```typescript + * import { getPriceFeedInfo, getNodePrices } from './priceFeedApi'; + * + * // Get complete price feed information + * const priceInfo = await getPriceFeedInfo({ + * realmId: 1, + * networkCtx: myNetworkContext + * }); + * + * // Get just the node prices sorted by cheapest first + * const prices = await getNodePrices({ + * realmId: 1, + * networkCtx: myNetworkContext + * }); + * ``` + */ + +import { NagaContext } from '../../../../../types'; +import { + getNodesForRequest, + PRODUCT_IDS, +} from '../../../apis/rawContractApis/pricing/getNodesForRequest'; + +// Configuration constants +const STALE_PRICES_SECONDS = 3 * 1000; // Update prices if > X seconds old +const PRODUCT_IDS_ARRAY = Object.values(PRODUCT_IDS); + +// Type for price feed information +export interface PriceFeedInfo { + epochId: any; + minNodeCount: any; + networkPrices: { + url: string; + prices: bigint[]; + }[]; +} + +// Type for the parameters +export interface GetPriceFeedInfoParams { + realmId?: number; + networkCtx: NagaContext; + productIds?: bigint[]; +} + +// Caching variables +let priceFeedInfo: PriceFeedInfo | null = null; +let fetchingPriceFeedInfo: null | Promise = null; +let lastUpdatedTimestamp = 0; + +/** + * Fetches price feed information directly from the blockchain + * + * @param params - Parameters for fetching price feed information + * @returns The price feed information including epoch ID, minimum node count, and sorted network prices + */ +async function fetchPriceFeedInfo( + params: GetPriceFeedInfoParams +): Promise { + const { realmId = 1, networkCtx, productIds = PRODUCT_IDS_ARRAY } = params; + + // Get nodes and prices from raw contract API + const nodesResponse = await getNodesForRequest({ productIds }, networkCtx); + + // Extract and format the network prices + const prices = nodesResponse.nodesAndPrices + .map((node) => { + return { + url: node.validatorUrl, + prices: node.prices.map((price) => BigInt(price)), + }; + }) + .sort(({ prices: pricesA }, { prices: pricesB }) => { + // Sort by first price since the cheapest for any product will often be cheapest for all + const diff = Number(pricesA[0] - pricesB[0]); + return diff; + }); + + return { + epochId: nodesResponse.epochId, + minNodeCount: nodesResponse.minNodeCount, + networkPrices: prices, + }; +} + +/** + * Fetches price feed information with local promise tracking + * to prevent duplicate concurrent requests + * + * @param params - Parameters for fetching price feed information + * @returns The price feed information + */ +async function fetchPriceFeedInfoWithLocalPromise( + params: GetPriceFeedInfoParams +): Promise { + try { + fetchingPriceFeedInfo = fetchPriceFeedInfo(params); + + priceFeedInfo = await fetchingPriceFeedInfo; + lastUpdatedTimestamp = Date.now(); + + return priceFeedInfo; + } finally { + fetchingPriceFeedInfo = null; + } +} + +/** + * Gets price feed information with caching to reduce blockchain calls + * + * @param params - Parameters for fetching price feed information + * @returns The price feed information including epoch ID, minimum node count, and sorted network prices + * @example + * { + epochId: 15n, + minNodeCount: 2n, + networkPrices: [ + { + url: "http://127.0.0.1:7470", + prices: [ 10000000000000000n, 10000000000000000n, 10000000000000000n ], + }, { + url: "http://127.0.0.1:7471", + prices: [ 10000000000000000n, 10000000000000000n, 10000000000000000n ], + }, { + url: "http://127.0.0.1:7472", + prices: [ 10000000000000000n, 10000000000000000n, 10000000000000000n ], + } + ], +} + */ +export async function getPriceFeedInfo( + params: GetPriceFeedInfoParams +): Promise { + // If there's a local promise, an update is in progress; wait for that + if (fetchingPriceFeedInfo) { + return fetchingPriceFeedInfo; + } + + // If we have updated prices in the last few seconds, return our current prices + if ( + priceFeedInfo && + Date.now() - lastUpdatedTimestamp < STALE_PRICES_SECONDS + ) { + return priceFeedInfo; + } + + // Fetch new prices, update local cache values, and return them + return fetchPriceFeedInfoWithLocalPromise(params); +} + +/** + * Gets just the node prices sorted by cheapest first + * + * @param params - Parameters for fetching price feed information + * @returns Array of network prices sorted by cheapest first + * @example + * [ + { + url: "http://127.0.0.1:7470", + prices: [ 10000000000000000n, 10000000000000000n, 10000000000000000n ], + }, { + url: "http://127.0.0.1:7471", + prices: [ 10000000000000000n, 10000000000000000n, 10000000000000000n ], + }, { + url: "http://127.0.0.1:7472", + prices: [ 10000000000000000n, 10000000000000000n, 10000000000000000n ], + } +] + */ +export async function getNodePrices( + params: GetPriceFeedInfoParams +): Promise { + const priceInfo = await getPriceFeedInfo(params); + return priceInfo.networkPrices; +} + +// if (import.meta.main) { +// // Get complete price feed information +// const priceInfo = await getPriceFeedInfo({ +// realmId: 1, +// networkCtx: networkContext, +// }); + +// // Get just the node prices sorted by cheapest first +// const prices = await getNodePrices({ +// realmId: 1, +// networkCtx: networkContext, +// }); + +// console.log('priceInfo', priceInfo); +// console.log('prices', prices); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/index.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/index.ts new file mode 100644 index 0000000000..eb7f59ea00 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/index.ts @@ -0,0 +1,92 @@ +// ==================== Imports ==================== +import { getPermittedActions } from './rawContractApis/permissions/read/getPermittedActions'; +import { getPermittedAddresses } from './rawContractApis/permissions/read/getPermittedAddresses'; +import { isPermittedAction } from './rawContractApis/permissions/read/isPermittedAction'; +import { isPermittedAddress } from './rawContractApis/permissions/read/isPermittedAddress'; +import { addPermittedAction } from './rawContractApis/permissions/write/addPermittedAction'; +import { addPermittedAddress } from './rawContractApis/permissions/write/addPermittedAddress'; +import { removePermittedAction } from './rawContractApis/permissions/write/removePermittedAction'; +import { removePermittedAddress } from './rawContractApis/permissions/write/removePermittedAddress'; +import { tokenOfOwnerByIndex } from './rawContractApis/pkp/read/tokenOfOwnerByIndex'; +import { claimAndMintNextAndAddAuthMethodsWithTypes } from './rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes'; +import { mintNextAndAddAuthMethods } from './rawContractApis/pkp/write/mintNextAndAddAuthMethods'; +import { + getNodesForRequest, + PRODUCT_IDS, +} from './rawContractApis/pricing/getNodesForRequest'; + +// High-level APIs +import { mintPKP } from './highLevelApis/mintPKP/mintPKP'; +import { PKPPermissionsManager } from './highLevelApis/PKPPermissionsManager'; +import { + getNodePrices, + getPriceFeedInfo, +} from './highLevelApis/priceFeed/priceFeedApi'; + +// ==================== Exports ==================== +// ========== Treeshakable ========== +// Individual exports allow better tree-shaking +// export { claimAndMintNextAndAddAuthMethodsWithTypes } from "./rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes"; +// export { mintNextAndAddAuthMethods } from "./rawContractApis/pkp/write/mintNextAndAddAuthMethods"; +// export { tokenOfOwnerByIndex } from "./rawContractApis/pkp/read/tokenOfOwnerByIndex"; +// export { getPermittedAddresses } from "./rawContractApis/permissions/read/getPermittedAddresses"; +// export { getPermittedActions } from "./rawContractApis/permissions/read/getPermittedActions"; +// export { isPermittedAddress } from "./rawContractApis/permissions/read/isPermittedAddress"; +// export { isPermittedAction } from "./rawContractApis/permissions/read/isPermittedAction"; +// export { addPermittedAction } from "./rawContractApis/permissions/write/addPermittedAction"; +// export { removePermittedAction } from "./rawContractApis/permissions/write/removePermittedAction"; +// export { addPermittedAddress } from "./rawContractApis/permissions/write/addPermittedAddress"; +// export { removePermittedAddress } from "./rawContractApis/permissions/write/removePermittedAddress"; +// export { createLitContracts } from "./utils/createLitContracts"; + +// High-level APIs +// export { mintPKP } from "./highLevelApis/mintPKP/mintPKP"; +// export { PKPPermissionsManager } from "./highLevelApis/PKPPermissionsManager"; + +// ========== Convenience API ========== +export const rawApi = { + pkp: { + read: { + tokenOfOwnerByIndex, + }, + write: { + claimAndMintNextAndAddAuthMethodsWithTypes, + mintNextAndAddAuthMethods, + }, + }, + permission: { + read: { + getPermittedAddresses, + isPermittedAddress, + getPermittedActions, + isPermittedAction, + }, + write: { + addPermittedAction, + removePermittedAction, + addPermittedAddress, + removePermittedAddress, + }, + }, + pricing: { + getNodesForRequest, + constants: { + PRODUCT_IDS, + }, + }, +}; + +export const api = { + // PKP Management + mintPKP, + + // Permissions Management + PKPPermissionsManager, + + pricing: { + getPriceFeedInfo, + getNodePrices, + }, +}; +// Export utils from separate file +export * as utils from './utils'; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/README.md b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/README.md new file mode 100644 index 0000000000..9dcda6e047 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/README.md @@ -0,0 +1 @@ +Raw APIs provide direct, unmodified access to smart contract functions diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedActions.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedActions.ts new file mode 100644 index 0000000000..032adb7e9e --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedActions.ts @@ -0,0 +1,47 @@ +// import { networkContext } from "../../../_config"; +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { NagaContext } from '../../../../../../types'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +const getPermittedActionsSchema = z.object({ + tokenId: toBigInt, +}); + +type GetPermittedActionsRequest = z.input; + +/** + * Get permitted actions for a PKP token + * @param request - Object containing tokenId + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted actions for the PKP token + */ +export async function getPermittedActions( + request: GetPermittedActionsRequest, + networkCtx: NagaContext +): Promise { + const validatedRequest = getPermittedActionsSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + const res = await pkpPermissionsContract.read.getPermittedActions([ + validatedRequest.tokenId, + ]); + + return res; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await getPermittedActions( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// }, +// networkCtx +// ); +// console.log("permittedActions", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAddresses.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAddresses.ts new file mode 100644 index 0000000000..536ad4e4cb --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAddresses.ts @@ -0,0 +1,47 @@ +// import { networkContext } from "../../../_config"; +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { NagaContext } from '../../../../../../types'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +const getPermittedAddressesSchema = z.object({ + tokenId: toBigInt, +}); + +type GetPermittedAddressesRequest = z.input; + +/** + * Get permitted addresses for a PKP token + * @param request - Object containing tokenId + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted addresses for the PKP token + */ +export async function getPermittedAddresses( + request: GetPermittedAddressesRequest, + networkCtx: NagaContext +): Promise { + const validatedRequest = getPermittedAddressesSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + const res = await pkpPermissionsContract.read.getPermittedAddresses([ + validatedRequest.tokenId, + ]); + + return res; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await getPermittedAddresses( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// }, +// networkCtx +// ); +// console.log("permittedAddresses", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethodScopes.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethodScopes.ts new file mode 100644 index 0000000000..0843bc355e --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethodScopes.ts @@ -0,0 +1,60 @@ +// import { networkContext } from "../../../_config"; +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { NagaContext } from '../../../../../../types'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +const getPermittedAuthMethodScopesSchema = z.object({ + tokenId: toBigInt, + authMethodType: z.number(), + authMethodId: z.string(), + scopeId: z.number().optional(), +}); + +type GetPermittedAuthMethodScopesRequest = z.input< + typeof getPermittedAuthMethodScopesSchema +>; + +/** + * Get permitted scopes for a specific authentication method of a PKP token + * @param request - Object containing tokenId, authMethodType, authMethodId, and optional scopeId + * @param networkCtx - Network context for contract interactions + * @returns Array of boolean values indicating whether each scope is permitted + */ +export async function getPermittedAuthMethodScopes( + request: GetPermittedAuthMethodScopesRequest, + networkCtx: NagaContext +): Promise { + const validatedRequest = getPermittedAuthMethodScopesSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + const res = await pkpPermissionsContract.read.getPermittedAuthMethodScopes([ + validatedRequest.tokenId, + BigInt(validatedRequest.authMethodType), + validatedRequest.authMethodId as `0x${string}`, + validatedRequest.scopeId !== undefined + ? BigInt(validatedRequest.scopeId) + : BigInt(0), + ]); + + return res; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await getPermittedAuthMethodScopes( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// authMethodType: 1, +// authMethodId: "0x1234567890abcdef1234567890abcdef12345678", +// // scopeId: 0, +// }, +// networkCtx +// ); +// console.log("permittedAuthMethodScopes", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethods.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethods.ts new file mode 100644 index 0000000000..fc03990f22 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/getPermittedAuthMethods.ts @@ -0,0 +1,56 @@ +// import { networkContext } from "../../../_config"; +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { NagaContext } from '../../../../../../types'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +const getPermittedAuthMethodsSchema = z.object({ + tokenId: toBigInt, +}); + +type GetPermittedAuthMethodsRequest = z.input< + typeof getPermittedAuthMethodsSchema +>; + +// Define the auth method return type +export interface AuthMethod { + authMethodType: bigint; + id: `0x${string}`; + userPubkey: `0x${string}`; +} + +/** + * Get permitted authentication methods for a PKP token + * @param request - Object containing tokenId + * @param networkCtx - Network context for contract interactions + * @returns Array of permitted authentication methods for the PKP token + */ +export async function getPermittedAuthMethods( + request: GetPermittedAuthMethodsRequest, + networkCtx: NagaContext +): Promise { + const validatedRequest = getPermittedAuthMethodsSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + const res = await pkpPermissionsContract.read.getPermittedAuthMethods([ + validatedRequest.tokenId, + ]); + + return res; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await getPermittedAuthMethods( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// }, +// networkCtx +// ); +// console.log("permittedAuthMethods", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAction.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAction.ts new file mode 100644 index 0000000000..71f423822d --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAction.ts @@ -0,0 +1,59 @@ +// import { networkContext } from "../../../_config"; +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { ipfsCidV0ToHex } from '../../../../../../../shared/utils/transformers/ipfsCidV0ToHex'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { NagaContext } from '../../../../../../types'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +const isPermittedActionSchema = z + .object({ + ipfsId: isIpfsCidV0, + tokenId: toBigInt, + }) + .transform((data) => { + return { + ...data, + ipfsId: ipfsCidV0ToHex(data.ipfsId), + }; + }); + +type IsPermittedActionRequest = z.input; + +/** + * Checks if an action is permitted for a PKP token + * @param request - Object containing tokenId and ipfsId + * @param networkCtx - Network context for the transaction + * @returns Promise resolving to boolean indicating if the action is permitted + */ +export async function isPermittedAction( + request: IsPermittedActionRequest, + networkCtx: NagaContext +): Promise { + const validatedRequest = isPermittedActionSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + + return pkpPermissionsContract.read.isPermittedAction([ + validatedRequest.tokenId, + validatedRequest.ipfsId, + ]); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await isPermittedAction( +// { +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// }, +// networkCtx +// ); + +// console.log("Is action permitted:", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAddress.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAddress.ts new file mode 100644 index 0000000000..ece712eeea --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/read/isPermittedAddress.ts @@ -0,0 +1,53 @@ +// import { networkContext } from "../../../_config"; +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { NagaContext } from '../../../../../../types'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +const isPermittedAddressSchema = z.object({ + tokenId: toBigInt, + address: z + .string() + .regex(/^0x[a-fA-F0-9]{40}$/) + .transform((val): `0x${string}` => val as `0x${string}`), +}); + +type IsPermittedAddressRequest = z.input; + +/** + * Checks if an address is permitted for a PKP token + * @param request - Object containing tokenId and address + * @param networkCtx - Network context for the transaction + * @returns Promise resolving to boolean indicating if the address is permitted + */ +export async function isPermittedAddress( + request: IsPermittedAddressRequest, + networkCtx: NagaContext +): Promise { + const validatedRequest = isPermittedAddressSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract } = createLitContracts(networkCtx); + + return pkpPermissionsContract.read.isPermittedAddress([ + validatedRequest.tokenId, + validatedRequest.address, + ]); +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await isPermittedAddress( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// address: "0x1234567890123456789012345678901234567890", +// }, +// networkCtx +// ); + +// console.log("Is address permitted:", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/utils/pubkeyToTokenId.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/utils/pubkeyToTokenId.ts new file mode 100644 index 0000000000..d4ad7ce410 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/utils/pubkeyToTokenId.ts @@ -0,0 +1,31 @@ +import { hexToBigInt, keccak256, toBytes } from 'viem'; + +/** + * Convert a public key to a token ID + * @param pubkey - The public key to convert + * @returns The token ID + * + * NOTE: code converted from: + * https://github.com/LIT-Protocol/lit-assets/blob/167d6908acc09c0aebdb6909f703b83921da4400/rust/lit-node/lit-node/src/utils/web.rs#L788-L802 + */ +export function pubkeyToTokenId(pubkey: string): bigint { + let pubkeyBytes: Uint8Array; + try { + pubkeyBytes = toBytes(pubkey); + } catch (e) { + throw new Error( + `Conversion error: ${e instanceof Error ? e.message : String(e)}` + ); + } + + if (pubkeyBytes.length !== 65) { + throw new Error( + `Invalid pubkey length. Expected 65 bytes, got ${pubkeyBytes.length}` + ); + } + + // this is what the original code did, but it returns a hex string instead of a bigint + // const tokenId = toHex(keccak256(pubkeyBytes)); + const tokenId = hexToBigInt(keccak256(pubkeyBytes)); + return tokenId; +} diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/utils/resolvePkpTokenId.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/utils/resolvePkpTokenId.ts new file mode 100644 index 0000000000..c253505bd8 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/utils/resolvePkpTokenId.ts @@ -0,0 +1,117 @@ +/** + * Utility for resolving PKP token IDs from various input types (pubkey, address, or direct tokenId) + * This module provides a consistent way to obtain PKP token IDs regardless of the input format. + */ + +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { isEthAddress } from '../../../../../../../shared/utils/z-validate'; +import { NagaContext } from '../../../../../../types'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { pubkeyToTokenId } from './pubkeyToTokenId'; + +// Input validation schema +export const PkpIdentifierSchema = z.discriminatedUnion('field', [ + z + .object({ + field: z.literal('tokenId'), + tokenId: toBigInt, + }) + .strict(), + z + .object({ + field: z.literal('address'), + address: isEthAddress, + }) + .strict(), + z + .object({ + field: z.literal('pubkey'), + pubkey: z.string(), + }) + .strict(), +]); + +// Helper type to ensure only one property exists +type ExactlyOne = { + [K in keyof T]: Record & Partial, never>>; +}[keyof T]; + +// Raw input type that ensures only one identifier is provided +export type PkpIdentifierRaw = ExactlyOne<{ + tokenId: string | number | bigint; + address: string; + pubkey: string; +}>; + +/** + * Resolves a PKP token ID from various input types + * @param identifier - Object containing exactly one of: tokenId, address, or pubkey + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to the PKP token ID as bigint + * @throws Error if unable to resolve token ID or if input is invalid + */ +export async function resolvePkpTokenId( + identifier: PkpIdentifierRaw, + networkCtx?: NagaContext +): Promise { + // Check for multiple fields + const providedFields = Object.keys(identifier); + if (providedFields.length !== 1) { + throw new Error( + `Invalid identifier: exactly one of tokenId, address, or pubkey must be provided. Found: ${providedFields.join( + ', ' + )}` + ); + } + + // Determine the field type and validate input + const validatedInput = PkpIdentifierSchema.parse({ + field: + 'tokenId' in identifier + ? 'tokenId' + : 'address' in identifier + ? 'address' + : 'pubkey' in identifier + ? 'pubkey' + : (() => { + throw new Error( + 'Invalid identifier: must provide tokenId, address, or pubkey' + ); + })(), + ...identifier, + }); + + logger.debug({ validatedInput }); + + // Handle direct token ID + if (validatedInput.field === 'tokenId') { + return validatedInput.tokenId; + } + + // Handle pubkey + if (validatedInput.field === 'pubkey') { + return pubkeyToTokenId(validatedInput.pubkey); + } + + // Handle address (requires network context) + if (validatedInput.field === 'address') { + if (!networkCtx) { + throw new Error('Network context required for address resolution'); + } + + const { pubkeyRouterContract } = createLitContracts(networkCtx); + const pkpTokenId = await pubkeyRouterContract.read.ethAddressToPkpId([ + validatedInput.address as `0x${string}`, + ]); + + if (!pkpTokenId) { + throw new Error('PKP token ID not found for address'); + } + + return pkpTokenId; + } + + throw new Error('Unable to resolve PKP token ID'); +} diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAction.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAction.ts new file mode 100644 index 0000000000..72c85f7ca7 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAction.ts @@ -0,0 +1,66 @@ +// import { networkContext } from "../../../_config"; +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { ipfsCidV0ToHex } from '../../../../../../../shared/utils/transformers/ipfsCidV0ToHex'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { NagaContext } from '../../../../../../types'; +import { ScopeSchemaRaw } from '../../../../schemas/shared/ScopeSchema'; +import { LitTxVoid } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +const addPermittedActionSchema = z + .object({ + ipfsId: isIpfsCidV0, + tokenId: toBigInt, + scopes: z.array(ScopeSchemaRaw), + }) + .transform((data) => { + return { + ...data, + ipfsId: ipfsCidV0ToHex(data.ipfsId), + }; + }); + +type AddPermittedActionRequest = z.input; + +export async function addPermittedAction( + request: AddPermittedActionRequest, + networkCtx: NagaContext +): Promise { + const validatedRequest = addPermittedActionSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract, pkpNftContract, publicClient, walletClient } = + createLitContracts(networkCtx); + + const hash = await callWithAdjustedOverrides( + pkpPermissionsContract, + 'addPermittedAction', + [validatedRequest.tokenId, validatedRequest.ipfsId, validatedRequest.scopes] + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + return { hash, receipt, decodedLogs }; +} + +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await addPermittedAction( +// { +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// scopes: ["sign-anything"], +// }, +// networkCtx +// ); + +// console.log(res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAddress.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAddress.ts new file mode 100644 index 0000000000..765cc57068 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAddress.ts @@ -0,0 +1,73 @@ +// import { networkContext } from "../../../_config"; +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { NagaContext } from '../../../../../../types'; +import { ScopeSchemaRaw } from '../../../../schemas/shared/ScopeSchema'; +import { LitTxVoid } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +const addPermittedAddressSchema = z.object({ + tokenId: toBigInt, + address: z + .string() + .regex(/^0x[a-fA-F0-9]{40}$/) + .transform((val): `0x${string}` => val as `0x${string}`), + scopes: z.array(ScopeSchemaRaw), +}); + +type AddPermittedAddressRequest = z.input; + +/** + * Adds a permitted address to a PKP token + * @param request - Object containing tokenId, address and scopes + * @param networkCtx - Network context for the transaction + * @returns Promise resolving to transaction details + */ +export async function addPermittedAddress( + request: AddPermittedAddressRequest, + networkCtx: NagaContext +): Promise { + const validatedRequest = addPermittedAddressSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract, pkpNftContract, publicClient, walletClient } = + createLitContracts(networkCtx); + + pkpPermissionsContract.write.addPermittedAddress; + + const hash = await callWithAdjustedOverrides( + pkpPermissionsContract, + 'addPermittedAddress', + [ + validatedRequest.tokenId, + validatedRequest.address, + validatedRequest.scopes, + ] + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + return { hash, receipt, decodedLogs }; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await addPermittedAddress( +// { +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// address: "0x1234567890123456789012345678901234567890", +// scopes: ["sign-anything"], +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAction.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAction.ts new file mode 100644 index 0000000000..8bdb85ee3e --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAction.ts @@ -0,0 +1,70 @@ +// import { networkContext } from "../../../_config"; +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { ipfsCidV0ToHex } from '../../../../../../../shared/utils/transformers/ipfsCidV0ToHex'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { isIpfsCidV0 } from '../../../../../../../shared/utils/z-validate'; +import { NagaContext } from '../../../../../../types'; +import { LitTxVoid } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +const removePermittedActionSchema = z + .object({ + ipfsId: isIpfsCidV0, + tokenId: toBigInt, + }) + .transform((data) => { + return { + ...data, + ipfsId: ipfsCidV0ToHex(data.ipfsId), + }; + }); + +type RemovePermittedActionRequest = z.input; + +/** + * Removes a permitted action from a PKP token + * @param request - Object containing tokenId and ipfsId + * @param networkCtx - Network context for the transaction + * @returns Promise resolving to transaction details + */ +export async function removePermittedAction( + request: RemovePermittedActionRequest, + networkCtx: NagaContext +): Promise { + const validatedRequest = removePermittedActionSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract, pkpNftContract, publicClient, walletClient } = + createLitContracts(networkCtx); + + const hash = await callWithAdjustedOverrides( + pkpPermissionsContract, + 'removePermittedAction', + [validatedRequest.tokenId, validatedRequest.ipfsId] + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + return { hash, receipt, decodedLogs }; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const res = await removePermittedAction( +// { +// ipfsId: "QmS4ghgMgPXR6fYW5tP4Y8Q22hF57kFnUJ9y4DgUJz1234", +// tokenId: +// "76136736151863037541847315168980811654782785653773679312890341037699996601290", +// }, +// networkCtx +// ); + +// console.log("res", res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAddress.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAddress.ts new file mode 100644 index 0000000000..c9e041f600 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAddress.ts @@ -0,0 +1,67 @@ +// import { networkContext } from "../../../_config"; +import { z } from 'zod'; +import { logger } from '../../../../../../../shared/logger'; +import { toBigInt } from '../../../../../../../shared/utils/z-transformers'; +import { NagaContext } from '../../../../../../types'; +import { LitTxVoid } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +const removePermittedAddressSchema = z.object({ + tokenId: toBigInt, + address: z + .string() + .regex(/^0x[a-fA-F0-9]{40}$/) + .transform((val): `0x${string}` => val as `0x${string}`), +}); + +type RemovePermittedAddressRequest = z.input< + typeof removePermittedAddressSchema +>; + +/** + * Removes a permitted address from a PKP token + * @param request - Object containing tokenId and address + * @param networkCtx - Network context for the transaction + * @returns Promise resolving to transaction details + */ +export async function removePermittedAddress( + request: RemovePermittedAddressRequest, + networkCtx: NagaContext +): Promise { + const validatedRequest = removePermittedAddressSchema.parse(request); + logger.debug({ validatedRequest }); + + const { pkpPermissionsContract, pkpNftContract, publicClient, walletClient } = + createLitContracts(networkCtx); + + const hash = await callWithAdjustedOverrides( + pkpPermissionsContract, + 'removePermittedAddress', + [validatedRequest.tokenId, validatedRequest.address] + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + return { hash, receipt, decodedLogs }; +} + +// Example usage when running as main +// if (import.meta.main) { +// const networkCtx = await import('../../../../_config'); +// const { networkContext } = networkCtx; + +// const res = await removePermittedAddress( +// { +// tokenId: +// '76136736151863037541847315168980811654782785653773679312890341037699996601290', +// address: '0x1234567890123456789012345678901234567890', +// }, +// networkContext +// ); + +// console.log('res', res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/read/getPubkeyByTokenId.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/read/getPubkeyByTokenId.ts new file mode 100644 index 0000000000..a2c7eb7f64 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/read/getPubkeyByTokenId.ts @@ -0,0 +1,42 @@ +import { NagaContext } from '../../../../../../types'; +import { z } from 'zod'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { logger } from '../../../../../../../shared/logger'; + +// Schema for the request +const getPubkeyByTokenIdSchema = z.object({ + tokenId: z.string(), +}); + +type GetPubkeyByTokenIdRequest = z.infer; + +/** + * Retrieves the public key associated with a PKP token ID + * @param request - Object containing the token ID + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to the public key as a string + */ +export async function getPubkeyByTokenId( + request: GetPubkeyByTokenIdRequest, + networkCtx: NagaContext +): Promise { + const { tokenId } = getPubkeyByTokenIdSchema.parse(request); + + logger.debug({ tokenId }, 'Fetching public key by token ID'); + + // Create contract instances + const { pubkeyRouterContract } = createLitContracts(networkCtx); + + // Convert tokenId to bigint for contract call + const tokenIdBigInt = BigInt(tokenId); + + // Call the contract to get the public key + const result = await pubkeyRouterContract.read.getPubkey([tokenIdBigInt]); + + // Ensure the result is a string + const publicKey = result.toString(); + + logger.debug({ tokenId, publicKey }, 'Public key fetched'); + + return publicKey; +} diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/read/tokenOfOwnerByIndex.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/read/tokenOfOwnerByIndex.ts new file mode 100644 index 0000000000..dc545c7c2e --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/read/tokenOfOwnerByIndex.ts @@ -0,0 +1,67 @@ +import { logger } from '../../../../../../../shared/logger'; +import { NagaContext } from '../../../../../../types'; +import { z } from 'zod'; +import { createLitContracts } from '../../../utils/createLitContracts'; + +// Schema for the request +const tokenOfOwnerByIndexSchema = z.object({ + ownerAddress: z.string().startsWith('0x'), + index: z.number().int().nonnegative(), +}); + +type TokenOfOwnerByIndexRequest = z.infer; + +/** + * Retrieves a PKP token ID owned by a specific address at a given index + * @param request - Object containing owner address and index + * @param networkCtx - Network context for contract interactions + * @returns Promise resolving to the token ID as a string + */ +export async function tokenOfOwnerByIndex( + request: TokenOfOwnerByIndexRequest, + networkCtx: NagaContext +): Promise { + const { ownerAddress, index } = tokenOfOwnerByIndexSchema.parse(request); + + logger.debug({ ownerAddress, index }, 'Fetching token of owner by index'); + + // Create contract instances + const { pkpNftContract } = createLitContracts(networkCtx); + // Convert index to bigint for contract call + const indexBigInt = BigInt(index); + + // Ensure ownerAddress is properly typed as a hex string + const typedOwnerAddress = ownerAddress as `0x${string}`; + // Call the contract to get the token ID + try { + const result = await pkpNftContract.read.tokenOfOwnerByIndex([ + typedOwnerAddress, + indexBigInt, + ]); + // Convert the result to a string + const tokenId = result.toString(); + + logger.debug( + { ownerAddress, index, tokenId }, + 'Token of owner by index fetched' + ); + + return tokenId; + } catch (e) { + throw new Error('Error fetching token of owner by index'); + } +} + +// if (import.meta.main) { +// const networkCtx = networkContext; + +// const tokenId = await tokenOfOwnerByIndex( +// { +// ownerAddress: "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", +// index: 0, +// }, +// networkCtx +// ); + +// console.log(tokenId); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.spec.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.spec.ts new file mode 100644 index 0000000000..d8372ce8ca --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.spec.ts @@ -0,0 +1,107 @@ +import { CallExecutionError, ContractFunctionRevertedError } from 'viem'; +import { networkContext, NetworkContext } from '../../../../_config'; +import { ClaimAndMintSchema } from '../../../../schemas/ClaimAndMintSchema'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { claimAndMint } from './claimAndMint'; + +describe('LitChainClient', () => { + let networkCtx: NetworkContext; + + beforeAll(async () => { + networkCtx = networkContext; + }); + + test('claimAndMint', async () => { + try { + const tx = await claimAndMint( + { + derivedKeyId: + '4d90d864b5f6adb1dd8ef5fbfc3d7ca74f6dd973f8c52ce12f8ce61aa6a1dfa4', + signatures: [ + { + r: '0xcc544fa05678fddff726ec2070bf0c4d2862e35f26ab74baede84dfdf117c841', + s: '0x2286aef0cd151175c63116cd622df3ea7bb8113982525ac07c0bd50d33ee7136', + v: 27, + }, + { + r: '0x7b2bbef14e4e277abe1ebb16e6803a4192c7157f2a7e190c6651b27d2b8eb98b', + s: '0x149d547cc36f1b996afa799c854fbe8776290864d22677e57f4fbbfac952f728', + v: 28, + }, + { + r: '0x59459b3830a4f5b365270a7cf559a8a4a8c90f348a68544e64fac3ed22190ad3', + s: '0x4d2bf3d3a9520fa205a60b6031aea84c5fe788fb5198a4a453fb9e20acb05488', + v: 28, + }, + ], + }, + networkCtx + ); + + console.log(tx); + + expect(tx.receipt.logs.length).toBeGreaterThan(0); + expect(tx.hash).toBeDefined(); + expect(tx.decodedLogs.length).toBeGreaterThan(0); + } catch (error) { + console.log(error); + + console.warn(`❗️If you want to pass this test then you need to generate a new unique keyId eg. + const res = await devEnv.litNodeClient.executeJs({ + authContext: getEoaAuthContext(devEnv, alice), + code: \`(async () => { + Lit.Actions.claimKey({keyId: "my-very-unique-key-id"}); + })();\`, + }); + `); + + const reason = ( + (error as CallExecutionError).cause as ContractFunctionRevertedError + ).reason; + expect(reason).toBe('PubkeyRouter: pubkey already has routing data'); + } + }); + + test('simulate claimAndMint', async () => { + const validatedRequest = ClaimAndMintSchema.parse({ + derivedKeyId: + 'fa9c79fc322d407c2b1f9e1589edd444c95bbadf4baf1f3a2863d33ee1ff7ab4', + signatures: [ + { + r: '0x87446889e5e551d88e968788d4f9651adcff0d2f4188ea9a27fe5d2436ddea9b', + s: '0x132ff3bdb078365c83bb5d24ee2c05408155b24234b39b962c8321a82d0c1f7f', + v: 27, + }, + { + r: '0xb15a8ed3a10f919301307ef463a72d40079c163107f43393cbf65701c73902de', + s: '0x20a4f1469c935363ac9cea5a7c5b65ffbd8f37c5d48be5c2e15966c9bbddde06', + v: 27, + }, + { + r: '0x97dee43dfbf3be22bc530e5322b33bf6a571d15c234e3d2251207d6c888bf140', + s: '0x7cfab33b2d4a9140089d2f0a4178b5fad0725fef4b6335741684f99715539bd1', + v: 27, + }, + ], + }); + const { derivedKeyId, signatures } = validatedRequest; + const { pkpNftContract, publicClient, stakingContract, walletClient } = + createLitContracts(networkCtx); + + const mintCost = await pkpNftContract.read.mintCost(); + + const REALM_ID = 1n; + const result = await publicClient.simulateContract({ + address: pkpNftContract.address, + abi: pkpNftContract.abi, + functionName: 'claimAndMint', + args: [REALM_ID, 2n, derivedKeyId, signatures, stakingContract.address], + value: mintCost, + account: walletClient.account!, + }); + + expect(result.result).toBe( + 39540774701362869188416741706549054806716702330527798538695592469657559009284n + ); + }); +}); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.ts new file mode 100644 index 0000000000..34d9243f7d --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMint.ts @@ -0,0 +1,55 @@ +import { NagaContext } from '../../../../../../types'; +import { + ClaimAndMintRaw, + ClaimAndMintSchema, +} from '../../../../schemas/ClaimAndMintSchema'; +import { + PKPData, + PKPDataSchema, +} from '../../../../schemas/shared/PKPDataSchema'; +import { LitTxRes } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; +export async function claimAndMint( + request: ClaimAndMintRaw, + networkCtx: NagaContext +): Promise> { + const validatedRequest = ClaimAndMintSchema.parse(request); + + const { derivedKeyId, signatures } = validatedRequest; + + const { pkpNftContract, publicClient, stakingContract, walletClient } = + createLitContracts(networkCtx); + + // Get mint cost + const mintCost = await pkpNftContract.read.mintCost(); + const ECDSA_SECP256K1 = 2n; + + const hash = await callWithAdjustedOverrides( + pkpNftContract, + 'claimAndMint', + [ + networkCtx.realmId, + ECDSA_SECP256K1, + derivedKeyId, + signatures, + stakingContract.address, + ], + { + value: mintCost, + account: null, + chain: null, + } + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + const args = decodedLogs.find((log) => log.eventName === 'PKPMinted')?.args; + + const data = PKPDataSchema.parse(args); + + return { hash, receipt, decodedLogs, data }; +} diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.spec.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.spec.ts new file mode 100644 index 0000000000..b9a678548a --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.spec.ts @@ -0,0 +1,63 @@ +import { networkContext, NetworkContext } from '../../../../_config'; +import { CallExecutionError, ContractFunctionRevertedError } from 'viem'; +import { claimAndMintNextAndAddAuthMethodsWithTypes } from './claimAndMintNextAndAddAuthMethodsWithTypes'; + +describe('LitChainClient', () => { + let networkCtx: NetworkContext; + + beforeAll(async () => { + networkCtx = networkContext; + }); + + test('claimAndMintNextAndAddAuthMethodsWithTypes', async () => { + try { + const tx = await claimAndMintNextAndAddAuthMethodsWithTypes( + { + derivedKeyId: + '62439a75ed81afa9366245c9107c413315a141b27129bd6340a9a7f9e63898a9', + signatures: [ + { + r: '0x08b8b9092f0e0a312b00be491382658ac18b3d6cb42c08a17b73eeeb92d7ac54', + s: '0x06da29df3f35b9db99cbfd20ebee83226777ebe52163f6cfe31baa25c829eb8a', + v: 27, + }, + { + r: '0x630e08a6feca8bc5d4078d87d8e846a7945bf0a8251d33f282a705ffedfce159', + s: '0x762fb3380187746975241f2441cf7579053517826ebf6baa798c820db565956f', + v: 28, + }, + { + r: '0x3757d04ea285fe52ec9efde9ae71d9f7113822ed7f34e112f5fbf4350c5161cc', + s: '0x027884f5fc8fb0079a4ce9d2c1021874ce36c3d1eca5a8832f85a5abcf9f50af', + v: 28, + }, + ], + authMethodType: 1, + authMethodId: '0x', + authMethodPubkey: '0x', + }, + networkCtx + ); + + console.log(tx); + + expect(tx.receipt.logs.length).toBeGreaterThan(0); + expect(tx.hash).toBeDefined(); + expect(tx.decodedLogs.length).toBeGreaterThan(0); + } catch (error) { + console.warn(`❗️If you want to pass this test then you need to generate a new unique keyId eg. +const res = await devEnv.litNodeClient.executeJs({ + authContext: getEoaAuthContext(devEnv, alice), + code: \`(async () => { + Lit.Actions.claimKey({keyId: "my-very-unique-key-id"}); + })();\`, +}); + `); + + const reason = ( + (error as CallExecutionError).cause as ContractFunctionRevertedError + ).reason; + expect(reason).toBe('PubkeyRouter: pubkey already has routing data'); + } + }); +}); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.ts new file mode 100644 index 0000000000..e5baba524f --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/claimAndMintNextAndAddAuthMethodsWithTypes.ts @@ -0,0 +1,90 @@ +import { NagaContext } from '../../../../../../types'; +import { + ClaimRequestRaw, + ClaimRequestSchema, +} from '../../../../schemas/ClaimRequestSchema'; +import { + PKPData, + PKPDataSchema, +} from '../../../../schemas/shared/PKPDataSchema'; +import { LitTxRes } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +/** + * Claims and mints a PKP using derived key ID and signatures, then adds authentication methods. + * + * @param {ClaimRequestRaw} request - The request object containing PKP claiming parameters + * @param {string} request.derivedKeyId - The derived key ID for claiming + * @param {Signature[]} request.signatures - Array of signatures required for claiming + * @param {number} request.authMethodType - The type of authentication method to add + * @param {string} request.authMethodId - The ID of the authentication method + * @param {string} request.authMethodPubkey - The public key of the authentication method + * + * @returns {Promise} Object containing transaction hash, receipt, and decoded logs + */ +export async function claimAndMintNextAndAddAuthMethodsWithTypes( + request: ClaimRequestRaw, + networkCtx: NagaContext +): Promise> { + const validatedRequest = ClaimRequestSchema.parse(request); + const { pkpHelperContract, pkpNftContract, publicClient } = + createLitContracts(networkCtx); + + // Get mint cost + const mintCost = await pkpNftContract.read.mintCost(); + const ECDSA_SECP256K1 = 2n; + + const AUTH_METHOD_SCOPE = { + SIGN_ANYTHING: 1n, + PERSONAL_SIGN: 2n, + } as const; + + const claimMaterial = { + keyType: ECDSA_SECP256K1, + derivedKeyId: validatedRequest.derivedKeyId, + signatures: validatedRequest.signatures, + }; + + const authMethodData = { + keyType: ECDSA_SECP256K1, + permittedIpfsCIDs: [], + permittedIpfsCIDScopes: [], + permittedAddresses: [], + permittedAddressScopes: [], + permittedAuthMethodTypes: [validatedRequest.authMethodType], + permittedAuthMethodIds: [validatedRequest.authMethodId], + permittedAuthMethodPubkeys: [validatedRequest.authMethodPubkey], + permittedAuthMethodScopes: [[AUTH_METHOD_SCOPE.SIGN_ANYTHING]], + addPkpEthAddressAsPermittedAddress: true, + sendPkpToItself: true, + }; + + const hash = await callWithAdjustedOverrides( + pkpHelperContract, + 'claimAndMintNextAndAddAuthMethodsWithTypes', + [claimMaterial, authMethodData], + { + value: mintCost, + account: null, + chain: null, + } + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + // { + // eventName: "PKPMinted", + // args: { + // tokenId: 46617443650351102737177954764827728186501111543181803171452029133339804161639n, + // pubkey: "0x045fb12df3d5c8482ab64f7cef10b7c44f9a55256e14ffe8bebe0c526279daa8379fd576b5ea5d26bc0b0973a1260138dfce3951b83378414acf8fe02fea299ccf", + // }, + // }, + const args = decodedLogs.find((log) => log.eventName === 'PKPMinted')?.args; + + const data = PKPDataSchema.parse(args); + + return { hash, receipt, decodedLogs, data }; +} diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.spec.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.spec.ts new file mode 100644 index 0000000000..ca59e22617 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.spec.ts @@ -0,0 +1,36 @@ +import { NetworkContext, networkContext } from '../../../../_config'; +import { mintNextAndAddAuthMethods } from './mintNextAndAddAuthMethods'; + +describe('LitChainClient', () => { + let networkCtx: NetworkContext; + + beforeAll(async () => { + networkCtx = networkContext; + }); + + test('mintNextAndAddAuthMethods', async () => { + const tx = await mintNextAndAddAuthMethods( + { + keyType: 2, + permittedAuthMethodTypes: [2], + permittedAuthMethodIds: [ + '170d13600caea2933912f39a0334eca3d22e472be203f937c4bad0213d92ed71', + ], + permittedAuthMethodPubkeys: ['0x'], + permittedAuthMethodScopes: [[1]], + addPkpEthAddressAsPermittedAddress: true, + sendPkpToItself: true, + }, + networkCtx + ); + + console.log(tx); + + expect(tx.receipt.logs.length).toBeGreaterThan(0); + expect(tx.hash).toBeDefined(); + expect(tx.decodedLogs.length).toBeGreaterThan(0); + expect(tx.data.tokenId).toBeDefined(); + expect(tx.data.pubkey).toMatch(/^0x/); + expect(tx.data.ethAddress).toMatch(/^0x/); + }); +}); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.ts new file mode 100644 index 0000000000..0d4afe1f4e --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pkp/write/mintNextAndAddAuthMethods.ts @@ -0,0 +1,75 @@ +import { NagaContext } from '../../../../../../types'; +import { + MintRequestRaw, + MintRequestSchema, +} from '../../../../schemas/MintRequestSchema'; +import { + PKPData, + PKPDataSchema, +} from '../../../../schemas/shared/PKPDataSchema'; +import { LitTxRes } from '../../../types'; +import { callWithAdjustedOverrides } from '../../../utils/callWithAdjustedOverrides'; +import { createLitContracts } from '../../../utils/createLitContracts'; +import { decodeLogs } from '../../../utils/decodeLogs'; + +/** + * Mints a new Programmable Key Pair (PKP) with specified authentication methods. + * + * @param {MintRequestRaw} request - The request object containing PKP minting parameters + * @param {number} request.keyType - The type of key to mint + * @param {number[]} request.permittedAuthMethodTypes - Array of permitted authentication method types + * @param {string[]} request.permittedAuthMethodIds - Array of permitted authentication method IDs + * @param {string[]} request.permittedAuthMethodPubkeys - Array of permitted authentication method public keys + * @param {string[][]} request.permittedAuthMethodScopes - Array of scopes for each authentication method + * @param {boolean} request.addPkpEthAddressAsPermittedAddress - Whether to add the PKP's Ethereum address as a permitted address + * @param {boolean} request.sendPkpToItself - Whether to send the PKP to itself + * + * @returns {Promise} Object containing transaction hash, receipt, and decoded logs + */ +export async function mintNextAndAddAuthMethods( + request: MintRequestRaw, + networkCtx: NagaContext +): Promise> { + const validatedRequest = MintRequestSchema.parse(request); + + const { pkpHelperContract, pkpNftContract, publicClient, walletClient } = + createLitContracts(networkCtx); + + const mintCost = await pkpNftContract.read.mintCost(); + + const hash = await callWithAdjustedOverrides( + pkpHelperContract, + 'mintNextAndAddAuthMethods', + [ + validatedRequest.keyType, + validatedRequest.permittedAuthMethodTypes, + validatedRequest.permittedAuthMethodIds, + validatedRequest.permittedAuthMethodPubkeys, + validatedRequest.permittedAuthMethodScopes, + validatedRequest.addPkpEthAddressAsPermittedAddress, + validatedRequest.sendPkpToItself, + ], + { + value: mintCost, + account: null, + chain: null, + } + ); + + const receipt = await publicClient.waitForTransactionReceipt({ hash }); + + const decodedLogs = await decodeLogs(receipt.logs, networkCtx); + + // { + // eventName: "PKPMinted", + // args: { + // tokenId: 46617443650351102737177954764827728186501111543181803171452029133339804161639n, + // pubkey: "0x045fb12df3d5c8482ab64f7cef10b7c44f9a55256e14ffe8bebe0c526279daa8379fd576b5ea5d26bc0b0973a1260138dfce3951b83378414acf8fe02fea299ccf", + // }, + // }, + const args = decodedLogs.find((log) => log.eventName === 'PKPMinted')?.args; + + const data = PKPDataSchema.parse(args); + + return { hash, receipt, decodedLogs, data }; +} diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pricing/getNodesForRequest.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pricing/getNodesForRequest.ts new file mode 100644 index 0000000000..65d76a6962 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/pricing/getNodesForRequest.ts @@ -0,0 +1,77 @@ +import { z } from 'zod'; +import { generateValidatorURLs } from '../../../../../../shared/utils/transformers'; +import { NagaContext } from '../../../../../types'; +import { createLitContracts } from '../../utils/createLitContracts'; + +/** + * Product IDs used for price feed and node selection + * + * - DECRYPTION (0): Used for decryption operations + * - SIGN (1): Used for signing operations + * - LA (2): Used for Lit Actions execution + */ +export const PRODUCT_IDS = { + DECRYPTION: 0n, // For decryption operations + SIGN: 1n, // For signing operations + LIT_ACTION: 2n, // For Lit Actions execution +} as const; + +// Schema for the request +const getNodesForRequestSchema = z.object({ + productIds: z.array(z.bigint()).default(Object.values(PRODUCT_IDS)), +}); + +type GetNodesForRequestRequest = z.infer; + +/** + * Get nodes available for a request with their pricing information + * + * This function retrieves information about nodes that can service a request, + * including their pricing data for various product IDs. + * + * @param request - Object containing product IDs to get pricing for + * @param networkCtx - The Naga network context + * @returns Information about nodes, their prices, epoch ID, and minimum node count + */ +export async function getNodesForRequest( + request: GetNodesForRequestRequest, + networkCtx: NagaContext +) { + const { productIds } = getNodesForRequestSchema.parse(request); + + const { priceFeed } = createLitContracts(networkCtx); + + const nodesForRequest = await priceFeed.read.getNodesForRequest([ + networkCtx.realmId, + productIds, + ]); + + const epochId = nodesForRequest[0]; + const minNodeCount = nodesForRequest[1]; + const nodesAndPrices = nodesForRequest[2]; + + const nodesAndPricesWithUrls = nodesAndPrices.map((info) => { + const { validator } = info; + const validatorUrl = generateValidatorURLs([validator]); + const fullUrl = networkCtx.httpProtocol + validatorUrl; + return { + ...info, + validatorUrl: fullUrl, + }; + }); + + return { + epochId, + minNodeCount, + nodesAndPrices: nodesAndPricesWithUrls, + }; +} + +// if (import.meta.main) { +// const networkCtx = networkContext; +// const res = await getNodesForRequest( +// { productIds: Object.values(PRODUCT_IDS) }, +// networkCtx +// ); +// console.log(res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.spec.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.spec.ts new file mode 100644 index 0000000000..fc5eb6b204 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.spec.ts @@ -0,0 +1,34 @@ +// Jest is automatically imported in the global scope +// No need to import describe, test, expect, beforeAll +import { networkContext, NetworkContext } from '../../../_config'; +import { getActiveUnkickedValidatorStructsAndCounts } from './getActiveUnkickedValidatorStructsAndCounts'; + +describe('LitChainClient', () => { + let networkCtx: NetworkContext; + + beforeAll(async () => { + networkCtx = networkContext; + }); + + // Expected output: + // { + // epochInfo: { + // epochLength: 300, + // number: 31316, + // endTime: 1740008064, + // retries: 0, + // timeout: 60, + // }, + // minNodeCount: 2, + // validatorURLs: [ "https://15.235.83.220:7470", "https://15.235.83.220:7472", "https://15.235.83.220:7471" ], + // } + test('getActiveUnkickedValidatorStructsAndCounts', async () => { + const res = await getActiveUnkickedValidatorStructsAndCounts(networkCtx); + console.log(res); + expect(res.minNodeCount).toBeGreaterThanOrEqual(2); + expect(res.epochInfo.epochLength).toBeGreaterThan(0); + expect(res.validatorURLs.length).toBeGreaterThanOrEqual( + Number(res.minNodeCount) + ); + }); +}); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.ts new file mode 100644 index 0000000000..05035b8d4f --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/rawContractApis/staking/getActiveUnkickedValidatorStructsAndCounts.ts @@ -0,0 +1,46 @@ +import { NagaContext } from '../../../../../types'; +import { GetActiveUnkickedValidatorStructsAndCountsSchema } from '../../../schemas/GetActiveUnkickedValidatorStructsAndCountsSchema'; +import { createLitContracts } from '../../utils/createLitContracts'; + +// const REALM_ID = 1n; + +export async function getActiveUnkickedValidatorStructsAndCounts( + networkCtx: NagaContext +) { + const { stakingContract } = createLitContracts(networkCtx); + + const res = + await stakingContract.read.getActiveUnkickedValidatorStructsAndCounts([ + networkCtx.realmId, + ]); + + const validatedRes = + GetActiveUnkickedValidatorStructsAndCountsSchema.parse(res); + + const transformedRes = { + ...validatedRes, + validatorURLs: validatedRes.validatorURLs.map( + (url) => networkCtx.httpProtocol + url + ), + }; + + return transformedRes; +} + +// Expected output: +// { +// epochInfo: { +// epochLength: 300, +// number: 34144, +// endTime: 1741198445, +// retries: 0, +// timeout: 60, +// }, +// minNodeCount: 2, +// validatorURLs: [ "https://15.235.83.220:7470", "https://15.235.83.220:7472", "https://15.235.83.220:7471" ], +// } +// if (import.meta.main) { +// const { networkContext } = await import('../../../_config'); +// const res = await getActiveUnkickedValidatorStructsAndCounts(networkContext); +// console.log(res); +// } diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/types.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/types.ts new file mode 100644 index 0000000000..93e6450ab7 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/types.ts @@ -0,0 +1,15 @@ +import { Hex, TransactionReceipt } from 'viem'; +import { DecodedLog } from './utils/decodeLogs'; + +export type LitTxRes = { + hash: Hex; + receipt: TransactionReceipt; + decodedLogs: DecodedLog[]; + data: T; +}; + +export type LitTxVoid = { + hash: Hex; + receipt: TransactionReceipt; + decodedLogs: DecodedLog[]; +}; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/callWithAdjustedOverrides.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/callWithAdjustedOverrides.ts new file mode 100644 index 0000000000..a22967a7d0 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/callWithAdjustedOverrides.ts @@ -0,0 +1,57 @@ +import { Hash } from 'viem'; +import { GAS_LIMIT_ADJUSTMENT } from '../../_config'; + +/** + * Strongly-typed wrapper around viem's `writeContract` that adjusts gas overrides for Arbitrum Stylus contracts + * NOTE: It must use an instance of a contract (from `getContract` viem function) so that we can infer the correct types + * @param contract The contract instance to call + * @param methodName The name of the contract method to call + * @param args The arguments to pass to the contract method + * @param overrides Optional transaction overrides (e.g. value, gasLimit) + * @returns A Promise that resolves to the transaction hash + */ +export async function callWithAdjustedOverrides< + TContract extends { + write: Record Promise>; + estimateGas: Record Promise>; + }, + TMethodName extends keyof TContract['write'], + TFunction extends TContract['write'][TMethodName], + TArgs extends Parameters[0] +>( + contract: TContract, + methodName: TMethodName & string, + args: TArgs, + overrides?: Parameters[1] +): Promise { + // Get the write function from the contract + const writeFunction = contract.write[methodName]; + if (!writeFunction) { + throw new Error(`Method ${methodName} not found on contract`); + } + + if (!overrides?.gas) { + // Otherwise estimate and adjust gas + const estimatedGas = await contract.estimateGas[methodName]( + args, + overrides + ); + + const adjustedGas = + (estimatedGas * BigInt(GAS_LIMIT_ADJUSTMENT)) / BigInt(100); + overrides = { + ...overrides, + gas: adjustedGas, + }; + } + + // For contract methods that expect array arguments, we need to pass the first array argument + // This handles cases where the contract method expects [arg1, arg2, ...] but we pass [[arg1, arg2, ...]] + const contractArgs = + Array.isArray(args) && args.length === 1 && Array.isArray(args[0]) + ? args[0] + : args; + + // Call the contract method with the provided arguments and overrides + return writeFunction(contractArgs, overrides); +} diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/createLitContracts.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/createLitContracts.ts new file mode 100644 index 0000000000..841af7de6d --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/createLitContracts.ts @@ -0,0 +1,228 @@ +import { + createPublicClient, + createWalletClient, + getContract, + Hex, + http, + PublicClient, + WalletClient, +} from 'viem'; +import { privateKeyToAccount } from 'viem/accounts'; +import { NagaContext } from '../../../../../vNaga/types'; +import { networkContext as defaultNetworkContext } from '../../_config'; +interface CreateLitContractsOptions { + publicClient?: PublicClient; +} + +// ============================================================================================================================================= +// ❗️ These types are required to fix the following error +// ERROR: The inferred type of this node exceeds the maximum length the compiler will serialize. An explicit type annotation is needed.ts(7056) +// If you could fix this WITHOUT breaking this code apart, or without setting the tsconfig's "declaration" to false, please do fix this. 🙏 +// ============================================================================================================================================= + +// Extract just the ContractData type, and you can use this type for variables that will eventually hold contract data +let futureContractData = defaultNetworkContext.chainConfig.contractData; + +const pkpNftContractType = getContract({ + address: undefined as unknown as Hex, + abi: [ + futureContractData.PKPNFT.methods.claimAndMint, + futureContractData.PKPNFT.methods.mintCost, + futureContractData.PKPNFT.methods.tokenOfOwnerByIndex, + ], + client: { + public: undefined as unknown as PublicClient, + wallet: undefined as unknown as WalletClient, + }, +}); + +const pkpHelperContractType = getContract({ + address: undefined as unknown as Hex, + abi: [ + futureContractData.PKPHelper.methods + .claimAndMintNextAndAddAuthMethodsWithTypes, + futureContractData.PKPHelper.methods.mintNextAndAddAuthMethods, + ], + client: { + public: undefined as unknown as PublicClient, + wallet: undefined as unknown as WalletClient, + }, +}); + +const stakingContractType = getContract({ + address: undefined as unknown as Hex, + abi: [ + futureContractData.Staking.methods + .getActiveUnkickedValidatorStructsAndCounts, + ], + client: { + public: undefined as unknown as PublicClient, + wallet: undefined as unknown as WalletClient, + }, +}); + +const priceFeedContractType = getContract({ + address: undefined as unknown as Hex, + abi: [futureContractData.PriceFeed.methods.getNodesForRequest], + client: { + public: undefined as unknown as PublicClient, + wallet: undefined as unknown as WalletClient, + }, +}); + +const pkpPermissionsContractType = getContract({ + address: undefined as unknown as Hex, + abi: [ + futureContractData.PKPPermissions.methods.addPermittedAction, + futureContractData.PKPPermissions.methods.addPermittedAddress, + futureContractData.PKPPermissions.methods.getPermittedActions, + futureContractData.PKPPermissions.methods.getPermittedAddresses, + futureContractData.PKPPermissions.methods.getPermittedAuthMethods, + futureContractData.PKPPermissions.methods.getPermittedAuthMethodScopes, + futureContractData.PKPPermissions.methods.removePermittedAction, + futureContractData.PKPPermissions.methods.removePermittedAddress, + futureContractData.PKPPermissions.methods.isPermittedAction, + futureContractData.PKPPermissions.methods.isPermittedAddress, + ], + client: { + public: undefined as unknown as PublicClient, + wallet: undefined as unknown as WalletClient, + }, +}); + +const pubkeyRouterContractType = getContract({ + address: undefined as unknown as Hex, + abi: [ + futureContractData.PubkeyRouter.methods.deriveEthAddressFromPubkey, + futureContractData.PubkeyRouter.methods.ethAddressToPkpId, + futureContractData.PubkeyRouter.methods.getEthAddress, + futureContractData.PubkeyRouter.methods.getPubkey, + ], + client: { + public: undefined as unknown as PublicClient, + wallet: undefined as unknown as WalletClient, + }, +}); +// Hacky fix ends + +export const createLitContracts = ( + networkCtx: NagaContext, + opts?: CreateLitContractsOptions +) => { + // 1. Fallback to env-based private key if user doesn't supply a wagmi walletClient + const fallbackTransport = http(networkCtx.rpcUrl); + const fallbackAccount = privateKeyToAccount( + networkCtx.privateKey as `0x${string}` + ); + + // 2. Decide which publicClient to use + const publicClient = + opts?.publicClient ?? + createPublicClient({ + chain: networkCtx.chainConfig.chain, + transport: fallbackTransport, + }); + + // 3. Decide which walletClient to use + const walletClient = + networkCtx?.walletClient ?? + createWalletClient({ + chain: networkCtx.chainConfig.chain, + transport: fallbackTransport, + account: fallbackAccount, + }); + + // 4. Get the contract data + const contractData = networkCtx.chainConfig.contractData; + + if (!contractData) { + throw new Error( + `Contract data not found for network: ${networkCtx.network}` + ); + } + + // ---------- All your contracts ---------- + const pkpNftContract = getContract({ + address: contractData.PKPNFT.address, + abi: [ + contractData.PKPNFT.methods.claimAndMint, + contractData.PKPNFT.methods.mintCost, + contractData.PKPNFT.methods.tokenOfOwnerByIndex, + ...contractData.PKPNFT.events, + ], + client: { public: publicClient, wallet: walletClient }, + }); + + const pkpHelperContract = getContract({ + address: contractData.PKPHelper.address, + abi: [ + contractData.PKPHelper.methods.claimAndMintNextAndAddAuthMethodsWithTypes, + contractData.PKPHelper.methods.mintNextAndAddAuthMethods, + ...contractData.PKPHelper.events, + ], + client: { public: publicClient, wallet: walletClient }, + }); + + const stakingContract = getContract({ + address: contractData.Staking.address, + abi: [ + contractData.Staking.methods.getActiveUnkickedValidatorStructsAndCounts, + ...contractData.Staking.events, + ], + client: { public: publicClient, wallet: walletClient }, + }); + + const priceFeed = getContract({ + address: contractData.PriceFeed.address, + abi: [ + contractData.PriceFeed.methods.getNodesForRequest, + ...contractData.PriceFeed.events, + ], + client: { public: publicClient, wallet: walletClient }, + }); + + const pkpPermissionsContract = getContract({ + address: contractData.PKPPermissions.address, + abi: [ + contractData.PKPPermissions.methods.addPermittedAction, + contractData.PKPPermissions.methods.addPermittedAddress, + contractData.PKPPermissions.methods.getPermittedActions, + contractData.PKPPermissions.methods.getPermittedAddresses, + contractData.PKPPermissions.methods.getPermittedAuthMethods, + contractData.PKPPermissions.methods.getPermittedAuthMethodScopes, + contractData.PKPPermissions.methods.removePermittedAction, + contractData.PKPPermissions.methods.removePermittedAddress, + contractData.PKPPermissions.methods.isPermittedAction, + contractData.PKPPermissions.methods.isPermittedAddress, + ...contractData.PKPPermissions.events, + ], + client: { public: publicClient, wallet: walletClient }, + }); + + const pubkeyRouterContract = getContract({ + address: contractData.PubkeyRouter.address, + abi: [ + contractData.PubkeyRouter.methods.deriveEthAddressFromPubkey, + contractData.PubkeyRouter.methods.ethAddressToPkpId, + contractData.PubkeyRouter.methods.getEthAddress, + contractData.PubkeyRouter.methods.getPubkey, + ...contractData.PubkeyRouter.events, + ], + client: { public: publicClient, wallet: walletClient }, + }); + + // ---------- End of all your contracts ---------- + return { + pkpNftContract: pkpNftContract as unknown as typeof pkpNftContractType, + pkpHelperContract: + pkpHelperContract as unknown as typeof pkpHelperContractType, + stakingContract: stakingContract as unknown as typeof stakingContractType, + priceFeed: priceFeed as unknown as typeof priceFeedContractType, + pkpPermissionsContract: + pkpPermissionsContract as unknown as typeof pkpPermissionsContractType, + pubkeyRouterContract: + pubkeyRouterContract as unknown as typeof pubkeyRouterContractType, + publicClient, + walletClient, + }; +}; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/decodeLogs.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/decodeLogs.ts new file mode 100644 index 0000000000..eb6a95f790 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/decodeLogs.ts @@ -0,0 +1,82 @@ +import { decodeEventLog, Log } from 'viem'; +import { NagaContext } from '../../../../types'; +import { createLitContracts } from './createLitContracts'; + +export type DecodedLog = { + eventName: string; + args: { + [key: string]: any; + }; +}; + +/** + * Decodes event logs from Lit Protocol contract transactions + * @param logs Array of transaction logs to decode + * @returns Array of decoded logs with event names and parameters + */ +export const decodeLogs = async ( + logs: Log[], + networkCtx: NagaContext +): Promise => { + // Get network context for contract ABIs + const networkContext = networkCtx.chainConfig.contractData; + + if (!networkContext) { + throw new Error(`Network "${networkCtx.network}" not found`); + } + + const { + pkpHelperContract, + pkpNftContract, + pkpPermissionsContract, + pubkeyRouterContract, + publicClient, + walletClient, + } = createLitContracts(networkCtx); + + // Map contract addresses to their ABIs + const contractABIs = new Map(); + contractABIs.set(pkpNftContract.address.toLowerCase(), pkpNftContract.abi); + contractABIs.set( + pkpHelperContract.address.toLowerCase(), + pkpHelperContract.abi + ); + contractABIs.set( + pkpPermissionsContract.address.toLowerCase(), + pkpPermissionsContract.abi + ); + contractABIs.set( + pubkeyRouterContract.address.toLowerCase(), + pubkeyRouterContract.abi + ); + + // Decode each log + const decodedLogs = logs.map((log) => { + try { + const abi = contractABIs.get(log.address.toLowerCase()); + if (!abi) { + return { + ...log, + decoded: null, + error: 'No matching ABI found for address', + }; + } + + const decoded = decodeEventLog({ + abi, + data: log.data, + topics: log.topics, + }); + + return decoded; + } catch (error) { + return { + ...log, + decoded: null, + error: error instanceof Error ? error.message : 'Unknown error', + }; + } + }); + + return decodedLogs as DecodedLog[]; +}; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/index.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/index.ts new file mode 100644 index 0000000000..8e910774fd --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/apis/utils/index.ts @@ -0,0 +1 @@ +export { createLitContracts } from './createLitContracts'; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/index.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/index.ts new file mode 100644 index 0000000000..dba87604f4 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/index.ts @@ -0,0 +1 @@ +export * from './apis/index'; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/ClaimAndMintSchema.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/ClaimAndMintSchema.ts new file mode 100644 index 0000000000..bbe6dba2a7 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/ClaimAndMintSchema.ts @@ -0,0 +1,14 @@ +import { z } from 'zod'; +import { toHexString } from '../../../../shared/utils/z-transformers'; +import { SignatureDataSchema } from './shared/SignatureDataSchema'; + +export const ClaimAndMintSchema = z.object({ + derivedKeyId: toHexString, + signatures: z.array(SignatureDataSchema), +}); + +// ✨ Two types from the same schema: +// 1. User Input Type - this is the type that the user will input, eg. the API we expose for the user to call, could be a function of a request body from a POST request. (e.g., number, string, etc.) +// 2. Transformed/Validated Type - this is the type after the user input has been transformed and validated. Usually used for smart contract calls or external API calls (such as communication with nodes). (e.g., BigInt, etc.) +export type ClaimAndMintRaw = z.input; +export type ClaimAndMintTransformed = z.infer; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/ClaimRequestSchema.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/ClaimRequestSchema.ts new file mode 100644 index 0000000000..122c51466c --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/ClaimRequestSchema.ts @@ -0,0 +1,33 @@ +import { t } from 'elysia'; +import { z } from 'zod'; +import { toBigInt, toHexString } from '../../../../shared/utils/z-transformers'; +import { SignatureDataSchema } from './shared/SignatureDataSchema'; + +export const ClaimRequestSchema = z.object({ + derivedKeyId: toHexString, + signatures: z.array(SignatureDataSchema), + authMethodType: toBigInt, + authMethodId: toHexString, + authMethodPubkey: toHexString, +}); + +// ✨ Two types from the same schema: +// 1. User Input Type - this is the type that the user will input, eg. the API we expose for the user to call, could be a function of a request body from a POST request. (e.g., number, string, etc.) +// 2. Transformed/Validated Type - this is the type after the user input has been transformed and validated. Usually used for smart contract calls or external API calls (such as communication with nodes). (e.g., BigInt, etc.) +export type ClaimRequestRaw = z.input; +export type ClaimRequestTransformed = z.infer; + +// ✨ Elysia Schema +export const tClaimRequestSchema = t.Object({ + derivedKeyId: t.String(), + signatures: t.Array( + t.Object({ + r: t.String(), + s: t.String(), + v: t.Number(), + }) + ), + authMethodType: t.Number(), + authMethodId: t.String(), + authMethodPubkey: t.String(), +}); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/GetActiveUnkickedValidatorStructsAndCountsSchema.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/GetActiveUnkickedValidatorStructsAndCountsSchema.ts new file mode 100644 index 0000000000..bbcdbcb927 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/GetActiveUnkickedValidatorStructsAndCountsSchema.ts @@ -0,0 +1,61 @@ +import { z } from 'zod'; +import { generateValidatorURLs } from '../../../../shared/utils/transformers'; +import { toNumber } from '../../../../shared/utils/z-transformers'; + +const EpochInfoSchema = z.object({ + epochLength: toNumber, + number: toNumber, + endTime: toNumber, + retries: toNumber, + timeout: toNumber, +}); + +type EpochInfo = z.infer; + +const ValidatorStructSchema = z.object({ + ip: z.number(), + ipv6: z.bigint(), + port: z.number(), + nodeAddress: z.string().regex(/^0x[a-fA-F0-9]{40}$/), + reward: z.bigint(), + senderPubKey: z.bigint(), + receiverPubKey: z.bigint(), +}); + +type ValidatorStruct = z.infer; + +export const GetActiveUnkickedValidatorStructsAndCountsSchema = z + .array(z.union([EpochInfoSchema, toNumber, z.array(ValidatorStructSchema)])) + .transform((ctx) => { + const epochInfo = ctx[0] as EpochInfo; + const minNodeCount = ctx[1]; + const activeUnkickedValidatorStructs = ctx[2] as ValidatorStruct[]; + + const validatorURLs = generateValidatorURLs(activeUnkickedValidatorStructs); + + if (!minNodeCount) { + throw new Error('❌ Minimum validator count is not set'); + } + + if (validatorURLs.length < Number(minNodeCount)) { + throw new Error( + `❌ Active validator set does not meet the consensus. Required: ${minNodeCount} but got: ${activeUnkickedValidatorStructs.length}` + ); + } + + return { + epochInfo, + minNodeCount, + validatorURLs, + }; + }); + +// ✨ Two types from the same schema: +// 1. User Input Type - this is the type that the user will input, eg. the API we expose for the user to call, could be a function of a request body from a POST request. (e.g., number, string, etc.) +// 2. Transformed/Validated Type - this is the type after the user input has been transformed and validated. Usually used for smart contract calls or external API calls (such as communication with nodes). (e.g., BigInt, etc.) +export type GetActiveUnkickedValidatorStructsAndCountsRaw = z.input< + typeof GetActiveUnkickedValidatorStructsAndCountsSchema +>; +export type GetActiveUnkickedValidatorStructsAndCountsTransformed = z.infer< + typeof GetActiveUnkickedValidatorStructsAndCountsSchema +>; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/MintRequestSchema.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/MintRequestSchema.ts new file mode 100644 index 0000000000..32ea49f4b9 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/MintRequestSchema.ts @@ -0,0 +1,36 @@ +import { t } from 'elysia'; +import { z } from 'zod'; +import { + toBigInt, + toBigIntArray, + toBigIntMatrix, + toBoolean, + toHexStringArray, +} from '../../../../shared/utils/z-transformers'; + +export const MintRequestSchema = z.object({ + keyType: toBigInt, + permittedAuthMethodTypes: toBigIntArray, + permittedAuthMethodIds: toHexStringArray, + permittedAuthMethodPubkeys: toHexStringArray, + permittedAuthMethodScopes: toBigIntMatrix, + addPkpEthAddressAsPermittedAddress: toBoolean, + sendPkpToItself: toBoolean, +}); + +// ✨ Two types from the same schema: +// 1. User Input Type - this is the type that the user will input, eg. the API we expose for the user to call, could be a function of a request body from a POST request. (e.g., number, string, etc.) +// 2. Transformed/Validated Type - this is the type after the user input has been transformed and validated. Usually used for smart contract calls or external API calls (such as communication with nodes). (e.g., BigInt, etc.) +export type MintRequestRaw = z.input; +export type MintRequestTransformed = z.infer; + +// ✨ Elysia Schema +export const tMintRequestSchema = t.Object({ + keyType: t.Number(), + permittedAuthMethodTypes: t.Array(t.Number()), + permittedAuthMethodIds: t.Array(t.String()), + permittedAuthMethodPubkeys: t.Array(t.String()), + permittedAuthMethodScopes: t.Array(t.Array(t.Number())), + addPkpEthAddressAsPermittedAddress: t.Boolean(), + sendPkpToItself: t.Boolean(), +}); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/AuthMethodSchema.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/AuthMethodSchema.ts new file mode 100644 index 0000000000..507589ae01 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/AuthMethodSchema.ts @@ -0,0 +1,24 @@ +import { z } from 'zod'; + +const AUTH_METHOD_TYPE = { + EthWallet: 1, + LitAction: 2, + WebAuthn: 3, + Discord: 4, + Google: 5, + GoogleJwt: 6, + AppleJwt: 8, + StytchOtp: 9, + StytchEmailFactorOtp: 10, + StytchSmsFactorOtp: 11, + StytchWhatsAppFactorOtp: 12, + StytchTotpFactorOtp: 13, +} as const; + +export const AuthMethodSchema = z.object({ + authMethodType: z.nativeEnum(AUTH_METHOD_TYPE), + accessToken: z.string(), +}); + +// enable this if needed +// export type AuthMethod = z.infer; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/PKPDataSchema.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/PKPDataSchema.ts new file mode 100644 index 0000000000..71c5fe9919 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/PKPDataSchema.ts @@ -0,0 +1,14 @@ +import { computeAddress } from 'ethers/lib/utils'; +import { z } from 'zod'; + +export const PKPDataSchema = z + .object({ + tokenId: z.bigint(), + pubkey: z.string(), + }) + .transform((data) => ({ + ...data, + ethAddress: computeAddress(data.pubkey), + })); + +export type PKPData = z.infer; diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/ScopeSchema.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/ScopeSchema.ts new file mode 100644 index 0000000000..17f0c85eba --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/ScopeSchema.ts @@ -0,0 +1,30 @@ +import { z } from 'zod'; + +/** + * Defines schemas for PKP permission scopes. + * Handles both string inputs and bigint transformations for contract calls. + */ + +// Valid scope values +export const SCOPE_VALUES = [ + 'no-permissions', + 'sign-anything', + 'personal-sign', +] as const; +export type ScopeString = (typeof SCOPE_VALUES)[number]; + +// Mapping from string scopes to their bigint representation +export const SCOPE_MAPPING = { + 'no-permissions': 0n, + 'sign-anything': 1n, + 'personal-sign': 2n, +} as const; +export type ScopeBigInt = (typeof SCOPE_MAPPING)[ScopeString]; + +// Schema for string values (used in high-level APIs) +export const ScopeStringSchema = z.enum(SCOPE_VALUES); + +// Schema that transforms strings to bigints (used in contract calls) +export const ScopeSchemaRaw = ScopeStringSchema.transform( + (val) => SCOPE_MAPPING[val] +); diff --git a/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/SignatureDataSchema.ts b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/SignatureDataSchema.ts new file mode 100644 index 0000000000..05c3fd9c86 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/LitChainClient/schemas/shared/SignatureDataSchema.ts @@ -0,0 +1,8 @@ +import { z } from 'zod'; +import { toHexString } from '../../../../../shared/utils/z-transformers'; + +export const SignatureDataSchema = z.object({ + r: toHexString, + s: toHexString, + v: z.number(), +}); diff --git a/packages/networks/src/lib/networks/vNaga/common/NetworkContext.ts b/packages/networks/src/lib/networks/vNaga/common/NetworkContext.ts new file mode 100644 index 0000000000..690ce4d5d7 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/common/NetworkContext.ts @@ -0,0 +1,18 @@ +import { Chain, WalletClient } from 'viem'; + +const HTTP = 'http://' as const; +const HTTPS = 'https://' as const; + +// Naga Network Context +export interface INetworkContext { + network: string; + rpcUrl: string; + privateKey: string; + chainConfig: { + chain: Chain; + contractData: T; + }; + httpProtocol: typeof HTTP | typeof HTTPS; + walletClient: WalletClient; + realmId: bigint; +} diff --git a/packages/networks/src/lib/networks/vNaga/local-develop/getCustomContext.ts b/packages/networks/src/lib/networks/vNaga/local-develop/getCustomContext.ts new file mode 100644 index 0000000000..9d3a8256fb --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/local-develop/getCustomContext.ts @@ -0,0 +1,25 @@ +// @ts-ignore - need to fix the import path in the lit-protocol/contracts package +import { generateSignaturesFromContext } from '@lit-protocol/contracts/custom-network-signatures'; + +const JSON_FILE_PATH = process.env['NETWORK_CONFIG'] as string; + +if (!JSON_FILE_PATH) { + throw new Error( + '❌ NETWORK_CONFIG is not set. Please set it in your .env file.' + ); +} + +async function main() { + await generateSignaturesFromContext({ + jsonFilePath: JSON_FILE_PATH, + networkName: 'naga-develop', + outputDir: './naga-develop-signatures', + useScriptDirectory: true, + + // @ts-ignore + callerPath: import.meta.url, + }); +} + +// gogogo! +main().catch(console.error); diff --git a/packages/networks/src/lib/networks/vNaga/local-develop/naga-develop-signatures/naga-develop.cjs b/packages/networks/src/lib/networks/vNaga/local-develop/naga-develop-signatures/naga-develop.cjs new file mode 100644 index 0000000000..d1726ee86b --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/local-develop/naga-develop-signatures/naga-develop.cjs @@ -0,0 +1,2592 @@ +/** + * Generated Contract Method Signatures for naga-develop + * This file is auto-generated. DO NOT EDIT UNLESS YOU KNOW WHAT YOU'RE DOING. + */ + +const signatures = { + PKPHelper: { + address: '0x04C89607413713Ec9775E14b954286519d836FEf', + methods: { + claimAndMintNextAndAddAuthMethodsWithTypes: { + inputs: [ + { + components: [ + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'bytes32', + name: 'derivedKeyId', + type: 'bytes32', + }, + { + components: [ + { + internalType: 'bytes32', + name: 'r', + type: 'bytes32', + }, + { + internalType: 'bytes32', + name: 's', + type: 'bytes32', + }, + { + internalType: 'uint8', + name: 'v', + type: 'uint8', + }, + ], + internalType: 'struct IPubkeyRouter.Signature[]', + name: 'signatures', + type: 'tuple[]', + }, + ], + internalType: 'struct LibPKPNFTStorage.ClaimMaterial', + name: 'claimMaterial', + type: 'tuple', + }, + { + components: [ + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'bytes[]', + name: 'permittedIpfsCIDs', + type: 'bytes[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedIpfsCIDScopes', + type: 'uint256[][]', + }, + { + internalType: 'address[]', + name: 'permittedAddresses', + type: 'address[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedAddressScopes', + type: 'uint256[][]', + }, + { + internalType: 'uint256[]', + name: 'permittedAuthMethodTypes', + type: 'uint256[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodIds', + type: 'bytes[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodPubkeys', + type: 'bytes[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedAuthMethodScopes', + type: 'uint256[][]', + }, + { + internalType: 'bool', + name: 'addPkpEthAddressAsPermittedAddress', + type: 'bool', + }, + { + internalType: 'bool', + name: 'sendPkpToItself', + type: 'bool', + }, + ], + internalType: 'struct PKPHelper.AuthMethodData', + name: 'authMethodData', + type: 'tuple', + }, + ], + name: 'claimAndMintNextAndAddAuthMethodsWithTypes', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'payable', + type: 'function', + }, + mintNextAndAddAuthMethods: { + inputs: [ + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'uint256[]', + name: 'permittedAuthMethodTypes', + type: 'uint256[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodIds', + type: 'bytes[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodPubkeys', + type: 'bytes[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedAuthMethodScopes', + type: 'uint256[][]', + }, + { + internalType: 'bool', + name: 'addPkpEthAddressAsPermittedAddress', + type: 'bool', + }, + { + internalType: 'bool', + name: 'sendPkpToItself', + type: 'bool', + }, + ], + name: 'mintNextAndAddAuthMethods', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'payable', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'bytes32', + name: 'role', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'bytes32', + name: 'previousAdminRole', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'bytes32', + name: 'newAdminRole', + type: 'bytes32', + }, + ], + name: 'RoleAdminChanged', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'bytes32', + name: 'role', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'address', + name: 'account', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'sender', + type: 'address', + }, + ], + name: 'RoleGranted', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'bytes32', + name: 'role', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'address', + name: 'account', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'sender', + type: 'address', + }, + ], + name: 'RoleRevoked', + type: 'event', + }, + ], + }, + PKPNFT: { + address: '0x99bbA657f2BbC93c02D617f8bA121cB8Fc104Acf', + methods: { + claimAndMint: { + inputs: [ + { + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'bytes32', + name: 'derivedKeyId', + type: 'bytes32', + }, + { + components: [ + { + internalType: 'bytes32', + name: 'r', + type: 'bytes32', + }, + { + internalType: 'bytes32', + name: 's', + type: 'bytes32', + }, + { + internalType: 'uint8', + name: 'v', + type: 'uint8', + }, + ], + internalType: 'struct IPubkeyRouter.Signature[]', + name: 'signatures', + type: 'tuple[]', + }, + { + internalType: 'address', + name: 'stakingContractAddress', + type: 'address', + }, + ], + name: 'claimAndMint', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'payable', + type: 'function', + }, + mintCost: { + inputs: [], + name: 'mintCost', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'view', + type: 'function', + }, + tokenOfOwnerByIndex: { + inputs: [ + { + internalType: 'address', + name: 'owner', + type: 'address', + }, + { + internalType: 'uint256', + name: 'index', + type: 'uint256', + }, + ], + name: 'tokenOfOwnerByIndex', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'owner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'approved', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'Approval', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'owner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'operator', + type: 'address', + }, + { + indexed: false, + internalType: 'bool', + name: 'approved', + type: 'bool', + }, + ], + name: 'ApprovalForAll', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'newFreeMintSigner', + type: 'address', + }, + ], + name: 'FreeMintSignerSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint8', + name: 'version', + type: 'uint8', + }, + ], + name: 'Initialized', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newMintCost', + type: 'uint256', + }, + ], + name: 'MintCostSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + ], + name: 'PKPMinted', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'from', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'to', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'Transfer', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Withdrew', + type: 'event', + }, + ], + }, + PKPPermissions: { + address: '0xdbC43Ba45381e02825b14322cDdd15eC4B3164E6', + methods: { + addPermittedAction: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'ipfsCID', + type: 'bytes', + }, + { + internalType: 'uint256[]', + name: 'scopes', + type: 'uint256[]', + }, + ], + name: 'addPermittedAction', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + addPermittedAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'address', + name: 'user', + type: 'address', + }, + { + internalType: 'uint256[]', + name: 'scopes', + type: 'uint256[]', + }, + ], + name: 'addPermittedAddress', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + getPermittedActions: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPermittedActions', + outputs: [ + { + internalType: 'bytes[]', + name: '', + type: 'bytes[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPermittedAddresses: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPermittedAddresses', + outputs: [ + { + internalType: 'address[]', + name: '', + type: 'address[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPermittedAuthMethodScopes: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + internalType: 'uint256', + name: 'maxScopeId', + type: 'uint256', + }, + ], + name: 'getPermittedAuthMethodScopes', + outputs: [ + { + internalType: 'bool[]', + name: '', + type: 'bool[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPermittedAuthMethods: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPermittedAuthMethods', + outputs: [ + { + components: [ + { + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + internalType: 'bytes', + name: 'userPubkey', + type: 'bytes', + }, + ], + internalType: 'struct LibPKPPermissionsStorage.AuthMethod[]', + name: '', + type: 'tuple[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + isPermittedAction: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'ipfsCID', + type: 'bytes', + }, + ], + name: 'isPermittedAction', + outputs: [ + { + internalType: 'bool', + name: '', + type: 'bool', + }, + ], + stateMutability: 'view', + type: 'function', + }, + isPermittedAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'address', + name: 'user', + type: 'address', + }, + ], + name: 'isPermittedAddress', + outputs: [ + { + internalType: 'bool', + name: '', + type: 'bool', + }, + ], + stateMutability: 'view', + type: 'function', + }, + removePermittedAction: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'ipfsCID', + type: 'bytes', + }, + ], + name: 'removePermittedAction', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + removePermittedAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'address', + name: 'user', + type: 'address', + }, + ], + name: 'removePermittedAddress', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + indexed: false, + internalType: 'bytes', + name: 'userPubkey', + type: 'bytes', + }, + ], + name: 'PermittedAuthMethodAdded', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + ], + name: 'PermittedAuthMethodRemoved', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + indexed: false, + internalType: 'uint256', + name: 'scopeId', + type: 'uint256', + }, + ], + name: 'PermittedAuthMethodScopeAdded', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + indexed: false, + internalType: 'uint256', + name: 'scopeId', + type: 'uint256', + }, + ], + name: 'PermittedAuthMethodScopeRemoved', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: true, + internalType: 'uint256', + name: 'group', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes32', + name: 'root', + type: 'bytes32', + }, + ], + name: 'RootHashUpdated', + type: 'event', + }, + ], + }, + PubkeyRouter: { + address: '0x809d550fca64d94Bd9F66E60752A544199cfAC3D', + methods: { + getEthAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getEthAddress', + outputs: [ + { + internalType: 'address', + name: '', + type: 'address', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPubkey: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPubkey', + outputs: [ + { + internalType: 'bytes', + name: '', + type: 'bytes', + }, + ], + stateMutability: 'view', + type: 'function', + }, + deriveEthAddressFromPubkey: { + inputs: [ + { + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + ], + name: 'deriveEthAddressFromPubkey', + outputs: [ + { + internalType: 'address', + name: '', + type: 'address', + }, + ], + stateMutability: 'pure', + type: 'function', + }, + ethAddressToPkpId: { + inputs: [ + { + internalType: 'address', + name: 'ethAddress', + type: 'address', + }, + ], + name: 'ethAddressToPkpId', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'string', + name: 'message', + type: 'string', + }, + { + indexed: false, + internalType: 'address', + name: 'sender', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'value', + type: 'uint256', + }, + ], + name: 'DebugEvent', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + { + indexed: false, + internalType: 'address', + name: 'stakingContract', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes32', + name: 'derivedKeyId', + type: 'bytes32', + }, + ], + name: 'PubkeyRoutingDataSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakingContract', + type: 'address', + }, + { + components: [ + { + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + ], + indexed: false, + internalType: 'struct IPubkeyRouter.RootKey', + name: 'rootKey', + type: 'tuple', + }, + ], + name: 'RootKeySet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'value', + type: 'uint256', + }, + { + indexed: false, + internalType: 'address', + name: 'sender', + type: 'address', + }, + ], + name: 'ToggleEvent', + type: 'event', + }, + ], + }, + Staking: { + address: '0x9E545E3C0baAB3E08CdfD552C960A1050f373042', + methods: { + getActiveUnkickedValidatorStructsAndCounts: { + inputs: [ + { + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + ], + name: 'getActiveUnkickedValidatorStructsAndCounts', + outputs: [ + { + components: [ + { + internalType: 'uint256', + name: 'epochLength', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'number', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'rewardEpochNumber', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'endTime', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'retries', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'timeout', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'startTime', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastEpochStart', + type: 'uint256', + }, + ], + internalType: 'struct LibStakingStorage.Epoch', + name: '', + type: 'tuple', + }, + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + { + components: [ + { + internalType: 'uint32', + name: 'ip', + type: 'uint32', + }, + { + internalType: 'uint128', + name: 'ipv6', + type: 'uint128', + }, + { + internalType: 'uint32', + name: 'port', + type: 'uint32', + }, + { + internalType: 'address', + name: 'nodeAddress', + type: 'address', + }, + { + internalType: 'uint256', + name: 'reward', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'senderPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'receiverPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastActiveEpoch', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commission', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commissionRate', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastRewardEpoch', + type: 'uint256', + }, + ], + internalType: 'struct LibStakingStorage.Validator[]', + name: '', + type: 'tuple[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'dataType', + type: 'uint256', + }, + ], + name: 'ClearOfflinePhaseData', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'reason', + type: 'uint256', + }, + { + components: [ + { + internalType: 'uint256', + name: 'tolerance', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'intervalSecs', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'kickPenaltyPercent', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'kickPenaltyDemerits', + type: 'uint256', + }, + ], + indexed: false, + internalType: 'struct LibStakingStorage.ComplaintConfig', + name: 'config', + type: 'tuple', + }, + ], + name: 'ComplaintConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'tokenRewardPerTokenPerEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'keyTypes', + type: 'uint256[]', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minimumValidatorCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'rewardEpochDuration', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'maxTimeLock', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minTimeLock', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'bmin', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'bmax', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'k', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'p', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bool', + name: 'enableStakeAutolock', + type: 'bool', + }, + { + indexed: false, + internalType: 'bool', + name: 'permittedStakersOn', + type: 'bool', + }, + { + indexed: false, + internalType: 'uint256', + name: 'tokenPrice', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'profitMultiplier', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'usdCostPerMonth', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'maxEmissionRate', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minStakeAmount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'maxStakeAmount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minSelfStake', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minSelfStakeTimelock', + type: 'uint256', + }, + ], + name: 'ConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'dataType', + type: 'uint256', + }, + ], + name: 'CountOfflinePhaseData', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newDevopsAdmin', + type: 'address', + }, + ], + name: 'DevopsAdminSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newEpochEndTime', + type: 'uint256', + }, + ], + name: 'EpochEndTimeSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newEpochLength', + type: 'uint256', + }, + ], + name: 'EpochLengthSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newEpochTimeout', + type: 'uint256', + }, + ], + name: 'EpochTimeoutSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'reason', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newKickPenaltyPercent', + type: 'uint256', + }, + ], + name: 'KickPenaltyPercentSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newTokenRewardPerTokenPerEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'newKeyTypes', + type: 'uint256[]', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinimumValidatorCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxConcurrentRequests', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newPeerCheckingIntervalSecs', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignConcurrency', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bool', + name: 'newRpcHealthcheckEnabled', + type: 'bool', + }, + ], + name: 'RealmConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newStakingTokenAddress', + type: 'address', + }, + ], + name: 'StakingTokenSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'enum LibStakingStorage.States', + name: 'newState', + type: 'uint8', + }, + ], + name: 'StateChanged', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'staker', + type: 'address', + }, + ], + name: 'ValidatorRejoinedNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverContractAddress', + type: 'address', + }, + ], + name: 'ResolverContractAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'address', + name: 'stakerAddressClient', + type: 'address', + }, + ], + name: 'StakeRecordCreated', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'userStakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + ], + name: 'StakeRecordRemoved', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + ], + name: 'StakeRecordUpdated', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'rewards', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'fromEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'toEpoch', + type: 'uint256', + }, + ], + name: 'StakeRewardsClaimed', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Staked', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + ], + name: 'ValidatorRegistered', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Withdrawn', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'epochNumber', + type: 'uint256', + }, + ], + name: 'AdvancedEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'attestedAddress', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'attestedPubKey', + type: 'uint256', + }, + ], + name: 'AttestedWalletRegistered', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newTokenRewardPerTokenPerEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'newKeyTypes', + type: 'uint256[]', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinimumValidatorCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxConcurrentRequests', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newPeerCheckingIntervalSecs', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignConcurrency', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bool', + name: 'newRpcHealthcheckEnabled', + type: 'bool', + }, + ], + name: 'ConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'string', + name: 'message', + type: 'string', + }, + { + indexed: false, + internalType: 'address', + name: 'sender', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'value', + type: 'uint256', + }, + ], + name: 'DebugEvent', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'epochNumber', + type: 'uint256', + }, + ], + name: 'ReadyForNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'token', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Recovered', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + ], + name: 'RequestToJoin', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + ], + name: 'RequestToLeave', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newDuration', + type: 'uint256', + }, + ], + name: 'RewardsDurationUpdated', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amountBurned', + type: 'uint256', + }, + ], + name: 'ValidatorKickedFromNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'reporter', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'validatorToKickStakerAddress', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'reason', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'data', + type: 'bytes', + }, + ], + name: 'VotedToKickValidatorInNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'index', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + components: [ + { + internalType: 'uint256', + name: 'major', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'minor', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'patch', + type: 'uint256', + }, + ], + indexed: false, + internalType: 'struct LibStakingStorage.Version', + name: 'version', + type: 'tuple', + }, + ], + name: 'VersionRequirementsUpdated', + type: 'event', + }, + ], + }, + PriceFeed: { + address: '0xf953b3A269d80e3eB0F2947630Da976B896A8C5b', + methods: { + getNodesForRequest: { + inputs: [ + { + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + internalType: 'uint256[]', + name: 'productIds', + type: 'uint256[]', + }, + ], + name: 'getNodesForRequest', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + { + components: [ + { + components: [ + { + internalType: 'uint32', + name: 'ip', + type: 'uint32', + }, + { + internalType: 'uint128', + name: 'ipv6', + type: 'uint128', + }, + { + internalType: 'uint32', + name: 'port', + type: 'uint32', + }, + { + internalType: 'address', + name: 'nodeAddress', + type: 'address', + }, + { + internalType: 'uint256', + name: 'reward', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'senderPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'receiverPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastActiveEpoch', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commission', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commissionRate', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastRewardEpoch', + type: 'uint256', + }, + ], + internalType: 'struct LibStakingStorage.Validator', + name: 'validator', + type: 'tuple', + }, + { + internalType: 'uint256[]', + name: 'prices', + type: 'uint256[]', + }, + ], + internalType: 'struct LibPriceFeedStorage.NodeInfoAndPrices[]', + name: '', + type: 'tuple[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newPrice', + type: 'uint256', + }, + ], + name: 'BaseNetworkPriceSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newPrice', + type: 'uint256', + }, + ], + name: 'MaxNetworkPriceSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'stakingAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'usagePercent', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'newPrices', + type: 'uint256[]', + }, + ], + name: 'UsageSet', + type: 'event', + }, + ], + }, +}; + +module.exports = { + signatures, +}; diff --git a/packages/networks/src/lib/networks/vNaga/local-develop/naga-develop-signatures/naga-develop.js b/packages/networks/src/lib/networks/vNaga/local-develop/naga-develop-signatures/naga-develop.js new file mode 100644 index 0000000000..913157e6cc --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/local-develop/naga-develop-signatures/naga-develop.js @@ -0,0 +1,2588 @@ +/** + * Generated Contract Method Signatures for naga-develop + * This file is auto-generated. DO NOT EDIT UNLESS YOU KNOW WHAT YOU'RE DOING. + */ + +export const signatures = { + PKPHelper: { + address: '0x04C89607413713Ec9775E14b954286519d836FEf', + methods: { + claimAndMintNextAndAddAuthMethodsWithTypes: { + inputs: [ + { + components: [ + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'bytes32', + name: 'derivedKeyId', + type: 'bytes32', + }, + { + components: [ + { + internalType: 'bytes32', + name: 'r', + type: 'bytes32', + }, + { + internalType: 'bytes32', + name: 's', + type: 'bytes32', + }, + { + internalType: 'uint8', + name: 'v', + type: 'uint8', + }, + ], + internalType: 'struct IPubkeyRouter.Signature[]', + name: 'signatures', + type: 'tuple[]', + }, + ], + internalType: 'struct LibPKPNFTStorage.ClaimMaterial', + name: 'claimMaterial', + type: 'tuple', + }, + { + components: [ + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'bytes[]', + name: 'permittedIpfsCIDs', + type: 'bytes[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedIpfsCIDScopes', + type: 'uint256[][]', + }, + { + internalType: 'address[]', + name: 'permittedAddresses', + type: 'address[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedAddressScopes', + type: 'uint256[][]', + }, + { + internalType: 'uint256[]', + name: 'permittedAuthMethodTypes', + type: 'uint256[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodIds', + type: 'bytes[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodPubkeys', + type: 'bytes[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedAuthMethodScopes', + type: 'uint256[][]', + }, + { + internalType: 'bool', + name: 'addPkpEthAddressAsPermittedAddress', + type: 'bool', + }, + { + internalType: 'bool', + name: 'sendPkpToItself', + type: 'bool', + }, + ], + internalType: 'struct PKPHelper.AuthMethodData', + name: 'authMethodData', + type: 'tuple', + }, + ], + name: 'claimAndMintNextAndAddAuthMethodsWithTypes', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'payable', + type: 'function', + }, + mintNextAndAddAuthMethods: { + inputs: [ + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'uint256[]', + name: 'permittedAuthMethodTypes', + type: 'uint256[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodIds', + type: 'bytes[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodPubkeys', + type: 'bytes[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedAuthMethodScopes', + type: 'uint256[][]', + }, + { + internalType: 'bool', + name: 'addPkpEthAddressAsPermittedAddress', + type: 'bool', + }, + { + internalType: 'bool', + name: 'sendPkpToItself', + type: 'bool', + }, + ], + name: 'mintNextAndAddAuthMethods', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'payable', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'bytes32', + name: 'role', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'bytes32', + name: 'previousAdminRole', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'bytes32', + name: 'newAdminRole', + type: 'bytes32', + }, + ], + name: 'RoleAdminChanged', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'bytes32', + name: 'role', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'address', + name: 'account', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'sender', + type: 'address', + }, + ], + name: 'RoleGranted', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'bytes32', + name: 'role', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'address', + name: 'account', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'sender', + type: 'address', + }, + ], + name: 'RoleRevoked', + type: 'event', + }, + ], + }, + PKPNFT: { + address: '0x99bbA657f2BbC93c02D617f8bA121cB8Fc104Acf', + methods: { + claimAndMint: { + inputs: [ + { + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'bytes32', + name: 'derivedKeyId', + type: 'bytes32', + }, + { + components: [ + { + internalType: 'bytes32', + name: 'r', + type: 'bytes32', + }, + { + internalType: 'bytes32', + name: 's', + type: 'bytes32', + }, + { + internalType: 'uint8', + name: 'v', + type: 'uint8', + }, + ], + internalType: 'struct IPubkeyRouter.Signature[]', + name: 'signatures', + type: 'tuple[]', + }, + { + internalType: 'address', + name: 'stakingContractAddress', + type: 'address', + }, + ], + name: 'claimAndMint', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'payable', + type: 'function', + }, + mintCost: { + inputs: [], + name: 'mintCost', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'view', + type: 'function', + }, + tokenOfOwnerByIndex: { + inputs: [ + { + internalType: 'address', + name: 'owner', + type: 'address', + }, + { + internalType: 'uint256', + name: 'index', + type: 'uint256', + }, + ], + name: 'tokenOfOwnerByIndex', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'owner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'approved', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'Approval', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'owner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'operator', + type: 'address', + }, + { + indexed: false, + internalType: 'bool', + name: 'approved', + type: 'bool', + }, + ], + name: 'ApprovalForAll', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'newFreeMintSigner', + type: 'address', + }, + ], + name: 'FreeMintSignerSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint8', + name: 'version', + type: 'uint8', + }, + ], + name: 'Initialized', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newMintCost', + type: 'uint256', + }, + ], + name: 'MintCostSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + ], + name: 'PKPMinted', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'from', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'to', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'Transfer', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Withdrew', + type: 'event', + }, + ], + }, + PKPPermissions: { + address: '0xdbC43Ba45381e02825b14322cDdd15eC4B3164E6', + methods: { + addPermittedAction: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'ipfsCID', + type: 'bytes', + }, + { + internalType: 'uint256[]', + name: 'scopes', + type: 'uint256[]', + }, + ], + name: 'addPermittedAction', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + addPermittedAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'address', + name: 'user', + type: 'address', + }, + { + internalType: 'uint256[]', + name: 'scopes', + type: 'uint256[]', + }, + ], + name: 'addPermittedAddress', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + getPermittedActions: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPermittedActions', + outputs: [ + { + internalType: 'bytes[]', + name: '', + type: 'bytes[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPermittedAddresses: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPermittedAddresses', + outputs: [ + { + internalType: 'address[]', + name: '', + type: 'address[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPermittedAuthMethodScopes: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + internalType: 'uint256', + name: 'maxScopeId', + type: 'uint256', + }, + ], + name: 'getPermittedAuthMethodScopes', + outputs: [ + { + internalType: 'bool[]', + name: '', + type: 'bool[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPermittedAuthMethods: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPermittedAuthMethods', + outputs: [ + { + components: [ + { + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + internalType: 'bytes', + name: 'userPubkey', + type: 'bytes', + }, + ], + internalType: 'struct LibPKPPermissionsStorage.AuthMethod[]', + name: '', + type: 'tuple[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + isPermittedAction: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'ipfsCID', + type: 'bytes', + }, + ], + name: 'isPermittedAction', + outputs: [ + { + internalType: 'bool', + name: '', + type: 'bool', + }, + ], + stateMutability: 'view', + type: 'function', + }, + isPermittedAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'address', + name: 'user', + type: 'address', + }, + ], + name: 'isPermittedAddress', + outputs: [ + { + internalType: 'bool', + name: '', + type: 'bool', + }, + ], + stateMutability: 'view', + type: 'function', + }, + removePermittedAction: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'ipfsCID', + type: 'bytes', + }, + ], + name: 'removePermittedAction', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + removePermittedAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'address', + name: 'user', + type: 'address', + }, + ], + name: 'removePermittedAddress', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + indexed: false, + internalType: 'bytes', + name: 'userPubkey', + type: 'bytes', + }, + ], + name: 'PermittedAuthMethodAdded', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + ], + name: 'PermittedAuthMethodRemoved', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + indexed: false, + internalType: 'uint256', + name: 'scopeId', + type: 'uint256', + }, + ], + name: 'PermittedAuthMethodScopeAdded', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + indexed: false, + internalType: 'uint256', + name: 'scopeId', + type: 'uint256', + }, + ], + name: 'PermittedAuthMethodScopeRemoved', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: true, + internalType: 'uint256', + name: 'group', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes32', + name: 'root', + type: 'bytes32', + }, + ], + name: 'RootHashUpdated', + type: 'event', + }, + ], + }, + PubkeyRouter: { + address: '0x809d550fca64d94Bd9F66E60752A544199cfAC3D', + methods: { + getEthAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getEthAddress', + outputs: [ + { + internalType: 'address', + name: '', + type: 'address', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPubkey: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPubkey', + outputs: [ + { + internalType: 'bytes', + name: '', + type: 'bytes', + }, + ], + stateMutability: 'view', + type: 'function', + }, + deriveEthAddressFromPubkey: { + inputs: [ + { + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + ], + name: 'deriveEthAddressFromPubkey', + outputs: [ + { + internalType: 'address', + name: '', + type: 'address', + }, + ], + stateMutability: 'pure', + type: 'function', + }, + ethAddressToPkpId: { + inputs: [ + { + internalType: 'address', + name: 'ethAddress', + type: 'address', + }, + ], + name: 'ethAddressToPkpId', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'string', + name: 'message', + type: 'string', + }, + { + indexed: false, + internalType: 'address', + name: 'sender', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'value', + type: 'uint256', + }, + ], + name: 'DebugEvent', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + { + indexed: false, + internalType: 'address', + name: 'stakingContract', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes32', + name: 'derivedKeyId', + type: 'bytes32', + }, + ], + name: 'PubkeyRoutingDataSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakingContract', + type: 'address', + }, + { + components: [ + { + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + ], + indexed: false, + internalType: 'struct IPubkeyRouter.RootKey', + name: 'rootKey', + type: 'tuple', + }, + ], + name: 'RootKeySet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'value', + type: 'uint256', + }, + { + indexed: false, + internalType: 'address', + name: 'sender', + type: 'address', + }, + ], + name: 'ToggleEvent', + type: 'event', + }, + ], + }, + Staking: { + address: '0x9E545E3C0baAB3E08CdfD552C960A1050f373042', + methods: { + getActiveUnkickedValidatorStructsAndCounts: { + inputs: [ + { + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + ], + name: 'getActiveUnkickedValidatorStructsAndCounts', + outputs: [ + { + components: [ + { + internalType: 'uint256', + name: 'epochLength', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'number', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'rewardEpochNumber', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'endTime', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'retries', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'timeout', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'startTime', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastEpochStart', + type: 'uint256', + }, + ], + internalType: 'struct LibStakingStorage.Epoch', + name: '', + type: 'tuple', + }, + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + { + components: [ + { + internalType: 'uint32', + name: 'ip', + type: 'uint32', + }, + { + internalType: 'uint128', + name: 'ipv6', + type: 'uint128', + }, + { + internalType: 'uint32', + name: 'port', + type: 'uint32', + }, + { + internalType: 'address', + name: 'nodeAddress', + type: 'address', + }, + { + internalType: 'uint256', + name: 'reward', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'senderPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'receiverPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastActiveEpoch', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commission', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commissionRate', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastRewardEpoch', + type: 'uint256', + }, + ], + internalType: 'struct LibStakingStorage.Validator[]', + name: '', + type: 'tuple[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'dataType', + type: 'uint256', + }, + ], + name: 'ClearOfflinePhaseData', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'reason', + type: 'uint256', + }, + { + components: [ + { + internalType: 'uint256', + name: 'tolerance', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'intervalSecs', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'kickPenaltyPercent', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'kickPenaltyDemerits', + type: 'uint256', + }, + ], + indexed: false, + internalType: 'struct LibStakingStorage.ComplaintConfig', + name: 'config', + type: 'tuple', + }, + ], + name: 'ComplaintConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'tokenRewardPerTokenPerEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'keyTypes', + type: 'uint256[]', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minimumValidatorCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'rewardEpochDuration', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'maxTimeLock', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minTimeLock', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'bmin', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'bmax', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'k', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'p', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bool', + name: 'enableStakeAutolock', + type: 'bool', + }, + { + indexed: false, + internalType: 'bool', + name: 'permittedStakersOn', + type: 'bool', + }, + { + indexed: false, + internalType: 'uint256', + name: 'tokenPrice', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'profitMultiplier', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'usdCostPerMonth', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'maxEmissionRate', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minStakeAmount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'maxStakeAmount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minSelfStake', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minSelfStakeTimelock', + type: 'uint256', + }, + ], + name: 'ConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'dataType', + type: 'uint256', + }, + ], + name: 'CountOfflinePhaseData', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newDevopsAdmin', + type: 'address', + }, + ], + name: 'DevopsAdminSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newEpochEndTime', + type: 'uint256', + }, + ], + name: 'EpochEndTimeSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newEpochLength', + type: 'uint256', + }, + ], + name: 'EpochLengthSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newEpochTimeout', + type: 'uint256', + }, + ], + name: 'EpochTimeoutSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'reason', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newKickPenaltyPercent', + type: 'uint256', + }, + ], + name: 'KickPenaltyPercentSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newTokenRewardPerTokenPerEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'newKeyTypes', + type: 'uint256[]', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinimumValidatorCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxConcurrentRequests', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newPeerCheckingIntervalSecs', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignConcurrency', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bool', + name: 'newRpcHealthcheckEnabled', + type: 'bool', + }, + ], + name: 'RealmConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newStakingTokenAddress', + type: 'address', + }, + ], + name: 'StakingTokenSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'enum LibStakingStorage.States', + name: 'newState', + type: 'uint8', + }, + ], + name: 'StateChanged', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'staker', + type: 'address', + }, + ], + name: 'ValidatorRejoinedNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverContractAddress', + type: 'address', + }, + ], + name: 'ResolverContractAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'address', + name: 'stakerAddressClient', + type: 'address', + }, + ], + name: 'StakeRecordCreated', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'userStakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + ], + name: 'StakeRecordRemoved', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + ], + name: 'StakeRecordUpdated', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'rewards', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'fromEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'toEpoch', + type: 'uint256', + }, + ], + name: 'StakeRewardsClaimed', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Staked', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + ], + name: 'ValidatorRegistered', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Withdrawn', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'epochNumber', + type: 'uint256', + }, + ], + name: 'AdvancedEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'attestedAddress', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'attestedPubKey', + type: 'uint256', + }, + ], + name: 'AttestedWalletRegistered', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newTokenRewardPerTokenPerEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'newKeyTypes', + type: 'uint256[]', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinimumValidatorCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxConcurrentRequests', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newPeerCheckingIntervalSecs', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignConcurrency', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bool', + name: 'newRpcHealthcheckEnabled', + type: 'bool', + }, + ], + name: 'ConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'string', + name: 'message', + type: 'string', + }, + { + indexed: false, + internalType: 'address', + name: 'sender', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'value', + type: 'uint256', + }, + ], + name: 'DebugEvent', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'epochNumber', + type: 'uint256', + }, + ], + name: 'ReadyForNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'token', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Recovered', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + ], + name: 'RequestToJoin', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + ], + name: 'RequestToLeave', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newDuration', + type: 'uint256', + }, + ], + name: 'RewardsDurationUpdated', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amountBurned', + type: 'uint256', + }, + ], + name: 'ValidatorKickedFromNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'reporter', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'validatorToKickStakerAddress', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'reason', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'data', + type: 'bytes', + }, + ], + name: 'VotedToKickValidatorInNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'index', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + components: [ + { + internalType: 'uint256', + name: 'major', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'minor', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'patch', + type: 'uint256', + }, + ], + indexed: false, + internalType: 'struct LibStakingStorage.Version', + name: 'version', + type: 'tuple', + }, + ], + name: 'VersionRequirementsUpdated', + type: 'event', + }, + ], + }, + PriceFeed: { + address: '0xf953b3A269d80e3eB0F2947630Da976B896A8C5b', + methods: { + getNodesForRequest: { + inputs: [ + { + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + internalType: 'uint256[]', + name: 'productIds', + type: 'uint256[]', + }, + ], + name: 'getNodesForRequest', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + { + components: [ + { + components: [ + { + internalType: 'uint32', + name: 'ip', + type: 'uint32', + }, + { + internalType: 'uint128', + name: 'ipv6', + type: 'uint128', + }, + { + internalType: 'uint32', + name: 'port', + type: 'uint32', + }, + { + internalType: 'address', + name: 'nodeAddress', + type: 'address', + }, + { + internalType: 'uint256', + name: 'reward', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'senderPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'receiverPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastActiveEpoch', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commission', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commissionRate', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastRewardEpoch', + type: 'uint256', + }, + ], + internalType: 'struct LibStakingStorage.Validator', + name: 'validator', + type: 'tuple', + }, + { + internalType: 'uint256[]', + name: 'prices', + type: 'uint256[]', + }, + ], + internalType: 'struct LibPriceFeedStorage.NodeInfoAndPrices[]', + name: '', + type: 'tuple[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newPrice', + type: 'uint256', + }, + ], + name: 'BaseNetworkPriceSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newPrice', + type: 'uint256', + }, + ], + name: 'MaxNetworkPriceSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'stakingAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'usagePercent', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'newPrices', + type: 'uint256[]', + }, + ], + name: 'UsageSet', + type: 'event', + }, + ], + }, +}; diff --git a/packages/networks/src/lib/networks/vNaga/local-develop/naga-develop-signatures/naga-develop.ts b/packages/networks/src/lib/networks/vNaga/local-develop/naga-develop-signatures/naga-develop.ts new file mode 100644 index 0000000000..41852485e6 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/local-develop/naga-develop-signatures/naga-develop.ts @@ -0,0 +1,2589 @@ +/** + * Generated Contract Method Signatures for naga-develop + * This file is auto-generated. DO NOT EDIT UNLESS YOU KNOW WHAT YOU'RE DOING. + */ + +export const signatures = { + PKPHelper: { + address: '0x04C89607413713Ec9775E14b954286519d836FEf', + methods: { + claimAndMintNextAndAddAuthMethodsWithTypes: { + inputs: [ + { + components: [ + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'bytes32', + name: 'derivedKeyId', + type: 'bytes32', + }, + { + components: [ + { + internalType: 'bytes32', + name: 'r', + type: 'bytes32', + }, + { + internalType: 'bytes32', + name: 's', + type: 'bytes32', + }, + { + internalType: 'uint8', + name: 'v', + type: 'uint8', + }, + ], + internalType: 'struct IPubkeyRouter.Signature[]', + name: 'signatures', + type: 'tuple[]', + }, + ], + internalType: 'struct LibPKPNFTStorage.ClaimMaterial', + name: 'claimMaterial', + type: 'tuple', + }, + { + components: [ + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'bytes[]', + name: 'permittedIpfsCIDs', + type: 'bytes[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedIpfsCIDScopes', + type: 'uint256[][]', + }, + { + internalType: 'address[]', + name: 'permittedAddresses', + type: 'address[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedAddressScopes', + type: 'uint256[][]', + }, + { + internalType: 'uint256[]', + name: 'permittedAuthMethodTypes', + type: 'uint256[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodIds', + type: 'bytes[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodPubkeys', + type: 'bytes[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedAuthMethodScopes', + type: 'uint256[][]', + }, + { + internalType: 'bool', + name: 'addPkpEthAddressAsPermittedAddress', + type: 'bool', + }, + { + internalType: 'bool', + name: 'sendPkpToItself', + type: 'bool', + }, + ], + internalType: 'struct PKPHelper.AuthMethodData', + name: 'authMethodData', + type: 'tuple', + }, + ], + name: 'claimAndMintNextAndAddAuthMethodsWithTypes', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'payable', + type: 'function', + }, + mintNextAndAddAuthMethods: { + inputs: [ + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'uint256[]', + name: 'permittedAuthMethodTypes', + type: 'uint256[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodIds', + type: 'bytes[]', + }, + { + internalType: 'bytes[]', + name: 'permittedAuthMethodPubkeys', + type: 'bytes[]', + }, + { + internalType: 'uint256[][]', + name: 'permittedAuthMethodScopes', + type: 'uint256[][]', + }, + { + internalType: 'bool', + name: 'addPkpEthAddressAsPermittedAddress', + type: 'bool', + }, + { + internalType: 'bool', + name: 'sendPkpToItself', + type: 'bool', + }, + ], + name: 'mintNextAndAddAuthMethods', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'payable', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'bytes32', + name: 'role', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'bytes32', + name: 'previousAdminRole', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'bytes32', + name: 'newAdminRole', + type: 'bytes32', + }, + ], + name: 'RoleAdminChanged', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'bytes32', + name: 'role', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'address', + name: 'account', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'sender', + type: 'address', + }, + ], + name: 'RoleGranted', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'bytes32', + name: 'role', + type: 'bytes32', + }, + { + indexed: true, + internalType: 'address', + name: 'account', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'sender', + type: 'address', + }, + ], + name: 'RoleRevoked', + type: 'event', + }, + ], + }, + PKPNFT: { + address: '0x99bbA657f2BbC93c02D617f8bA121cB8Fc104Acf', + methods: { + claimAndMint: { + inputs: [ + { + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + internalType: 'bytes32', + name: 'derivedKeyId', + type: 'bytes32', + }, + { + components: [ + { + internalType: 'bytes32', + name: 'r', + type: 'bytes32', + }, + { + internalType: 'bytes32', + name: 's', + type: 'bytes32', + }, + { + internalType: 'uint8', + name: 'v', + type: 'uint8', + }, + ], + internalType: 'struct IPubkeyRouter.Signature[]', + name: 'signatures', + type: 'tuple[]', + }, + { + internalType: 'address', + name: 'stakingContractAddress', + type: 'address', + }, + ], + name: 'claimAndMint', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'payable', + type: 'function', + }, + mintCost: { + inputs: [], + name: 'mintCost', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'view', + type: 'function', + }, + tokenOfOwnerByIndex: { + inputs: [ + { + internalType: 'address', + name: 'owner', + type: 'address', + }, + { + internalType: 'uint256', + name: 'index', + type: 'uint256', + }, + ], + name: 'tokenOfOwnerByIndex', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'owner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'approved', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'Approval', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'owner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'operator', + type: 'address', + }, + { + indexed: false, + internalType: 'bool', + name: 'approved', + type: 'bool', + }, + ], + name: 'ApprovalForAll', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'newFreeMintSigner', + type: 'address', + }, + ], + name: 'FreeMintSignerSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint8', + name: 'version', + type: 'uint8', + }, + ], + name: 'Initialized', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newMintCost', + type: 'uint256', + }, + ], + name: 'MintCostSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + ], + name: 'PKPMinted', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'from', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'to', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'Transfer', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Withdrew', + type: 'event', + }, + ], + }, + PKPPermissions: { + address: '0xdbC43Ba45381e02825b14322cDdd15eC4B3164E6', + methods: { + addPermittedAction: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'ipfsCID', + type: 'bytes', + }, + { + internalType: 'uint256[]', + name: 'scopes', + type: 'uint256[]', + }, + ], + name: 'addPermittedAction', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + addPermittedAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'address', + name: 'user', + type: 'address', + }, + { + internalType: 'uint256[]', + name: 'scopes', + type: 'uint256[]', + }, + ], + name: 'addPermittedAddress', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + getPermittedActions: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPermittedActions', + outputs: [ + { + internalType: 'bytes[]', + name: '', + type: 'bytes[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPermittedAddresses: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPermittedAddresses', + outputs: [ + { + internalType: 'address[]', + name: '', + type: 'address[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPermittedAuthMethodScopes: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + internalType: 'uint256', + name: 'maxScopeId', + type: 'uint256', + }, + ], + name: 'getPermittedAuthMethodScopes', + outputs: [ + { + internalType: 'bool[]', + name: '', + type: 'bool[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPermittedAuthMethods: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPermittedAuthMethods', + outputs: [ + { + components: [ + { + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + internalType: 'bytes', + name: 'userPubkey', + type: 'bytes', + }, + ], + internalType: 'struct LibPKPPermissionsStorage.AuthMethod[]', + name: '', + type: 'tuple[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + isPermittedAction: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'ipfsCID', + type: 'bytes', + }, + ], + name: 'isPermittedAction', + outputs: [ + { + internalType: 'bool', + name: '', + type: 'bool', + }, + ], + stateMutability: 'view', + type: 'function', + }, + isPermittedAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'address', + name: 'user', + type: 'address', + }, + ], + name: 'isPermittedAddress', + outputs: [ + { + internalType: 'bool', + name: '', + type: 'bool', + }, + ], + stateMutability: 'view', + type: 'function', + }, + removePermittedAction: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'bytes', + name: 'ipfsCID', + type: 'bytes', + }, + ], + name: 'removePermittedAction', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + removePermittedAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + internalType: 'address', + name: 'user', + type: 'address', + }, + ], + name: 'removePermittedAddress', + outputs: [], + stateMutability: 'nonpayable', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + indexed: false, + internalType: 'bytes', + name: 'userPubkey', + type: 'bytes', + }, + ], + name: 'PermittedAuthMethodAdded', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + ], + name: 'PermittedAuthMethodRemoved', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + indexed: false, + internalType: 'uint256', + name: 'scopeId', + type: 'uint256', + }, + ], + name: 'PermittedAuthMethodScopeAdded', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'authMethodType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'id', + type: 'bytes', + }, + { + indexed: false, + internalType: 'uint256', + name: 'scopeId', + type: 'uint256', + }, + ], + name: 'PermittedAuthMethodScopeRemoved', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: true, + internalType: 'uint256', + name: 'group', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes32', + name: 'root', + type: 'bytes32', + }, + ], + name: 'RootHashUpdated', + type: 'event', + }, + ], + }, + PubkeyRouter: { + address: '0x809d550fca64d94Bd9F66E60752A544199cfAC3D', + methods: { + getEthAddress: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getEthAddress', + outputs: [ + { + internalType: 'address', + name: '', + type: 'address', + }, + ], + stateMutability: 'view', + type: 'function', + }, + getPubkey: { + inputs: [ + { + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + ], + name: 'getPubkey', + outputs: [ + { + internalType: 'bytes', + name: '', + type: 'bytes', + }, + ], + stateMutability: 'view', + type: 'function', + }, + deriveEthAddressFromPubkey: { + inputs: [ + { + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + ], + name: 'deriveEthAddressFromPubkey', + outputs: [ + { + internalType: 'address', + name: '', + type: 'address', + }, + ], + stateMutability: 'pure', + type: 'function', + }, + ethAddressToPkpId: { + inputs: [ + { + internalType: 'address', + name: 'ethAddress', + type: 'address', + }, + ], + name: 'ethAddressToPkpId', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverAddress', + type: 'address', + }, + ], + name: 'ContractResolverAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'string', + name: 'message', + type: 'string', + }, + { + indexed: false, + internalType: 'address', + name: 'sender', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'value', + type: 'uint256', + }, + ], + name: 'DebugEvent', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'uint256', + name: 'tokenId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + { + indexed: false, + internalType: 'address', + name: 'stakingContract', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes32', + name: 'derivedKeyId', + type: 'bytes32', + }, + ], + name: 'PubkeyRoutingDataSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakingContract', + type: 'address', + }, + { + components: [ + { + internalType: 'bytes', + name: 'pubkey', + type: 'bytes', + }, + { + internalType: 'uint256', + name: 'keyType', + type: 'uint256', + }, + ], + indexed: false, + internalType: 'struct IPubkeyRouter.RootKey', + name: 'rootKey', + type: 'tuple', + }, + ], + name: 'RootKeySet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'value', + type: 'uint256', + }, + { + indexed: false, + internalType: 'address', + name: 'sender', + type: 'address', + }, + ], + name: 'ToggleEvent', + type: 'event', + }, + ], + }, + Staking: { + address: '0x9E545E3C0baAB3E08CdfD552C960A1050f373042', + methods: { + getActiveUnkickedValidatorStructsAndCounts: { + inputs: [ + { + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + ], + name: 'getActiveUnkickedValidatorStructsAndCounts', + outputs: [ + { + components: [ + { + internalType: 'uint256', + name: 'epochLength', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'number', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'rewardEpochNumber', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'endTime', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'retries', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'timeout', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'startTime', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastEpochStart', + type: 'uint256', + }, + ], + internalType: 'struct LibStakingStorage.Epoch', + name: '', + type: 'tuple', + }, + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + { + components: [ + { + internalType: 'uint32', + name: 'ip', + type: 'uint32', + }, + { + internalType: 'uint128', + name: 'ipv6', + type: 'uint128', + }, + { + internalType: 'uint32', + name: 'port', + type: 'uint32', + }, + { + internalType: 'address', + name: 'nodeAddress', + type: 'address', + }, + { + internalType: 'uint256', + name: 'reward', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'senderPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'receiverPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastActiveEpoch', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commission', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commissionRate', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastRewardEpoch', + type: 'uint256', + }, + ], + internalType: 'struct LibStakingStorage.Validator[]', + name: '', + type: 'tuple[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'dataType', + type: 'uint256', + }, + ], + name: 'ClearOfflinePhaseData', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'reason', + type: 'uint256', + }, + { + components: [ + { + internalType: 'uint256', + name: 'tolerance', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'intervalSecs', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'kickPenaltyPercent', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'kickPenaltyDemerits', + type: 'uint256', + }, + ], + indexed: false, + internalType: 'struct LibStakingStorage.ComplaintConfig', + name: 'config', + type: 'tuple', + }, + ], + name: 'ComplaintConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'tokenRewardPerTokenPerEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'keyTypes', + type: 'uint256[]', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minimumValidatorCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'rewardEpochDuration', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'maxTimeLock', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minTimeLock', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'bmin', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'bmax', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'k', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'p', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bool', + name: 'enableStakeAutolock', + type: 'bool', + }, + { + indexed: false, + internalType: 'bool', + name: 'permittedStakersOn', + type: 'bool', + }, + { + indexed: false, + internalType: 'uint256', + name: 'tokenPrice', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'profitMultiplier', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'usdCostPerMonth', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'maxEmissionRate', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minStakeAmount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'maxStakeAmount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minSelfStake', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'minSelfStakeTimelock', + type: 'uint256', + }, + ], + name: 'ConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'dataType', + type: 'uint256', + }, + ], + name: 'CountOfflinePhaseData', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newDevopsAdmin', + type: 'address', + }, + ], + name: 'DevopsAdminSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newEpochEndTime', + type: 'uint256', + }, + ], + name: 'EpochEndTimeSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newEpochLength', + type: 'uint256', + }, + ], + name: 'EpochLengthSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newEpochTimeout', + type: 'uint256', + }, + ], + name: 'EpochTimeoutSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'reason', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newKickPenaltyPercent', + type: 'uint256', + }, + ], + name: 'KickPenaltyPercentSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newTokenRewardPerTokenPerEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'newKeyTypes', + type: 'uint256[]', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinimumValidatorCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxConcurrentRequests', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newPeerCheckingIntervalSecs', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignConcurrency', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bool', + name: 'newRpcHealthcheckEnabled', + type: 'bool', + }, + ], + name: 'RealmConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newStakingTokenAddress', + type: 'address', + }, + ], + name: 'StakingTokenSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'enum LibStakingStorage.States', + name: 'newState', + type: 'uint8', + }, + ], + name: 'StateChanged', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'staker', + type: 'address', + }, + ], + name: 'ValidatorRejoinedNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'newResolverContractAddress', + type: 'address', + }, + ], + name: 'ResolverContractAddressSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'address', + name: 'stakerAddressClient', + type: 'address', + }, + ], + name: 'StakeRecordCreated', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'userStakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + ], + name: 'StakeRecordRemoved', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + ], + name: 'StakeRecordUpdated', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'recordId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'rewards', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'fromEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'toEpoch', + type: 'uint256', + }, + ], + name: 'StakeRewardsClaimed', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Staked', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'stakerAddress', + type: 'address', + }, + ], + name: 'ValidatorRegistered', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Withdrawn', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'epochNumber', + type: 'uint256', + }, + ], + name: 'AdvancedEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'attestedAddress', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'attestedPubKey', + type: 'uint256', + }, + ], + name: 'AttestedWalletRegistered', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newTokenRewardPerTokenPerEpoch', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'newKeyTypes', + type: 'uint256[]', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinimumValidatorCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxConcurrentRequests', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMinPresignCount', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newPeerCheckingIntervalSecs', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'newMaxPresignConcurrency', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bool', + name: 'newRpcHealthcheckEnabled', + type: 'bool', + }, + ], + name: 'ConfigSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'string', + name: 'message', + type: 'string', + }, + { + indexed: false, + internalType: 'address', + name: 'sender', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'value', + type: 'uint256', + }, + ], + name: 'DebugEvent', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'epochNumber', + type: 'uint256', + }, + ], + name: 'ReadyForNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'address', + name: 'token', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amount', + type: 'uint256', + }, + ], + name: 'Recovered', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + ], + name: 'RequestToJoin', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + ], + name: 'RequestToLeave', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newDuration', + type: 'uint256', + }, + ], + name: 'RewardsDurationUpdated', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'staker', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'amountBurned', + type: 'uint256', + }, + ], + name: 'ValidatorKickedFromNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'reporter', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'validatorToKickStakerAddress', + type: 'address', + }, + { + indexed: true, + internalType: 'uint256', + name: 'reason', + type: 'uint256', + }, + { + indexed: false, + internalType: 'bytes', + name: 'data', + type: 'bytes', + }, + ], + name: 'VotedToKickValidatorInNextEpoch', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'index', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + components: [ + { + internalType: 'uint256', + name: 'major', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'minor', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'patch', + type: 'uint256', + }, + ], + indexed: false, + internalType: 'struct LibStakingStorage.Version', + name: 'version', + type: 'tuple', + }, + ], + name: 'VersionRequirementsUpdated', + type: 'event', + }, + ], + }, + PriceFeed: { + address: '0xf953b3A269d80e3eB0F2947630Da976B896A8C5b', + methods: { + getNodesForRequest: { + inputs: [ + { + internalType: 'uint256', + name: 'realmId', + type: 'uint256', + }, + { + internalType: 'uint256[]', + name: 'productIds', + type: 'uint256[]', + }, + ], + name: 'getNodesForRequest', + outputs: [ + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + { + internalType: 'uint256', + name: '', + type: 'uint256', + }, + { + components: [ + { + components: [ + { + internalType: 'uint32', + name: 'ip', + type: 'uint32', + }, + { + internalType: 'uint128', + name: 'ipv6', + type: 'uint128', + }, + { + internalType: 'uint32', + name: 'port', + type: 'uint32', + }, + { + internalType: 'address', + name: 'nodeAddress', + type: 'address', + }, + { + internalType: 'uint256', + name: 'reward', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'senderPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'receiverPubKey', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastActiveEpoch', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commission', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'commissionRate', + type: 'uint256', + }, + { + internalType: 'uint256', + name: 'lastRewardEpoch', + type: 'uint256', + }, + ], + internalType: 'struct LibStakingStorage.Validator', + name: 'validator', + type: 'tuple', + }, + { + internalType: 'uint256[]', + name: 'prices', + type: 'uint256[]', + }, + ], + internalType: 'struct LibPriceFeedStorage.NodeInfoAndPrices[]', + name: '', + type: 'tuple[]', + }, + ], + stateMutability: 'view', + type: 'function', + }, + }, + events: [ + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: 'address', + name: 'facetAddress', + type: 'address', + }, + { + internalType: 'enum IDiamond.FacetCutAction', + name: 'action', + type: 'uint8', + }, + { + internalType: 'bytes4[]', + name: 'functionSelectors', + type: 'bytes4[]', + }, + ], + indexed: false, + internalType: 'struct IDiamond.FacetCut[]', + name: '_diamondCut', + type: 'tuple[]', + }, + { + indexed: false, + internalType: 'address', + name: '_init', + type: 'address', + }, + { + indexed: false, + internalType: 'bytes', + name: '_calldata', + type: 'bytes', + }, + ], + name: 'DiamondCut', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'previousOwner', + type: 'address', + }, + { + indexed: true, + internalType: 'address', + name: 'newOwner', + type: 'address', + }, + ], + name: 'OwnershipTransferred', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newPrice', + type: 'uint256', + }, + ], + name: 'BaseNetworkPriceSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: 'uint256', + name: 'newPrice', + type: 'uint256', + }, + ], + name: 'MaxNetworkPriceSet', + type: 'event', + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: 'address', + name: 'stakingAddress', + type: 'address', + }, + { + indexed: false, + internalType: 'uint256', + name: 'usagePercent', + type: 'uint256', + }, + { + indexed: false, + internalType: 'uint256[]', + name: 'newPrices', + type: 'uint256[]', + }, + ], + name: 'UsageSet', + type: 'event', + }, + ], + }, +} as const; +export type Signatures = typeof signatures; diff --git a/packages/networks/src/lib/networks/vNaga/local-develop/networkContext.ts b/packages/networks/src/lib/networks/vNaga/local-develop/networkContext.ts new file mode 100644 index 0000000000..5b6eebc0bb --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/local-develop/networkContext.ts @@ -0,0 +1,34 @@ +import { createWalletClient, http } from 'viem'; +import { privateKeyToAccount } from 'viem/accounts'; +import { + anvilConfig, + anvilFirstPrivateKey, + anvilRpcUrl, +} from '../../shared/chains/anvil'; +import { INetworkContext } from '../common/NetworkContext'; +import { signatures as localDevelopSignatures } from './naga-develop-signatures/naga-develop'; + +export const nagaLocalDevelopNetworkContext: INetworkContext< + typeof localDevelopSignatures +> = { + network: 'custom', + rpcUrl: anvilRpcUrl, + privateKey: anvilFirstPrivateKey, + chainConfig: { + chain: anvilConfig, + contractData: localDevelopSignatures, + }, + httpProtocol: 'http://', + walletClient: createWalletClient({ + chain: anvilConfig, + transport: http(anvilRpcUrl), + account: privateKeyToAccount(anvilFirstPrivateKey), + }), + realmId: 1n, +}; + +export type NagaLocalDevelopNetworkContext = + typeof nagaLocalDevelopNetworkContext; + +// network object calls the chain client +// LitClient could use the network to figure out diff --git a/packages/networks/src/lib/networks/vNaga/naga-dev/handlers/index.ts b/packages/networks/src/lib/networks/vNaga/naga-dev/handlers/index.ts new file mode 100644 index 0000000000..13f5977763 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/naga-dev/handlers/index.ts @@ -0,0 +1,3 @@ +// TODO: Define request creator and response handlers in this folder; export object hash of them here for composition on to naga-dev object +// TODO: Define local types.ts here just for these functions; they will be exposed using `Parameters<>` and `ReturnType<>` on the network object +// Note that if handlers are the same for multiple networks, they can be defined in ../handlers or ../../handlers, and imported / composed on to each network they apply to diff --git a/packages/networks/src/lib/networks/vNaga/naga-dev/index.ts b/packages/networks/src/lib/networks/vNaga/naga-dev/index.ts new file mode 100644 index 0000000000..750c3d9459 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/naga-dev/index.ts @@ -0,0 +1,61 @@ +import { HTTPS, LIT_CHAINS, LIT_ENDPOINT } from '@lit-protocol/constants'; +import { nagaDev } from '@lit-protocol/contracts'; + +import { LitNetwork } from '../../../LitNetwork'; + +import type { LitNetworkConfig } from '../../../types'; + +export class NagaDev extends LitNetwork { + constructor(params: Partial) { + // NOTE: only contractData is managed by network; LitChainClient must handle the full ContractContext as it needs to actually hit the chain + // to get contract addresses, and the LitNetwork doesn't use the chain directly + super({ + name: 'naga-dev', + endpoints: LIT_ENDPOINT, + httpProtocol: HTTPS, + chainConfig: { + chain: LIT_CHAINS['yellowstone'], + contractData: nagaDev.data.map((c) => ({ + address: c.contracts[0].address_hash, + abi: c.contracts[0].ABI, + name: c.name, + })), + }, + ...params, + }); + } + + // Note: Node selection logic happens in the createXXXRequest() methods, as it is network-specific + + // TODO: Input: LitNodeClient.decrypt() params + // TODO: Output: LitNodeClient.sendCommandToNode() params array + async createDecryptRequests(params: unknown) { + return undefined; + } + + // TODO: Input: Result from sending decrypt requests to all necessary nodes + // TODO: Output: LitNodeClient.decrypt() return value + async handleDecryptResponses(response: unknown) { + return {}; + } + + // TODO: LitNodeClient.executeJs() params + async createExecuteJsRequests(params: unknown) { + return undefined; + } + + // TODO: LitNodeClient.executeJs() return value + async handleExecuteJsResponses(response: unknown) { + return {}; + } + + // TODO: LitNodeClient.pkpSign() params + async createSignRequests(params: unknown) { + return undefined; + } + + // TODO: LitNodeClient.pkpSign() return value + async handleSignResponses(response: unknown) { + return {}; + } +} diff --git a/packages/networks/src/lib/networks/vNaga/naga/handlers/index.ts b/packages/networks/src/lib/networks/vNaga/naga/handlers/index.ts new file mode 100644 index 0000000000..93327eb646 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/naga/handlers/index.ts @@ -0,0 +1 @@ +// This directory to contain request/response handlers that are specific to mainnet naga; will compose from `../handlers/...` for shared handlers diff --git a/packages/networks/src/lib/networks/vNaga/naga/index.ts b/packages/networks/src/lib/networks/vNaga/naga/index.ts new file mode 100644 index 0000000000..f8547553ee --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/naga/index.ts @@ -0,0 +1,61 @@ +import { HTTPS, LIT_CHAINS, LIT_ENDPOINT } from '@lit-protocol/constants'; +import { nagaDev } from '@lit-protocol/contracts'; + +import { LitNetwork } from '../../../LitNetwork'; + +import type { LitNetworkConfig } from '../../../types'; + +export class Naga extends LitNetwork { + constructor(params: Partial) { + // NOTE: only contractData is managed by network; LitChainClient must handle the full ContractContext as it needs to actually hit the chain + // to get contract addresses, and the LitNetwork doesn't use the chain directly + super({ + name: 'naga', + endpoints: LIT_ENDPOINT, + httpProtocol: HTTPS, + chainConfig: { + chain: LIT_CHAINS['yellowstone'], + contractData: nagaDev.data.map((c) => ({ + address: c.contracts[0].address_hash, + abi: c.contracts[0].ABI, + name: c.name, + })), + }, + ...params, + }); + } + + // Note: Node selection logic happens in the createXXXRequest() methods, as it is network-specific + + // TODO: Input: LitNodeClient.decrypt() params + // TODO: Output: LitNodeClient.sendCommandToNode() params array + async createDecryptRequests(params: unknown) { + return undefined; + } + + // TODO: Input: Result from sending decrypt requests to all necessary nodes + // TODO: Output: LitNodeClient.decrypt() return value + async handleDecryptResponses(response: unknown) { + return {}; + } + + // TODO: LitNodeClient.executeJs() params + async createExecuteJsRequests(params: unknown) { + return undefined; + } + + // TODO: LitNodeClient.executeJs() return value + async handleExecuteJsResponses(response: unknown) { + return {}; + } + + // TODO: LitNodeClient.pkpSign() params + async createSignRequests(params: unknown) { + return undefined; + } + + // TODO: LitNodeClient.pkpSign() return value + async handleSignResponses(response: unknown) { + return {}; + } +} diff --git a/packages/networks/src/lib/networks/vNaga/types.ts b/packages/networks/src/lib/networks/vNaga/types.ts new file mode 100644 index 0000000000..33ff97a6a5 --- /dev/null +++ b/packages/networks/src/lib/networks/vNaga/types.ts @@ -0,0 +1,26 @@ +import { NagaLocalDevelopNetworkContext } from './local-develop/networkContext'; +// import { NagaDevNetworkContext } from "./naga-dev/networkContext"; +// import { NagaTestNetworkContext } from "./naga-test/networkContext"; +// import { NagaMainnetNetworkContext } from "./naga-mainnet/networkContext"; + +/** + * Union type representing all supported Naga network contexts. + * + * @remarks + * When using this union type, TypeScript will only allow access to properties/methods + * that exist in both network contexts. If you attempt to use a method that exists + * in only one of the network contexts (Dev or Test), TypeScript will throw a + * compilation error. + * + * @example + * ```typescript + * function example(networkCtx: NagaContext) { + * networkCtx.sharedMethod(); // ✅ OK - exists in both contexts + * networkCtx.devOnlyMethod(); // ❌ Error - only exists in DevNetwork + * } + * ``` + */ +export type NagaContext = NagaLocalDevelopNetworkContext; +// | NagaDevNetworkContext +// | NagaTestNetworkContext +// | NagaMainnetNetworkContext; diff --git a/packages/networks/src/lib/types.ts b/packages/networks/src/lib/types.ts new file mode 100644 index 0000000000..4cac989a5a --- /dev/null +++ b/packages/networks/src/lib/types.ts @@ -0,0 +1,17 @@ +// TODO: These types can probably be moved into the `networks` package and only exposed by way of the `LitNetwork` if necessary +import { LIT_ENDPOINT, HTTP, HTTPS, LIT_CHAINS } from '@lit-protocol/constants'; + +import type { LitContract } from '@lit-protocol/types'; + +export interface LitChainConfig { + chain: (typeof LIT_CHAINS)[keyof typeof LIT_CHAINS]; + contractData: LitContract[]; +} + +export interface LitNetworkConfig { + name: string; + chainConfig: LitChainConfig; + endpoints: typeof LIT_ENDPOINT; + httpProtocol: typeof HTTP | typeof HTTPS; + options?: unknown; +} diff --git a/packages/auth-browser/tsconfig.json b/packages/networks/tsconfig.json similarity index 90% rename from packages/auth-browser/tsconfig.json rename to packages/networks/tsconfig.json index 9159542230..f5b85657a8 100644 --- a/packages/auth-browser/tsconfig.json +++ b/packages/networks/tsconfig.json @@ -9,8 +9,8 @@ "noImplicitReturns": true, "noFallthroughCasesInSwitch": true }, - "include": ["src/lib/connect-modal/modal.css"], "files": [], + "include": [], "references": [ { "path": "./tsconfig.lib.json" diff --git a/packages/lit-auth-client/tsconfig.lib.json b/packages/networks/tsconfig.lib.json similarity index 100% rename from packages/lit-auth-client/tsconfig.lib.json rename to packages/networks/tsconfig.lib.json diff --git a/packages/lit-node-client-nodejs/tsconfig.spec.json b/packages/networks/tsconfig.spec.json similarity index 100% rename from packages/lit-node-client-nodejs/tsconfig.spec.json rename to packages/networks/tsconfig.spec.json diff --git a/packages/pkp-base/src/lib/pkp-base.ts b/packages/pkp-base/src/lib/pkp-base.ts index 3f7ff3feb0..912950464b 100644 --- a/packages/pkp-base/src/lib/pkp-base.ts +++ b/packages/pkp-base/src/lib/pkp-base.ts @@ -12,8 +12,9 @@ import { LitNodeClientNotReadyError, UnknownError, } from '@lit-protocol/constants'; +import { publicKeyCompress } from '@lit-protocol/crypto'; import { LitNodeClient } from '@lit-protocol/lit-node-client'; -import { publicKeyConvert } from '@lit-protocol/misc'; +import { Logger, getChildLogger } from '@lit-protocol/logger'; import { AuthenticationContext, JsonExecutionSdkParams, @@ -23,29 +24,11 @@ import { RPCUrls, } from '@lit-protocol/types'; -/** - * Compresses a given public key. - * @param {string} pubKey - The public key to be compressed. - * @returns {string} - The compressed public key. - */ -const compressPubKey = (pubKey: string): string => { - const testBuffer = Buffer.from(pubKey, 'hex'); - if (testBuffer.length === 64) { - pubKey = '04' + pubKey; - } - - // const hex = Buffer.from(pubKey, 'hex'); - const uint8array = Buffer.from(pubKey, 'hex'); - const compressedKey = publicKeyConvert(uint8array, true); - const hex = Buffer.from(compressedKey).toString('hex'); - - return hex; -}; - /** * A base class that can be shared between Ethers and Cosmos signers. */ export class PKPBase { + private readonly _logger: Logger; rpcs?: RPCUrls; authContext: AuthenticationContext; @@ -62,11 +45,6 @@ export class PKPBase { debug: boolean; useAction: boolean | undefined; - // -- debug things - private PREFIX = '[PKPBase]'; - private orange = '\x1b[33m'; - private reset = '\x1b[0m'; - get litNodeClientReady(): boolean { return this.litNodeClient.ready; } @@ -77,6 +55,10 @@ export class PKPBase { const prop = { ...pkpBaseProp }; // Avoid modifications to the received object this.debug = prop.debug || false; + this._logger = getChildLogger({ + module: 'PKPBase', + ...(prop.debug ? { level: 'debug' } : {}), + }); if (prop.pkpPubKey.startsWith('0x')) { prop.pkpPubKey = prop.pkpPubKey.slice(2); @@ -87,7 +69,7 @@ export class PKPBase { this.rpcs = prop.rpcs; - console.log('authContext:', prop.authContext); + this._logger.info({ msg: 'authContext', authContext: prop.authContext }); this.authContext = prop.authContext; this.validateAuthContext(); @@ -131,6 +113,25 @@ export class PKPBase { } } + /** + * Compresses a given public key. + * @param {string} pubKey - The public key to be compressed. + * @returns {string} - The compressed public key. + */ + private compressPubKey(pubKey: string): string { + const testBuffer = Buffer.from(pubKey, 'hex'); + if (testBuffer.length === 64) { + pubKey = '04' + pubKey; + } + + // const hex = Buffer.from(pubKey, 'hex'); + const uint8array = Buffer.from(pubKey, 'hex'); + const compressedKey = publicKeyCompress(uint8array); + const hex = Buffer.from(compressedKey).toString('hex'); + + return hex; + } + /** * Sets the compressed public key and its buffer representation. * @@ -138,7 +139,7 @@ export class PKPBase { */ private setCompressedPubKeyAndBuffer(prop: PKPBaseProp): void | never { try { - this.compressedPubKey = compressPubKey(prop.pkpPubKey); + this.compressedPubKey = this.compressPubKey(prop.pkpPubKey); this.compressedPubKeyBuffer = Buffer.from(this.compressedPubKey, 'hex'); } catch (e) { throw new UnknownError( @@ -178,7 +179,7 @@ export class PKPBase { } if (!pkpBaseProp.litActionCode && !pkpBaseProp.litActionIPFS) { - this.log( + this._logger.debug( 'No lit action code or IPFS hash provided. Using default action.' ); this.useAction = false; @@ -205,7 +206,7 @@ export class PKPBase { async init(): Promise { try { await this.litNodeClient.connect(); - this.log('Connected to Lit Node'); + this._logger.debug('Connected to Lit Node'); } catch (e) { throw new LitNodeClientNotReadyError( { @@ -310,14 +311,14 @@ export class PKPBase { ); } - this.log('executeJsArgs:', executeJsArgs); + this._logger.debug({ msg: 'executeJsArgs', executeJsArgs }); const res = await this.litNodeClient.executeJs(executeJsArgs); const sig = res.signatures[sigName]; - this.log('res:', res); - this.log('res.signatures[sigName]:', sig); + this._logger.debug({ msg: 'res', res }); + this._logger.debug({ msg: 'res.signatures[sigName]', sig }); if (sig.r && sig.s) { // pad sigs with 0 if length is odd @@ -350,26 +351,21 @@ export class PKPBase { this.validateAuthContext(); - try { - const sig = await this.litNodeClient.pkpSign({ - toSign, - pubKey: this.uncompressedPubKey, - authContext: this.authContext, - }); + const sig = await this.litNodeClient.pkpSign({ + toSign, + pubKey: this.uncompressedPubKey, + authContext: this.authContext, + }); - if (!sig) { - throw new UnknownError({}, 'No signature returned'); - } + if (!sig) { + throw new UnknownError({}, 'No signature returned'); + } - // pad sigs with 0 if length is odd - sig.r = sig.r.length % 2 === 0 ? sig.r : '0' + sig.r; - sig.s = sig.s.length % 2 === 0 ? sig.s : '0' + sig.s; + // pad sigs with 0 if length is odd + sig.r = sig.r.length % 2 === 0 ? sig.r : '0' + sig.r; + sig.s = sig.s.length % 2 === 0 ? sig.s : '0' + sig.s; - return sig; - } catch (e) { - console.log('err: ', e); - throw e; - } + return sig; } /** @@ -383,18 +379,4 @@ export class PKPBase { await this.init(); } } - - /** - * Logs the provided arguments to the console, but only if debugging is enabled. - * - * @param {...any[]} args - The values to be logged to the console. - * - * @returns {void} - This function does not return a value. - */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - log(...args: any[]): void { - if (this.debug) { - console.log(this.orange + this.PREFIX + this.reset, ...args); - } - } } diff --git a/packages/pkp-cosmos/src/lib/pkp-cosmos.ts b/packages/pkp-cosmos/src/lib/pkp-cosmos.ts index 3438ce657b..e1c73cc956 100644 --- a/packages/pkp-cosmos/src/lib/pkp-cosmos.ts +++ b/packages/pkp-cosmos/src/lib/pkp-cosmos.ts @@ -37,6 +37,7 @@ import { InvalidArgumentException, RemovedFunctionError, } from '@lit-protocol/constants'; +import { Logger, getChildLogger } from '@lit-protocol/logger'; import { PKPBase } from '@lit-protocol/pkp-base'; import { PKPClientHelpers, @@ -56,6 +57,7 @@ const DEFAULT_COSMOS_RPC_URL = export class PKPCosmosWallet implements PKPWallet, OfflineDirectSigner, PKPClientHelpers { + private readonly _logger: Logger; private readonly pkpBase: PKPBase; // Address prefix for Bech32 addresses @@ -73,6 +75,10 @@ export class PKPCosmosWallet constructor(prop: PKPCosmosWalletProp) { this.pkpBase = PKPBase.createInstance(prop); + this._logger = getChildLogger({ + module: 'PKPCosmosWallet', + ...(prop.debug ? { level: 'debug' } : {}), + }); // Set the address prefix and RPC URL based on the provided properties this.addressPrefix = prop.addressPrefix ?? 'cosmos'; @@ -204,7 +210,7 @@ export class PKPCosmosWallet ); // Log the encoded signature. - this.pkpBase.log('stdSignature:', stdSignature); + this._logger.debug({ msg: 'stdSignature', stdSignature }); // Return the signed transaction and encoded signature. return { diff --git a/packages/pkp-ethers/src/lib/pkp-ethers.ts b/packages/pkp-ethers/src/lib/pkp-ethers.ts index 179ec04e6a..3cea06141c 100644 --- a/packages/pkp-ethers/src/lib/pkp-ethers.ts +++ b/packages/pkp-ethers/src/lib/pkp-ethers.ts @@ -24,7 +24,6 @@ import { ProgressCallback, } from '@ethersproject/json-wallets'; import { keccak256 } from '@ethersproject/keccak256'; -import { Logger } from '@ethersproject/logger'; import { defineReadOnly, resolveProperties } from '@ethersproject/properties'; import { randomBytes } from '@ethersproject/random'; import { @@ -33,10 +32,11 @@ import { UnsignedTransaction, } from '@ethersproject/transactions'; import { Wordlist } from '@ethersproject/wordlists'; -import { ethers, version, Wallet } from 'ethers'; +import { ethers, Wallet } from 'ethers'; import { InitError, + InvalidArgumentException, RPC_URL_BY_NETWORK, InvalidParamType, UnknownError, @@ -44,6 +44,7 @@ import { UnsupportedChainException, LIT_CHAINS, } from '@lit-protocol/constants'; +import { Logger, getChildLogger } from '@lit-protocol/logger'; import { PKPBase } from '@lit-protocol/pkp-base'; import { PKPClientHelpers, @@ -60,8 +61,6 @@ import { ETHTxRes, } from './pkp-ethers-types'; -const logger = new Logger(version); - export class PKPEthersWallet implements PKPWallet, @@ -70,6 +69,7 @@ export class PKPEthersWallet TypedDataSigner, PKPClientHelpers { + private readonly _logger: Logger; private readonly pkpBase: PKPBase; readonly address!: string; @@ -90,6 +90,10 @@ export class PKPEthersWallet constructor(prop: PKPEthersWalletProp) { this.pkpBase = PKPBase.createInstance(prop); + this._logger = getChildLogger({ + module: 'PKPEthersWallet', + ...(prop.debug ? { level: 'debug' } : {}), + }); const rpcUrl = prop.rpc || RPC_URL_BY_NETWORK[prop.litNodeClient.config.litNetwork]; @@ -198,13 +202,13 @@ export class PKPEthersWallet } async signTransaction(transaction: TransactionRequest): Promise { - this.pkpBase.log('signTransaction => transaction:', transaction); + this._logger.debug({ msg: 'signTransaction => transaction', transaction }); // Check if the LIT node client is connected, and connect if it's not. await this.pkpBase.ensureLitNodeClientReady(); const addr = await this.getAddress(); - this.pkpBase.log('signTransaction => addr:', addr); + this._logger.debug({ msg: 'signTransaction => addr', addr }); // if manual settings are set, use them if (this.manualGasPrice) { @@ -226,40 +230,57 @@ export class PKPEthersWallet try { if (!transaction['gasLimit']) { transaction.gasLimit = await this.rpcProvider.estimateGas(transaction); - this.pkpBase.log('signTransaction => gasLimit:', transaction.gasLimit); + this._logger.debug({ + msg: 'signTransaction => gasLimit', + gasLimit: transaction.gasLimit, + }); } if (!transaction['nonce']) { transaction.nonce = await this.rpcProvider.getTransactionCount(addr); - this.pkpBase.log('signTransaction => nonce:', transaction.nonce); + this._logger.debug({ + msg: 'signTransaction => nonce', + nonce: transaction.nonce, + }); } if (!transaction['chainId']) { transaction.chainId = (await this.rpcProvider.getNetwork()).chainId; - this.pkpBase.log('signTransaction => chainId:', transaction.chainId); + this._logger.debug({ + msg: 'signTransaction => chainId', + chainId: transaction.chainId, + }); } if (!transaction['gasPrice']) { transaction.gasPrice = await this.getGasPrice(); - this.pkpBase.log('signTransaction => gasPrice:', transaction.gasPrice); + this._logger.debug({ + msg: 'signTransaction => gasPrice', + gasPrice: transaction.gasPrice, + }); } } catch (err) { - this.pkpBase.log( - 'signTransaction => unable to populate transaction with details:', - err - ); + this._logger.debug({ + msg: 'signTransaction => unable to populate transaction with details', + err, + }); } return resolveProperties(transaction).then(async (tx) => { - this.pkpBase.log('tx.from:', tx.from); - this.pkpBase.log('this.address:', this.address); + this._logger.debug({ msg: 'tx.from', from: tx.from }); + this._logger.debug({ msg: 'this.address', address: this.address }); - if (tx.from != null) { + if (tx.from) { if (getAddress(tx.from) !== this.address) { - logger.throwArgumentError( - 'transaction from address mismatch', - 'transaction.from', - transaction.from + throw new InvalidArgumentException( + { + info: { + transaction, + tx, + address: this.address, + }, + }, + 'transaction from address mismatch' ); } delete tx.from; @@ -273,11 +294,11 @@ export class PKPEthersWallet let signature; if (this.pkpBase.useAction) { - this.pkpBase.log('running lit action => sigName: pkp-eth-sign-tx'); + this._logger.debug('running lit action => sigName: pkp-eth-sign-tx'); signature = (await this.pkpBase.runLitAction(toSign, 'pkp-eth-sign-tx')) .signature; } else { - this.pkpBase.log('requesting signature from nodes'); + this._logger.debug('requesting signature from nodes'); signature = (await this.pkpBase.runSign(toSign)).signature; } @@ -295,10 +316,10 @@ export class PKPEthersWallet const toSign = arrayify(hashMessage(message)); let signature; if (this.pkpBase.useAction) { - this.pkpBase.log('running lit action => sigName: pkp-eth-sign-message'); + this._logger.debug('running lit action => sigName: pkp-eth-sign-message'); signature = await this.runLitAction(toSign, 'pkp-eth-sign-message'); } else { - this.pkpBase.log('requesting signature from nodes'); + this._logger.debug('requesting signature from nodes'); signature = await this.runSign(toSign); } @@ -352,10 +373,10 @@ export class PKPEthersWallet let signature; if (this.pkpBase.useAction) { - this.pkpBase.log('running lit action => sigName: pkp-eth-sign-message'); + this._logger.debug('running lit action => sigName: pkp-eth-sign-message'); signature = await this.runLitAction(toSignBuffer, 'pkp-eth-sign-message'); } else { - this.pkpBase.log('requesting signature from nodes'); + this._logger.debug('requesting signature from nodes'); signature = await this.runSign(toSignBuffer); } @@ -401,7 +422,7 @@ export class PKPEthersWallet async sendTransaction(transaction: TransactionRequest | any): Promise { // : Promise - this.pkpBase.log('sendTransaction => transaction:', transaction); + this._logger.debug({ msg: 'sendTransaction => transaction', transaction }); let res; let signedTxn; @@ -557,7 +578,7 @@ export class PKPEthersWallet } _checkProvider(): void { - this.pkpBase.log( + this._logger.debug( 'This function is not implemented yet, but will skip it for now.' ); } diff --git a/packages/pkp-sui/src/lib/pkp-sui.ts b/packages/pkp-sui/src/lib/pkp-sui.ts index 776187b29d..7f57a1aee5 100644 --- a/packages/pkp-sui/src/lib/pkp-sui.ts +++ b/packages/pkp-sui/src/lib/pkp-sui.ts @@ -30,12 +30,12 @@ import { secp256k1 } from '@noble/curves/secp256k1'; import { blake2b } from '@noble/hashes/blake2b'; import { sha256 } from '@noble/hashes/sha256'; -import { PKPBase } from '@lit-protocol/pkp-base'; -import { PKPBaseProp, PKPWallet, SigResponse } from '@lit-protocol/types'; import { InvalidArgumentException, UnknownError, } from '@lit-protocol/constants'; +import { PKPBase } from '@lit-protocol/pkp-base'; +import { PKPBaseProp, PKPWallet, SigResponse } from '@lit-protocol/types'; import { getDigestFromBytes } from './TransactionBlockData'; diff --git a/packages/pkp-walletconnect/src/lib/pkp-walletconnect.ts b/packages/pkp-walletconnect/src/lib/pkp-walletconnect.ts index f3e703e6cc..ce433be0bc 100644 --- a/packages/pkp-walletconnect/src/lib/pkp-walletconnect.ts +++ b/packages/pkp-walletconnect/src/lib/pkp-walletconnect.ts @@ -29,6 +29,7 @@ import { ParamsMissingError, UnsupportedMethodError, } from '@lit-protocol/constants'; +import { Logger, getChildLogger } from '@lit-protocol/logger'; import { PKPEthersWallet, SupportedETHSigningMethods, @@ -46,19 +47,19 @@ export interface InitWalletConnectParams } export class PKPWalletConnect { + private readonly debug: boolean = false; + private readonly _logger: Logger; // WalletConnect client private client: IWeb3Wallet | undefined; // List of PKP wallets private pkpEthersWallets: PKPEthersWallet[] = []; - // For logging - private readonly debug: boolean = false; - private readonly PREFIX = '[PKPWalletConnect]'; - private readonly orange = '\x1b[33m'; - private readonly reset = '\x1b[0m'; - constructor(debug?: boolean) { this.debug = debug || false; + this._logger = getChildLogger({ + module: 'PKPWalletConnect', + ...(debug ? { level: 'debug' } : {}), + }); } /** @@ -689,7 +690,7 @@ export class PKPWalletConnect { client: IWeb3Wallet | undefined ): IWeb3Wallet { if (!client) { - this._log('WalletConnect client has not yet been initialized.'); + this._logger.debug('WalletConnect client has not yet been initialized.'); throw new InitError( {}, 'WalletConnect client has not yet been initialized. Please call initWalletConnect().' @@ -697,16 +698,4 @@ export class PKPWalletConnect { } return client; } - - /** - * Logs the provided arguments to the console if the `debug` property is set to true. - * - * @private - * @param {...any[]} args - The arguments to log to the console. - */ - private _log(...args: any[]): void { - if (this.debug) { - console.log(this.orange + this.PREFIX + this.reset, ...args); - } - } } diff --git a/packages/schemas/project.json b/packages/schemas/project.json index 44e7606077..d78fcc5a8e 100644 --- a/packages/schemas/project.json +++ b/packages/schemas/project.json @@ -28,7 +28,7 @@ "lintFilePatterns": ["packages/schemas/**/*.ts"] } }, - "testPackage": { + "test": { "executor": "@nx/jest:jest", "outputs": ["{workspaceRoot}/coverage/packages/schemas"], "options": { diff --git a/packages/schemas/src/index.ts b/packages/schemas/src/index.ts index 8259b5c291..034c4467f9 100644 --- a/packages/schemas/src/index.ts +++ b/packages/schemas/src/index.ts @@ -1 +1,4 @@ +export * from './lib/encryption'; +export * from './lib/models'; export * from './lib/schemas'; +export * from './lib/validation'; diff --git a/packages/schemas/src/lib/encryption.ts b/packages/schemas/src/lib/encryption.ts new file mode 100644 index 0000000000..9099a89455 --- /dev/null +++ b/packages/schemas/src/lib/encryption.ts @@ -0,0 +1,37 @@ +import { z } from 'zod'; + +import { MultipleAccessControlConditionsSchema } from '@lit-protocol/access-control-conditions-schemas'; + +import { AuthenticationContextSchema } from './models'; +import { AuthSigSchema, ChainedSchema, PricedSchema } from './schemas'; + +export const DecryptRequestBaseSchema = + MultipleAccessControlConditionsSchema.merge(ChainedSchema) + .merge(PricedSchema.partial()) + .extend({ + authContext: AuthenticationContextSchema, + authSig: AuthSigSchema.optional(), + }); + +export const EncryptResponseSchema = z.object({ + /** + * The base64-encoded ciphertext + */ + ciphertext: z.string(), + /** + * The hash of the data that was encrypted + */ + dataToEncryptHash: z.string(), +}); + +export const DecryptRequestSchema = EncryptResponseSchema.merge( + DecryptRequestBaseSchema +); + +export const EncryptRequestSchema = + MultipleAccessControlConditionsSchema.extend({ + /** + * The uint8array that you wish to encrypt + */ + dataToEncrypt: z.instanceof(Uint8Array), + }); diff --git a/packages/schemas/src/lib/models.ts b/packages/schemas/src/lib/models.ts new file mode 100644 index 0000000000..caf8048090 --- /dev/null +++ b/packages/schemas/src/lib/models.ts @@ -0,0 +1,253 @@ +import { SiweMessage } from 'siwe'; +import { z } from 'zod'; + +import { + AttenuationsObjectSchema, + AuthMethodSchema, + AuthSigSchema, + ChainSchema, + CosmosWalletTypeSchema, + DefinedJsonSchema, + EvmChainSchema, + ExecuteJsAdvancedOptionsSchema, + IpfsOptionsSchema, + LitAbilitySchema, + LitActionSdkParamsSchema, + LitResourcePrefixSchema, + PricedSchema, + SessionKeyPairSchema, +} from './schemas'; + +export const ILitResourceSchema = z.object({ + /** + * Gets the fully qualified resource key. + * @returns The fully qualified resource key. + */ + getResourceKey: z.function().args().returns(z.string()), + /** + * Validates that the given LIT ability is valid for this resource. + * @param litAbility The LIT ability to validate. + */ + isValidLitAbility: z.function().args(LitAbilitySchema).returns(z.boolean()), + toString: z.function().args().returns(z.string()), + resourcePrefix: LitResourcePrefixSchema.readonly(), + resource: z.string().readonly(), +}); + +/** + * A LIT resource ability is a combination of a LIT resource and a LIT ability. + * It specifies which LIT specific ability is being requested to be performed + * on the specified LIT resource. + * + * @description This object does NOT guarantee compatibility between the + * specified LIT resource and the specified LIT ability, and will be validated by + * the LIT-internal systems. + */ +export const LitResourceAbilityRequestSchema = z.object({ + resource: ILitResourceSchema, + ability: LitAbilitySchema, + data: z.record(z.string(), DefinedJsonSchema).optional(), +}); + +export const AuthCallbackParamsSchema = LitActionSdkParamsSchema.extend({ + /** + * The serialized session key pair to sign. If not provided, a session key pair will be fetched from localStorge or generated. + */ + sessionKey: SessionKeyPairSchema.optional(), + /** + * The chain you want to use. Find the supported list of chains here: https://developer.litprotocol.com/docs/supportedChains + */ + chain: EvmChainSchema, + /** + * The statement that describes what the user is signing. If the auth callback is for signing a SIWE message, you MUST add this statement to the end of the SIWE statement. + */ + statement: z.string().optional(), + /** + * The blockhash that the nodes return during the handshake + */ + nonce: z.string(), + /** + * Optional and only used with EVM chains. A list of resources to be passed to Sign In with Ethereum. These resources will be part of the Sign in with Ethereum signed message presented to the user. + */ + resources: z.array(z.string()).optional(), + /** + * Optional and only used with EVM chains right now. Set to true by default. Whether or not to ask Metamask or the user's wallet to switch chains before signing. This may be desired if you're going to have the user send a txn on that chain. On the other hand, if all you care about is the user's wallet signature, then you probably don't want to make them switch chains for no reason. Pass false here to disable this chain switching behavior. + */ + switchChain: z.boolean().optional(), + // --- Following for Session Auth --- + expiration: z.string().optional(), + uri: z.string().optional(), + /** + * Cosmos wallet type, to support mutliple popular cosmos wallets + * Keplr & Cypher -> window.keplr + * Leap -> window.leap + */ + cosmosWalletType: CosmosWalletTypeSchema.optional(), + /** + * Optional project ID for WalletConnect V2. Only required if one is using checkAndSignAuthMessage and wants to display WalletConnect as an option. + */ + walletConnectProjectId: z.string().optional(), + resourceAbilityRequests: z.array(LitResourceAbilityRequestSchema).optional(), +}); + +export const AuthCallbackSchema = z + .function() + .args(AuthCallbackParamsSchema) + .returns(z.promise(AuthSigSchema)); + +export const ISessionCapabilityObjectSchema = z.object({ + attenuations: AttenuationsObjectSchema, + proofs: z.array(z.string()), // CID[] + statement: z.string(), + addProof: z.function().args(z.string()).returns(z.void()), // (proof: CID) => void + /** + * Add an arbitrary attenuation to the session capability object. + * + * @description We do NOT recommend using this unless with the LIT specific + * abilities. Use this ONLY if you know what you are doing. + */ + addAttenuation: z + .function() + .args( + z.string(), + z.string().optional(), + z.string().optional(), + z.record(z.string(), DefinedJsonSchema).optional() + ) + .returns(z.void()), + addToSiweMessage: z + .function() + .args(z.instanceof(SiweMessage)) + .returns(z.instanceof(SiweMessage)), + /** + * Encode the session capability object as a SIWE resource. + */ + encodeAsSiweResource: z.function().returns(z.string()), + + /** LIT specific methods */ + + /** + * Add a LIT-specific capability to the session capability object for the + * specified resource. + * + * @param litResource The LIT-specific resource being added. + * @param ability The LIT-specific ability being added. + * @param [data] + * @example If the ability is `LitAbility.AccessControlConditionDecryption`, + * then the resource should be the hashed key value of the access control + * condition. + * @example If the ability is `LitAbility.AccessControlConditionSigning`, + * then the resource should be the hashed key value of the access control + * condition. + * @example If the ability is `LitAbility.PKPSigning`, then the resource + * should be the PKP token ID. + * @example If the ability is `LitAbility.RateLimitIncreaseAuth`, then the + * resource should be the RLI token ID. + * @example If the ability is `LitAbility.LitActionExecution`, then the + * resource should be the Lit Action IPFS CID. + * @throws If the ability is not a LIT-specific ability. + */ + addCapabilityForResource: z + .function() + .args( + ILitResourceSchema, + LitAbilitySchema, + z.record(z.string(), DefinedJsonSchema).optional() + ) + .returns(z.void()), + /** + * Verify that the session capability object has the specified LIT-specific + * capability for the specified resource. + */ + verifyCapabilitiesForResource: z + .function() + .args(ILitResourceSchema, LitAbilitySchema) + .returns(z.boolean()), + /** + * Add a wildcard ability to the session capability object for the specified + * resource. + */ + addAllCapabilitiesForResource: z + .function() + .args(ILitResourceSchema) + .returns(z.void()), +}); + +export const AuthenticationContextSchema = LitActionSdkParamsSchema.extend({ + /** + * Session signature properties shared across all functions that generate session signatures. + */ + pkpPublicKey: z.string().optional(), + + /** + * When this session signature will expire. After this time is up you will need to reauthenticate, generating a new session signature. The default time until expiration is 24 hours. The formatting is an [RFC3339](https://datatracker.ietf.org/doc/html/rfc3339) timestamp. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + expiration: z.any().optional(), + + /** + * The chain to use for the session signature and sign the session key. This value is almost always `ethereum`. If you're using EVM, this parameter isn't very important. + */ + chain: ChainSchema.optional(), + + /** + * An array of resource abilities that you want to request for this session. These will be signed with the session key. + * For example, an ability is added to grant a session permission to decrypt content associated with a particular Access Control Conditions (ACC) hash. When trying to decrypt, this ability is checked in the `resourceAbilityRequests` to verify if the session has the required decryption capability. + * @example + * [{ resource: new LitAccessControlConditionResource('someAccHash`), ability: LitAbility.AccessControlConditionDecryption }] + */ + resourceAbilityRequests: z.array(LitResourceAbilityRequestSchema), + + /** + * The session capability object that you want to request for this session. + * It is likely you will not need this, as the object will be automatically derived from the `resourceAbilityRequests`. + * If you pass nothing, then this will default to a wildcard for each type of resource you're accessing. + * The wildcard means that the session will be granted the ability to perform operations with any access control condition. + */ + sessionCapabilityObject: ISessionCapabilityObjectSchema.optional(), + + /** + * If you want to ask MetaMask to try and switch the user's chain, you may pass true here. This will only work if the user is using MetaMask, otherwise this will be ignored. + */ + switchChain: z.boolean().optional(), + /** + * The serialized session key pair to sign. + * If not provided, a session key pair will be fetched from localStorage or generated. + */ + sessionKey: SessionKeyPairSchema.optional(), + + /** + * Not limited to capacityDelegationAuthSig. Other AuthSigs with other purposes can also be in this array. + */ + capabilityAuthSigs: z.array(AuthSigSchema).optional(), + + /** + * This is a callback that will be used to generate an AuthSig within the session signatures. It's inclusion is required, as it defines the specific resources and abilities that will be allowed for the current session. + */ + authNeededCallback: AuthCallbackSchema.optional(), + + authMethods: z.array(AuthMethodSchema).optional(), + + ipfsOptions: IpfsOptionsSchema.optional(), +}); + +export const JsonExecutionSdkParamsBaseSchema = LitActionSdkParamsSchema.pick({ + jsParams: true, +}) + .merge(ExecuteJsAdvancedOptionsSchema) + .merge(PricedSchema.partial()) + .extend({ + /** + * JS code to run on the nodes + */ + code: z.string().optional(), + /** + * The IPFS ID of some JS code to run on the nodes + */ + ipfsId: z.string().optional(), + + /** + * auth context + */ + authContext: AuthenticationContextSchema, + }); diff --git a/packages/schemas/src/lib/schemas.ts b/packages/schemas/src/lib/schemas.ts index 05d9c2a9b3..e0aeaaba5d 100644 --- a/packages/schemas/src/lib/schemas.ts +++ b/packages/schemas/src/lib/schemas.ts @@ -1,21 +1,49 @@ import { z } from 'zod'; import { - LIT_ABILITY, LIT_AUTH_SIG_CHAIN_KEYS, + LIT_ABILITY, + LIT_CHAINS_KEYS, LIT_NETWORK, LIT_RESOURCE_PREFIX, VMTYPE, } from '@lit-protocol/constants'; +const definedLiteralSchema = z.union([z.string(), z.number(), z.boolean()]); +export type DefinedLiteral = z.infer; +export type DefinedJson = + | DefinedLiteral + | { [key: string]: DefinedJson } + | DefinedJson[]; +export const DefinedJsonSchema: z.ZodType = z.lazy(() => + z.union([ + definedLiteralSchema, + z.array(DefinedJsonSchema), + z.record(DefinedJsonSchema), + ]) +); + const literalSchema = z.union([z.string(), z.number(), z.boolean(), z.null()]); -type Literal = z.infer; -type Json = Literal | { [key: string]: Json } | Json[]; +export type Literal = z.infer; +export type Json = Literal | { [key: string]: Json } | Json[]; export const JsonSchema: z.ZodType = z.lazy(() => z.union([literalSchema, z.array(JsonSchema), z.record(JsonSchema)]) ); +export const HexSchema = z.string().regex(/^0x[0-9a-fA-F]+$/); export const ChainSchema = z.string(); +export const EvmChainSchema = z.enum(LIT_CHAINS_KEYS); + +export const ChainedSchema = z.object({ + /** + * The chain name of the chain that will be used. See LIT_CHAINS for currently supported chains. + */ + chain: ChainSchema, +}); + +export const PricedSchema = z.object({ + userMaxPrice: z.bigint(), +}); export const LitNetworkKeysSchema = z.nativeEnum(LIT_NETWORK); @@ -101,3 +129,139 @@ export const AllLitChainsSchema = z.record( z.string(), z.union([LitEVMChainSchema, LitSVMChainSchema, LitCosmosChainSchema]) ); + +export const AuthSigSchema = z.object({ + /** + * The signature produced by signing the `signMessage` property with the corresponding private key for the `address` property. + */ + sig: z.string(), + /** + * The method used to derive the signature (e.g, `web3.eth.personal.sign`). + */ + derivedVia: z.string(), + /** + * An [ERC-5573](https://eips.ethereum.org/EIPS/eip-5573) SIWE (Sign-In with Ethereum) message. This can be prepared by using one of the `createSiweMessage` functions from the [`@auth-helpers`](https://v6-api-doc-lit-js-sdk.vercel.app/modules/auth_helpers_src.html) package: + * - [`createSiweMessage`](https://v6-api-doc-lit-js-sdk.vercel.app/functions/auth_helpers_src.createSiweMessage.html) + * - [`createSiweMessageWithRecaps](https://v6-api-doc-lit-js-sdk.vercel.app/functions/auth_helpers_src.createSiweMessageWithRecaps.html) + * - [`createSiweMessageWithCapacityDelegation`](https://v6-api-doc-lit-js-sdk.vercel.app/functions/auth_helpers_src.createSiweMessageWithCapacityDelegation.html) + */ + signedMessage: z.string(), + /** + * The Ethereum address that was used to sign `signedMessage` and create the `sig`. + */ + address: z.string(), + /** + * An optional property only seen when generating session signatures, this is the signing algorithm used to generate session signatures. + */ + algo: z.string().optional(), +}); + +export const ResponseStrategySchema = z.enum([ + 'leastCommon', + 'mostCommon', + 'custom', +]); + +export const LitActionResponseStrategySchema = z.object({ + strategy: ResponseStrategySchema, + customFilter: z + .function() + .args(z.array(z.record(z.string(), z.string()))) + .returns(z.record(z.string(), z.string())) + .optional(), +}); + +export const IpfsOptionsSchema = z.object({ + overwriteCode: z.boolean().optional(), + gatewayUrl: z.string().startsWith('https://').endsWith('/ipfs/').optional(), +}); + +export const ExecuteJsAdvancedOptionsSchema = z.object({ + /** + * a strategy for processing `response` objects returned from the + * Lit Action execution context + */ + responseStrategy: LitActionResponseStrategySchema.optional(), + /** + * Allow overriding the default `code` property in the `JsonExecutionSdkParams` + */ + ipfsOptions: IpfsOptionsSchema.optional(), + /** + * Only run the action on a single node; this will only work if all code in your action is non-interactive + */ + useSingleNode: z.boolean().optional(), +}); + +// pub struct AuthMethod { +// pub auth_method_type: u32, +// pub access_token: String, +// } +export const AuthMethodSchema = z.object({ + authMethodType: z.number(), + accessToken: z.string(), +}); + +// TODO make it forcefully have litActionCode OR litActionIpfsId, one and only one of them MUST be provided +export const LitActionSdkParamsSchema = z.object({ + /** + * The litActionCode is the JavaScript code that will run on the nodes. + * You will need to convert the string content to base64. + * + * @example + * Buffer.from(litActionCodeString).toString('base64'); + */ + litActionCode: z.string().optional(), + /** + * You can obtain the Lit Action IPFS CID by converting your JavaScript code using this tool: + * https://explorer.litprotocol.com/create-action + * + * Note: You do not need to pin your code to IPFS necessarily. + * You can convert a code string to an IPFS hash using the "ipfs-hash-only" or 'ipfs-unixfs-importer' library. + * + * @example + * async function stringToIpfsHash(input: string): Promise { + * // Convert the input string to a Buffer + * const content = Buffer.from(input); + * + * // Import the content to create an IPFS file + * const files = importer([{ content }], {} as any, { onlyHash: true }); + * + * // Get the first (and only) file result + * const result = (await files.next()).value; + * + * const ipfsHash = (result as any).cid.toString(); + * if (!ipfsHash.startsWith('Qm')) { + * throw new Error('Generated hash does not start with Qm'); + * } + * + * return ipfsHash; + * } + */ + litActionIpfsId: z.string().optional(), + /** + * An object that contains params to expose to the Lit Action. These will be injected to the JS runtime before your code runs, so you can use any of these as normal variables in your Lit Action. + */ + jsParams: z + .union([ + z.any(), // TODO what happens if jsParams is a string/number/primitive? + z + .object({ + publicKey: z.string().optional(), + sigName: z.string().optional(), + }) + .catchall(z.any()), + ]) + .optional(), +}); + +export const CosmosWalletTypeSchema = z.enum(['keplr', 'leap'] as const); + +export const SessionKeyPairSchema = z.object({ + publicKey: z.string(), + secretKey: z.string(), +}); + +export const AttenuationsObjectSchema = z.record( + z.string(), + z.record(z.string(), z.array(DefinedJsonSchema)) +); diff --git a/packages/schemas/src/lib/validation.ts b/packages/schemas/src/lib/validation.ts new file mode 100644 index 0000000000..7561f3eea4 --- /dev/null +++ b/packages/schemas/src/lib/validation.ts @@ -0,0 +1,33 @@ +import { z } from 'zod'; +import { fromError, isZodErrorLike } from 'zod-validation-error'; + +import { InvalidArgumentException } from '@lit-protocol/constants'; + +export function throwFailedValidation( + functionName: string, + params: unknown, + e: unknown +): never { + throw new InvalidArgumentException( + { + info: { + params, + function: functionName, + }, + cause: isZodErrorLike(e) ? fromError(e) : e, + }, + `Invalid params for ${functionName}. Check error for details.` + ); +} + +export function applySchemaWithValidation( + functionName: string, + params: T, + schema: z.ZodType +): T { + try { + return schema.parse(params); + } catch (e) { + throwFailedValidation(functionName, params, e); + } +} diff --git a/packages/types/src/lib/interfaces.ts b/packages/types/src/lib/interfaces.ts index f6da23aa46..0888ce1ecf 100644 --- a/packages/types/src/lib/interfaces.ts +++ b/packages/types/src/lib/interfaces.ts @@ -1,21 +1,37 @@ import { Provider } from '@ethersproject/abstract-provider'; +import { z } from 'zod'; +import { MultipleAccessControlConditionsSchema } from '@lit-protocol/access-control-conditions-schemas'; import { LitEVMChainKeys } from '@lit-protocol/constants'; +import { + AuthCallbackSchema, + AuthenticationContextSchema, + AuthMethodSchema, + AuthSigSchema, + DecryptRequestSchema, + EncryptResponseSchema, + EncryptRequestSchema, + ExecuteJsAdvancedOptionsSchema, + IpfsOptionsSchema, + JsonExecutionSdkParamsBaseSchema, + LitActionResponseStrategySchema, + LitActionSdkParamsSchema, + SessionKeyPairSchema, +} from '@lit-protocol/schemas'; import { SigType } from './EndpointResponses'; import { ILitNodeClient } from './ILitNodeClient'; import { ISessionCapabilityObject, LitResourceAbilityRequest } from './models'; import { - AcceptedFileType, AccessControlConditions, Chain, EvmContractConditions, + Hex, IRelayAuthStatus, JsonRequest, LIT_NETWORKS_KEYS, LitContractContext, LitContractResolverContext, - ResponseStrategy, SolRpcConditions, UnifiedAccessControlConditions, } from './types'; @@ -25,35 +41,7 @@ import { /** * An `AuthSig` represents a cryptographic proof of ownership for an Ethereum address, created by signing a standardized [ERC-5573 SIWE ReCap](https://eips.ethereum.org/EIPS/eip-5573) (Sign-In with Ethereum) message. This signature serves as a verifiable credential, allowing the Lit network to associate specific permissions, access rights, and operational parameters with the signing Ethereum address. By incorporating various capabilities, resources, and parameters into the SIWE message before signing, the resulting `AuthSig` effectively defines and communicates these authorizations and specifications for the address within the Lit network. */ -export interface AuthSig { - /** - * The signature produced by signing the `signMessage` property with the corresponding private key for the `address` property. - */ - sig: string; - - /** - * The method used to derive the signature (e.g, `web3.eth.personal.sign`). - */ - derivedVia: string; - - /** - * An [ERC-5573](https://eips.ethereum.org/EIPS/eip-5573) SIWE (Sign-In with Ethereum) message. This can be prepared by using one of the `createSiweMessage` functions from the [`@auth-helpers`](https://v6-api-doc-lit-js-sdk.vercel.app/modules/auth_helpers_src.html) package: - * - [`createSiweMessage`](https://v6-api-doc-lit-js-sdk.vercel.app/functions/auth_helpers_src.createSiweMessage.html) - * - [`createSiweMessageWithRecaps](https://v6-api-doc-lit-js-sdk.vercel.app/functions/auth_helpers_src.createSiweMessageWithRecaps.html) - * - [`createSiweMessageWithCapacityDelegation`](https://v6-api-doc-lit-js-sdk.vercel.app/functions/auth_helpers_src.createSiweMessageWithCapacityDelegation.html) - */ - signedMessage: string; - - /** - * The Ethereum address that was used to sign `signedMessage` and create the `sig`. - */ - address: string; - - /** - * An optional property only seen when generating session signatures, this is the signing algorithm used to generate session signatures. - */ - algo?: string; -} +export type AuthSig = z.infer; export interface AuthCallbackParams extends LitActionSdkParams { /** @@ -236,7 +224,6 @@ export interface NodeSetRequired { export interface JsonSignSessionKeyRequestV1 extends Pick, - Pick, NodeSetRequired { sessionKey: string; authMethods: AuthMethod[]; @@ -247,11 +234,11 @@ export interface JsonSignSessionKeyRequestV1 // custom auth params code?: string; + litActionIpfsId?: string; } export interface JsonSignSessionKeyRequestV2 extends Pick, - Pick, NodeSetRequired { sessionKey: string; authMethods: AuthMethod[]; @@ -262,6 +249,7 @@ export interface JsonSignSessionKeyRequestV2 // custom auth params code?: string; + litActionIpfsId?: string; signingScheme: T; } @@ -334,21 +322,9 @@ export interface JsonSigningResourceId { extraData: string; } -// CHANGE: `MultipleAccessControlConditions` is basically identical to `AccessControlConditions`, -// but due to the way the types are deeply nested, we will revisit this later. -export interface MultipleAccessControlConditions { - // The access control conditions that the user must meet to obtain this signed token. This could be possession of an NFT, for example. You must pass either accessControlConditions or evmContractConditions or solRpcConditions or unifiedAccessControlConditions. - accessControlConditions?: AccessControlConditions; - - // EVM Smart Contract access control conditions that the user must meet to obtain this signed token. This could be possession of an NFT, for example. This is different than accessControlConditions because accessControlConditions only supports a limited number of contract calls. evmContractConditions supports any contract call. You must pass either accessControlConditions or evmContractConditions or solRpcConditions or unifiedAccessControlConditions. - evmContractConditions?: EvmContractConditions; - - // Solana RPC call conditions that the user must meet to obtain this signed token. This could be possession of an NFT, for example. - solRpcConditions?: SolRpcConditions; - - // An array of unified access control conditions. You may use AccessControlCondition, EVMContractCondition, or SolRpcCondition objects in this array, but make sure you add a conditionType for each one. You must pass either accessControlConditions or evmContractConditions or solRpcConditions or unifiedAccessControlConditions. - unifiedAccessControlConditions?: UnifiedAccessControlConditions; -} +export type MultipleAccessControlConditions = z.infer< + typeof MultipleAccessControlConditionsSchema +>; export interface JsonAccsRequest extends MultipleAccessControlConditions { // The chain name of the chain that you are querying. See ALL_LIT_CHAINS for currently supported chains. @@ -423,52 +399,19 @@ export interface JsonEncryptionRetrieveRequest extends JsonAccsRequest { toDecrypt: string; } -export interface LitActionResponseStrategy { - strategy: ResponseStrategy; - customFilter?: ( - responses: Record[] - ) => Record; -} - -export interface IpfsOptions { - overwriteCode?: boolean; - gatewayUrl?: `https://${string}/ipfs/`; -} - -export interface JsonExecutionSdkParams - extends Pick, - ExecuteJsAdvancedOptions { - /** - * JS code to run on the nodes - */ - code?: string; - - /** - * The IPFS ID of some JS code to run on the nodes - */ - ipfsId?: string; - - authContext: AuthenticationContext; - userMaxPrice?: bigint; -} +export type LitActionResponseStrategy = z.infer< + typeof LitActionResponseStrategySchema +>; -export interface ExecuteJsAdvancedOptions { - /** - * a strategy for proccessing `reponse` objects returned from the - * Lit Action execution context - */ - responseStrategy?: LitActionResponseStrategy; +export type IpfsOptions = z.infer; - /** - * Allow overriding the default `code` property in the `JsonExecutionSdkParams` - */ - ipfsOptions?: IpfsOptions; +export type ExecuteJsAdvancedOptions = z.infer< + typeof ExecuteJsAdvancedOptionsSchema +>; - /** - * Only run the action on a single node; this will only work if all code in your action is non-interactive - */ - useSingleNode?: boolean; -} +export type JsonExecutionSdkParams = z.infer< + typeof JsonExecutionSdkParamsBaseSchema +>; export interface JsonExecutionRequest extends Pick, @@ -485,56 +428,11 @@ export interface JsonExecutionRequest authMethods?: AuthMethod[]; } -export interface DecryptRequestBase extends MultipleAccessControlConditions { - /** - * The chain name of the chain that this contract is deployed on. See LIT_CHAINS for currently supported chains. - */ - chain: Chain; - authSig?: AuthSig; - authContext: AuthenticationContext; - userMaxPrice?: bigint; -} -export interface EncryptSdkParams extends MultipleAccessControlConditions { - dataToEncrypt: Uint8Array; -} +export type EncryptSdkParams = z.infer; -export interface EncryptRequest extends DecryptRequestBase { - // The data that you wish to encrypt as a Uint8Array - dataToEncrypt: Uint8Array; -} +export type EncryptResponse = z.infer; -export interface EncryptResponse { - /** - * The base64-encoded ciphertext - */ - ciphertext: string; - - /** - * The hash of the data that was encrypted - */ - dataToEncryptHash: string; -} - -export interface EncryptUint8ArrayRequest - extends MultipleAccessControlConditions { - /** - * The uint8array that you wish to encrypt - */ - dataToEncrypt: Uint8Array; -} - -export interface EncryptStringRequest extends MultipleAccessControlConditions { - /** - * String that you wish to encrypt - */ - dataToEncrypt: string; -} - -export interface EncryptFileRequest extends DecryptRequestBase { - file: AcceptedFileType; -} - -export interface DecryptRequest extends EncryptResponse, DecryptRequestBase {} +export type DecryptRequest = z.infer; export interface DecryptResponse { // The decrypted data as a Uint8Array @@ -549,7 +447,7 @@ export interface SigResponse { r: string; s: string; recid: number; - signature: `0x${string}`; + signature: Hex; publicKey: string; // pkp public key (no 0x prefix) dataSigned: string; } @@ -721,46 +619,6 @@ export interface JsonHandshakeResponse { latestBlockhash?: string; } -export interface EncryptToJsonProps extends MultipleAccessControlConditions { - /** - * The chain - */ - chain: string; - - /** - * The string you wish to encrypt - */ - string?: string; - - /** - * The file you wish to encrypt - */ - file?: AcceptedFileType; - - /** - * An instance of LitNodeClient that is already connected - */ - litNodeClient: ILitNodeClient; - - authContext: AuthenticationContext; -} - -export type EncryptToJsonDataType = 'string' | 'file'; - -export interface EncryptToJsonPayload extends DecryptRequestBase { - ciphertext: string; - dataToEncryptHash: string; - dataType: EncryptToJsonDataType; -} - -export interface DecryptFromJsonProps { - // An instance of LitNodeClient that is already connected - litNodeClient: ILitNodeClient; - - parsedJsonData: EncryptToJsonPayload; - authContext: AuthenticationContext; -} - /** * Struct in rust * ----- @@ -783,17 +641,11 @@ export interface SessionKeySignedMessage { nodeAddress: string; } -export interface SessionKeyPair { - publicKey: string; - secretKey: string; -} +export type SessionKeyPair = z.infer; /** ========== Session ========== */ -export interface AuthMethod { - authMethodType: number; - accessToken: string; -} +export type AuthMethod = z.infer; // pub struct JsonSignSessionKeyRequest { // pub session_key: String, @@ -854,65 +706,9 @@ export interface SignSessionKeyResponse { authSig: AuthSig; } -export interface AuthenticationContext extends LitActionSdkParams { - /** - * Session signature properties shared across all functions that generate session signatures. - */ - pkpPublicKey?: string; - - /** - * When this session signature will expire. After this time is up you will need to reauthenticate, generating a new session signature. The default time until expiration is 24 hours. The formatting is an [RFC3339](https://datatracker.ietf.org/doc/html/rfc3339) timestamp. - */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - expiration?: any; - - /** - * The chain to use for the session signature and sign the session key. This value is almost always `ethereum`. If you're using EVM, this parameter isn't very important. - */ - chain?: Chain; - - /** - * An array of resource abilities that you want to request for this session. These will be signed with the session key. - * For example, an ability is added to grant a session permission to decrypt content associated with a particular Access Control Conditions (ACC) hash. When trying to decrypt, this ability is checked in the `resourceAbilityRequests` to verify if the session has the required decryption capability. - * @example - * [{ resource: new LitAccessControlConditionResource('someAccHash`), ability: LitAbility.AccessControlConditionDecryption }] - */ - resourceAbilityRequests: LitResourceAbilityRequest[]; - - /** - * The session capability object that you want to request for this session. - * It is likely you will not need this, as the object will be automatically derived from the `resourceAbilityRequests`. - * If you pass nothing, then this will default to a wildcard for each type of resource you're accessing. - * The wildcard means that the session will be granted the ability to perform operations with any access control condition. - */ - sessionCapabilityObject?: ISessionCapabilityObject; - - /** - * If you want to ask MetaMask to try and switch the user's chain, you may pass true here. This will only work if the user is using MetaMask, otherwise this will be ignored. - */ - switchChain?: boolean; - /** - * The serialized session key pair to sign. - * If not provided, a session key pair will be fetched from localStorage or generated. - */ - sessionKey?: SessionKeyPair; +export type AuthenticationContext = z.infer; - /** - * Not limited to capacityDelegationAuthSig. Other AuthSigs with other purposes can also be in this array. - */ - capabilityAuthSigs?: AuthSig[]; - - /** - * This is a callback that will be used to generate an AuthSig within the session signatures. It's inclusion is required, as it defines the specific resources and abilities that will be allowed for the current session. - */ - authNeededCallback?: AuthCallback; - - authMethods?: AuthMethod[]; - - ipfsOptions?: IpfsOptions; -} - -export type AuthCallback = (params: AuthCallbackParams) => Promise; +export type AuthCallback = z.infer; /** * A map of node addresses to the session signature payload @@ -1359,57 +1155,7 @@ export interface CapacityCreditsRes { capacityDelegationAuthSig: AuthSig; } -export interface LitActionSdkParams { - /** - * The litActionCode is the JavaScript code that will run on the nodes. - * You will need to convert the string content to base64. - * - * @example - * Buffer.from(litActionCodeString).toString('base64'); - */ - litActionCode?: string; - - /** - * You can obtain the Lit Action IPFS CID by converting your JavaScript code using this tool: - * https://explorer.litprotocol.com/create-action - * - * Note: You do not need to pin your code to IPFS necessarily. - * You can convert a code string to an IPFS hash using the "ipfs-hash-only" or 'ipfs-unixfs-importer' library. - * - * @example - * async function stringToIpfsHash(input: string): Promise { - * // Convert the input string to a Buffer - * const content = Buffer.from(input); - * - * // Import the content to create an IPFS file - * const files = importer([{ content }], {} as any, { onlyHash: true }); - * - * // Get the first (and only) file result - * const result = (await files.next()).value; - * - * const ipfsHash = (result as any).cid.toString(); - * if (!ipfsHash.startsWith('Qm')) { - * throw new Error('Generated hash does not start with Qm'); - * } - * - * return ipfsHash; - * } - */ - litActionIpfsId?: string; - - /** - * An object that contains params to expose to the Lit Action. These will be injected to the JS runtime before your code runs, so you can use any of these as normal variables in your Lit Action. - */ - jsParams?: - | { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - [key: string]: any; - publicKey?: string; - sigName?: string; - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - | any; -} +export type LitActionSdkParams = z.infer; export interface LitEndpoint { path: string; diff --git a/packages/types/src/lib/models.ts b/packages/types/src/lib/models.ts index a86736b55d..57e67c6c4b 100644 --- a/packages/types/src/lib/models.ts +++ b/packages/types/src/lib/models.ts @@ -1,115 +1,22 @@ -import { SiweMessage } from 'siwe'; +import { z } from 'zod'; -import { LitAbility, LitResourcePrefix } from './types'; +import { + AttenuationsObjectSchema, + ISessionCapabilityObjectSchema, + LitResourceAbilityRequestSchema, + ILitResourceSchema, +} from '@lit-protocol/schemas'; -export type PlainJSON = - | boolean - | number - | string - | { [key: string]: PlainJSON } - | PlainJSON[]; -export type AttenuationsObject = Record>; -export type CID = string; - -export interface ISessionCapabilityObject { - get attenuations(): AttenuationsObject; - get proofs(): CID[]; - get statement(): string; - addProof(proof: CID): void; - - /** - * Add an arbitrary attenuation to the session capability object. - * - * @description We do NOT recommend using this unless with the LIT specific - * abilities. Use this ONLY if you know what you are doing. - */ - addAttenuation( - resource: string, - namespace?: string, - name?: string, - restriction?: Record - ): void; - addToSiweMessage(siwe: SiweMessage): SiweMessage; - - /** - * Encode the session capability object as a SIWE resource. - */ - encodeAsSiweResource(): string; - - /** LIT specific methods */ +export type AttenuationsObject = z.infer; - /** - * Add a LIT-specific capability to the session capability object for the - * specified resource. - * - * @param litResource The LIT-specific resource being added. - * @param ability The LIT-specific ability being added. - * @example If the ability is `LitAbility.AccessControlConditionDecryption`, - * then the resource should be the hashed key value of the access control - * condition. - * @example If the ability is `LitAbility.AccessControlConditionSigning`, - * then the resource should be the hashed key value of the access control - * condition. - * @example If the ability is `LitAbility.PKPSigning`, then the resource - * should be the PKP token ID. - * @example If the ability is `LitAbility.RateLimitIncreaseAuth`, then the - * resource should be the RLI token ID. - * @example If the ability is `LitAbility.LitActionExecution`, then the - * resource should be the Lit Action IPFS CID. - * @throws If the ability is not a LIT-specific ability. - */ - addCapabilityForResource( - litResource: ILitResource, - ability: LitAbility, - data?: unknown - ): void; - - /** - * Verify that the session capability object has the specified LIT-specific - * capability for the specified resource. - */ - verifyCapabilitiesForResource( - litResource: ILitResource, - ability: LitAbility - ): boolean; - - /** - * Add a wildcard ability to the session capability object for the specified - * resource. - */ - addAllCapabilitiesForResource(litResource: ILitResource): void; -} - -export interface ILitResource { - /** - * Gets the fully qualified resource key. - * @returns The fully qualified resource key. - */ - getResourceKey(): string; - - /** - * Validates that the given LIT ability is valid for this resource. - * @param litAbility The LIT ability to validate. - */ - isValidLitAbility(litAbility: LitAbility): boolean; +export type CID = string; - toString(): string; +export type ISessionCapabilityObject = z.infer< + typeof ISessionCapabilityObjectSchema +>; - readonly resourcePrefix: LitResourcePrefix; - readonly resource: string; -} +export type ILitResource = z.infer; -/** - * A LIT resource ability is a combination of a LIT resource and a LIT ability. - * It specifies which LIT specific ability is being requested to be performed - * on the specified LIT resource. - * - * @description This object does NOT guarantee compatibility between the - * specified LIT resource and the specified LIT ability, and will be validated by - * the LIT-internal systems. - */ -export interface LitResourceAbilityRequest { - resource: ILitResource; - ability: LitAbility; - data?: unknown; -} +export type LitResourceAbilityRequest = z.infer< + typeof LitResourceAbilityRequestSchema +>; diff --git a/packages/types/src/lib/types.ts b/packages/types/src/lib/types.ts index d0a25ad7d1..490894cfa7 100644 --- a/packages/types/src/lib/types.ts +++ b/packages/types/src/lib/types.ts @@ -8,6 +8,7 @@ import { LitAbilitySchema, LitNetworkKeysSchema, LitResourcePrefixSchema, + ResponseStrategySchema, TokenInfoSchema, } from '@lit-protocol/schemas'; @@ -29,7 +30,15 @@ import type { SolAcc, } from '@lit-protocol/access-control-conditions-schemas'; -export type ConditionType = 'solRpc' | 'evmBasic' | 'evmContract' | 'cosmos'; +export { + DefinedJson, + DefinedLiteral, + Json, + Literal, +} from '@lit-protocol/schemas'; + +// Zod only derives string, not giving real type safety over it +export type Hex = `0x${string}`; // z.infer; // Backwards compatibility with @lit-protocol/accs-schemas export type AccsDefaultParams = EvmBasicAcc; @@ -188,7 +197,7 @@ export interface LitContractResolverContext { provider?: ethers.providers.JsonRpcProvider; } -export type ResponseStrategy = 'leastCommon' | 'mostCommon' | 'custom'; +export type ResponseStrategy = z.infer; export type LitResourcePrefix = z.infer; diff --git a/packages/uint8arrays/.babelrc b/packages/uint8arrays/.babelrc deleted file mode 100644 index 158083d278..0000000000 --- a/packages/uint8arrays/.babelrc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "presets": [ - [ - "@nx/web/babel", - { - "useBuiltIns": "usage" - } - ] - ] -} diff --git a/packages/uint8arrays/README.md b/packages/uint8arrays/README.md deleted file mode 100644 index a97c419da8..0000000000 --- a/packages/uint8arrays/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Quick Start - -This submodule provides functions to make dealing with Uint8Arrays easier. - -Here are the two main functions: - -- uint8arrayFromString -- uint8arrayToString - -### node.js / browser - -``` -yarn add @lit-protocol/uint8array -``` diff --git a/packages/uint8arrays/jest.config.ts b/packages/uint8arrays/jest.config.ts deleted file mode 100644 index 94e336b29a..0000000000 --- a/packages/uint8arrays/jest.config.ts +++ /dev/null @@ -1,16 +0,0 @@ -/* eslint-disable */ -export default { - displayName: 'uint8arrays', - preset: '../../jest.preset.js', - globals: { - 'ts-jest': { - tsconfig: '/tsconfig.spec.json', - }, - }, - transform: { - '^.+\\.[t]s$': 'ts-jest', - }, - moduleFileExtensions: ['ts', 'js', 'html'], - coverageDirectory: '../../coverage/packages/uint8arrays', - setupFilesAfterEnv: ['../../jest.setup.js'], -}; diff --git a/packages/uint8arrays/package.json b/packages/uint8arrays/package.json deleted file mode 100644 index 2414782ab1..0000000000 --- a/packages/uint8arrays/package.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "name": "@lit-protocol/uint8arrays", - "license": "MIT", - "homepage": "https://github.com/Lit-Protocol/js-sdk", - "repository": { - "type": "git", - "url": "https://github.com/LIT-Protocol/js-sdk" - }, - "keywords": [ - "library" - ], - "bugs": { - "url": "https://github.com/LIT-Protocol/js-sdk/issues" - }, - "type": "commonjs", - "publishConfig": { - "access": "public", - "directory": "../../dist/packages/uint8arrays" - }, - "gitHead": "0d7334c2c55f448e91fe32f29edc5db8f5e09e4b", - "tags": [ - "universal" - ], - "version": "8.0.0-alpha.0", - "main": "./dist/src/index.js", - "typings": "./dist/src/index.d.ts" -} diff --git a/packages/uint8arrays/project.json b/packages/uint8arrays/project.json deleted file mode 100644 index 79a0243527..0000000000 --- a/packages/uint8arrays/project.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "uint8arrays", - "$schema": "../../node_modules/nx/schemas/project-schema.json", - "implicitDependencies": ["!misc-browser"], - "sourceRoot": "packages/uint8arrays/src", - "projectType": "library", - "targets": { - "build": { - "executor": "@nx/js:tsc", - "outputs": ["{options.outputPath}"], - "options": { - "outputPath": "dist/packages/uint8arrays", - "main": "packages/uint8arrays/src/index.ts", - "tsConfig": "packages/uint8arrays/tsconfig.lib.json", - "assets": ["packages/uint8arrays/*.md"], - "updateBuildableProjectDepsInPackageJson": true - } - }, - "test": { - "executor": "@nx/jest:jest", - "outputs": ["{workspaceRoot}/coverage/packages/uint8arrays"], - "options": { - "jestConfig": "packages/uint8arrays/jest.config.ts", - "passWithNoTests": true - } - } - }, - "tags": [] -} diff --git a/packages/uint8arrays/src/index.ts b/packages/uint8arrays/src/index.ts deleted file mode 100644 index 13139f00a6..0000000000 --- a/packages/uint8arrays/src/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { - uint8arrayFromString, - uint8arrayToString, - uint8ArrayToBase64, -} from './lib/uint8arrays'; - -export { uint8arrayFromString, uint8arrayToString, uint8ArrayToBase64 }; diff --git a/packages/uint8arrays/src/lib/uint8arrays.spec.ts b/packages/uint8arrays/src/lib/uint8arrays.spec.ts deleted file mode 100644 index f00907917d..0000000000 --- a/packages/uint8arrays/src/lib/uint8arrays.spec.ts +++ /dev/null @@ -1,190 +0,0 @@ -import { - uint8arrayFromString, - uint8arrayToString, - base64ToUint8Array, -} from './uint8arrays'; -import { base64StringToBlob } from '@lit-protocol/misc-browser'; - -describe('Encoding Functions', () => { - const testCases = [ - { str: 'Hello, World!', encoding: 'utf8' }, - { str: 'こんにちは、世界!', encoding: 'utf8' }, - { str: 'Привет, мир!', encoding: 'utf8' }, - { str: '1234567890', encoding: 'utf8' }, - { str: 'abcdefABCDEF', encoding: 'utf8' }, - { str: '48656c6c6f2c20576f726c6421', encoding: 'base16' }, - ]; - - testCases.forEach((testCase) => { - it(`should encode and decode a string using ${testCase.encoding} encoding`, () => { - const uint8Array = uint8arrayFromString( - testCase.str, - testCase.encoding as any - ); - const decodedStr = uint8arrayToString( - uint8Array, - testCase.encoding as any - ); - expect(decodedStr).toEqual(testCase.str); - }); - }); - - it('should throw an error for an unsupported encoding', () => { - expect(() => - uint8arrayFromString('Hello, World!', 'unsupported' as any) - ).toThrow(); - expect(() => - uint8arrayToString(new Uint8Array(), 'unsupported' as any) - ).toThrow(); - }); -}); - -describe('uint8arrayFromString and uint8arrayToString', () => { - const testCases = [ - { str: 'Hello, World!', encoding: 'utf8' }, - { str: 'こんにちは世界', encoding: 'utf8' }, - { str: '👋🌎', encoding: 'utf8' }, - { str: '48656c6c6f2c20576f726c6421', encoding: 'base16' }, - { str: 'e38182e3818de3818ee381aae38184', encoding: 'base16' }, - { str: 'SGVsbG8sIFdvcmxkIQ==', encoding: 'base64' }, - { str: '44GT44KT44Gr44Gh44Gv', encoding: 'base64' }, - ]; - - testCases.forEach(({ str, encoding }) => { - test(`converts "${str}" with encoding "${encoding}"`, () => { - const uint8Array = uint8arrayFromString(str, encoding as any); - const result = uint8arrayToString(uint8Array, encoding as any); - expect(result).toBe(str); - }); - }); -}); - -describe('conversion', () => { - describe('uint8arrayFromString', () => { - it('converts utf8 string to Uint8Array', () => { - const str = 'Hello, World!'; - const expectedResult = new Uint8Array([ - 72, 101, 108, 108, 111, 44, 32, 87, 111, 114, 108, 100, 33, - ]); - - const result = uint8arrayFromString(str, 'utf8'); - - expect(result).toEqual(expectedResult); - }); - - it('converts base16 string to Uint8Array', () => { - const str = '48656c6c6f2c20576f726c6421'; - const expectedResult = new Uint8Array([ - 72, 101, 108, 108, 111, 44, 32, 87, 111, 114, 108, 100, 33, - ]); - - const result = uint8arrayFromString(str, 'base16'); - - expect(result).toEqual(expectedResult); - }); - - it('converts base64 string to Uint8Array', () => { - const str = 'SGVsbG8sIFdvcmxkIQ=='; - const expectedResult = new Uint8Array([ - 72, 101, 108, 108, 111, 44, 32, 87, 111, 114, 108, 100, 33, - ]); - - const result = uint8arrayFromString(str, 'base64'); - - expect(result).toEqual(expectedResult); - }); - - it('converts base64urlpad string to Uint8Array', () => { - const str = 'SGVsbG8sIFdvcmxkIQ'; - const expectedResult = new Uint8Array([ - 72, 101, 108, 108, 111, 44, 32, 87, 111, 114, 108, 100, 33, - ]); - - const result = uint8arrayFromString(str, 'base64urlpad'); - - expect(result).toEqual(expectedResult); - }); - }); - - describe('uint8arrayToString', () => { - it('converts Uint8Array to utf8 string', () => { - const uint8array = new Uint8Array([ - 72, 101, 108, 108, 111, 44, 32, 87, 111, 114, 108, 100, 33, - ]); - const expectedResult = 'Hello, World!'; - - const result = uint8arrayToString(uint8array, 'utf8'); - - expect(result).toEqual(expectedResult); - }); - - it('converts Uint8Array to base16 string', () => { - const uint8array = new Uint8Array([ - 72, 101, 108, 108, 111, 44, 32, 87, 111, 114, 108, 100, 33, - ]); - const expectedResult = '48656c6c6f2c20576f726c6421'; - - const result = uint8arrayToString(uint8array, 'base16'); - - expect(result).toEqual(expectedResult); - }); - - describe('base64 ', () => { - it('encode to base64urlpad should decde', () => { - // generate a random base64urlpad string of length 1333 (which is equivalent to 1000 bytes when decoded) - // generate a random Uint8Array of length 1000 - const randomBytes = new Uint8Array(1000); - for (let i = 0; i < randomBytes.length; i++) { - randomBytes[i] = Math.floor(Math.random() * 256); - } - - // Convert the Uint8Array to a base64urlpad string - const str = uint8arrayToString(randomBytes, 'base64urlpad'); - const blob = new Blob([uint8arrayFromString(str, 'base64urlpad')]); - - expect(blob.size).toBe(1000); - }); - - it('base64 large encoding should decode', () => { - // generate a random base64urlpad string of length 1333 (which is equivalent to 1000 bytes when decoded) - // generate a random Uint8Array of length 1000 * 20000 - const randomBytes = new Uint8Array(1000 * 20000); - for (let i = 0; i < randomBytes.length; i++) { - randomBytes[i] = Math.floor(Math.random() * 256); - } - - // Convert the Uint8Array to a base64urlpad string - const str = uint8arrayToString(randomBytes, 'base64'); - const urlStr = base64StringToBlob(str); - - expect(urlStr.size).toBe(1000 * 20000); - }); - }); - - describe('base64 ', () => { - // generate a random base64urlpad string of length 1333 (which is equivalent to 1000 bytes when decoded) - // generate a random Uint8Array of length 1000 - const randomBytes = new Uint8Array(1000); - for (let i = 0; i < randomBytes.length; i++) { - randomBytes[i] = Math.floor(Math.random() * 256); - } - - // Convert the Uint8Array to a base64urlpad string - const str = uint8arrayToString(randomBytes, 'base64urlpad'); - const blob = new Blob([uint8arrayFromString(str, 'base64urlpad')]); - - expect(blob.size).toBe(1000); - }); - }); -}); - -describe('turn to base64urlpad', () => { - it('should decrypt a file', async () => { - const u8a = uint8arrayFromString('Hello, World!', 'utf8'); - - // blobToBase64String - const base64 = uint8arrayToString(u8a, 'base64urlpad'); - - expect(base64).toBe('SGVsbG8sIFdvcmxkIQ'); - }); -}); diff --git a/packages/uint8arrays/src/lib/uint8arrays.ts b/packages/uint8arrays/src/lib/uint8arrays.ts deleted file mode 100644 index 186111648d..0000000000 --- a/packages/uint8arrays/src/lib/uint8arrays.ts +++ /dev/null @@ -1,193 +0,0 @@ -// /** -// utf8Encode - Encodes a given string into a UTF-8 encoded Uint8Array. -// @param {string} str - The input string to be encoded. -// @returns {Uint8Array} utf8Array - The UTF-8 encoded Uint8Array of the input string. -// */ -import { InvalidParamType } from '@lit-protocol/constants'; - -function utf8Encode(str: string): Uint8Array { - // Initialize an empty array to store the UTF-8 encoded dat - let utf8Array: number[] = []; - - // Iterate through the characters of the input string - for (let i = 0; i < str.length; i++) { - // Get the Unicode character code of the current character - let charCode = str.charCodeAt(i); - - // If the character code is less than 128 (ASCII range) - if (charCode < 128) { - // Directly push the character code into the UTF-8 array - utf8Array.push(charCode); - - // If the character code is between 128 and 2047 (2-byte sequence) - } else if (charCode < 2048) { - // Push the two-byte sequence of the character code into the UTF-8 array - utf8Array.push(192 | (charCode >> 6), 128 | (charCode & 63)); - } else if ( - // Check if the character is a high surrogate (UTF-16) - (charCode & 0xfc00) === 0xd800 && - i + 1 < str.length && - (str.charCodeAt(i + 1) & 0xfc00) === 0xdc00 - ) { - // Combine the high and low surrogate pair into a single UTF-32 code point - charCode = - 0x10000 + ((charCode & 0x03ff) << 10) + (str.charCodeAt(++i) & 0x03ff); - - // Push the four-byte sequence of the character code into the UTF-8 array - utf8Array.push( - 240 | (charCode >> 18), - 128 | ((charCode >> 12) & 63), - 128 | ((charCode >> 6) & 63), - 128 | (charCode & 63) - ); - } else { - // If the character code is between 2048 and 65535 (3-byte sequence) - - // Push the three-byte sequence of the character code into the UTF-8 array - utf8Array.push( - 224 | (charCode >> 12), - 128 | ((charCode >> 6) & 63), - 128 | (charCode & 63) - ); - } - } - - return new Uint8Array(utf8Array); -} - -// /** - -// utf8Decode - Decodes a given UTF-8 encoded Uint8Array into a string. -// @param {Uint8Array} utf8Array - The input UTF-8 encoded Uint8Array to be decoded. -// @returns {string} str - The decoded string from the input UTF-8 encoded Uint8Array. -// */ -export function utf8Decode(utf8Array: Uint8Array): string { - let str = ''; - let i = 0; - - while (i < utf8Array.length) { - let charCode = utf8Array[i++]; - - if (charCode < 128) { - str += String.fromCharCode(charCode); - } else if (charCode > 191 && charCode < 224) { - str += String.fromCharCode( - ((charCode & 31) << 6) | (utf8Array[i++] & 63) - ); - } else if (charCode > 239 && charCode < 365) { - charCode = - ((charCode & 7) << 18) | - ((utf8Array[i++] & 63) << 12) | - ((utf8Array[i++] & 63) << 6) | - (utf8Array[i++] & 63); - charCode -= 0x10000; - str += String.fromCharCode( - 0xd800 + (charCode >> 10), - 0xdc00 + (charCode & 0x3ff) - ); - } else { - str += String.fromCharCode( - ((charCode & 15) << 12) | - ((utf8Array[i++] & 63) << 6) | - (utf8Array[i++] & 63) - ); - } - } - - return str; -} - -export function base64ToUint8Array(base64Str: string): Uint8Array { - const binaryStr = atob(base64Str); - const len = binaryStr.length; - const bytes = new Uint8Array(len); - - for (let i = 0; i < len; i++) { - bytes[i] = binaryStr.charCodeAt(i); - } - - return bytes; -} - -export function uint8ArrayToBase64(uint8Array: Uint8Array): string { - let binaryStr = ''; - - for (let i = 0; i < uint8Array.length; i++) { - binaryStr += String.fromCharCode(uint8Array[i]); - } - - return btoa(binaryStr); -} - -function base64UrlPadToBase64(base64UrlPadStr: string): string { - return ( - base64UrlPadStr.replace('-', '+').replace('_', '/') + - '='.repeat((4 - (base64UrlPadStr.length % 4)) % 4) - ); -} - -function base64ToBase64UrlPad(base64Str: string): string { - return base64Str.replace('+', '-').replace('/', '_').replace(/=+$/, ''); -} - -export function uint8arrayFromString( - str: string, - encoding = 'utf8' -): Uint8Array { - switch (encoding) { - case 'utf8': - return utf8Encode(str); - case 'base16': - const arr = []; - for (let i = 0; i < str.length; i += 2) { - arr.push(parseInt(str.slice(i, i + 2), 16)); - } - return new Uint8Array(arr); - case 'base64': - return base64ToUint8Array(str); - case 'base64url': - case 'base64urlpad': - return base64ToUint8Array(base64UrlPadToBase64(str)); - default: - throw new InvalidParamType( - { - info: { - encoding, - str, - }, - }, - `Unsupported encoding "${encoding}"` - ); - } -} - -export function uint8arrayToString( - uint8array: Uint8Array, - encoding = 'utf8' -): string { - let _uint8array = new Uint8Array(uint8array); - - switch (encoding) { - case 'utf8': - return utf8Decode(_uint8array); - case 'base16': - return Array.from(_uint8array) - .map((byte: number) => byte.toString(16).padStart(2, '0')) - .join(''); - case 'base64': - return uint8ArrayToBase64(_uint8array); - case 'base64url': - case 'base64urlpad': - return base64ToBase64UrlPad(uint8ArrayToBase64(_uint8array)); - default: - throw new InvalidParamType( - { - info: { - encoding, - _uint8array, - }, - }, - `Unsupported encoding "${encoding}"` - ); - } -} diff --git a/packages/uint8arrays/tsconfig.json b/packages/uint8arrays/tsconfig.json deleted file mode 100644 index 5fd697b347..0000000000 --- a/packages/uint8arrays/tsconfig.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "extends": "../../tsconfig.base.json", - "compilerOptions": { - "module": "commonjs", - "forceConsistentCasingInFileNames": true, - "strict": true, - "noImplicitOverride": true, - "noPropertyAccessFromIndexSignature": true, - "noImplicitReturns": true, - "noFallthroughCasesInSwitch": true, - "allowSyntheticDefaultImports": true - }, - "files": [], - "include": [], - "references": [ - { - "path": "./tsconfig.lib.json" - }, - { - "path": "./tsconfig.spec.json" - } - ] -} diff --git a/packages/uint8arrays/tsconfig.lib.json b/packages/uint8arrays/tsconfig.lib.json deleted file mode 100644 index e85ef50f65..0000000000 --- a/packages/uint8arrays/tsconfig.lib.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "../../dist/out-tsc", - "declaration": true, - "types": [] - }, - "include": ["**/*.ts"], - "exclude": ["jest.config.ts", "**/*.spec.ts", "**/*.test.ts"] -} diff --git a/packages/uint8arrays/tsconfig.spec.json b/packages/uint8arrays/tsconfig.spec.json deleted file mode 100644 index a2f7dd30d7..0000000000 --- a/packages/uint8arrays/tsconfig.spec.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "../../dist/out-tsc", - "module": "commonjs", - "types": ["jest", "node"], - "allowJs": true - }, - "include": ["jest.config.ts", "**/*.test.ts", "**/*.spec.ts", "**/*.d.ts"] -} diff --git a/packages/wasm/src/lib/bls.spec.ts b/packages/wasm/src/lib/bls.spec.ts index 95f431a892..12bd8d99ad 100644 --- a/packages/wasm/src/lib/bls.spec.ts +++ b/packages/wasm/src/lib/bls.spec.ts @@ -42,7 +42,6 @@ describe('BLS', () => { messageBase64Buffer, identityParamsUtf8Buffer ); - console.log('ciphertext:', ciphertext); expect(ciphertext).toBeInstanceOf(Uint8Array); expect(ciphertext.byteLength).toEqual(115); }); diff --git a/packages/wrapped-keys/src/lib/api/import-private-key.ts b/packages/wrapped-keys/src/lib/api/import-private-key.ts index ce1f3eb583..207fb632d7 100644 --- a/packages/wrapped-keys/src/lib/api/import-private-key.ts +++ b/packages/wrapped-keys/src/lib/api/import-private-key.ts @@ -1,5 +1,3 @@ -import { encryptString } from '@lit-protocol/encryption'; - import { getFirstSessionSig, getPkpAccessControlCondition, @@ -36,13 +34,10 @@ export async function importPrivateKey( const saltedPrivateKey = LIT_PREFIX + privateKey; - const { ciphertext, dataToEncryptHash } = await encryptString( - { - accessControlConditions: [allowPkpAddressToDecrypt], - dataToEncrypt: saltedPrivateKey, - }, - litNodeClient - ); + const { ciphertext, dataToEncryptHash } = await litNodeClient.encrypt({ + accessControlConditions: [allowPkpAddressToDecrypt], + dataToEncrypt: Buffer.from(saltedPrivateKey, 'utf8'), + }); const { id } = await storePrivateKey({ sessionSig: firstSessionSig, diff --git a/packages/wrapped-keys/src/lib/api/utils.ts b/packages/wrapped-keys/src/lib/api/utils.ts index 1854f29f6b..f81690d0c6 100644 --- a/packages/wrapped-keys/src/lib/api/utils.ts +++ b/packages/wrapped-keys/src/lib/api/utils.ts @@ -1,6 +1,6 @@ import { ethers } from 'ethers'; -import { log } from '@lit-protocol/misc'; +import { logger } from '@lit-protocol/logger'; import { AccsDefaultParams, AuthSig, @@ -39,7 +39,7 @@ export function getFirstSessionSig(pkpSessionSigs: SessionSigsMap): AuthSig { } const [[, sessionSig]] = sessionSigsEntries; - log(`Session Sig being used: ${JSON.stringify(sessionSig)}`); + logger.info(`Session Sig being used: ${JSON.stringify(sessionSig)}`); return sessionSig; } @@ -74,7 +74,7 @@ export function getPkpAddressFromSessionSig(pkpSessionSig: AuthSig): string { } const pkpAddress = delegationAuthSig.address; - log(`pkpAddress to permit decryption: ${pkpAddress}`); + logger.info(`pkpAddress to permit decryption: ${pkpAddress}`); return pkpAddress; } diff --git a/packages/wrapped-keys/src/lib/service-client/utils.ts b/packages/wrapped-keys/src/lib/service-client/utils.ts index 6672e65ac6..4a828d8afd 100644 --- a/packages/wrapped-keys/src/lib/service-client/utils.ts +++ b/packages/wrapped-keys/src/lib/service-client/utils.ts @@ -1,9 +1,5 @@ import { LIT_NETWORK_VALUES } from '@lit-protocol/constants'; import { AuthSig } from '@lit-protocol/types'; -import { - uint8arrayFromString, - uint8ArrayToBase64, -} from '@lit-protocol/uint8arrays'; import { LIT_SESSIONSIG_AUTHORIZATION_SCHEMA_PREFIX, @@ -12,11 +8,12 @@ import { import { BaseRequestParams, SupportedNetworks } from './types'; function composeAuthHeader(sessionSig: AuthSig) { - const sessionSigUintArr = uint8arrayFromString(JSON.stringify(sessionSig)); + const sessionSigsString = JSON.stringify(sessionSig); - return `${LIT_SESSIONSIG_AUTHORIZATION_SCHEMA_PREFIX}${uint8ArrayToBase64( - sessionSigUintArr - )}`; + return `${LIT_SESSIONSIG_AUTHORIZATION_SCHEMA_PREFIX}${Buffer.from( + sessionSigsString, + 'utf8' + ).toString('base64')}`; } const supportedNetworks: SupportedNetworks[] = [ diff --git a/typedoc.json b/typedoc.json index 20d2c62f98..fc7b1bf1e1 100644 --- a/typedoc.json +++ b/typedoc.json @@ -9,14 +9,10 @@ "./packages/contracts-sdk/src/index.ts", "./packages/core/src/index.ts", "./packages/crypto/src/index.ts", - "./packages/encryption/src/index.ts", "./packages/lit-auth-client/src/index.ts", "./packages/lit-node-client/src/index.ts", "./packages/lit-node-client-nodejs/src/index.ts", - "./packages/logger/src/index.ts", - "./packages/misc/src/index.ts", "./packages/misc-browser/src/index.ts", - "./packages/nacl/src/index.ts", "./packages/pkp-base/src/index.ts", "./packages/pkp-cosmos/src/index.ts", "./packages/pkp-ethers/src/index.ts", @@ -24,7 +20,6 @@ "./packages/pkp-walletconnect/src/index.ts", "./packages/schemas/src/index.ts", "./packages/types/src/index.ts", - "./packages/uint8arrays/src/index.ts", "./packages/wasm/src/index.ts", "./packages/wrapped-keys/src/index.ts", "./packages/wrapped-keys-lit-actions/src/index.ts" diff --git a/yarn.lock b/yarn.lock index 053258519a..1936cf1c34 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,6 +2,16 @@ # yarn lockfile v1 +"@adraffy/ens-normalize@1.10.1": + version "1.10.1" + resolved "https://registry.yarnpkg.com/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz#63430d04bd8c5e74f8d7d049338f1cd9d4f02069" + integrity sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw== + +"@adraffy/ens-normalize@^1.10.1": + version "1.11.0" + resolved "https://registry.yarnpkg.com/@adraffy/ens-normalize/-/ens-normalize-1.11.0.tgz#42cc67c5baa407ac25059fcd7d405cc5ecdb0c33" + integrity sha512-/3DDPKHqqIqxUULp8yP4zODUY1i+2xvVWsv8A79xGWdCAG+8sb0hRh0Rk2QyOJUnnbyPUAZYcpBuRe3nS2OIUg== + "@ampproject/remapping@^2.2.0": version "2.3.0" resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.3.0.tgz#ed441b6fa600072520ce18b43d2c8cc8caecc7f4" @@ -29,7 +39,7 @@ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.26.5.tgz#df93ac37f4417854130e21d72c66ff3d4b897fc7" integrity sha512-XvcZi1KWf88RVbF9wn8MN6tYFloU5qX8KjuF3E1PVBmJ9eypXfs4GRiJwLuTZL0iSnJUKn1BFPa5BPZZJyFzPg== -"@babel/core@^7.1.0", "@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.21.3", "@babel/core@^7.22.9", "@babel/core@^7.23.9", "@babel/core@^7.7.2", "@babel/core@^7.8.0": +"@babel/core@^7.1.0", "@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.21.3", "@babel/core@^7.22.9", "@babel/core@^7.23.9": version "7.26.7" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.26.7.tgz#0439347a183b97534d52811144d763a17f9d2b24" integrity sha512-SRijHmF0PSPgLIBYlWnG0hyeJLwXE2CgpsXaMOrtt2yp9/86ALw6oUlj9KYuZ0JN07T4eBMVIW4li/9S1j2BGA== @@ -972,6 +982,13 @@ dependencies: regenerator-runtime "^0.14.0" +"@babel/runtime@^7.21.0", "@babel/runtime@^7.26.0": + version "7.26.10" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.10.tgz#a07b4d8fa27af131a633d7b3524db803eb4764c2" + integrity sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw== + dependencies: + regenerator-runtime "^0.14.0" + "@babel/template@^7.25.9", "@babel/template@^7.3.3": version "7.25.9" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.25.9.tgz#ecb62d81a8a6f5dc5fe8abfc3901fc52ddf15016" @@ -981,7 +998,7 @@ "@babel/parser" "^7.25.9" "@babel/types" "^7.25.9" -"@babel/traverse@^7.16.0", "@babel/traverse@^7.25.9", "@babel/traverse@^7.26.5", "@babel/traverse@^7.26.7", "@babel/traverse@^7.7.2": +"@babel/traverse@^7.16.0", "@babel/traverse@^7.25.9", "@babel/traverse@^7.26.5", "@babel/traverse@^7.26.7": version "7.26.7" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.26.7.tgz#99a0a136f6a75e7fb8b0a1ace421e0b25994b8bb" integrity sha512-1x1sgeyRLC3r5fQOM0/xtQKsYjyxmFjaOrLJNtZ81inNjyJHGIolTULPiSc/2qe1/qfpFLisLQYFnnZl7QoedA== @@ -1015,6 +1032,16 @@ "@truffle/contract" "^4.2.6" ethers "^4.0.45" +"@coinbase/wallet-sdk@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@coinbase/wallet-sdk/-/wallet-sdk-4.3.0.tgz#03b8fce92ac2b3b7cf132f64d6008ac081569b4e" + integrity sha512-T3+SNmiCw4HzDm4we9wCHCxlP0pqCiwKe4sOwPH3YAK2KSKjxPRydKu6UQJrdONFVLG7ujXvbd/6ZqmvJb8rkw== + dependencies: + "@noble/hashes" "^1.4.0" + clsx "^1.2.1" + eventemitter3 "^5.0.1" + preact "^10.24.2" + "@colors/colors@1.5.0": version "1.5.0" resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" @@ -1434,6 +1461,14 @@ crc-32 "^1.2.0" ethereumjs-util "^7.1.5" +"@ethereumjs/common@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@ethereumjs/common/-/common-3.2.0.tgz#b71df25845caf5456449163012074a55f048e0a0" + integrity sha512-pksvzI0VyLgmuEF2FA/JR/4/y6hcPq8OUail3/AvycBaW1d5VSauOZzqGvJ3RTmR4MU35lWE8KseKOsEhrFRBA== + dependencies: + "@ethereumjs/util" "^8.1.0" + crc-32 "^1.2.0" + "@ethereumjs/rlp@^4.0.1": version "4.0.1" resolved "https://registry.yarnpkg.com/@ethereumjs/rlp/-/rlp-4.0.1.tgz#626fabfd9081baab3d0a3074b0c7ecaf674aaa41" @@ -1447,6 +1482,16 @@ "@ethereumjs/common" "^2.5.0" ethereumjs-util "^7.1.2" +"@ethereumjs/tx@^4.1.2", "@ethereumjs/tx@^4.2.0": + version "4.2.0" + resolved "https://registry.yarnpkg.com/@ethereumjs/tx/-/tx-4.2.0.tgz#5988ae15daf5a3b3c815493bc6b495e76009e853" + integrity sha512-1nc6VO4jtFd172BbSnTnDQVr9IYBFl1y4xPzZdtkrkKIncBCkdbgfdRV+MiTkJYAtTxvV12GRZLqBFT1PNK6Yw== + dependencies: + "@ethereumjs/common" "^3.2.0" + "@ethereumjs/rlp" "^4.0.1" + "@ethereumjs/util" "^8.1.0" + ethereum-cryptography "^2.0.0" + "@ethereumjs/util@^8.0.0", "@ethereumjs/util@^8.1.0": version "8.1.0" resolved "https://registry.yarnpkg.com/@ethereumjs/util/-/util-8.1.0.tgz#299df97fb6b034e0577ce9f94c7d9d1004409ed4" @@ -1889,18 +1934,6 @@ resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== -"@jest/console@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba" - integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^27.5.1" - jest-util "^27.5.1" - slash "^3.0.0" - "@jest/console@^29.7.0": version "29.7.0" resolved "https://registry.yarnpkg.com/@jest/console/-/console-29.7.0.tgz#cd4822dbdb84529265c5a2bdb529a3c9cc950ffc" @@ -1913,50 +1946,40 @@ jest-util "^29.7.0" slash "^3.0.0" -"@jest/core@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/core/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626" - integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== +"@jest/core@^29.2.2", "@jest/core@^29.7.0": + version "29.7.0" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-29.7.0.tgz#b6cccc239f30ff36609658c5a5e2291757ce448f" + integrity sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg== dependencies: - "@jest/console" "^27.5.1" - "@jest/reporters" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" + "@jest/console" "^29.7.0" + "@jest/reporters" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/transform" "^29.7.0" + "@jest/types" "^29.6.3" "@types/node" "*" ansi-escapes "^4.2.1" chalk "^4.0.0" - emittery "^0.8.1" + ci-info "^3.2.0" exit "^0.1.2" graceful-fs "^4.2.9" - jest-changed-files "^27.5.1" - jest-config "^27.5.1" - jest-haste-map "^27.5.1" - jest-message-util "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-resolve-dependencies "^27.5.1" - jest-runner "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - jest-watcher "^27.5.1" + jest-changed-files "^29.7.0" + jest-config "^29.7.0" + jest-haste-map "^29.7.0" + jest-message-util "^29.7.0" + jest-regex-util "^29.6.3" + jest-resolve "^29.7.0" + jest-resolve-dependencies "^29.7.0" + jest-runner "^29.7.0" + jest-runtime "^29.7.0" + jest-snapshot "^29.7.0" + jest-util "^29.7.0" + jest-validate "^29.7.0" + jest-watcher "^29.7.0" micromatch "^4.0.4" - rimraf "^3.0.0" + pretty-format "^29.7.0" slash "^3.0.0" strip-ansi "^6.0.0" -"@jest/environment@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74" - integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== - dependencies: - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - "@jest/environment@^29.7.0": version "29.7.0" resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-29.7.0.tgz#24d61f54ff1f786f3cd4073b4b94416383baf2a7" @@ -1982,18 +2005,6 @@ expect "^29.7.0" jest-snapshot "^29.7.0" -"@jest/fake-timers@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74" - integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== - dependencies: - "@jest/types" "^27.5.1" - "@sinonjs/fake-timers" "^8.0.1" - "@types/node" "*" - jest-message-util "^27.5.1" - jest-mock "^27.5.1" - jest-util "^27.5.1" - "@jest/fake-timers@^29.7.0": version "29.7.0" resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-29.7.0.tgz#fd91bf1fffb16d7d0d24a426ab1a47a49881a565" @@ -2006,15 +2017,6 @@ jest-mock "^29.7.0" jest-util "^29.7.0" -"@jest/globals@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b" - integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/types" "^27.5.1" - expect "^27.5.1" - "@jest/globals@^29.7.0": version "29.7.0" resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-29.7.0.tgz#8d9290f9ec47ff772607fa864ca1d5a2efae1d4d" @@ -2025,38 +2027,7 @@ "@jest/types" "^29.6.3" jest-mock "^29.7.0" -"@jest/reporters@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04" - integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== - dependencies: - "@bcoe/v8-coverage" "^0.2.3" - "@jest/console" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - collect-v8-coverage "^1.0.0" - exit "^0.1.2" - glob "^7.1.2" - graceful-fs "^4.2.9" - istanbul-lib-coverage "^3.0.0" - istanbul-lib-instrument "^5.1.0" - istanbul-lib-report "^3.0.0" - istanbul-lib-source-maps "^4.0.0" - istanbul-reports "^3.1.3" - jest-haste-map "^27.5.1" - jest-resolve "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - slash "^3.0.0" - source-map "^0.6.0" - string-length "^4.0.1" - terminal-link "^2.0.0" - v8-to-istanbul "^8.1.0" - -"@jest/reporters@^29.4.1": +"@jest/reporters@^29.4.1", "@jest/reporters@^29.7.0": version "29.7.0" resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-29.7.0.tgz#04b262ecb3b8faa83b0b3d321623972393e8f4c7" integrity sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg== @@ -2093,15 +2064,6 @@ dependencies: "@sinclair/typebox" "^0.27.8" -"@jest/source-map@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf" - integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== - dependencies: - callsites "^3.0.0" - graceful-fs "^4.2.9" - source-map "^0.6.0" - "@jest/source-map@^29.6.3": version "29.6.3" resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-29.6.3.tgz#d90ba772095cf37a34a5eb9413f1b562a08554c4" @@ -2111,16 +2073,6 @@ callsites "^3.0.0" graceful-fs "^4.2.9" -"@jest/test-result@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb" - integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== - dependencies: - "@jest/console" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - "@jest/test-result@^29.4.1", "@jest/test-result@^29.7.0": version "29.7.0" resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-29.7.0.tgz#8db9a80aa1a097bb2262572686734baed9b1657c" @@ -2131,16 +2083,6 @@ "@types/istanbul-lib-coverage" "^2.0.0" collect-v8-coverage "^1.0.0" -"@jest/test-sequencer@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b" - integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== - dependencies: - "@jest/test-result" "^27.5.1" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-runtime "^27.5.1" - "@jest/test-sequencer@^29.7.0": version "29.7.0" resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz#6cef977ce1d39834a3aea887a1726628a6f072ce" @@ -2204,7 +2146,7 @@ "@types/yargs" "^16.0.0" chalk "^4.0.0" -"@jest/types@^29.6.3": +"@jest/types@^29.2.1", "@jest/types@^29.6.3": version "29.6.3" resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.3.tgz#1131f8cf634e7e84c5e77bab12f052af585fba59" integrity sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw== @@ -2995,10 +2937,27 @@ npmlog "^6.0.2" write-file-atomic "^4.0.1" -"@lit-protocol/contracts@^0.0.86": - version "0.0.86" - resolved "https://registry.yarnpkg.com/@lit-protocol/contracts/-/contracts-0.0.86.tgz#adec861d0b775995523483b2fa5f4baf83d735a9" - integrity sha512-JtSjXwClG9wietQMERhSN1NqYas8JjQbso0FA9BAyv4svS3ejeKVwWcXUUvHPK9gDWPVhBzmvMNaB7ooR5UpBw== +"@lit-labs/ssr-dom-shim@^1.0.0", "@lit-labs/ssr-dom-shim@^1.1.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@lit-labs/ssr-dom-shim/-/ssr-dom-shim-1.3.0.tgz#a28799c463177d1a0b0e5cefdc173da5ac859eb4" + integrity sha512-nQIWonJ6eFAvUUrSlwyHDm/aE8PBDu5kRpL0vHMg6K8fK3Diq1xdPjTnsJSwxABhaZ+5eBi1btQB5ShUTKo4nQ== + +"@lit-protocol/contracts@^0.1.7": + version "0.1.7" + resolved "https://registry.yarnpkg.com/@lit-protocol/contracts/-/contracts-0.1.7.tgz#b8cc75c1c44187818f24445de9c325a8dffeebc4" + integrity sha512-mXXSLa91yDdEbRBpaBZhGzV6a4aKeTR8n11/1pEpngA970GGOydONNmkc06cD5DihU15A33ndsJKWQgPdSx2yw== + dependencies: + "@t3-oss/env-core" "^0.12.0" + ethers "^6.13.5" + viem "^2.23.3" + zod "^3.24.2" + +"@lit/reactive-element@^1.3.0", "@lit/reactive-element@^1.6.0": + version "1.6.3" + resolved "https://registry.yarnpkg.com/@lit/reactive-element/-/reactive-element-1.6.3.tgz#25b4eece2592132845d303e091bad9b04cdcfe03" + integrity sha512-QuTgnG52Poic7uM1AN5yJ09QMe0O28e10XzSvWDz02TJiiKee4stsiownEIadWm8nYzyDAyT+gKzUoZmiWQtsQ== + dependencies: + "@lit-labs/ssr-dom-shim" "^1.0.0" "@ljharb/resumer@~0.0.1": version "0.0.1" @@ -3014,6 +2973,15 @@ dependencies: call-bind "^1.0.7" +"@metamask/eth-json-rpc-provider@^1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@metamask/eth-json-rpc-provider/-/eth-json-rpc-provider-1.0.1.tgz#3fd5316c767847f4ca107518b611b15396a5a32c" + integrity sha512-whiUMPlAOrVGmX8aKYVPvlKyG4CpQXiNNyt74vE1xb5sPvmx5oA7B/kOi/JdBvhGQq97U1/AVdXEdk2zkP8qyA== + dependencies: + "@metamask/json-rpc-engine" "^7.0.0" + "@metamask/safe-event-emitter" "^3.0.0" + "@metamask/utils" "^5.0.1" + "@metamask/eth-sig-util@5.0.2": version "5.0.2" resolved "https://registry.yarnpkg.com/@metamask/eth-sig-util/-/eth-sig-util-5.0.2.tgz#c518279a6e17a88135a13d53a0b970f145ff8bce" @@ -3026,11 +2994,243 @@ tweetnacl "^1.0.3" tweetnacl-util "^0.15.1" +"@metamask/json-rpc-engine@^7.0.0": + version "7.3.3" + resolved "https://registry.yarnpkg.com/@metamask/json-rpc-engine/-/json-rpc-engine-7.3.3.tgz#f2b30a2164558014bfcca45db10f5af291d989af" + integrity sha512-dwZPq8wx9yV3IX2caLi9q9xZBw2XeIoYqdyihDDDpuHVCEiqadJLwqM3zy+uwf6F1QYQ65A8aOMQg1Uw7LMLNg== + dependencies: + "@metamask/rpc-errors" "^6.2.1" + "@metamask/safe-event-emitter" "^3.0.0" + "@metamask/utils" "^8.3.0" + +"@metamask/json-rpc-engine@^8.0.1", "@metamask/json-rpc-engine@^8.0.2": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@metamask/json-rpc-engine/-/json-rpc-engine-8.0.2.tgz#29510a871a8edef892f838ee854db18de0bf0d14" + integrity sha512-IoQPmql8q7ABLruW7i4EYVHWUbF74yrp63bRuXV5Zf9BQwcn5H9Ww1eLtROYvI1bUXwOiHZ6qT5CWTrDc/t/AA== + dependencies: + "@metamask/rpc-errors" "^6.2.1" + "@metamask/safe-event-emitter" "^3.0.0" + "@metamask/utils" "^8.3.0" + +"@metamask/json-rpc-middleware-stream@^7.0.1": + version "7.0.2" + resolved "https://registry.yarnpkg.com/@metamask/json-rpc-middleware-stream/-/json-rpc-middleware-stream-7.0.2.tgz#2e8b2cbc38968e3c6239a9144c35bbb08a8fb57d" + integrity sha512-yUdzsJK04Ev98Ck4D7lmRNQ8FPioXYhEUZOMS01LXW8qTvPGiRVXmVltj2p4wrLkh0vW7u6nv0mNl5xzC5Qmfg== + dependencies: + "@metamask/json-rpc-engine" "^8.0.2" + "@metamask/safe-event-emitter" "^3.0.0" + "@metamask/utils" "^8.3.0" + readable-stream "^3.6.2" + +"@metamask/object-multiplex@^2.0.0": + version "2.1.0" + resolved "https://registry.yarnpkg.com/@metamask/object-multiplex/-/object-multiplex-2.1.0.tgz#5e2e908fc46aee581cbba809870eeee0e571cbb6" + integrity sha512-4vKIiv0DQxljcXwfpnbsXcfa5glMj5Zg9mqn4xpIWqkv6uJ2ma5/GtUfLFSxhlxnR8asRMv8dDmWya1Tc1sDFA== + dependencies: + once "^1.4.0" + readable-stream "^3.6.2" + +"@metamask/onboarding@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@metamask/onboarding/-/onboarding-1.0.1.tgz#14a36e1e175e2f69f09598e2008ab6dc1b3297e6" + integrity sha512-FqHhAsCI+Vacx2qa5mAFcWNSrTcVGMNjzxVgaX8ECSny/BJ9/vgXP9V7WF/8vb9DltPeQkxr+Fnfmm6GHfmdTQ== + dependencies: + bowser "^2.9.0" + +"@metamask/providers@16.1.0": + version "16.1.0" + resolved "https://registry.yarnpkg.com/@metamask/providers/-/providers-16.1.0.tgz#7da593d17c541580fa3beab8d9d8a9b9ce19ea07" + integrity sha512-znVCvux30+3SaUwcUGaSf+pUckzT5ukPRpcBmy+muBLC0yaWnBcvDqGfcsw6CBIenUdFrVoAFa8B6jsuCY/a+g== + dependencies: + "@metamask/json-rpc-engine" "^8.0.1" + "@metamask/json-rpc-middleware-stream" "^7.0.1" + "@metamask/object-multiplex" "^2.0.0" + "@metamask/rpc-errors" "^6.2.1" + "@metamask/safe-event-emitter" "^3.1.1" + "@metamask/utils" "^8.3.0" + detect-browser "^5.2.0" + extension-port-stream "^3.0.0" + fast-deep-equal "^3.1.3" + is-stream "^2.0.0" + readable-stream "^3.6.2" + webextension-polyfill "^0.10.0" + +"@metamask/rpc-errors@^6.2.1": + version "6.4.0" + resolved "https://registry.yarnpkg.com/@metamask/rpc-errors/-/rpc-errors-6.4.0.tgz#a7ce01c06c9a347ab853e55818ac5654a73bd006" + integrity sha512-1ugFO1UoirU2esS3juZanS/Fo8C8XYocCuBpfZI5N7ECtoG+zu0wF+uWZASik6CkO6w9n/Iebt4iI4pT0vptpg== + dependencies: + "@metamask/utils" "^9.0.0" + fast-safe-stringify "^2.0.6" + "@metamask/safe-event-emitter@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@metamask/safe-event-emitter/-/safe-event-emitter-2.0.0.tgz#af577b477c683fad17c619a78208cede06f9605c" integrity sha512-/kSXhY692qiV1MXu6EeOZvg5nECLclxNXcKCxJ3cXQgYuRymRHpdx/t7JXfsK+JLjwA1e1c1/SBrlQYpusC29Q== +"@metamask/safe-event-emitter@^3.0.0", "@metamask/safe-event-emitter@^3.1.1": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@metamask/safe-event-emitter/-/safe-event-emitter-3.1.2.tgz#bfac8c7a1a149b5bbfe98f59fbfea512dfa3bad4" + integrity sha512-5yb2gMI1BDm0JybZezeoX/3XhPDOtTbcFvpTXM9kxsoZjPZFh4XciqRbpD6N86HYZqWDhEaKUDuOyR0sQHEjMA== + +"@metamask/sdk-communication-layer@0.32.0": + version "0.32.0" + resolved "https://registry.yarnpkg.com/@metamask/sdk-communication-layer/-/sdk-communication-layer-0.32.0.tgz#89710e807806836138ea5018b087731d6acab627" + integrity sha512-dmj/KFjMi1fsdZGIOtbhxdg3amxhKL/A5BqSU4uh/SyDKPub/OT+x5pX8bGjpTL1WPWY/Q0OIlvFyX3VWnT06Q== + dependencies: + bufferutil "^4.0.8" + date-fns "^2.29.3" + debug "^4.3.4" + utf-8-validate "^5.0.2" + uuid "^8.3.2" + +"@metamask/sdk-install-modal-web@0.32.0": + version "0.32.0" + resolved "https://registry.yarnpkg.com/@metamask/sdk-install-modal-web/-/sdk-install-modal-web-0.32.0.tgz#86f80420ca364fa0d7710016fa5c81f95537ab23" + integrity sha512-TFoktj0JgfWnQaL3yFkApqNwcaqJ+dw4xcnrJueMP3aXkSNev2Ido+WVNOg4IIMxnmOrfAC9t0UJ0u/dC9MjOQ== + dependencies: + "@paulmillr/qr" "^0.2.1" + +"@metamask/sdk@0.32.0": + version "0.32.0" + resolved "https://registry.yarnpkg.com/@metamask/sdk/-/sdk-0.32.0.tgz#f0e179746fe69dccd032a9026884b45b519c1975" + integrity sha512-WmGAlP1oBuD9hk4CsdlG1WJFuPtYJY+dnTHJMeCyohTWD2GgkcLMUUuvu9lO1/NVzuOoSi1OrnjbuY1O/1NZ1g== + dependencies: + "@babel/runtime" "^7.26.0" + "@metamask/onboarding" "^1.0.1" + "@metamask/providers" "16.1.0" + "@metamask/sdk-communication-layer" "0.32.0" + "@metamask/sdk-install-modal-web" "0.32.0" + "@paulmillr/qr" "^0.2.1" + bowser "^2.9.0" + cross-fetch "^4.0.0" + debug "^4.3.4" + eciesjs "^0.4.11" + eth-rpc-errors "^4.0.3" + eventemitter2 "^6.4.9" + obj-multiplex "^1.0.0" + pump "^3.0.0" + readable-stream "^3.6.2" + socket.io-client "^4.5.1" + tslib "^2.6.0" + util "^0.12.4" + uuid "^8.3.2" + +"@metamask/superstruct@^3.0.0", "@metamask/superstruct@^3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@metamask/superstruct/-/superstruct-3.1.0.tgz#148f786a674fba3ac885c1093ab718515bf7f648" + integrity sha512-N08M56HdOgBfRKkrgCMZvQppkZGcArEop3kixNEtVbJKm6P9Cfg0YkI6X0s1g78sNrj2fWUwvJADdZuzJgFttA== + +"@metamask/utils@^5.0.1": + version "5.0.2" + resolved "https://registry.yarnpkg.com/@metamask/utils/-/utils-5.0.2.tgz#140ba5061d90d9dac0280c19cab101bc18c8857c" + integrity sha512-yfmE79bRQtnMzarnKfX7AEJBwFTxvTyw3nBQlu/5rmGXrjAeAMltoGxO62TFurxrQAFMNa/fEjIHNvungZp0+g== + dependencies: + "@ethereumjs/tx" "^4.1.2" + "@types/debug" "^4.1.7" + debug "^4.3.4" + semver "^7.3.8" + superstruct "^1.0.3" + +"@metamask/utils@^8.3.0": + version "8.5.0" + resolved "https://registry.yarnpkg.com/@metamask/utils/-/utils-8.5.0.tgz#ddd0d4012d5191809404c97648a837ea9962cceb" + integrity sha512-I6bkduevXb72TIM9q2LRO63JSsF9EXduh3sBr9oybNX2hNNpr/j1tEjXrsG0Uabm4MJ1xkGAQEMwifvKZIkyxQ== + dependencies: + "@ethereumjs/tx" "^4.2.0" + "@metamask/superstruct" "^3.0.0" + "@noble/hashes" "^1.3.1" + "@scure/base" "^1.1.3" + "@types/debug" "^4.1.7" + debug "^4.3.4" + pony-cause "^2.1.10" + semver "^7.5.4" + uuid "^9.0.1" + +"@metamask/utils@^9.0.0": + version "9.3.0" + resolved "https://registry.yarnpkg.com/@metamask/utils/-/utils-9.3.0.tgz#4726bd7f5d6a43ea8425b6d663ab9207f617c2d1" + integrity sha512-w8CVbdkDrVXFJbfBSlDfafDR6BAkpDmv1bC1UJVCoVny5tW2RKAdn9i68Xf7asYT4TnUhl/hN4zfUiKQq9II4g== + dependencies: + "@ethereumjs/tx" "^4.2.0" + "@metamask/superstruct" "^3.1.0" + "@noble/hashes" "^1.3.1" + "@scure/base" "^1.1.3" + "@types/debug" "^4.1.7" + debug "^4.3.4" + pony-cause "^2.1.10" + semver "^7.5.4" + uuid "^9.0.1" + +"@motionone/animation@^10.15.1", "@motionone/animation@^10.18.0": + version "10.18.0" + resolved "https://registry.yarnpkg.com/@motionone/animation/-/animation-10.18.0.tgz#868d00b447191816d5d5cf24b1cafa144017922b" + integrity sha512-9z2p5GFGCm0gBsZbi8rVMOAJCtw1WqBTIPw3ozk06gDvZInBPIsQcHgYogEJ4yuHJ+akuW8g1SEIOpTOvYs8hw== + dependencies: + "@motionone/easing" "^10.18.0" + "@motionone/types" "^10.17.1" + "@motionone/utils" "^10.18.0" + tslib "^2.3.1" + +"@motionone/dom@^10.16.2", "@motionone/dom@^10.16.4": + version "10.18.0" + resolved "https://registry.yarnpkg.com/@motionone/dom/-/dom-10.18.0.tgz#7fd25dac04cab72def6d2b92b8e0cdc091576527" + integrity sha512-bKLP7E0eyO4B2UaHBBN55tnppwRnaE3KFfh3Ps9HhnAkar3Cb69kUCJY9as8LrccVYKgHA+JY5dOQqJLOPhF5A== + dependencies: + "@motionone/animation" "^10.18.0" + "@motionone/generators" "^10.18.0" + "@motionone/types" "^10.17.1" + "@motionone/utils" "^10.18.0" + hey-listen "^1.0.8" + tslib "^2.3.1" + +"@motionone/easing@^10.18.0": + version "10.18.0" + resolved "https://registry.yarnpkg.com/@motionone/easing/-/easing-10.18.0.tgz#7b82f6010dfee3a1bb0ee83abfbaff6edae0c708" + integrity sha512-VcjByo7XpdLS4o9T8t99JtgxkdMcNWD3yHU/n6CLEz3bkmKDRZyYQ/wmSf6daum8ZXqfUAgFeCZSpJZIMxaCzg== + dependencies: + "@motionone/utils" "^10.18.0" + tslib "^2.3.1" + +"@motionone/generators@^10.18.0": + version "10.18.0" + resolved "https://registry.yarnpkg.com/@motionone/generators/-/generators-10.18.0.tgz#fe09ab5cfa0fb9a8884097feb7eb60abeb600762" + integrity sha512-+qfkC2DtkDj4tHPu+AFKVfR/C30O1vYdvsGYaR13W/1cczPrrcjdvYCj0VLFuRMN+lP1xvpNZHCRNM4fBzn1jg== + dependencies: + "@motionone/types" "^10.17.1" + "@motionone/utils" "^10.18.0" + tslib "^2.3.1" + +"@motionone/svelte@^10.16.2": + version "10.16.4" + resolved "https://registry.yarnpkg.com/@motionone/svelte/-/svelte-10.16.4.tgz#5daf117cf5b2576fc6dd487c5e0500938a742470" + integrity sha512-zRVqk20lD1xqe+yEDZhMYgftsuHc25+9JSo+r0a0OWUJFocjSV9D/+UGhX4xgJsuwB9acPzXLr20w40VnY2PQA== + dependencies: + "@motionone/dom" "^10.16.4" + tslib "^2.3.1" + +"@motionone/types@^10.15.1", "@motionone/types@^10.17.1": + version "10.17.1" + resolved "https://registry.yarnpkg.com/@motionone/types/-/types-10.17.1.tgz#cf487badbbdc9da0c2cb86ffc1e5d11147c6e6fb" + integrity sha512-KaC4kgiODDz8hswCrS0btrVrzyU2CSQKO7Ps90ibBVSQmjkrt2teqta6/sOG59v7+dPnKMAg13jyqtMKV2yJ7A== + +"@motionone/utils@^10.15.1", "@motionone/utils@^10.18.0": + version "10.18.0" + resolved "https://registry.yarnpkg.com/@motionone/utils/-/utils-10.18.0.tgz#a59ff8932ed9009624bca07c56b28ef2bb2f885e" + integrity sha512-3XVF7sgyTSI2KWvTf6uLlBJ5iAgRgmvp3bpuOiQJvInd4nZ19ET8lX5unn30SlmRH7hXbBbH+Gxd0m0klJ3Xtw== + dependencies: + "@motionone/types" "^10.17.1" + hey-listen "^1.0.8" + tslib "^2.3.1" + +"@motionone/vue@^10.16.2": + version "10.16.4" + resolved "https://registry.yarnpkg.com/@motionone/vue/-/vue-10.16.4.tgz#07d09e3aa5115ca0bcc0076cb9e5322775277c09" + integrity sha512-z10PF9JV6SbjFq+/rYabM+8CVlMokgl8RFGvieSGNTmrkQanfHn+15XBrhG3BgUfvmTeSeyShfOHpG0i9zEdcg== + dependencies: + "@motionone/dom" "^10.16.4" + tslib "^2.3.1" + "@multiformats/murmur3@^2.0.0": version "2.1.8" resolved "https://registry.yarnpkg.com/@multiformats/murmur3/-/murmur3-2.1.8.tgz#81c1c15b6391109f3febfca4b3205196615a04e9" @@ -3074,11 +3274,18 @@ resolved "https://registry.yarnpkg.com/@noble/ciphers/-/ciphers-1.2.0.tgz#a7858e18eb620f6b2a327a7f0e647b6a78fd0727" integrity sha512-YGdEUzYEd+82jeaVbSKKVp1jFZb8LwaNMIIzHFkihGvYdd/KKAr7KaJHdEdSYGredE3ssSravXIa0Jxg28Sv5w== -"@noble/ciphers@^1.0.0": +"@noble/ciphers@1.2.1", "@noble/ciphers@^1.0.0": version "1.2.1" resolved "https://registry.yarnpkg.com/@noble/ciphers/-/ciphers-1.2.1.tgz#3812b72c057a28b44ff0ad4aff5ca846e5b9cdc9" integrity sha512-rONPWMC7PeExE077uLE4oqWrZ1IvAfz3oH9LibVAcVCopJiA9R62uavnbEzdkVmJYI6M6Zgkbeb07+tWjlq2XA== +"@noble/curves@1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@noble/curves/-/curves-1.2.0.tgz#92d7e12e4e49b23105a2555c6984d41733d65c35" + integrity sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw== + dependencies: + "@noble/hashes" "1.3.2" + "@noble/curves@1.4.2", "@noble/curves@~1.4.0": version "1.4.2" resolved "https://registry.yarnpkg.com/@noble/curves/-/curves-1.4.2.tgz#40309198c76ed71bc6dbf7ba24e81ceb4d0d1fe9" @@ -3093,7 +3300,7 @@ dependencies: "@noble/hashes" "1.7.0" -"@noble/curves@^1.0.0", "@noble/curves@^1.4.2", "@noble/curves@^1.6.0", "@noble/curves@~1.8.1": +"@noble/curves@1.8.1", "@noble/curves@^1.0.0", "@noble/curves@^1.4.2", "@noble/curves@^1.6.0", "@noble/curves@^1.8.1", "@noble/curves@~1.8.1": version "1.8.1" resolved "https://registry.yarnpkg.com/@noble/curves/-/curves-1.8.1.tgz#19bc3970e205c99e4bdb1c64a4785706bce497ff" integrity sha512-warwspo+UYUPep0Q+vtdVB4Ugn8GGQj8iyB3gnRWsztmUHTI3S1nhdiWNsPUGL0vud7JlRRk1XEu7Lq1KGTnMQ== @@ -3105,6 +3312,11 @@ resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.2.0.tgz#a3150eeb09cc7ab207ebf6d7b9ad311a9bdbed12" integrity sha512-FZfhjEDbT5GRswV3C6uvLPHMiVD6lQBmpoX5+eSiPaMTXte/IKqI5dykDxzZB/WBeK/CDuQRBWarPdi3FNY2zQ== +"@noble/hashes@1.3.2": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.3.2.tgz#6f26dbc8fbc7205873ce3cee2f690eba0d421b39" + integrity sha512-MVC8EAQp7MvEcm30KWENFjgR+Mkmf+D189XJTkFIlwohU5hcBbn1ZkKq7KVTi2Hme3PMGF390DaL52beVrIihQ== + "@noble/hashes@1.4.0", "@noble/hashes@~1.4.0": version "1.4.0" resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.4.0.tgz#45814aa329f30e4fe0ba49426f49dfccdd066426" @@ -3115,7 +3327,7 @@ resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.7.0.tgz#5d9e33af2c7d04fee35de1519b80c958b2e35e39" integrity sha512-HXydb0DgzTpDPwbVeDGCG1gIu7X6+AuU6Zl6av/E/KG8LMsvPntvq+w17CHRpKBmN6Ybdrt1eP3k4cj8DJa78w== -"@noble/hashes@1.7.1", "@noble/hashes@^1", "@noble/hashes@^1.0.0", "@noble/hashes@^1.1.2", "@noble/hashes@^1.3.0", "@noble/hashes@^1.4.0", "@noble/hashes@^1.5.0", "@noble/hashes@~1.7.1": +"@noble/hashes@1.7.1", "@noble/hashes@^1", "@noble/hashes@^1.0.0", "@noble/hashes@^1.1.2", "@noble/hashes@^1.3.0", "@noble/hashes@^1.3.1", "@noble/hashes@^1.4.0", "@noble/hashes@^1.5.0", "@noble/hashes@~1.7.1": version "1.7.1" resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.7.1.tgz#5738f6d765710921e7a751e00c20ae091ed8db0f" integrity sha512-B8XBPsn4vT/KJAGqDzbwztd+6Yte3P4V7iafm24bxgDe/mlRuK6xmWPuCNrKt2vDafZ8MfJLlchDG/vYafQEjQ== @@ -4113,6 +4325,11 @@ node-addon-api "^3.2.1" node-gyp-build "^4.3.0" +"@paulmillr/qr@^0.2.1": + version "0.2.1" + resolved "https://registry.yarnpkg.com/@paulmillr/qr/-/qr-0.2.1.tgz#76ade7080be4ac4824f638146fd8b6db1805eeca" + integrity sha512-IHnV6A+zxU7XwmKFinmYjUcwlyK9+xkG3/s9KcQhI9BjQKycrJ1JRO+FbNYPwZiPKW3je/DR0k7w8/gLa5eaxQ== + "@phenomnomnominal/tsquery@~5.0.1": version "5.0.1" resolved "https://registry.yarnpkg.com/@phenomnomnominal/tsquery/-/tsquery-5.0.1.tgz#a2a5abc89f92c01562a32806655817516653a388" @@ -4233,16 +4450,37 @@ resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.10.5.tgz#3a1c12c959010a55c17d46b395ed3047b545c246" integrity sha512-kkKUDVlII2DQiKy7UstOR1ErJP8kUKAQ4oa+SQtM0K+lPdmmjj0YnnxBgtTVYH7mUKtbsxeFC9y0AmK7Yb78/A== -"@scure/base@~1.1.0", "@scure/base@~1.1.6": - version "1.1.9" - resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.1.9.tgz#e5e142fbbfe251091f9c5f1dd4c834ac04c3dbd1" - integrity sha512-8YKhl8GHiNI/pU2VMaofa2Tor7PJRAjwQLBBuilkJ9L5+13yVbC7JO/wS7piioAvPSwR3JKM1IJ/u4xQzbcXKg== +"@safe-global/safe-apps-provider@0.18.5": + version "0.18.5" + resolved "https://registry.yarnpkg.com/@safe-global/safe-apps-provider/-/safe-apps-provider-0.18.5.tgz#745a932bda3739a8a298ae44ec6c465f6c4773b7" + integrity sha512-9v9wjBi3TwLsEJ3C2ujYoexp3pFJ0omDLH/GX91e2QB+uwCKTBYyhxFSrTQ9qzoyQd+bfsk4gjOGW87QcJhf7g== + dependencies: + "@safe-global/safe-apps-sdk" "^9.1.0" + events "^3.3.0" + +"@safe-global/safe-apps-sdk@9.1.0", "@safe-global/safe-apps-sdk@^9.1.0": + version "9.1.0" + resolved "https://registry.yarnpkg.com/@safe-global/safe-apps-sdk/-/safe-apps-sdk-9.1.0.tgz#0e65913e0f202e529ed3c846e0f5a98c2d35aa98" + integrity sha512-N5p/ulfnnA2Pi2M3YeWjULeWbjo7ei22JwU/IXnhoHzKq3pYCN6ynL9mJBOlvDVv892EgLPCWCOwQk/uBT2v0Q== + dependencies: + "@safe-global/safe-gateway-typescript-sdk" "^3.5.3" + viem "^2.1.1" + +"@safe-global/safe-gateway-typescript-sdk@^3.5.3": + version "3.22.9" + resolved "https://registry.yarnpkg.com/@safe-global/safe-gateway-typescript-sdk/-/safe-gateway-typescript-sdk-3.22.9.tgz#7f6571aaf1aecbe1217f6dd294ad2f3d90c2c8c2" + integrity sha512-7ojVK/crhOaGowEO8uYWaopZzcr5rR76emgllGIfjCLR70aY4PbASpi9Pbs+7jIRzPDBBkM0RBo+zYx5UduX8Q== -"@scure/base@~1.2.2", "@scure/base@~1.2.4": +"@scure/base@^1.1.3", "@scure/base@~1.2.2", "@scure/base@~1.2.4": version "1.2.4" resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.2.4.tgz#002eb571a35d69bdb4c214d0995dff76a8dcd2a9" integrity sha512-5Yy9czTO47mqz+/J8GM6GIId4umdCk1wc1q8rKERQulIoc8VP9pzDcghv10Tl2E7R96ZUx/PhND3ESYUQX8NuQ== +"@scure/base@~1.1.0", "@scure/base@~1.1.6": + version "1.1.9" + resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.1.9.tgz#e5e142fbbfe251091f9c5f1dd4c834ac04c3dbd1" + integrity sha512-8YKhl8GHiNI/pU2VMaofa2Tor7PJRAjwQLBBuilkJ9L5+13yVbC7JO/wS7piioAvPSwR3JKM1IJ/u4xQzbcXKg== + "@scure/bip32@1.1.5": version "1.1.5" resolved "https://registry.yarnpkg.com/@scure/bip32/-/bip32-1.1.5.tgz#d2ccae16dcc2e75bc1d75f5ef3c66a338d1ba300" @@ -4261,7 +4499,7 @@ "@noble/hashes" "~1.4.0" "@scure/base" "~1.1.6" -"@scure/bip32@^1.3.0": +"@scure/bip32@1.6.2", "@scure/bip32@^1.3.0", "@scure/bip32@^1.5.0": version "1.6.2" resolved "https://registry.yarnpkg.com/@scure/bip32/-/bip32-1.6.2.tgz#093caa94961619927659ed0e711a6e4bf35bffd0" integrity sha512-t96EPDMbtGgtb7onKKqxRLfE5g05k7uHnHRM2xdE6BP/ZmxaLtPek4J4KfVn/90IQNrU1IOAqMgiDtUdtbe3nw== @@ -4286,7 +4524,7 @@ "@noble/hashes" "~1.4.0" "@scure/base" "~1.1.6" -"@scure/bip39@^1.2.0": +"@scure/bip39@1.5.4", "@scure/bip39@^1.2.0", "@scure/bip39@^1.4.0": version "1.5.4" resolved "https://registry.yarnpkg.com/@scure/bip39/-/bip39-1.5.4.tgz#07fd920423aa671be4540d59bdd344cc1461db51" integrity sha512-TFM4ni0vKvCfBpohoh+/lY05i9gRbSwXWngAsF4CABQxoaOHijxuaZ2R6cStDQ5CHtHO9aGJTr4ksVJASRRyMA== @@ -4459,6 +4697,11 @@ resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== +"@sinclair/typebox@^0.34.27": + version "0.34.30" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.34.30.tgz#a5f3b127a0ae14e03103f689f744befc429b10cb" + integrity sha512-gFB3BiqjDxEoadW0zn+xyMVb7cLxPCoblVn2C/BKpI41WPYi2d6fwHAlynPNZ5O/Q4WEiujdnJzVtvG/Jc2CBQ== + "@sindresorhus/is@^0.14.0": version "0.14.0" resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" @@ -4474,13 +4717,6 @@ resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f" integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== -"@sinonjs/commons@^1.7.0": - version "1.8.6" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.6.tgz#80c516a4dc264c2a69115e7578d62581ff455ed9" - integrity sha512-Ky+XkAkqPZSm3NLBeUng77EBQl3cmeJhITaGHdYH8kjVB+aun3S4XBRti2zt17mtt0mIUDiNxYeoJm6drVvBJQ== - dependencies: - type-detect "4.0.8" - "@sinonjs/commons@^3.0.0": version "3.0.1" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.1.tgz#1029357e44ca901a615585f6d27738dbc89084cd" @@ -4495,12 +4731,10 @@ dependencies: "@sinonjs/commons" "^3.0.0" -"@sinonjs/fake-timers@^8.0.1": - version "8.1.0" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" - integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== - dependencies: - "@sinonjs/commons" "^1.7.0" +"@socket.io/component-emitter@~3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@socket.io/component-emitter/-/component-emitter-3.1.2.tgz#821f8442f4175d8f0467b9daf26e3a18e2d02af2" + integrity sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA== "@solana/buffer-layout@^4.0.1": version "4.0.1" @@ -4823,6 +5057,11 @@ dependencies: defer-to-connect "^2.0.1" +"@t3-oss/env-core@^0.12.0": + version "0.12.0" + resolved "https://registry.yarnpkg.com/@t3-oss/env-core/-/env-core-0.12.0.tgz#d5b6d92bf07d2f3ccdf59cc428f1faf114350d35" + integrity sha512-lOPj8d9nJJTt81mMuN9GMk8x5veOt7q9m11OSnCBJhwp1QrL/qR+M8Y467ULBSm9SunosryWNbmQQbgoiMgcdw== + "@testing-library/cypress@^8.0.2": version "8.0.7" resolved "https://registry.yarnpkg.com/@testing-library/cypress/-/cypress-8.0.7.tgz#18315eba3cf8852808afadf122e4858406384015" @@ -5017,7 +5256,7 @@ "@babel/parser" "^7.1.0" "@babel/types" "^7.0.0" -"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": version "7.20.6" resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.20.6.tgz#8dc9f0ae0f202c08d8d4dab648912c8d6038e3f7" integrity sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg== @@ -5055,6 +5294,13 @@ dependencies: "@types/node" "*" +"@types/debug@^4.1.7": + version "4.1.12" + resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.12.tgz#a155f21690871953410df4b6b6f53187f0500917" + integrity sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ== + dependencies: + "@types/ms" "*" + "@types/depd@^1.1.36": version "1.1.37" resolved "https://registry.yarnpkg.com/@types/depd/-/depd-1.1.37.tgz#dc8a8b9e450acaba3f6308c5927e6a3062b80c87" @@ -5113,6 +5359,15 @@ jest-matcher-utils "^27.0.0" pretty-format "^27.0.0" +"@types/jsdom@^20.0.0": + version "20.0.1" + resolved "https://registry.yarnpkg.com/@types/jsdom/-/jsdom-20.0.1.tgz#07c14bc19bd2f918c1929541cdaacae894744808" + integrity sha512-d0r18sZPmMQr1eG35u12FZfhIXNrnsPU/g5wvRKCUf/tOGilKKwYMYGqh33BNR6ba+2gkHw1EUiHoN3mn7E5IQ== + dependencies: + "@types/node" "*" + "@types/tough-cookie" "*" + parse5 "^7.0.0" + "@types/json-schema@^7.0.12", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": version "7.0.15" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" @@ -5152,6 +5407,11 @@ resolved "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.5.tgz#ec10755e871497bcd83efe927e43ec46e8c0747e" integrity sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag== +"@types/ms@*": + version "2.1.0" + resolved "https://registry.yarnpkg.com/@types/ms/-/ms-2.1.0.tgz#052aa67a48eccc4309d7f0191b7e41434b90bb78" + integrity sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA== + "@types/node@*", "@types/node@>=13.7.0": version "22.10.10" resolved "https://registry.yarnpkg.com/@types/node/-/node-22.10.10.tgz#85fe89f8bf459dc57dfef1689bd5b52ad1af07e6" @@ -5159,12 +5419,19 @@ dependencies: undici-types "~6.20.0" -"@types/node@18.19.18": - version "18.19.18" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.18.tgz#7526471b28828d1fef1f7e4960fb9477e6e4369c" - integrity sha512-80CP7B8y4PzZF0GWx15/gVWRrB5y/bIjNI84NK3cmQJu0WZwvmj2WMA5LcofQFVfLqqCSp545+U2LsrVzX36Zg== +"@types/node@20": + version "20.17.23" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.17.23.tgz#d228a57bbab954f763a883e495bacea8264efcd5" + integrity sha512-8PCGZ1ZJbEZuYNTMqywO+Sj4vSKjSjT6Ua+6RFOYlEvIvKQABPtrNkoVSLSKDb4obYcMhspVKmsw8Cm10NFRUg== + dependencies: + undici-types "~6.19.2" + +"@types/node@22.7.5": + version "22.7.5" + resolved "https://registry.yarnpkg.com/@types/node/-/node-22.7.5.tgz#cfde981727a7ab3611a481510b473ae54442b92b" + integrity sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ== dependencies: - undici-types "~5.26.4" + undici-types "~6.19.2" "@types/node@^12.12.54", "@types/node@^12.12.6": version "12.20.55" @@ -5193,11 +5460,6 @@ dependencies: "@types/node" "*" -"@types/prettier@^2.1.5": - version "2.7.3" - resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.3.tgz#3e51a17e291d01d17d3fc61422015a933af7a08f" - integrity sha512-+68kP9yzs4LMp7VNh8gdzMSPZFL44MLGqiHWvttYJe+6qnuVr4Ek9wSBQoveqY/r+LwjCcU29kNVkidwim+kYA== - "@types/responselike@^1.0.0": version "1.0.3" resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.3.tgz#cc29706f0a397cfe6df89debfe4bf5cea159db50" @@ -5242,6 +5504,16 @@ resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.3.tgz#6209321eb2c1712a7e7466422b8cb1fc0d9dd5d8" integrity sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw== +"@types/tough-cookie@*": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.5.tgz#cb6e2a691b70cb177c6e3ae9c1d2e8b2ea8cd304" + integrity sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA== + +"@types/trusted-types@^2.0.2": + version "2.0.7" + resolved "https://registry.yarnpkg.com/@types/trusted-types/-/trusted-types-2.0.7.tgz#baccb07a970b91707df3a3e8ba6896c57ead2d11" + integrity sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw== + "@types/unist@*", "@types/unist@^3.0.0": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/unist/-/unist-3.0.3.tgz#acaab0f919ce69cce629c2d4ed2eb4adc1b6c20c" @@ -5427,6 +5699,27 @@ resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.3.0.tgz#d06bbb384ebcf6c505fde1c3d0ed4ddffe0aaff8" integrity sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g== +"@wagmi/connectors@5.7.9": + version "5.7.9" + resolved "https://registry.yarnpkg.com/@wagmi/connectors/-/connectors-5.7.9.tgz#ad354ee5ee5db6786993c4fcafdb128bb766f320" + integrity sha512-mKgSjjdlnFjVu5dE8yKJfgs06+GvFFf6tOrLh9ihFzz0dwv6SQtC68qeo/YHxiHRZ8olFzam8GQRuTXM9bF1rg== + dependencies: + "@coinbase/wallet-sdk" "4.3.0" + "@metamask/sdk" "0.32.0" + "@safe-global/safe-apps-provider" "0.18.5" + "@safe-global/safe-apps-sdk" "9.1.0" + "@walletconnect/ethereum-provider" "2.19.0" + cbw-sdk "npm:@coinbase/wallet-sdk@3.9.3" + +"@wagmi/core@2.16.5": + version "2.16.5" + resolved "https://registry.yarnpkg.com/@wagmi/core/-/core-2.16.5.tgz#ae451daba4d84402f4ddf7b1279efeab46f3567f" + integrity sha512-7WlsxIvcS2WXO/8KnIkutCfY6HACsPsEuZHoYGu2TbwM7wlJv2HmR9zSvmyeEDsTBDPva/tuFbmJo4HJ9llkWA== + dependencies: + eventemitter3 "5.0.1" + mipd "0.0.7" + zustand "5.0.0" + "@walletconnect/auth-client@2.1.1": version "2.1.1" resolved "https://registry.yarnpkg.com/@walletconnect/auth-client/-/auth-client-2.1.1.tgz#45548fc5d5e5ac155503d1b42ac97a96a2cba98d" @@ -5467,6 +5760,29 @@ "@walletconnect/types" "^1.8.0" "@walletconnect/utils" "^1.8.0" +"@walletconnect/core@2.19.0": + version "2.19.0" + resolved "https://registry.yarnpkg.com/@walletconnect/core/-/core-2.19.0.tgz#acd84b605b05469aa9962079af2590e583815d49" + integrity sha512-AEoyICLHQEnjijZr9XsL4xtFhC5Cmu0RsEGxAxmwxbfGvAcYcSCNp1fYq0Q6nHc8jyoPOALpwySTle300Y1vxw== + dependencies: + "@walletconnect/heartbeat" "1.2.2" + "@walletconnect/jsonrpc-provider" "1.0.14" + "@walletconnect/jsonrpc-types" "1.0.4" + "@walletconnect/jsonrpc-utils" "1.0.8" + "@walletconnect/jsonrpc-ws-connection" "1.0.16" + "@walletconnect/keyvaluestorage" "1.1.1" + "@walletconnect/logger" "2.1.2" + "@walletconnect/relay-api" "1.0.11" + "@walletconnect/relay-auth" "1.1.0" + "@walletconnect/safe-json" "1.0.2" + "@walletconnect/time" "1.0.2" + "@walletconnect/types" "2.19.0" + "@walletconnect/utils" "2.19.0" + "@walletconnect/window-getters" "1.0.1" + events "3.3.0" + lodash.isequal "4.5.0" + uint8arrays "3.1.0" + "@walletconnect/core@2.9.2": version "2.9.2" resolved "https://registry.yarnpkg.com/@walletconnect/core/-/core-2.9.2.tgz#c46734ca63771b28fd77606fd521930b7ecfc5e1" @@ -5549,6 +5865,23 @@ dependencies: tslib "1.14.1" +"@walletconnect/ethereum-provider@2.19.0": + version "2.19.0" + resolved "https://registry.yarnpkg.com/@walletconnect/ethereum-provider/-/ethereum-provider-2.19.0.tgz#bbc94b1f6162e8288817b60bf286abfc9d663988" + integrity sha512-c1lwV6geL+IAbgB0DBTArzxkCE9raifTHPPv8ixGQPNS21XpVCaWTN6SE+rS9iwAtEoXjWAoNeK7rEOHE2negw== + dependencies: + "@walletconnect/jsonrpc-http-connection" "1.0.8" + "@walletconnect/jsonrpc-provider" "1.0.14" + "@walletconnect/jsonrpc-types" "1.0.4" + "@walletconnect/jsonrpc-utils" "1.0.8" + "@walletconnect/keyvaluestorage" "1.1.1" + "@walletconnect/modal" "2.7.0" + "@walletconnect/sign-client" "2.19.0" + "@walletconnect/types" "2.19.0" + "@walletconnect/universal-provider" "2.19.0" + "@walletconnect/utils" "2.19.0" + events "3.3.0" + "@walletconnect/ethereum-provider@2.9.2": version "2.9.2" resolved "https://registry.yarnpkg.com/@walletconnect/ethereum-provider/-/ethereum-provider-2.9.2.tgz#fb3a6fca279bb4e98e75baa2fb9730545d41bb99" @@ -5609,7 +5942,7 @@ "@walletconnect/types" "^1.8.0" "@walletconnect/utils" "^1.8.0" -"@walletconnect/jsonrpc-http-connection@^1.0.7": +"@walletconnect/jsonrpc-http-connection@1.0.8", "@walletconnect/jsonrpc-http-connection@^1.0.7": version "1.0.8" resolved "https://registry.yarnpkg.com/@walletconnect/jsonrpc-http-connection/-/jsonrpc-http-connection-1.0.8.tgz#2f4c3948f074960a3edd07909560f3be13e2c7ae" integrity sha512-+B7cRuaxijLeFDJUq5hAzNyef3e3tBDIxyaCNmFtjwnod5AGis3RToNqzFU33vpVcxFhofkpE7Cx+5MYejbMGw== @@ -5713,6 +6046,31 @@ resolved "https://registry.yarnpkg.com/@walletconnect/mobile-registry/-/mobile-registry-1.4.0.tgz#502cf8ab87330841d794819081e748ebdef7aee5" integrity sha512-ZtKRio4uCZ1JUF7LIdecmZt7FOLnX72RPSY7aUVu7mj7CSfxDwUn6gBuK6WGtH+NZCldBqDl5DenI5fFSvkKYw== +"@walletconnect/modal-core@2.7.0": + version "2.7.0" + resolved "https://registry.yarnpkg.com/@walletconnect/modal-core/-/modal-core-2.7.0.tgz#73c13c3b7b0abf9ccdbac9b242254a86327ce0a4" + integrity sha512-oyMIfdlNdpyKF2kTJowTixZSo0PGlCJRdssUN/EZdA6H6v03hZnf09JnwpljZNfir2M65Dvjm/15nGrDQnlxSA== + dependencies: + valtio "1.11.2" + +"@walletconnect/modal-ui@2.7.0": + version "2.7.0" + resolved "https://registry.yarnpkg.com/@walletconnect/modal-ui/-/modal-ui-2.7.0.tgz#dbbb7ee46a5a25f7d39db622706f2d197b268cbb" + integrity sha512-gERYvU7D7K1ANCN/8vUgsE0d2hnRemfAFZ2novm9aZBg7TEd/4EgB+AqbJ+1dc7GhOL6dazckVq78TgccHb7mQ== + dependencies: + "@walletconnect/modal-core" "2.7.0" + lit "2.8.0" + motion "10.16.2" + qrcode "1.5.3" + +"@walletconnect/modal@2.7.0": + version "2.7.0" + resolved "https://registry.yarnpkg.com/@walletconnect/modal/-/modal-2.7.0.tgz#55f969796d104cce1205f5f844d8f8438b79723a" + integrity sha512-RQVt58oJ+rwqnPcIvRFeMGKuXb9qkgSmwz4noF8JZGUym3gUAzVs+uW2NQ1Owm9XOJAV+sANrtJ+VoVq1ftElw== + dependencies: + "@walletconnect/modal-core" "2.7.0" + "@walletconnect/modal-ui" "2.7.0" + "@walletconnect/qrcode-modal@^1.8.0": version "1.8.0" resolved "https://registry.yarnpkg.com/@walletconnect/qrcode-modal/-/qrcode-modal-1.8.0.tgz#ddd6f5c9b7ee52c16adf9aacec2a3eac4994caea" @@ -5754,7 +6112,7 @@ tslib "1.14.1" uint8arrays "^3.0.0" -"@walletconnect/relay-auth@^1.0.4": +"@walletconnect/relay-auth@1.1.0", "@walletconnect/relay-auth@^1.0.4": version "1.1.0" resolved "https://registry.yarnpkg.com/@walletconnect/relay-auth/-/relay-auth-1.1.0.tgz#c3c5f54abd44a5138ea7d4fe77970597ba66c077" integrity sha512-qFw+a9uRz26jRCDgL7Q5TA9qYIgcNY8jpJzI1zAWNZ8i7mQjaijRnWFKsCHAU9CyGjvt6RKrRXyFtFOpWTVmCQ== @@ -5777,6 +6135,21 @@ dependencies: tslib "1.14.1" +"@walletconnect/sign-client@2.19.0": + version "2.19.0" + resolved "https://registry.yarnpkg.com/@walletconnect/sign-client/-/sign-client-2.19.0.tgz#775d21928a402ab5506f7c0b6065932cd6c8724d" + integrity sha512-+GkuJzPK9SPq+RZgdKHNOvgRagxh/hhYWFHOeSiGh3DyAQofWuFTq4UrN/MPjKOYswSSBKfIa+iqKYsi4t8zLQ== + dependencies: + "@walletconnect/core" "2.19.0" + "@walletconnect/events" "1.0.1" + "@walletconnect/heartbeat" "1.2.2" + "@walletconnect/jsonrpc-utils" "1.0.8" + "@walletconnect/logger" "2.1.2" + "@walletconnect/time" "1.0.2" + "@walletconnect/types" "2.19.0" + "@walletconnect/utils" "2.19.0" + events "3.3.0" + "@walletconnect/sign-client@2.9.2": version "2.9.2" resolved "https://registry.yarnpkg.com/@walletconnect/sign-client/-/sign-client-2.9.2.tgz#ff4c81c082c2078878367d07f24bcb20b1f7ab9e" @@ -5820,6 +6193,18 @@ "@walletconnect/logger" "2.1.2" events "3.3.0" +"@walletconnect/types@2.19.0": + version "2.19.0" + resolved "https://registry.yarnpkg.com/@walletconnect/types/-/types-2.19.0.tgz#cbb8053c20064377a85440ede06d5057c34c5786" + integrity sha512-Ttse3p3DCdFQ/TRQrsPMQJzFr7cb/2AF5ltLPzXRNMmapmGydc6WO8QU7g/tGEB3RT9nHcLY2aqlwsND9sXMxA== + dependencies: + "@walletconnect/events" "1.0.1" + "@walletconnect/heartbeat" "1.2.2" + "@walletconnect/jsonrpc-types" "1.0.4" + "@walletconnect/keyvaluestorage" "1.1.1" + "@walletconnect/logger" "2.1.2" + events "3.3.0" + "@walletconnect/types@2.9.2": version "2.9.2" resolved "https://registry.yarnpkg.com/@walletconnect/types/-/types-2.9.2.tgz#d5fd5a61dc0f41cbdca59d1885b85207ac7bf8c5" @@ -5837,6 +6222,24 @@ resolved "https://registry.yarnpkg.com/@walletconnect/types/-/types-1.8.0.tgz#3f5e85b2d6b149337f727ab8a71b8471d8d9a195" integrity sha512-Cn+3I0V0vT9ghMuzh1KzZvCkiAxTq+1TR2eSqw5E5AVWfmCtECFkVZBP6uUJZ8YjwLqXheI+rnjqPy7sVM4Fyg== +"@walletconnect/universal-provider@2.19.0": + version "2.19.0" + resolved "https://registry.yarnpkg.com/@walletconnect/universal-provider/-/universal-provider-2.19.0.tgz#2648a604def3a81cc91893ffd1bba01c6fa637d5" + integrity sha512-e9JvadT5F8QwdLmd7qBrmACq04MT7LQEe1m3X2Fzvs3DWo8dzY8QbacnJy4XSv5PCdxMWnua+2EavBk8nrI9QA== + dependencies: + "@walletconnect/events" "1.0.1" + "@walletconnect/jsonrpc-http-connection" "1.0.8" + "@walletconnect/jsonrpc-provider" "1.0.14" + "@walletconnect/jsonrpc-types" "1.0.4" + "@walletconnect/jsonrpc-utils" "1.0.8" + "@walletconnect/keyvaluestorage" "1.1.1" + "@walletconnect/logger" "2.1.2" + "@walletconnect/sign-client" "2.19.0" + "@walletconnect/types" "2.19.0" + "@walletconnect/utils" "2.19.0" + events "3.3.0" + lodash "4.17.21" + "@walletconnect/universal-provider@2.9.2": version "2.9.2" resolved "https://registry.yarnpkg.com/@walletconnect/universal-provider/-/universal-provider-2.9.2.tgz#40e54e98bc48b1f2f5f77eb5b7f05462093a8506" @@ -5857,25 +6260,48 @@ resolved "https://registry.yarnpkg.com/@walletconnect/utils/-/utils-2.17.5.tgz#4d1eace920dfae51b13f4209a57e77a5eb18774a" integrity sha512-3qBeAuEeYw/xbonhK1wC+j1y5I9Fn5qX3D5jW5SuTd1d4SsRSzgUi7SFCFwU1u4LitkEae16FNmTOk4lrrXY3g== dependencies: - "@ethersproject/hash" "5.7.0" - "@ethersproject/transactions" "5.7.0" - "@stablelib/chacha20poly1305" "1.0.1" - "@stablelib/hkdf" "1.0.1" - "@stablelib/random" "1.0.2" - "@stablelib/sha256" "1.0.1" - "@stablelib/x25519" "1.0.3" + "@ethersproject/hash" "5.7.0" + "@ethersproject/transactions" "5.7.0" + "@stablelib/chacha20poly1305" "1.0.1" + "@stablelib/hkdf" "1.0.1" + "@stablelib/random" "1.0.2" + "@stablelib/sha256" "1.0.1" + "@stablelib/x25519" "1.0.3" + "@walletconnect/jsonrpc-utils" "1.0.8" + "@walletconnect/keyvaluestorage" "1.1.1" + "@walletconnect/relay-api" "1.0.11" + "@walletconnect/relay-auth" "1.0.4" + "@walletconnect/safe-json" "1.0.2" + "@walletconnect/time" "1.0.2" + "@walletconnect/types" "2.17.5" + "@walletconnect/window-getters" "1.0.1" + "@walletconnect/window-metadata" "1.0.1" + detect-browser "5.3.0" + elliptic "6.6.1" + uint8arrays "3.1.0" + +"@walletconnect/utils@2.19.0": + version "2.19.0" + resolved "https://registry.yarnpkg.com/@walletconnect/utils/-/utils-2.19.0.tgz#5fffb1f83928ece8c534d1596134e5c097010804" + integrity sha512-LZ0D8kevknKfrfA0Sq3Hf3PpmM8oWyNfsyWwFR51t//2LBgtN2Amz5xyoDDJcjLibIbKAxpuo/i0JYAQxz+aPA== + dependencies: + "@noble/ciphers" "1.2.1" + "@noble/curves" "1.8.1" + "@noble/hashes" "1.7.1" "@walletconnect/jsonrpc-utils" "1.0.8" "@walletconnect/keyvaluestorage" "1.1.1" "@walletconnect/relay-api" "1.0.11" - "@walletconnect/relay-auth" "1.0.4" + "@walletconnect/relay-auth" "1.1.0" "@walletconnect/safe-json" "1.0.2" "@walletconnect/time" "1.0.2" - "@walletconnect/types" "2.17.5" + "@walletconnect/types" "2.19.0" "@walletconnect/window-getters" "1.0.1" "@walletconnect/window-metadata" "1.0.1" detect-browser "5.3.0" elliptic "6.6.1" + query-string "7.1.3" uint8arrays "3.1.0" + viem "2.23.2" "@walletconnect/utils@2.9.2": version "2.9.2" @@ -5996,7 +6422,7 @@ JSONStream@^1.0.4, JSONStream@^1.3.5: jsonparse "^1.2.0" through ">=2.2.7 <3" -abab@^2.0.3, abab@^2.0.5: +abab@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== @@ -6027,11 +6453,18 @@ abi-decoder@^2.3.0: web3-eth-abi "^1.2.1" web3-utils "^1.2.1" -abitype@^1.0.8: +abitype@1.0.8, abitype@^1.0.6, abitype@^1.0.8: version "1.0.8" resolved "https://registry.yarnpkg.com/abitype/-/abitype-1.0.8.tgz#3554f28b2e9d6e9f35eb59878193eabd1b9f46ba" integrity sha512-ZeiI6h3GnW06uYDLx0etQtX/p8E24UaHHBj57RSjK7YBFe7iuVn07EDpOeP451D06sF27VOz9JJPlIKJmXgkEg== +abort-controller@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" + integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== + dependencies: + event-target-shim "^5.0.0" + abortcontroller-polyfill@^1.7.3: version "1.7.8" resolved "https://registry.yarnpkg.com/abortcontroller-polyfill/-/abortcontroller-polyfill-1.7.8.tgz#fe8d4370403f02e2aa37e3d2b0b178bae9d83f49" @@ -6059,37 +6492,37 @@ accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: mime-types "~2.1.34" negotiator "0.6.3" -acorn-globals@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" - integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== +acorn-globals@^7.0.0: + version "7.0.1" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-7.0.1.tgz#0dbf05c44fa7c94332914c02066d5beff62c40c3" + integrity sha512-umOSDSDrfHbTNPuNpC2NSnnA3LUrqpevPb4T9jRx4MagXNS0rs+gwiTcAvqCRmsD6utzsrzNt+ebm00SNWiC3Q== dependencies: - acorn "^7.1.1" - acorn-walk "^7.1.1" + acorn "^8.1.0" + acorn-walk "^8.0.2" acorn-jsx@^5.3.1, acorn-jsx@^5.3.2: version "5.3.2" resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== -acorn-walk@^7.1.1: - version "7.2.0" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" - integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== - -acorn-walk@^8.1.1: +acorn-walk@^8.0.2, acorn-walk@^8.1.1: version "8.3.4" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.4.tgz#794dd169c3977edf4ba4ea47583587c5866236b7" integrity sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g== dependencies: acorn "^8.11.0" -acorn@^7.1.1, acorn@^7.4.0: +acorn@^7.4.0: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== -acorn@^8.11.0, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.9.0: +acorn@^8.1.0, acorn@^8.8.1: + version "8.14.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.14.1.tgz#721d5dc10f7d5b5609a891773d47731796935dfb" + integrity sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg== + +acorn@^8.11.0, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.9.0: version "8.14.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.14.0.tgz#063e2c70cac5fb4f6467f0b11152e04c682795b0" integrity sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA== @@ -6109,6 +6542,11 @@ aes-js@3.0.0: resolved "https://registry.yarnpkg.com/aes-js/-/aes-js-3.0.0.tgz#e21df10ad6c2053295bcbb8dab40b09dbea87e4d" integrity sha512-H7wUZRn8WpTq9jocdxQ2c8x2sKo9ZVmzfRE13GiNJXfp7NcKYEdvl3vspKjXox6RIG2VtaRe4JFvxG4rqp2Zuw== +aes-js@4.0.0-beta.5: + version "4.0.0-beta.5" + resolved "https://registry.yarnpkg.com/aes-js/-/aes-js-4.0.0-beta.5.tgz#8d2452c52adedebc3a3e28465d858c11ca315873" + integrity sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q== + agent-base@6, agent-base@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" @@ -6908,7 +7346,7 @@ babel-helpers@^6.24.1: babel-runtime "^6.22.0" babel-template "^6.24.1" -babel-jest@27.5.1, babel-jest@^27.5.1: +babel-jest@27.5.1: version "27.5.1" resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444" integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg== @@ -7434,6 +7872,11 @@ base-x@^4.0.0: resolved "https://registry.yarnpkg.com/base-x/-/base-x-4.0.0.tgz#d0e3b7753450c73f8ad2389b5c018a4af7b2224a" integrity sha512-FuwxlW4H5kh37X/oW59pwTzzTKRzfrrQwhmyspRM7swOEZcHtDZSCt45U6oKgtuFE+WYPblePMVIPR4RZrh/hw== +base-x@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/base-x/-/base-x-5.0.1.tgz#16bf35254be1df8aca15e36b7c1dda74b2aa6b03" + integrity sha512-M7uio8Zt++eg3jPj+rHMfCC+IuygQHHCOU+IYsVtik6FWjuYpVt/+MRKcgsAMHh8mMFAwnB+Bs+mTrFiXjMzKg== + base64-js@^1.3.0, base64-js@^1.3.1: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" @@ -7686,6 +8129,11 @@ bottleneck@^2.18.1: resolved "https://registry.yarnpkg.com/bottleneck/-/bottleneck-2.19.5.tgz#5df0b90f59fd47656ebe63c78a98419205cadd91" integrity sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw== +bowser@^2.9.0: + version "2.11.0" + resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f" + integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA== + boxen@7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/boxen/-/boxen-7.0.0.tgz#9e5f8c26e716793fc96edcf7cf754cdf5e3fbf32" @@ -7757,11 +8205,6 @@ brorand@^1.0.1, brorand@^1.1.0: resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w== -browser-process-hrtime@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" - integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== - browserify-aes@^1.0.4, browserify-aes@^1.0.6, browserify-aes@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" @@ -7869,6 +8312,13 @@ bs58@^5.0.0: dependencies: base-x "^4.0.0" +bs58@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/bs58/-/bs58-6.0.0.tgz#a2cda0130558535dd281a2f8697df79caaf425d8" + integrity sha512-PD0wEnEYg6ijszw/u8s+iI3H17cTymlrwkKhDhPZq+Sokl3AU4htyBFTjAeNAlCCmg0f53g6ih3jATyCKftTfw== + dependencies: + base-x "^5.0.0" + bs58check@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/bs58check/-/bs58check-2.1.2.tgz#53b018291228d82a5aa08e7d796fdafda54aebfc" @@ -7944,7 +8394,7 @@ buffer@^5.0.5, buffer@^5.2.1, buffer@^5.4.3, buffer@^5.5.0, buffer@^5.6.0: base64-js "^1.3.1" ieee754 "^1.1.13" -bufferutil@^4.0.1: +bufferutil@^4.0.1, bufferutil@^4.0.8: version "4.0.9" resolved "https://registry.yarnpkg.com/bufferutil/-/bufferutil-4.0.9.tgz#6e81739ad48a95cad45a279588e13e95e24a800a" integrity sha512-WDtdLmJvAuNNPzByAYpRo2rF1Mmradw6gvWsQKf63476DDXmomT9zUiGypLcG4ibIM67vhAj8jJRdbmEws2Aqw== @@ -8239,6 +8689,21 @@ cbor@^8.1.0: dependencies: nofilter "^3.1.0" +"cbw-sdk@npm:@coinbase/wallet-sdk@3.9.3": + version "3.9.3" + resolved "https://registry.yarnpkg.com/@coinbase/wallet-sdk/-/wallet-sdk-3.9.3.tgz#daf10cb0c85d0363315b7270cb3f02bedc408aab" + integrity sha512-N/A2DRIf0Y3PHc1XAMvbBUu4zisna6qAdqABMZwBMNEfWrXpAwx16pZGkYCLGE+Rvv1edbcB2LYDRnACNcmCiw== + dependencies: + bn.js "^5.2.1" + buffer "^6.0.3" + clsx "^1.2.1" + eth-block-tracker "^7.1.0" + eth-json-rpc-filters "^6.0.0" + eventemitter3 "^5.0.1" + keccak "^3.0.3" + preact "^10.16.0" + sha.js "^2.4.11" + ccount@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/ccount/-/ccount-2.0.1.tgz#17a3bf82302e0870d6da43a01311a8bc02a3ecf5" @@ -8588,6 +9053,15 @@ cliui@^5.0.0: strip-ansi "^5.2.0" wrap-ansi "^5.1.0" +cliui@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" + integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^6.2.0" + cliui@^7.0.2: version "7.0.4" resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" @@ -8639,7 +9113,7 @@ clone@^2.0.0, clone@^2.1.1: resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w== -clsx@^1.1.0: +clsx@^1.1.0, clsx@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.2.1.tgz#0ddc4a20a549b59c93a4116bb26f5294ca17dc12" integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg== @@ -8718,7 +9192,7 @@ color-support@^1.1.2, color-support@^1.1.3: resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== -colorette@^2.0.16: +colorette@^2.0.16, colorette@^2.0.7: version "2.0.20" resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== @@ -9056,7 +9530,7 @@ conventional-recommended-bump@^6.1.0: meow "^8.0.0" q "^1.5.1" -convert-source-map@^1.4.0, convert-source-map@^1.5.1, convert-source-map@^1.6.0: +convert-source-map@^1.4.0, convert-source-map@^1.5.1: version "1.9.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== @@ -9081,6 +9555,11 @@ cookie@0.7.1: resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.1.tgz#2f73c42142d5d5cf71310a74fc4ae61670e5dbc9" integrity sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w== +cookie@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-1.0.2.tgz#27360701532116bd3f1f9416929d176afe1e4610" + integrity sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA== + cookiejar@^2.1.1: version "2.1.4" resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.4.tgz#ee669c1fea2cf42dc31585469d193fef0d65771b" @@ -9221,6 +9700,19 @@ create-hmac@^1.1.4, create-hmac@^1.1.7: safe-buffer "^5.0.1" sha.js "^2.4.8" +create-jest@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/create-jest/-/create-jest-29.7.0.tgz#a355c5b3cb1e1af02ba177fe7afd7feee49a5320" + integrity sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q== + dependencies: + "@jest/types" "^29.6.3" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-config "^29.7.0" + jest-util "^29.7.0" + prompts "^2.0.1" + create-require@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" @@ -9248,6 +9740,13 @@ cross-fetch@^3.1.4: dependencies: node-fetch "^2.7.0" +cross-fetch@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-4.1.0.tgz#8f69355007ee182e47fa692ecbaa37a52e43c3d2" + integrity sha512-uKm5PU+MHTootlWEY+mZ4vvXoCn4fLQxT9dSc1sXVMSFkINTJVN8cAQROpwcKm8bJ/c7rgZVIBWzH5T78sNZZw== + dependencies: + node-fetch "^2.7.0" + cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.6" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f" @@ -9355,10 +9854,10 @@ csso@^5.0.5: dependencies: css-tree "~2.2.0" -cssom@^0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" - integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== +cssom@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.5.0.tgz#d254fa92cd8b6fbd83811b9fbaed34663cc17c36" + integrity sha512-iKuQcq+NdHqlAcwUY0o/HL69XQrUaQdMjmStJ8JFmUaiiQErlhrmuigkg/CU4E2J0IyUKUrMAgl36TvN67MqTw== cssom@~0.3.6: version "0.3.8" @@ -9585,14 +10084,14 @@ dashdash@^1.12.0: dependencies: assert-plus "^1.0.0" -data-urls@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" - integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== +data-urls@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-3.0.2.tgz#9cf24a477ae22bcef5cd5f6f0bfbc1d2d3be9143" + integrity sha512-Jy/tj3ldjZJo63sVAvg6LHt2mHvl4V6AgRAmNDtLdm7faqtsx+aJG42rsyCo9JCoRVKwPFzKlIPx3DIibwSIaQ== dependencies: - abab "^2.0.3" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.0.0" + abab "^2.0.6" + whatwg-mimetype "^3.0.0" + whatwg-url "^11.0.0" data-view-buffer@^1.0.2: version "1.0.2" @@ -9626,11 +10125,23 @@ date-and-time@^2.4.1: resolved "https://registry.yarnpkg.com/date-and-time/-/date-and-time-2.4.3.tgz#116963998a8cecd478955ae053f31a6747a988df" integrity sha512-xkS/imTmsyEdpp9ie5oV5UWolg3XkYWNySbT2W4ESWr6v4V8YrsHbhpk9fIeQcr0NFTnYbQJLXlgU1zrLItysA== +date-fns@^2.29.3: + version "2.30.0" + resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-2.30.0.tgz#f367e644839ff57894ec6ac480de40cae4b0f4d0" + integrity sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw== + dependencies: + "@babel/runtime" "^7.21.0" + dateformat@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-3.0.3.tgz#a6e37499a4d9a9cf85ef5872044d62901c9889ae" integrity sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q== +dateformat@^4.6.3: + version "4.6.3" + resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-4.6.3.tgz#556fa6497e5217fedb78821424f8a1c22fa3f4b5" + integrity sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA== + dayjs@^1.10.4: version "1.11.13" resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.13.tgz#92430b0139055c3ebb60150aa13e860a4b5a366c" @@ -9671,6 +10182,13 @@ debug@^3.1.0, debug@^3.2.7: dependencies: ms "^2.1.1" +debug@~4.3.1, debug@~4.3.2: + version "4.3.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.7.tgz#87945b4151a011d76d95a198d7111c865c360a52" + integrity sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ== + dependencies: + ms "^2.1.3" + debuglog@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492" @@ -9689,7 +10207,7 @@ decamelize@^1.1.0, decamelize@^1.1.1, decamelize@^1.2.0: resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA== -decimal.js@^10.2.1: +decimal.js@^10.4.2: version "10.5.0" resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.5.0.tgz#0f371c7cf6c4898ce0afb09836db73cd82010f22" integrity sha512-8vDa8Qxvr/+d94hSh5P3IJwI5t8/c0KsMp+g8bNw9cY2icONa5aPfvKeieW1WlG0WQYwwhJ7mjui2xtiePQSXw== @@ -9978,7 +10496,7 @@ detect-browser@5.2.0: resolved "https://registry.yarnpkg.com/detect-browser/-/detect-browser-5.2.0.tgz#c9cd5afa96a6a19fda0bbe9e9be48a6b6e1e9c97" integrity sha512-tr7XntDAu50BVENgQfajMLzacmSe34D+qZc4zjnniz0ZVuw/TZcLcyxHQjYpJTM36sGEkZZlYLnIM1hH7alTMA== -detect-browser@5.3.0: +detect-browser@5.3.0, detect-browser@^5.2.0: version "5.3.0" resolved "https://registry.yarnpkg.com/detect-browser/-/detect-browser-5.3.0.tgz#9705ef2bddf46072d0f7265a1fe300e36fe7ceca" integrity sha512-53rsFbGdwMwlF7qvCt0ypLM5V5/Mbl0szB7GPN8y9NCcbknYOeVVXdrXEq+90IwAfrrzt6Hd+u2E2ntakICU8w== @@ -10133,12 +10651,12 @@ domelementtype@^2.0.1, domelementtype@^2.2.0, domelementtype@^2.3.0: resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== -domexception@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" - integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== +domexception@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-4.0.0.tgz#4ad1be56ccadc86fc76d033353999a8037d03673" + integrity sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw== dependencies: - webidl-conversions "^5.0.0" + webidl-conversions "^7.0.0" domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: version "4.3.1" @@ -10326,6 +10844,16 @@ eciesjs@^0.4.10: "@noble/curves" "^1.6.0" "@noble/hashes" "^1.5.0" +eciesjs@^0.4.11: + version "0.4.14" + resolved "https://registry.yarnpkg.com/eciesjs/-/eciesjs-0.4.14.tgz#a48c527f7754b4390dfd7e863fe0166c1972be94" + integrity sha512-eJAgf9pdv214Hn98FlUzclRMYWF7WfoLlkS9nWMTm1qcCwn6Ad4EGD9lr9HXMBfSrZhYQujRE+p0adPRkctC6A== + dependencies: + "@ecies/ciphers" "^0.2.2" + "@noble/ciphers" "^1.0.0" + "@noble/curves" "^1.6.0" + "@noble/hashes" "^1.5.0" + ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" @@ -10382,16 +10910,21 @@ elliptic@6.6.1, elliptic@^6.4.0, elliptic@^6.5.2, elliptic@^6.5.3, elliptic@^6.5 minimalistic-assert "^1.0.1" minimalistic-crypto-utils "^1.0.1" +elysia@^1.2.25: + version "1.2.25" + resolved "https://registry.yarnpkg.com/elysia/-/elysia-1.2.25.tgz#52a9493ef5153c605ce5d0d8d8b0ad7c9d834b29" + integrity sha512-WsdQpORJvb4uszzeqYT0lg97knw1iBW1NTzJ1Jm57tiHg+DfAotlWXYbjmvQ039ssV0fYELDHinLLoUazZkEHg== + dependencies: + "@sinclair/typebox" "^0.34.27" + cookie "^1.0.2" + memoirist "^0.3.0" + openapi-types "^12.1.3" + emittery@^0.13.1: version "0.13.1" resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.13.1.tgz#c04b8c3457490e0847ae51fced3af52d338e3dad" integrity sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ== -emittery@^0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" - integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== - emoji-regex-xs@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/emoji-regex-xs/-/emoji-regex-xs-1.0.0.tgz#e8af22e5d9dbd7f7f22d280af3d19d2aab5b0724" @@ -10417,6 +10950,11 @@ emojis-list@^3.0.0: resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== +encode-utf8@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/encode-utf8/-/encode-utf8-1.0.3.tgz#f30fdd31da07fb596f281beb2f6b027851994cda" + integrity sha512-ucAnuBEhUK4boH2HjVYG5Q2mQyPorvv0u/ocS+zhdw0S8AlHYY+GOFhP1Gio5z4icpP2ivFSvhtFjQi8+T9ppw== + encodeurl@^2.0.0, encodeurl@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" @@ -10442,13 +10980,29 @@ encoding@^0.1.11, encoding@^0.1.12, encoding@^0.1.13: dependencies: iconv-lite "^0.6.2" -end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1: +end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.0, end-of-stream@^1.4.1: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" +engine.io-client@~6.6.1: + version "6.6.3" + resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-6.6.3.tgz#815393fa24f30b8e6afa8f77ccca2f28146be6de" + integrity sha512-T0iLjnyNWahNyv/lcjS2y4oE358tVS/SYQNxYXGAJ9/GLgH4VCvOQ/mhTjqU88mLZCQgiG8RIegFHYCdVC+j5w== + dependencies: + "@socket.io/component-emitter" "~3.1.0" + debug "~4.3.1" + engine.io-parser "~5.2.1" + ws "~8.17.1" + xmlhttprequest-ssl "~2.1.1" + +engine.io-parser@~5.2.1: + version "5.2.3" + resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.2.3.tgz#00dc5b97b1f233a23c9398d0209504cf5f94d92f" + integrity sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q== + enhanced-resolve@^5.15.0: version "5.18.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.18.0.tgz#91eb1db193896b9801251eeff1c6980278b1e404" @@ -11258,6 +11812,17 @@ eth-block-tracker@^4.2.0, eth-block-tracker@^4.4.2: pify "^3.0.0" safe-event-emitter "^1.0.1" +eth-block-tracker@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/eth-block-tracker/-/eth-block-tracker-7.1.0.tgz#dfc16085c6817cc30caabba381deb8d204c1c766" + integrity sha512-8YdplnuE1IK4xfqpf4iU7oBxnOYAc35934o083G8ao+8WM8QQtt/mVlAY6yIAdY1eMeLqg4Z//PZjJGmWGPMRg== + dependencies: + "@metamask/eth-json-rpc-provider" "^1.0.0" + "@metamask/safe-event-emitter" "^3.0.0" + "@metamask/utils" "^5.0.1" + json-rpc-random-id "^1.0.1" + pify "^3.0.0" + eth-ens-namehash@2.0.8, eth-ens-namehash@^2.0.8: version "2.0.8" resolved "https://registry.yarnpkg.com/eth-ens-namehash/-/eth-ens-namehash-2.0.8.tgz#229ac46eca86d52e0c991e7cb2aef83ff0f68bcf" @@ -11278,6 +11843,17 @@ eth-json-rpc-filters@^4.0.2, eth-json-rpc-filters@^4.2.1: json-rpc-engine "^6.1.0" pify "^5.0.0" +eth-json-rpc-filters@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/eth-json-rpc-filters/-/eth-json-rpc-filters-6.0.1.tgz#0b3e370f017f5c6f58d3e7bd0756d8099ed85c56" + integrity sha512-ITJTvqoCw6OVMLs7pI8f4gG92n/St6x80ACtHodeS+IXmO0w+t1T5OOzfSt7KLSMLRkVUoexV7tztLgDxg+iig== + dependencies: + "@metamask/safe-event-emitter" "^3.0.0" + async-mutex "^0.2.6" + eth-query "^2.1.2" + json-rpc-engine "^6.1.0" + pify "^5.0.0" + eth-json-rpc-infura@^3.1.0: version "3.2.1" resolved "https://registry.yarnpkg.com/eth-json-rpc-infura/-/eth-json-rpc-infura-3.2.1.tgz#26702a821067862b72d979c016fd611502c6057f" @@ -11399,7 +11975,7 @@ eth-rpc-errors@^3.0.0: dependencies: fast-safe-stringify "^2.0.6" -eth-rpc-errors@^4.0.2: +eth-rpc-errors@^4.0.2, eth-rpc-errors@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/eth-rpc-errors/-/eth-rpc-errors-4.0.3.tgz#6ddb6190a4bf360afda82790bb7d9d5e724f423a" integrity sha512-Z3ymjopaoft7JDoxZcEb3pwdGh7yiYMhOwm2doUt6ASXlMavpNlK6Cre0+IMl2VSGyEU9rkiperQhp5iRxn5Pg== @@ -11781,6 +12357,19 @@ ethers@^5.0.13, ethers@^5.1.4, ethers@^5.7.1: "@ethersproject/web" "5.7.1" "@ethersproject/wordlists" "5.7.0" +ethers@^6.13.5: + version "6.13.5" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-6.13.5.tgz#8c1d6ac988ac08abc3c1d8fabbd4b8b602851ac4" + integrity sha512-+knKNieu5EKRThQJWwqaJ10a6HE9sSehGeqWN65//wE7j47ZpFhKAnHB/JJFibwwg61I/koxaPsXbXpD/skNOQ== + dependencies: + "@adraffy/ens-normalize" "1.10.1" + "@noble/curves" "1.2.0" + "@noble/hashes" "1.3.2" + "@types/node" "22.7.5" + aes-js "4.0.0-beta.5" + tslib "2.7.0" + ws "8.17.1" + ethjs-unit@0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/ethjs-unit/-/ethjs-unit-0.1.6.tgz#c665921e476e87bce2a9d588a6fe0405b2c41699" @@ -11818,12 +12407,17 @@ event-stream@3.3.4, event-stream@=3.3.4: stream-combiner "~0.0.4" through "~2.3.1" +event-target-shim@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" + integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== + eventemitter2@6.4.7: version "6.4.7" resolved "https://registry.yarnpkg.com/eventemitter2/-/eventemitter2-6.4.7.tgz#a7f6c4d7abf28a14c1ef3442f21cb306a054271d" integrity sha512-tYUSVOGeQPKt/eC1ABfhHy5Xd96N3oIijJvN3O9+TsC28T5V9yX9oEfEK5faP0EFSNVOG97qtAS68GBrQB2hDg== -eventemitter2@^6.4.3: +eventemitter2@^6.4.3, eventemitter2@^6.4.9: version "6.4.9" resolved "https://registry.yarnpkg.com/eventemitter2/-/eventemitter2-6.4.9.tgz#41f2750781b4230ed58827bc119d293471ecb125" integrity sha512-JEPTiaOt9f04oa6NOkc4aH+nVp5I3wEjpHbIPqfgCdD5v5bUzy7xQqwcVO2aDQgOWhI28da57HksMrzK9HlRxg== @@ -11838,7 +12432,7 @@ eventemitter3@4.0.7, eventemitter3@^4.0.0, eventemitter3@^4.0.4: resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== -eventemitter3@^5.0.1: +eventemitter3@5.0.1, eventemitter3@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA== @@ -11911,16 +12505,6 @@ expand-brackets@^2.1.4: snapdragon "^0.8.1" to-regex "^3.0.1" -expect@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" - integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw== - dependencies: - "@jest/types" "^27.5.1" - jest-get-type "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - expect@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/expect/-/expect-29.7.0.tgz#578874590dcb3214514084c08115d8aee61e11bc" @@ -12016,6 +12600,14 @@ extend@~3.0.2: resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== +extension-port-stream@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/extension-port-stream/-/extension-port-stream-3.0.0.tgz#00a7185fe2322708a36ed24843c81bd754925fef" + integrity sha512-an2S5quJMiy5bnZKEf6AkfH/7r8CzHvhchU40gxN+OM6HPhe7Z9T1FUychcf2M9PpPOO0Hf7BAEfJkw2TDIBDw== + dependencies: + readable-stream "^3.6.2 || ^4.4.2" + webextension-polyfill ">=0.10.0 <1.0" + external-editor@^3.0.3, external-editor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" @@ -12079,6 +12671,11 @@ fast-check@3.1.1: dependencies: pure-rand "^5.0.1" +fast-copy@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/fast-copy/-/fast-copy-3.0.2.tgz#59c68f59ccbcac82050ba992e0d5c389097c9d35" + integrity sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ== + fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" @@ -12116,12 +12713,12 @@ fast-levenshtein@^2.0.6: resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== -fast-redact@^3.0.0: +fast-redact@^3.0.0, fast-redact@^3.1.1: version "3.5.0" resolved "https://registry.yarnpkg.com/fast-redact/-/fast-redact-3.5.0.tgz#e9ea02f7e57d0cd8438180083e93077e496285e4" integrity sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A== -fast-safe-stringify@^2.0.6: +fast-safe-stringify@^2.0.6, fast-safe-stringify@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884" integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== @@ -12432,15 +13029,6 @@ form-data-encoder@1.7.1: resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.1.tgz#ac80660e4f87ee0d3d3c3638b7da8278ddb8ec96" integrity sha512-EFRDrsMm/kyqbTQocNvRXMLjc7Es2Vk+IQFx/YW7hkUH1eBl4J1fqiP34l74Yt0pFLCNpc06fkbVk00008mzjg== -form-data@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.2.tgz#83ad9ced7c03feaad97e293d6f6091011e1659c8" - integrity sha512-sJe+TQb2vIaIyO783qN6BlMYWMw3WBOHA1Ay2qxsnjuafEOQFJ2JakedOQirT6D5XPRxDvS7AHYyem9fTpb4LQ== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - form-data@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.1.tgz#ba1076daaaa5bfd7e99c1a6cb02aa0a5cff90d48" @@ -12952,7 +13540,7 @@ glob@^10.2.2, glob@^10.3.10: package-json-from-dist "^1.0.0" path-scurry "^1.11.1" -glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@^7.2.0, glob@~7.2.3: +glob@^7.1.1, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@^7.2.0, glob@~7.2.3: version "7.2.3" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== @@ -13395,6 +13983,16 @@ header-case@^1.0.0: no-case "^2.2.0" upper-case "^1.1.3" +help-me@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/help-me/-/help-me-5.0.0.tgz#b1ebe63b967b74060027c2ac61f9be12d354a6f6" + integrity sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg== + +hey-listen@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/hey-listen/-/hey-listen-1.0.8.tgz#8e59561ff724908de1aa924ed6ecc84a56a9aa68" + integrity sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q== + highlight.js@^10.4.1: version "10.7.3" resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-10.7.3.tgz#697272e3991356e40c3cac566a74eef681756531" @@ -13465,13 +14063,6 @@ hosted-git-info@^7.0.0: dependencies: lru-cache "^10.0.1" -html-encoding-sniffer@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" - integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== - dependencies: - whatwg-encoding "^1.0.5" - html-encoding-sniffer@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz#2cb1a8cf0db52414776e5b2a7a04d5dd98158de9" @@ -13653,7 +14244,7 @@ https-browserify@^1.0.0: resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha512-J+FkSdyD+0mA0N+81tMotaRMfSL9SGi+xpD3T6YApKsc3bGSXJlfXri3VyFOeYkfLRQisDk1W+jIFFKBeUBbBg== -https-proxy-agent@^5.0.0: +https-proxy-agent@^5.0.0, https-proxy-agent@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== @@ -14639,6 +15230,11 @@ isomorphic-ws@^5.0.0: resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz#e5529148912ecb9b451b46ed44d53dae1ce04bbf" integrity sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw== +isows@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/isows/-/isows-1.0.6.tgz#0da29d706fa51551c663c627ace42769850f86e7" + integrity sha512-lPHCayd40oW98/I0uvgaHKWCSvkzY27LjWLbtzOm64yQ+G3Q5npjjbdppU65iZXkK1Zt+kH9pfegli0AYfwYYw== + isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" @@ -14660,7 +15256,7 @@ istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz#2d166c4b0644d43a39f04bf6c2edd1e585f31756" integrity sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg== -istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: +istanbul-lib-instrument@^5.0.4: version "5.2.1" resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== @@ -14792,39 +15388,14 @@ jayson@^4.1.1: uuid "^8.3.2" ws "^7.5.10" -jest-changed-files@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5" - integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== +jest-changed-files@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-29.7.0.tgz#1c06d07e77c78e1585d020424dedc10d6e17ac3a" + integrity sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w== dependencies: - "@jest/types" "^27.5.1" execa "^5.0.0" - throat "^6.0.1" - -jest-circus@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc" - integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - dedent "^0.7.0" - expect "^27.5.1" - is-generator-fn "^2.0.0" - jest-each "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - slash "^3.0.0" - stack-utils "^2.0.3" - throat "^6.0.1" + jest-util "^29.7.0" + p-limit "^3.1.0" jest-circus@^29.7.0: version "29.7.0" @@ -14852,55 +15423,24 @@ jest-circus@^29.7.0: slash "^3.0.0" stack-utils "^2.0.3" -jest-cli@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145" - integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== +jest-cli@^29.2.2: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-29.7.0.tgz#5592c940798e0cae677eec169264f2d839a37995" + integrity sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg== dependencies: - "@jest/core" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" + "@jest/core" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/types" "^29.6.3" chalk "^4.0.0" + create-jest "^29.7.0" exit "^0.1.2" - graceful-fs "^4.2.9" import-local "^3.0.2" - jest-config "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - prompts "^2.0.1" - yargs "^16.2.0" - -jest-config@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41" - integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== - dependencies: - "@babel/core" "^7.8.0" - "@jest/test-sequencer" "^27.5.1" - "@jest/types" "^27.5.1" - babel-jest "^27.5.1" - chalk "^4.0.0" - ci-info "^3.2.0" - deepmerge "^4.2.2" - glob "^7.1.1" - graceful-fs "^4.2.9" - jest-circus "^27.5.1" - jest-environment-jsdom "^27.5.1" - jest-environment-node "^27.5.1" - jest-get-type "^27.5.1" - jest-jasmine2 "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-runner "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - micromatch "^4.0.4" - parse-json "^5.2.0" - pretty-format "^27.5.1" - slash "^3.0.0" - strip-json-comments "^3.1.1" + jest-config "^29.7.0" + jest-util "^29.7.0" + jest-validate "^29.7.0" + yargs "^17.3.1" -jest-config@^29.4.1: +jest-config@^29.4.1, jest-config@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-29.7.0.tgz#bcbda8806dbcc01b1e316a46bb74085a84b0245f" integrity sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ== @@ -14948,13 +15488,6 @@ jest-diff@^29.4.1, jest-diff@^29.7.0: jest-get-type "^29.6.3" pretty-format "^29.7.0" -jest-docblock@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0" - integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== - dependencies: - detect-newline "^3.0.0" - jest-docblock@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-29.7.0.tgz#8fddb6adc3cdc955c93e2a87f61cfd350d5d119a" @@ -14962,17 +15495,6 @@ jest-docblock@^29.7.0: dependencies: detect-newline "^3.0.0" -jest-each@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e" - integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== - dependencies: - "@jest/types" "^27.5.1" - chalk "^4.0.0" - jest-get-type "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - jest-each@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-29.7.0.tgz#162a9b3f2328bdd991beaabffbb74745e56577d1" @@ -14984,30 +15506,19 @@ jest-each@^29.7.0: jest-util "^29.7.0" pretty-format "^29.7.0" -jest-environment-jsdom@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546" - integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - jest-util "^27.5.1" - jsdom "^16.6.0" - -jest-environment-node@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e" - integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== +jest-environment-jsdom@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-29.7.0.tgz#d206fa3551933c3fd519e5dfdb58a0f5139a837f" + integrity sha512-k9iQbsf9OyOfdzWH8HDmrRT0gSIcX+FLNW7IQq94tFX0gynPwqDTW0Ho6iMVNjGz/nb+l/vW3dWM2bbLLpkbXA== dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" + "@jest/environment" "^29.7.0" + "@jest/fake-timers" "^29.7.0" + "@jest/types" "^29.6.3" + "@types/jsdom" "^20.0.0" "@types/node" "*" - jest-mock "^27.5.1" - jest-util "^27.5.1" + jest-mock "^29.7.0" + jest-util "^29.7.0" + jsdom "^20.0.0" jest-environment-node@^29.7.0: version "29.7.0" @@ -15070,37 +15581,6 @@ jest-haste-map@^29.7.0: optionalDependencies: fsevents "^2.3.2" -jest-jasmine2@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4" - integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/source-map" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - expect "^27.5.1" - is-generator-fn "^2.0.0" - jest-each "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - throat "^6.0.1" - -jest-leak-detector@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8" - integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== - dependencies: - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - jest-leak-detector@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz#5b7ec0dadfdfec0ca383dc9aa016d36b5ea4c728" @@ -15109,7 +15589,7 @@ jest-leak-detector@^29.7.0: jest-get-type "^29.6.3" pretty-format "^29.7.0" -jest-matcher-utils@^27.0.0, jest-matcher-utils@^27.5.1: +jest-matcher-utils@^27.0.0: version "27.5.1" resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab" integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== @@ -15129,21 +15609,6 @@ jest-matcher-utils@^29.7.0: jest-get-type "^29.6.3" pretty-format "^29.7.0" -jest-message-util@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf" - integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^27.5.1" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^27.5.1" - slash "^3.0.0" - stack-utils "^2.0.3" - jest-message-util@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.7.0.tgz#8bc392e204e95dfe7564abbe72a404e28e51f7f3" @@ -15159,14 +15624,6 @@ jest-message-util@^29.7.0: slash "^3.0.0" stack-utils "^2.0.3" -jest-mock@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" - integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-29.7.0.tgz#4e836cf60e99c6fcfabe9f99d017f3fdd50a6347" @@ -15191,30 +15648,13 @@ jest-regex-util@^29.6.3: resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-29.6.3.tgz#4a556d9c776af68e1c5f48194f4d0327d24e8a52" integrity sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg== -jest-resolve-dependencies@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8" - integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== - dependencies: - "@jest/types" "^27.5.1" - jest-regex-util "^27.5.1" - jest-snapshot "^27.5.1" - -jest-resolve@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384" - integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== +jest-resolve-dependencies@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz#1b04f2c095f37fc776ff40803dc92921b1e88428" + integrity sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA== dependencies: - "@jest/types" "^27.5.1" - chalk "^4.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-pnp-resolver "^1.2.2" - jest-util "^27.5.1" - jest-validate "^27.5.1" - resolve "^1.20.0" - resolve.exports "^1.1.0" - slash "^3.0.0" + jest-regex-util "^29.6.3" + jest-snapshot "^29.7.0" jest-resolve@^29.4.1, jest-resolve@^29.7.0: version "29.7.0" @@ -15231,33 +15671,6 @@ jest-resolve@^29.4.1, jest-resolve@^29.7.0: resolve.exports "^2.0.0" slash "^3.0.0" -jest-runner@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5" - integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== - dependencies: - "@jest/console" "^27.5.1" - "@jest/environment" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - emittery "^0.8.1" - graceful-fs "^4.2.9" - jest-docblock "^27.5.1" - jest-environment-jsdom "^27.5.1" - jest-environment-node "^27.5.1" - jest-haste-map "^27.5.1" - jest-leak-detector "^27.5.1" - jest-message-util "^27.5.1" - jest-resolve "^27.5.1" - jest-runtime "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - source-map-support "^0.5.6" - throat "^6.0.1" - jest-runner@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-29.7.0.tgz#809af072d408a53dcfd2e849a4c976d3132f718e" @@ -15285,34 +15698,6 @@ jest-runner@^29.7.0: p-limit "^3.1.0" source-map-support "0.5.13" -jest-runtime@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af" - integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/globals" "^27.5.1" - "@jest/source-map" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - chalk "^4.0.0" - cjs-module-lexer "^1.0.0" - collect-v8-coverage "^1.0.0" - execa "^5.0.0" - glob "^7.1.3" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-message-util "^27.5.1" - jest-mock "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - slash "^3.0.0" - strip-bom "^4.0.0" - jest-runtime@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-29.7.0.tgz#efecb3141cf7d3767a3a0cc8f7c9990587d3d817" @@ -15349,34 +15734,6 @@ jest-serializer@^27.5.1: "@types/node" "*" graceful-fs "^4.2.9" -jest-snapshot@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1" - integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== - dependencies: - "@babel/core" "^7.7.2" - "@babel/generator" "^7.7.2" - "@babel/plugin-syntax-typescript" "^7.7.2" - "@babel/traverse" "^7.7.2" - "@babel/types" "^7.0.0" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/babel__traverse" "^7.0.4" - "@types/prettier" "^2.1.5" - babel-preset-current-node-syntax "^1.0.0" - chalk "^4.0.0" - expect "^27.5.1" - graceful-fs "^4.2.9" - jest-diff "^27.5.1" - jest-get-type "^27.5.1" - jest-haste-map "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-util "^27.5.1" - natural-compare "^1.4.0" - pretty-format "^27.5.1" - semver "^7.3.2" - jest-snapshot@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-29.7.0.tgz#c2c574c3f51865da1bb329036778a69bf88a6be5" @@ -15427,18 +15784,6 @@ jest-util@^29.0.0, jest-util@^29.4.1, jest-util@^29.7.0: graceful-fs "^4.2.9" picomatch "^2.2.3" -jest-validate@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067" - integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== - dependencies: - "@jest/types" "^27.5.1" - camelcase "^6.2.0" - chalk "^4.0.0" - jest-get-type "^27.5.1" - leven "^3.1.0" - pretty-format "^27.5.1" - jest-validate@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-29.7.0.tgz#7bf705511c64da591d46b15fce41400d52147d9c" @@ -15451,19 +15796,6 @@ jest-validate@^29.7.0: leven "^3.1.0" pretty-format "^29.7.0" -jest-watcher@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2" - integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw== - dependencies: - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - jest-util "^27.5.1" - string-length "^4.0.1" - jest-watcher@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-29.7.0.tgz#7810d30d619c3a62093223ce6bb359ca1b28a2f2" @@ -15497,14 +15829,15 @@ jest-worker@^29.7.0: merge-stream "^2.0.0" supports-color "^8.0.0" -jest@27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc" - integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== +jest@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest/-/jest-29.2.2.tgz#24da83cbbce514718acd698926b7679109630476" + integrity sha512-r+0zCN9kUqoON6IjDdjbrsWobXM/09Nd45kIPRD8kloaRh1z5ZCMdVsgLXGxmlL7UpAJsvCYOQNO+NjvG/gqiQ== dependencies: - "@jest/core" "^27.5.1" + "@jest/core" "^29.2.2" + "@jest/types" "^29.2.1" import-local "^3.0.2" - jest-cli "^27.5.1" + jest-cli "^29.2.2" joi@^17.7.0: version "17.13.3" @@ -15531,6 +15864,16 @@ jose@^4.14.4: resolved "https://registry.yarnpkg.com/jose/-/jose-4.15.9.tgz#9b68eda29e9a0614c042fa29387196c7dd800100" integrity sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA== +jose@^5.6.3: + version "5.9.6" + resolved "https://registry.npmjs.org/jose/-/jose-5.9.6.tgz#77f1f901d88ebdc405e57cce08d2a91f47521883" + integrity sha512-AMlnetc9+CV9asI19zHmrgS/WYsWUwCn2R7RzlbJWD7F9eWYUTGyBmU9o6PxngtLGOiDGPRu+Uc4fhKzbpteZQ== + +joycon@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/joycon/-/joycon-3.1.1.tgz#bce8596d6ae808f8b68168f5fc69280996894f03" + integrity sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw== + js-levenshtein@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" @@ -15581,38 +15924,37 @@ jsbn@~0.1.0: resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg== -jsdom@^16.6.0: - version "16.7.0" - resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" - integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== +jsdom@^20.0.0: + version "20.0.3" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-20.0.3.tgz#886a41ba1d4726f67a8858028c99489fed6ad4db" + integrity sha512-SYhBvTh89tTfCD/CRdSOm13mOBa42iTaTyfyEWBdKcGdPxPtLFBXuHR8XHb33YNYaP+lLbmSvBTsnoesCNJEsQ== dependencies: - abab "^2.0.5" - acorn "^8.2.4" - acorn-globals "^6.0.0" - cssom "^0.4.4" + abab "^2.0.6" + acorn "^8.8.1" + acorn-globals "^7.0.0" + cssom "^0.5.0" cssstyle "^2.3.0" - data-urls "^2.0.0" - decimal.js "^10.2.1" - domexception "^2.0.1" + data-urls "^3.0.2" + decimal.js "^10.4.2" + domexception "^4.0.0" escodegen "^2.0.0" - form-data "^3.0.0" - html-encoding-sniffer "^2.0.1" - http-proxy-agent "^4.0.1" - https-proxy-agent "^5.0.0" + form-data "^4.0.0" + html-encoding-sniffer "^3.0.0" + http-proxy-agent "^5.0.0" + https-proxy-agent "^5.0.1" is-potential-custom-element-name "^1.0.1" - nwsapi "^2.2.0" - parse5 "6.0.1" - saxes "^5.0.1" + nwsapi "^2.2.2" + parse5 "^7.1.1" + saxes "^6.0.0" symbol-tree "^3.2.4" - tough-cookie "^4.0.0" - w3c-hr-time "^1.0.2" - w3c-xmlserializer "^2.0.0" - webidl-conversions "^6.1.0" - whatwg-encoding "^1.0.5" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.5.0" - ws "^7.4.6" - xml-name-validator "^3.0.0" + tough-cookie "^4.1.2" + w3c-xmlserializer "^4.0.0" + webidl-conversions "^7.0.0" + whatwg-encoding "^2.0.0" + whatwg-mimetype "^3.0.0" + whatwg-url "^11.0.0" + ws "^8.11.0" + xml-name-validator "^4.0.0" jsesc@^1.3.0: version "1.3.0" @@ -15860,7 +16202,7 @@ keccak@^1.0.2: nan "^2.2.1" safe-buffer "^5.1.0" -keccak@^3.0.0: +keccak@^3.0.0, keccak@^3.0.3: version "3.0.4" resolved "https://registry.yarnpkg.com/keccak/-/keccak-3.0.4.tgz#edc09b89e633c0549da444432ecf062ffadee86d" integrity sha512-3vKuW0jV8J3XNTzvfyicFR5qvxrSAGl7KIhvgOu5cmWwM7tZRj3fMbj/pfIf4be7aznbc+prBWGjywox/g2Y6Q== @@ -16223,6 +16565,31 @@ listr2@^3.8.3: through "^2.3.8" wrap-ansi "^7.0.0" +lit-element@^3.3.0: + version "3.3.3" + resolved "https://registry.yarnpkg.com/lit-element/-/lit-element-3.3.3.tgz#10bc19702b96ef5416cf7a70177255bfb17b3209" + integrity sha512-XbeRxmTHubXENkV4h8RIPyr8lXc+Ff28rkcQzw3G6up2xg5E8Zu1IgOWIwBLEQsu3cOVFqdYwiVi0hv0SlpqUA== + dependencies: + "@lit-labs/ssr-dom-shim" "^1.1.0" + "@lit/reactive-element" "^1.3.0" + lit-html "^2.8.0" + +lit-html@^2.8.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/lit-html/-/lit-html-2.8.0.tgz#96456a4bb4ee717b9a7d2f94562a16509d39bffa" + integrity sha512-o9t+MQM3P4y7M7yNzqAyjp7z+mQGa4NS4CxiyLqFPyFWyc4O+nodLrkrxSaCTrla6M5YOLaT3RpbbqjszB5g3Q== + dependencies: + "@types/trusted-types" "^2.0.2" + +lit@2.8.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/lit/-/lit-2.8.0.tgz#4d838ae03059bf9cafa06e5c61d8acc0081e974e" + integrity sha512-4Sc3OFX9QHOJaHbmTMk28SYgVxLN3ePDjg7hofEft2zWlehFL3LiAuapWc4U/kYwMYJSh2hTCPZ6/LIC7ii0MA== + dependencies: + "@lit/reactive-element" "^1.6.0" + lit-element "^3.3.0" + lit-html "^2.8.0" + live-server@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/live-server/-/live-server-1.2.2.tgz#20b4fe5c2ca886faa61813310e28680804f48dad" @@ -16453,7 +16820,7 @@ lodash@4.17.19: resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b" integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ== -lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.12, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.4, lodash@^4.17.5, lodash@^4.7.0: +lodash@4.17.21, lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.12, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.4, lodash@^4.17.5: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -16791,6 +17158,11 @@ memdown@^1.0.0: ltgt "~2.2.0" safe-buffer "~5.1.1" +memoirist@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/memoirist/-/memoirist-0.3.0.tgz#95e9ab2b44794872beaa9dd9632f99de52e0546c" + integrity sha512-wR+4chMgVPq+T6OOsk40u9Wlpw1Pjx66NMNiYxCQQ4EUJ7jDs3D9kTCeKdBOkvAiqXlHLVJlvYL01PvIJ1MPNg== + memorystream@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/memorystream/-/memorystream-0.3.1.tgz#86d7090b30ce455d63fbae12dda51a47ddcaf9b2" @@ -17182,6 +17554,11 @@ minizlib@^2.0.0, minizlib@^2.1.1, minizlib@^2.1.2: minipass "^3.0.0" yallist "^4.0.0" +mipd@0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/mipd/-/mipd-0.0.7.tgz#bb5559e21fa18dc3d9fe1c08902ef14b7ce32fd9" + integrity sha512-aAPZPNDQ3uMTdKbuO2YmAw2TxLHO0moa4YKAyETM/DTj5FloZo+a+8tU+iv4GmW+sOxKLSRwcSFuczk+Cpt6fg== + mixin-deep@^1.2.0: version "1.3.2" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" @@ -17261,6 +17638,18 @@ morgan@^1.8.2, morgan@^1.9.1: on-finished "~2.3.0" on-headers "~1.0.2" +motion@10.16.2: + version "10.16.2" + resolved "https://registry.yarnpkg.com/motion/-/motion-10.16.2.tgz#7dc173c6ad62210a7e9916caeeaf22c51e598d21" + integrity sha512-p+PurYqfUdcJZvtnmAqu5fJgV2kR0uLFQuBKtLeFVTrYEVllI99tiOTSefVNYuip9ELTEkepIIDftNdze76NAQ== + dependencies: + "@motionone/animation" "^10.15.1" + "@motionone/dom" "^10.16.2" + "@motionone/svelte" "^10.16.2" + "@motionone/types" "^10.15.1" + "@motionone/utils" "^10.15.1" + "@motionone/vue" "^10.16.2" + ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" @@ -17957,10 +18346,10 @@ number-to-bn@1.7.0: bn.js "4.11.6" strip-hex-prefix "1.0.0" -nwsapi@^2.2.0: - version "2.2.16" - resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.16.tgz#177760bba02c351df1d2644e220c31dfec8cdb43" - integrity sha512-F1I/bimDpj3ncaNDhfyMWuFqmQDBwDB0Fogc2qpL3BWvkQteFD/8BzWuIRl83rq0DXfm8SGt/HFhLXZyljTXcQ== +nwsapi@^2.2.2: + version "2.2.18" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.18.tgz#3c4d7927e1ef4d042d319438ecfda6cd81b7ee41" + integrity sha512-p1TRH/edngVEHVbwqWnxUViEmq5znDvyB+Sik5cmuLpGOIfDf/39zLiq3swPF8Vakqn+gvNiOQAZu8djYlQILA== nx@15.9.7, "nx@>=14.8.1 < 16": version "15.9.7" @@ -18069,6 +18458,15 @@ oauth-sign@~0.9.0: resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== +obj-multiplex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/obj-multiplex/-/obj-multiplex-1.0.0.tgz#2f2ae6bfd4ae11befe742ea9ea5b36636eabffc1" + integrity sha512-0GNJAOsHoBHeNTvl5Vt6IWnpUEcc3uSRxzBri7EDyIcMgYvnY2JL2qdeV5zTMjWQX5OHcD5amcW2HFfDh0gjIA== + dependencies: + end-of-stream "^1.4.0" + once "^1.4.0" + readable-stream "^2.3.3" + object-assign@^4, object-assign@^4.0.0, object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1, object-assign@latest: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" @@ -18215,6 +18613,11 @@ on-exit-leak-free@^0.2.0: resolved "https://registry.yarnpkg.com/on-exit-leak-free/-/on-exit-leak-free-0.2.0.tgz#b39c9e3bf7690d890f4861558b0d7b90a442d209" integrity sha512-dqaz3u44QbRXQooZLTUKU41ZrzYrcvLISVgbrzbyCMxpmSLJvZ3ZamIJIZ29P6OhZIkNIQKosdeM6t1LYbA9hg== +on-exit-leak-free@^2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz#fed195c9ebddb7d9e4c3842f93f281ac8dadd3b8" + integrity sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA== + on-finished@2.4.1, on-finished@^2.2.0, on-finished@^2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" @@ -18273,6 +18676,11 @@ open@^8.4.0: is-docker "^2.1.1" is-wsl "^2.2.0" +openapi-types@^12.1.3: + version "12.1.3" + resolved "https://registry.yarnpkg.com/openapi-types/-/openapi-types-12.1.3.tgz#471995eb26c4b97b7bd356aacf7b91b73e777dd3" + integrity sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw== + opener@^1.5.1, opener@^1.5.2: version "1.5.2" resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598" @@ -18372,6 +18780,32 @@ own-keys@^1.0.1: object-keys "^1.1.1" safe-push-apply "^1.0.0" +ox@0.6.7: + version "0.6.7" + resolved "https://registry.yarnpkg.com/ox/-/ox-0.6.7.tgz#afd53f2ecef68b8526660e9d29dee6e6b599a832" + integrity sha512-17Gk/eFsFRAZ80p5eKqv89a57uXjd3NgIf1CaXojATPBuujVc/fQSVhBeAU9JCRB+k7J50WQAyWTxK19T9GgbA== + dependencies: + "@adraffy/ens-normalize" "^1.10.1" + "@noble/curves" "^1.6.0" + "@noble/hashes" "^1.5.0" + "@scure/bip32" "^1.5.0" + "@scure/bip39" "^1.4.0" + abitype "^1.0.6" + eventemitter3 "5.0.1" + +ox@0.6.9: + version "0.6.9" + resolved "https://registry.yarnpkg.com/ox/-/ox-0.6.9.tgz#da1ee04fa10de30c8d04c15bfb80fe58b1f554bd" + integrity sha512-wi5ShvzE4eOcTwQVsIPdFr+8ycyX+5le/96iAJutaZAvCes1J0+RvpEPg5QDPDiaR0XQQAvZVl7AwqQcINuUug== + dependencies: + "@adraffy/ens-normalize" "^1.10.1" + "@noble/curves" "^1.6.0" + "@noble/hashes" "^1.5.0" + "@scure/bip32" "^1.5.0" + "@scure/bip39" "^1.4.0" + abitype "^1.0.6" + eventemitter3 "5.0.1" + p-cancelable@^0.4.0: version "0.4.1" resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.4.1.tgz#35f363d67d52081c8d9585e37bcceb7e0bbcb2a0" @@ -18729,12 +19163,7 @@ parse5-parser-stream@^7.1.2: dependencies: parse5 "^7.0.0" -parse5@6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" - integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== - -parse5@^7.0.0, parse5@^7.1.2: +parse5@^7.0.0, parse5@^7.1.1, parse5@^7.1.2: version "7.2.1" resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.2.1.tgz#8928f55915e6125f430cc44309765bf17556a33a" integrity sha512-BuBYQYlv1ckiPdQi/ohiivi9Sagc9JG+Ozs0r7b/0iK3sKmrb0b9FdWdBbOdx6hBCM/F9Ir82ofnBhtZOjCRPQ== @@ -18957,6 +19386,13 @@ pinkie@^2.0.0: resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg== +pino-abstract-transport@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pino-abstract-transport/-/pino-abstract-transport-2.0.0.tgz#de241578406ac7b8a33ce0d77ae6e8a0b3b68a60" + integrity sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw== + dependencies: + split2 "^4.0.0" + pino-abstract-transport@v0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/pino-abstract-transport/-/pino-abstract-transport-0.5.0.tgz#4b54348d8f73713bfd14e3dc44228739aa13d9c0" @@ -18965,11 +19401,35 @@ pino-abstract-transport@v0.5.0: duplexify "^4.1.2" split2 "^4.0.0" +pino-pretty@^13.0.0: + version "13.0.0" + resolved "https://registry.yarnpkg.com/pino-pretty/-/pino-pretty-13.0.0.tgz#21d57fe940e34f2e279905d7dba2d7e2c4f9bf17" + integrity sha512-cQBBIVG3YajgoUjo1FdKVRX6t9XPxwB9lcNJVD5GCnNM4Y6T12YYx8c6zEejxQsU0wrg9TwmDulcE9LR7qcJqA== + dependencies: + colorette "^2.0.7" + dateformat "^4.6.3" + fast-copy "^3.0.2" + fast-safe-stringify "^2.1.1" + help-me "^5.0.0" + joycon "^3.1.1" + minimist "^1.2.6" + on-exit-leak-free "^2.1.0" + pino-abstract-transport "^2.0.0" + pump "^3.0.0" + secure-json-parse "^2.4.0" + sonic-boom "^4.0.1" + strip-json-comments "^3.1.1" + pino-std-serializers@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pino-std-serializers/-/pino-std-serializers-4.0.0.tgz#1791ccd2539c091ae49ce9993205e2cd5dbba1e2" integrity sha512-cK0pekc1Kjy5w9V2/n+8MkZwusa6EyyxfeQCB799CQRhRt/CqYKiWs5adeu8Shve2ZNffvfC/7J64A2PJo1W/Q== +pino-std-serializers@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/pino-std-serializers/-/pino-std-serializers-7.0.0.tgz#7c625038b13718dbbd84ab446bd673dc52259e3b" + integrity sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA== + pino@7.11.0: version "7.11.0" resolved "https://registry.yarnpkg.com/pino/-/pino-7.11.0.tgz#0f0ea5c4683dc91388081d44bff10c83125066f6" @@ -18987,6 +19447,23 @@ pino@7.11.0: sonic-boom "^2.2.1" thread-stream "^0.15.1" +pino@^9.6.0: + version "9.6.0" + resolved "https://registry.yarnpkg.com/pino/-/pino-9.6.0.tgz#6bc628159ba0cc81806d286718903b7fc6b13169" + integrity sha512-i85pKRCt4qMjZ1+L7sy2Ag4t1atFcdbEt76+7iRJn1g2BvsnRMGu9p8pivl9fs63M2kF/A0OacFZhTub+m/qMg== + dependencies: + atomic-sleep "^1.0.0" + fast-redact "^3.1.1" + on-exit-leak-free "^2.1.0" + pino-abstract-transport "^2.0.0" + pino-std-serializers "^7.0.0" + process-warning "^4.0.0" + quick-format-unescaped "^4.0.3" + real-require "^0.2.0" + safe-stable-stringify "^2.3.1" + sonic-boom "^4.0.1" + thread-stream "^3.0.0" + pirates@^4.0.4: version "4.0.6" resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" @@ -19041,6 +19518,11 @@ pngjs@^3.3.0: resolved "https://registry.yarnpkg.com/pngjs/-/pngjs-3.4.0.tgz#99ca7d725965fb655814eaf65f38f12bbdbf555f" integrity sha512-NCrCHhWmnQklfH4MtJMRjZ2a8c80qXeMlQMv2uVp9ISJMTt562SbGd6n2oq0PaPgKm7Z6pL9E2UlLIhC+SHL3w== +pngjs@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/pngjs/-/pngjs-5.0.0.tgz#e79dd2b215767fd9c04561c01236df960bce7fbb" + integrity sha512-40QW5YalBNfQo5yRYmiw7Yz6TKKVr3h6970B2YE+3fQpsWcrbj1PzJgxeJ19DRQjhMbKPIuMY8rFaXc8moolVw== + pocket-js-core@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/pocket-js-core/-/pocket-js-core-0.0.3.tgz#1ab278b9a6a5775e2bdc3c2c2e218057774061e4" @@ -19048,6 +19530,11 @@ pocket-js-core@0.0.3: dependencies: axios "^0.18.0" +pony-cause@^2.1.10: + version "2.1.11" + resolved "https://registry.yarnpkg.com/pony-cause/-/pony-cause-2.1.11.tgz#d69a20aaccdb3bdb8f74dd59e5c68d8e6772e4bd" + integrity sha512-M7LhCsdNbNgiLYiP4WjsfLUuFmCfnjdF6jKe2R9NKl4WFN+HZPGHJZ9lnLP7f9ZnKe3U9nuWD0szirmj+migUg== + portfinder@^1.0.28: version "1.0.32" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.32.tgz#2fe1b9e58389712429dc2bea5beb2146146c7f81" @@ -19072,6 +19559,11 @@ preact@10.4.1: resolved "https://registry.yarnpkg.com/preact/-/preact-10.4.1.tgz#9b3ba020547673a231c6cf16f0fbaef0e8863431" integrity sha512-WKrRpCSwL2t3tpOOGhf2WfTpcmbpxaWtDbdJdKdjd0aEiTkvOmS4NBkG6kzlaAHI9AkQ3iVqbFWM3Ei7mZ4o1Q== +preact@^10.16.0, preact@^10.24.2: + version "10.26.4" + resolved "https://registry.yarnpkg.com/preact/-/preact-10.26.4.tgz#b514f4249453a4247c82ff6d1267d59b7d78f9f9" + integrity sha512-KJhO7LBFTjP71d83trW+Ilnjbo+ySsaAgCfXOXUlmGzJ4ygYPWmysm77yg4emwfmoz3b22yvH5IsVFHbhUaH5w== + preact@^10.3.3: version "10.25.4" resolved "https://registry.yarnpkg.com/preact/-/preact-10.25.4.tgz#c1d00bee9d7b9dcd06a2311d9951973b506ae8ac" @@ -19163,6 +19655,11 @@ process-warning@^1.0.0: resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-1.0.0.tgz#980a0b25dc38cd6034181be4b7726d89066b4616" integrity sha512-du4wfLyj4yCZq1VupnVSZmRsPJsNuxoDQFdCFHLaYiEbFBD7QE0a+I4D7hOxrVnh78QE/YipFAj9lXHiXocV+Q== +process-warning@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-4.0.1.tgz#5c1db66007c67c756e4e09eb170cdece15da32fb" + integrity sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q== + process@^0.11.1, process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" @@ -19288,6 +19785,11 @@ proxy-addr@~2.0.7: forwarded "0.2.0" ipaddr.js "1.9.1" +proxy-compare@2.5.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/proxy-compare/-/proxy-compare-2.5.1.tgz#17818e33d1653fbac8c2ec31406bce8a2966f600" + integrity sha512-oyfc0Tx87Cpwva5ZXezSp5V9vht1c7dZBhvuV/y3ctkgMVUmiAGDVeeB0dKhGSyT0v1ZTEQYpe/RXlBVBNuCLA== + proxy-from-env@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.0.0.tgz#33c50398f70ea7eb96d21f7b817630a55791c7ee" @@ -19420,6 +19922,16 @@ qrcode@1.4.4: pngjs "^3.3.0" yargs "^13.2.4" +qrcode@1.5.3: + version "1.5.3" + resolved "https://registry.yarnpkg.com/qrcode/-/qrcode-1.5.3.tgz#03afa80912c0dccf12bc93f615a535aad1066170" + integrity sha512-puyri6ApkEHYiVl4CFzo1tDkAZ+ATcnbJrJ6RiBM1Fhctdn/ix9MTE3hRph33omisEbC/2fcfemsseiKgBPKZg== + dependencies: + dijkstrajs "^1.0.1" + encode-utf8 "^1.0.3" + pngjs "^5.0.0" + yargs "^15.3.1" + qs@6.13.0: version "6.13.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" @@ -19693,7 +20205,7 @@ read@1, read@^1.0.7, read@~1.0.1, read@~1.0.7: dependencies: mute-stream "~0.0.4" -readable-stream@3, readable-stream@^3.0.0, readable-stream@^3.0.2, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.5.0, readable-stream@^3.6.0: +readable-stream@3, readable-stream@^3.0.0, readable-stream@^3.0.2, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.5.0, readable-stream@^3.6.0, readable-stream@^3.6.2: version "3.6.2" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== @@ -19712,7 +20224,7 @@ readable-stream@^1.0.33: isarray "0.0.1" string_decoder "~0.10.x" -readable-stream@^2.0.0, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.2.2, readable-stream@^2.2.9, readable-stream@^2.3.0, readable-stream@^2.3.5, readable-stream@^2.3.8, readable-stream@~2.3.6: +readable-stream@^2.0.0, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.2.2, readable-stream@^2.2.9, readable-stream@^2.3.0, readable-stream@^2.3.3, readable-stream@^2.3.5, readable-stream@^2.3.8, readable-stream@~2.3.6: version "2.3.8" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== @@ -19725,6 +20237,17 @@ readable-stream@^2.0.0, readable-stream@^2.0.2, readable-stream@^2.0.6, readable string_decoder "~1.1.1" util-deprecate "~1.0.1" +"readable-stream@^3.6.2 || ^4.4.2": + version "4.7.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-4.7.0.tgz#cedbd8a1146c13dfff8dab14068028d58c15ac91" + integrity sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg== + dependencies: + abort-controller "^3.0.0" + buffer "^6.0.3" + events "^3.3.0" + process "^0.11.10" + string_decoder "^1.3.0" + readable-stream@~1.0.15: version "1.0.34" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" @@ -19771,6 +20294,11 @@ real-require@^0.1.0: resolved "https://registry.yarnpkg.com/real-require/-/real-require-0.1.0.tgz#736ac214caa20632847b7ca8c1056a0767df9381" integrity sha512-r/H9MzAWtrv8aSVjPCMFpDMl5q66GqtmmRkRjpHTsp4zBAa+snZyiQNlMONiUmEJcsnaw0wCauJ2GWODr/aFkg== +real-require@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/real-require/-/real-require-0.2.0.tgz#209632dea1810be2ae063a6ac084fee7e33fba78" + integrity sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg== + redent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" @@ -20090,11 +20618,6 @@ resolve.exports@1.1.0: resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== -resolve.exports@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.1.tgz#05cfd5b3edf641571fd46fa608b610dda9ead999" - integrity sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ== - resolve.exports@^2.0.0: version "2.0.3" resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-2.0.3.tgz#41955e6f1b4013b7586f873749a635dea07ebe3f" @@ -20329,7 +20852,7 @@ safe-regex@^1.1.0: dependencies: ret "~0.1.10" -safe-stable-stringify@^2.1.0: +safe-stable-stringify@^2.1.0, safe-stable-stringify@^2.3.1: version "2.5.0" resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz#4ca2f8e385f2831c432a719b108a3bf7af42a1dd" integrity sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA== @@ -20339,10 +20862,10 @@ safe-stable-stringify@^2.1.0: resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== -saxes@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" - integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== +saxes@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-6.0.0.tgz#fe5b4a4768df4f14a201b1ba6a65c1f3d9988cc5" + integrity sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA== dependencies: xmlchars "^2.2.0" @@ -20403,6 +20926,11 @@ secure-compare@3.0.1: resolved "https://registry.yarnpkg.com/secure-compare/-/secure-compare-3.0.1.tgz#f1a0329b308b221fae37b9974f3d578d0ca999e3" integrity sha512-AckIIV90rPDcBcglUwXPF3kg0P0qmPsPXAj6BBEENQE1p5yA1xfmDJzfi1Tappj37Pv2mVbKpL3Z1T+Nn7k1Qw== +secure-json-parse@^2.4.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/secure-json-parse/-/secure-json-parse-2.7.0.tgz#5a5f9cd6ae47df23dba3151edd06855d47e09862" + integrity sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw== + seek-bzip@^1.0.5: version "1.0.6" resolved "https://registry.yarnpkg.com/seek-bzip/-/seek-bzip-1.0.6.tgz#35c4171f55a680916b52a07859ecf3b5857f21c4" @@ -20490,6 +21018,11 @@ semver@^7.0.0, semver@^7.1.1, semver@^7.1.2, semver@^7.1.3, semver@^7.2.1, semve resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143" integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== +semver@^7.3.8: + version "7.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.1.tgz#abd5098d82b18c6c81f6074ff2647fd3e7220c9f" + integrity sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA== + semver@~5.4.1: version "5.4.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.4.1.tgz#e059c09d8571f0540823733433505d3a2f00b18e" @@ -20682,7 +21215,7 @@ setprototypeof@1.2.0: resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== -sha.js@^2.4.0, sha.js@^2.4.8: +sha.js@^2.4.0, sha.js@^2.4.11, sha.js@^2.4.8: version "2.4.11" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== @@ -20910,6 +21443,24 @@ snapdragon@^0.8.1: source-map-resolve "^0.5.0" use "^3.1.0" +socket.io-client@^4.5.1: + version "4.8.1" + resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-4.8.1.tgz#1941eca135a5490b94281d0323fe2a35f6f291cb" + integrity sha512-hJVXfu3E28NmzGk8o1sHhN3om52tRvwYeidbj7xKy2eIIse5IoKX3USlS6Tqt3BHAtflLIkCQBkzVrEEfWUyYQ== + dependencies: + "@socket.io/component-emitter" "~3.1.0" + debug "~4.3.2" + engine.io-client "~6.6.1" + socket.io-parser "~4.2.4" + +socket.io-parser@~4.2.4: + version "4.2.4" + resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.4.tgz#c806966cf7270601e47469ddeec30fbdfda44c83" + integrity sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew== + dependencies: + "@socket.io/component-emitter" "~3.1.0" + debug "~4.3.1" + socks-proxy-agent@^6.0.0: version "6.2.1" resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-6.2.1.tgz#2687a31f9d7185e38d530bef1944fe1f1496d6ce" @@ -20968,6 +21519,13 @@ sonic-boom@^2.2.1: dependencies: atomic-sleep "^1.0.0" +sonic-boom@^4.0.1: + version "4.2.0" + resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-4.2.0.tgz#e59a525f831210fa4ef1896428338641ac1c124d" + integrity sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww== + dependencies: + atomic-sleep "^1.0.0" + sort-keys-length@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/sort-keys-length/-/sort-keys-length-1.0.1.tgz#9cb6f4f4e9e48155a6aa0671edd336ff1479a188" @@ -21035,14 +21593,6 @@ source-map-support@^0.4.15: dependencies: source-map "^0.5.6" -source-map-support@^0.5.6: - version "0.5.21" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - source-map-url@^0.4.0: version "0.4.1" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56" @@ -21058,11 +21608,6 @@ source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== -source-map@^0.7.3: - version "0.7.4" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" - integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== - space-separated-tokens@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz#1ecd9d2350a3844572c3f4a312bceb018348859f" @@ -21334,7 +21879,7 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" -"string-width-cjs@npm:string-width@^4.2.0": +"string-width-cjs@npm:string-width@^4.2.0", "string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -21352,15 +21897,6 @@ string-width@^1.0.1: is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" -"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - string-width@^2.0.0, string-width@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" @@ -21487,7 +22023,7 @@ stringify-package@^1.0.1: resolved "https://registry.yarnpkg.com/stringify-package/-/stringify-package-1.0.1.tgz#e5aa3643e7f74d0f28628b72f3dad5cecfc3ba85" integrity sha512-sa4DUQsYciMP1xhKWGuFM04fB0LG/9DlluZoSVywUMRNvzid6XucHK0/90xGxRoHrAaROrcHK1aPKaijCtSrhg== -"strip-ansi-cjs@npm:strip-ansi@^6.0.1": +"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -21515,13 +22051,6 @@ strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: dependencies: ansi-regex "^4.1.0" -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - strip-ansi@^7.0.1: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" @@ -21608,6 +22137,14 @@ strong-log-transformer@^2.1.0: minimist "^1.2.0" through "^2.3.4" +stytch@^12.4.0: + version "12.4.0" + resolved "https://registry.npmjs.org/stytch/-/stytch-12.4.0.tgz#2a9dbac10e2a45057409ed1bd4f7743e1fc6cda9" + integrity sha512-jyYIfirVnhy3gAtGLEIK5c5tSp5bhi9tUE0JRzItJlwISBW/StMMOvP0hhPUb831EGjV2l1S4YRPg/NqJ+eYNg== + dependencies: + jose "^5.6.3" + undici "^6.19.5" + superstatic@^7.1.0: version "7.1.0" resolved "https://registry.yarnpkg.com/superstatic/-/superstatic-7.1.0.tgz#42cc773a0f500fb691841e0533d0b8c31f25997f" @@ -21676,7 +22213,7 @@ supports-color@^8.0.0, supports-color@^8.1.1: dependencies: has-flag "^4.0.0" -supports-hyperlinks@^2.0.0, supports-hyperlinks@^2.1.0: +supports-hyperlinks@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz#3943544347c1ff90b15effb03fc14ae45ec10624" integrity sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA== @@ -21915,14 +22452,6 @@ term-size@^2.1.0: resolved "https://registry.yarnpkg.com/term-size/-/term-size-2.2.1.tgz#2a6a54840432c2fb6320fea0f415531e90189f54" integrity sha512-wK0Ri4fOGjv/XPy8SBHZChl8CM7uMc5VML7SqiQ0zG7+J5Vr+RMQDoHa2CNT6KHUnTGIXH34UDMkPzAUyapBZg== -terminal-link@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" - integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== - dependencies: - ansi-escapes "^4.2.1" - supports-hyperlinks "^2.0.0" - test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" @@ -21959,10 +22488,12 @@ thread-stream@^0.15.1: dependencies: real-require "^0.1.0" -throat@^6.0.1: - version "6.0.2" - resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.2.tgz#51a3fbb5e11ae72e2cf74861ed5c8020f89f29fe" - integrity sha512-WKexMoJj3vEuK0yFEapj8y64V0A6xcuPuK9Gt1d0R+dzCSJc0lHqQytAbSB4cDAK0dWh4T0E2ETkoLE2WZ41OQ== +thread-stream@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/thread-stream/-/thread-stream-3.1.0.tgz#4b2ef252a7c215064507d4ef70c05a5e2d34c4f1" + integrity sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A== + dependencies: + real-require "^0.2.0" throttleit@^1.0.0: version "1.0.1" @@ -22088,7 +22619,7 @@ toidentifier@1.0.1: resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== -tough-cookie@^4.0.0, tough-cookie@^4.1.3: +tough-cookie@^4.1.2, tough-cookie@^4.1.3: version "4.1.4" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.4.tgz#945f1461b45b5a8c76821c33ea49c3ac192c1b36" integrity sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag== @@ -22113,10 +22644,10 @@ toxic@^1.0.0: dependencies: lodash "^4.17.10" -tr46@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" - integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== +tr46@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-3.0.0.tgz#555c4e297a950617e8eeddef633c87d4d9d6cbf9" + integrity sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA== dependencies: punycode "^2.1.1" @@ -22234,7 +22765,12 @@ tslib@1.14.1, tslib@^1.8.1, tslib@^1.9.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.2.0, tslib@^2.3.0, tslib@^2.4.0, tslib@^2.4.1, tslib@^2.7.0, tslib@^2.8.0: +tslib@2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.7.0.tgz#d9b40c5c40ab59e8738f297df3087bf1a2690c01" + integrity sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA== + +tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.2.0, tslib@^2.3.0, tslib@^2.3.1, tslib@^2.4.0, tslib@^2.4.1, tslib@^2.6.0, tslib@^2.7.0, tslib@^2.8.0: version "2.8.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== @@ -22526,10 +23062,10 @@ underscore@1.9.1: resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.9.1.tgz#06dce34a0e68a7babc29b365b8e74b8925203961" integrity sha512-5/4etnCkd9c8gwgowi5/om/mYO5ajCaOgdzj/oW+0eQV9WxKBDZw5+ycmKmeaTXjInS/W0BzpGLo2xR2aBwZdg== -undici-types@~5.26.4: - version "5.26.5" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" - integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== +undici-types@~6.19.2: + version "6.19.8" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.19.8.tgz#35111c9d1437ab83a7cdc0abae2f26d88eda0a02" + integrity sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw== undici-types@~6.20.0: version "6.20.0" @@ -22869,6 +23405,16 @@ url@^0.11.0: punycode "^1.4.1" qs "^6.12.3" +use-sync-external-store@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz#7dbefd6ef3fe4e767a0cf5d7287aacfb5846928a" + integrity sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA== + +use-sync-external-store@1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/use-sync-external-store/-/use-sync-external-store-1.4.0.tgz#adbc795d8eeb47029963016cefdf89dc799fcebc" + integrity sha512-9WXSPC5fMv61vaupRkCKCxsPxBocVnwakBEkMIHHpkTTg6icbJtg6jzgtLDm4bl3cSHAca52rYWih0k4K3PfHw== + use@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" @@ -22898,7 +23444,7 @@ util@^0.10.3: dependencies: inherits "2.0.3" -util@^0.12.3, util@^0.12.5: +util@^0.12.3, util@^0.12.4, util@^0.12.5: version "0.12.5" resolved "https://registry.yarnpkg.com/util/-/util-0.12.5.tgz#5f17a6059b73db61a875668781a1c2b136bd6fbc" integrity sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA== @@ -22934,7 +23480,7 @@ uuid@^8.3.2: resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== -uuid@^9.0.0: +uuid@^9.0.0, uuid@^9.0.1: version "9.0.1" resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== @@ -22954,15 +23500,6 @@ v8-compile-cache@^2.0.3: resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.4.0.tgz#cdada8bec61e15865f05d097c5f4fd30e94dc128" integrity sha512-ocyWc3bAHBB/guyqJQVI5o4BZkPhznPYUG2ea80Gond/BgNWpap8TOmLSeeQG7bnh2KMISxskdADG59j7zruhw== -v8-to-istanbul@^8.1.0: - version "8.1.1" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" - integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^1.6.0" - source-map "^0.7.3" - v8-to-istanbul@^9.0.1: version "9.3.0" resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz#b9572abfa62bd556c16d75fdebc1a411d5ff3175" @@ -23004,6 +23541,14 @@ validate-npm-package-name@^5.0.0: resolved "https://registry.yarnpkg.com/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz#a316573e9b49f3ccd90dbb6eb52b3f06c6d604e8" integrity sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ== +valtio@1.11.2: + version "1.11.2" + resolved "https://registry.yarnpkg.com/valtio/-/valtio-1.11.2.tgz#b8049c02dfe65620635d23ebae9121a741bb6530" + integrity sha512-1XfIxnUXzyswPAPXo1P3Pdx2mq/pIqZICkWN60Hby0d9Iqb+MEIpqgYVlbflvHdrp2YR/q3jyKWRPJJ100yxaw== + dependencies: + proxy-compare "2.5.1" + use-sync-external-store "1.2.0" + varint@^5.0.0: version "5.0.2" resolved "https://registry.yarnpkg.com/varint/-/varint-5.0.2.tgz#5b47f8a947eb668b848e034dcfa87d0ff8a7f7a4" @@ -23039,24 +23584,54 @@ vfile@^6.0.0: "@types/unist" "^3.0.0" vfile-message "^4.0.0" +viem@2.23.2: + version "2.23.2" + resolved "https://registry.yarnpkg.com/viem/-/viem-2.23.2.tgz#db395c8cf5f4fb5572914b962fb8ce5db09f681c" + integrity sha512-NVmW/E0c5crMOtbEAqMF0e3NmvQykFXhLOc/CkLIXOlzHSA6KXVz3CYVmaKqBF8/xtjsjHAGjdJN3Ru1kFJLaA== + dependencies: + "@noble/curves" "1.8.1" + "@noble/hashes" "1.7.1" + "@scure/bip32" "1.6.2" + "@scure/bip39" "1.5.4" + abitype "1.0.8" + isows "1.0.6" + ox "0.6.7" + ws "8.18.0" + +viem@^2.1.1, viem@^2.23.3: + version "2.23.10" + resolved "https://registry.yarnpkg.com/viem/-/viem-2.23.10.tgz#deebe1a49da3102a650bc55e256e9877128a7c60" + integrity sha512-va6Wde+v96PdfzdPEspCML1MjAqe+88O8BD+R9Kun/4s5KMUNcqfHbXdZP0ZZ2Zms80styvH2pDRAqCho6TqkA== + dependencies: + "@noble/curves" "1.8.1" + "@noble/hashes" "1.7.1" + "@scure/bip32" "1.6.2" + "@scure/bip39" "1.5.4" + abitype "1.0.8" + isows "1.0.6" + ox "0.6.9" + ws "8.18.1" + vm-browserify@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== -w3c-hr-time@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" - integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== +w3c-xmlserializer@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-4.0.0.tgz#aebdc84920d806222936e3cdce408e32488a3073" + integrity sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw== dependencies: - browser-process-hrtime "^1.0.0" + xml-name-validator "^4.0.0" -w3c-xmlserializer@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" - integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== +wagmi@^2.14.13: + version "2.14.13" + resolved "https://registry.yarnpkg.com/wagmi/-/wagmi-2.14.13.tgz#0c3f68bd2f6618754f17ff99355c0ab50df39c46" + integrity sha512-CX+NpyTczVIST5DqLtasKZ3VrhImKQZ9XM9aDUVgOM46MRN/CykgGGAJfuIfpQ80LZ91GCY+JuitGknHUz7MNQ== dependencies: - xml-name-validator "^3.0.0" + "@wagmi/connectors" "5.7.9" + "@wagmi/core" "2.16.5" + use-sync-external-store "1.4.0" wait-on@7.0.1: version "7.0.1" @@ -23406,20 +23981,25 @@ web3@1.10.0: web3-shh "1.10.0" web3-utils "1.10.0" +"webextension-polyfill@>=0.10.0 <1.0": + version "0.12.0" + resolved "https://registry.yarnpkg.com/webextension-polyfill/-/webextension-polyfill-0.12.0.tgz#f62c57d2cd42524e9fbdcee494c034cae34a3d69" + integrity sha512-97TBmpoWJEE+3nFBQ4VocyCdLKfw54rFaJ6EVQYLBCXqCIpLSZkwGgASpv4oPt9gdKCJ80RJlcmNzNn008Ag6Q== + +webextension-polyfill@^0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/webextension-polyfill/-/webextension-polyfill-0.10.0.tgz#ccb28101c910ba8cf955f7e6a263e662d744dbb8" + integrity sha512-c5s35LgVa5tFaHhrZDnr3FpQpjj1BB+RXhLTYUxGqBVN460HkbM8TBtEqdXWbpTKfzwCcjAZVF7zXCYSKtcp9g== + webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== -webidl-conversions@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" - integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== - -webidl-conversions@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" - integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== +webidl-conversions@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-7.0.0.tgz#256b4e1882be7debbf01d05f0aa2039778ea080a" + integrity sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g== webpack-merge@^5.8.0: version "5.10.0" @@ -23456,13 +24036,6 @@ websocket@^1.0.32: utf-8-validate "^5.0.2" yaeti "^0.0.6" -whatwg-encoding@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" - integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== - dependencies: - iconv-lite "0.4.24" - whatwg-encoding@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-2.0.0.tgz#e7635f597fd87020858626805a2729fa7698ac53" @@ -23487,16 +24060,24 @@ whatwg-fetch@^3.0.0, whatwg-fetch@^3.4.1: resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz#580ce6d791facec91d37c72890995a0b48d31c70" integrity sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg== -whatwg-mimetype@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" - integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== +whatwg-mimetype@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz#5fa1a7623867ff1af6ca3dc72ad6b8a4208beba7" + integrity sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q== whatwg-mimetype@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz#bc1bf94a985dc50388d54a9258ac405c3ca2fc0a" integrity sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg== +whatwg-url@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-11.0.0.tgz#0a849eebb5faf2119b901bb76fd795c2848d4018" + integrity sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ== + dependencies: + tr46 "^3.0.0" + webidl-conversions "^7.0.0" + whatwg-url@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" @@ -23505,15 +24086,6 @@ whatwg-url@^5.0.0: tr46 "~0.0.3" webidl-conversions "^3.0.0" -whatwg-url@^8.0.0, whatwg-url@^8.5.0: - version "8.7.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" - integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== - dependencies: - lodash "^4.7.0" - tr46 "^2.1.0" - webidl-conversions "^6.1.0" - which-boxed-primitive@^1.0.2, which-boxed-primitive@^1.1.0, which-boxed-primitive@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz#d76ec27df7fa165f18d5808374a5fe23c29b176e" @@ -23631,7 +24203,7 @@ wordwrap@^1.0.0: resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -23666,15 +24238,6 @@ wrap-ansi@^6.0.1, wrap-ansi@^6.2.0: string-width "^4.1.0" strip-ansi "^6.0.0" -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - wrap-ansi@^8.0.1, wrap-ansi@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" @@ -23767,6 +24330,21 @@ ws@7.5.3: resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.3.tgz#160835b63c7d97bfab418fc1b8a9fced2ac01a74" integrity sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg== +ws@8.17.1, ws@~8.17.1: + version "8.17.1" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b" + integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ== + +ws@8.18.0, ws@^8.5.0: + version "8.18.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.0.tgz#0d7505a6eafe2b0e712d232b42279f53bc289bbc" + integrity sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw== + +ws@8.18.1, ws@^8.11.0: + version "8.18.1" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.1.tgz#ea131d3784e1dfdff91adb0a4a116b127515e3cb" + integrity sha512-RKW2aJZMXeMxVpnZ6bck+RswznaxmzdULiBr6KY7XkTnW8uvt0iT9H5DkHUChXrc+uurzwa0rVI16n/Xzjdz1w== + ws@^3.0.0: version "3.3.3" resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2" @@ -23783,16 +24361,11 @@ ws@^5.1.1: dependencies: async-limiter "~1.0.0" -ws@^7, ws@^7.0.0, ws@^7.2.3, ws@^7.4.6, ws@^7.5.1, ws@^7.5.10: +ws@^7, ws@^7.0.0, ws@^7.2.3, ws@^7.5.1, ws@^7.5.10: version "7.5.10" resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9" integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ== -ws@^8.5.0: - version "8.18.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.0.tgz#0d7505a6eafe2b0e712d232b42279f53bc289bbc" - integrity sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw== - xdg-basedir@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13" @@ -23835,16 +24408,21 @@ xhr@^2.0.4, xhr@^2.2.0, xhr@^2.3.3: parse-headers "^2.0.0" xtend "^4.0.0" -xml-name-validator@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" - integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== +xml-name-validator@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-4.0.0.tgz#79a006e2e63149a8600f15430f0a4725d1524835" + integrity sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw== xmlchars@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== +xmlhttprequest-ssl@~2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-2.1.2.tgz#e9e8023b3f29ef34b97a859f584c5e6c61418e23" + integrity sha512-TEU+nJVUUnA4CYJFLvK5X9AOeH4KvDvhIfm0vV1GaQRtchnG0hgK5p8hw/xjv8cunWYCsiPCSDzObPyhEwq3KQ== + xmlhttprequest@1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/xmlhttprequest/-/xmlhttprequest-1.8.0.tgz#67fe075c5c24fef39f9d65f5f7b7fe75171968fc" @@ -23928,6 +24506,14 @@ yargs-parser@^13.1.2: camelcase "^5.0.0" decamelize "^1.2.0" +yargs-parser@^18.1.2: + version "18.1.3" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" + integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + yargs-parser@^2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-2.4.1.tgz#85568de3cf150ff49fa51825f03a8c880ddcc5c4" @@ -23957,6 +24543,23 @@ yargs@^13.2.4: y18n "^4.0.0" yargs-parser "^13.1.2" +yargs@^15.3.1: + version "15.4.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" + integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== + dependencies: + cliui "^6.0.0" + decamelize "^1.2.0" + find-up "^4.1.0" + get-caller-file "^2.0.1" + require-directory "^2.1.1" + require-main-filename "^2.0.0" + set-blocking "^2.0.0" + string-width "^4.2.0" + which-module "^2.0.0" + y18n "^4.0.0" + yargs-parser "^18.1.2" + yargs@^16.2.0: version "16.2.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" @@ -23970,7 +24573,7 @@ yargs@^16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" -yargs@^17.6.2: +yargs@^17.3.1, yargs@^17.6.2: version "17.7.2" resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== @@ -24036,6 +24639,11 @@ zod@^3.24.2: resolved "https://registry.yarnpkg.com/zod/-/zod-3.24.2.tgz#8efa74126287c675e92f46871cfc8d15c34372b3" integrity sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ== +zustand@5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/zustand/-/zustand-5.0.0.tgz#71f8aaecf185592a3ba2743d7516607361899da9" + integrity sha512-LE+VcmbartOPM+auOjCCLQOsQ05zUTp8RkgwRzefUk+2jISdMMFnxvyTjA4YNWr5ZGXYbVsEMZosttuxUBkojQ== + zwitch@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/zwitch/-/zwitch-2.0.4.tgz#c827d4b0acb76fc3e685a4c6ec2902d51070e9d7"