From f230fcdddc27320abf967c91696eb104875d7105 Mon Sep 17 00:00:00 2001 From: brechtpd Date: Thu, 17 Oct 2024 02:19:15 +0200 Subject: [PATCH 01/15] block building fixes --- bin/reth/src/main.rs | 3 +-- crates/gwyneth/src/exex.rs | 24 ++++++++++++++++++------ 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/bin/reth/src/main.rs b/bin/reth/src/main.rs index 159dfd041b07..6d475e993067 100644 --- a/bin/reth/src/main.rs +++ b/bin/reth/src/main.rs @@ -37,8 +37,7 @@ fn main() -> eyre::Result<()> { .with_chain(chain_spec.clone()) .with_network(network_config.clone()) .with_unused_ports() - .with_rpc(RpcServerArgs::default().with_unused_ports().with_static_l2_rpc_ip_and_port(chain_spec.chain.id())) - .set_dev(true); + .with_rpc(RpcServerArgs::default().with_unused_ports().with_static_l2_rpc_ip_and_port(chain_spec.chain.id())); let NodeHandle { node: gwyneth_node, node_exit_future: _ } = NodeBuilder::new(node_config.clone()) diff --git a/crates/gwyneth/src/exex.rs b/crates/gwyneth/src/exex.rs index 1bd6bc1a3b18..144f5cc8f25b 100644 --- a/crates/gwyneth/src/exex.rs +++ b/crates/gwyneth/src/exex.rs @@ -122,7 +122,7 @@ impl Rollup { let attrs = GwynethPayloadAttributes { inner: EthPayloadAttributes { - timestamp: INITIAL_TIMESTAMP, + timestamp: block.timestamp, prev_randao: B256::ZERO, suggested_fee_recipient: Address::ZERO, withdrawals: Some(vec![]), @@ -154,10 +154,22 @@ impl Rollup { let mut payload = EthBuiltPayload::new(payload_id, SealedBlock::default(), U256::ZERO); loop { - payload = - self.node.payload_builder.best_payload(payload_id).await.unwrap().unwrap(); - if payload.block().body.is_empty() { - tokio::time::sleep(std::time::Duration::from_millis(20)).await; + let result = self.node.payload_builder.best_payload(payload_id).await; + + // TODO: There seems to be no result when there's an empty tx list + if let Some(result) = result { + if let Ok(new_payload) = result { + payload = new_payload; + if payload.block().body.is_empty() { + tokio::time::sleep(std::time::Duration::from_millis(20)).await; + continue; + } + } else { + println!("Gwyneth: No payload?"); + continue; + } + } else { + println!("Gwyneth: No block?"); continue; } break; @@ -223,7 +235,7 @@ fn decode_chain_into_rollup_events( .collect() } -pub fn decode_transactions(tx_list: &[u8]) -> Vec { +fn decode_transactions(tx_list: &[u8]) -> Vec { #[allow(clippy::useless_asref)] Vec::::decode(&mut tx_list.as_ref()).unwrap_or_else(|e| { // If decoding fails we need to make an empty block From 61f18264d9b1be39a1b2ce24cb0fa66fa4cfc0be Mon Sep 17 00:00:00 2001 From: Brecht Devos Date: Thu, 17 Oct 2024 02:55:35 +0200 Subject: [PATCH 02/15] add default env file for easy setup --- packages/protocol/.env | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 packages/protocol/.env diff --git a/packages/protocol/.env b/packages/protocol/.env new file mode 100644 index 000000000000..3289806d9c1a --- /dev/null +++ b/packages/protocol/.env @@ -0,0 +1,3 @@ +L2_GENESIS_HASH=0xdf90a9c4daa571aa308e967c9a6b4bf21ba8842d95d73d28be112b6fe0618e8c +PRIVATE_KEY=0xbcdf20249abf0ed6d944c0288fad489e33f66b3960d9e6229c1cd214ed3bbe31 +MAINNET_CONTRACT_OWNER=0x8943545177806ED17B9F23F0a21ee5948eCaa776 \ No newline at end of file From 5e2729cdc1bc273cbe2b64e1867df543a1bc0449 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Keszey=20D=C3=A1niel?= Date: Fri, 18 Oct 2024 15:15:49 +0200 Subject: [PATCH 03/15] first trial --- bin/reth/src/main.rs | 67 ++++++++++++++++++++------------ crates/gwyneth/src/engine_api.rs | 31 +++++++++++---- crates/gwyneth/src/exex.rs | 60 +++++++++++++++++----------- 3 files changed, 104 insertions(+), 54 deletions(-) diff --git a/bin/reth/src/main.rs b/bin/reth/src/main.rs index 6d475e993067..141e90a97a47 100644 --- a/bin/reth/src/main.rs +++ b/bin/reth/src/main.rs @@ -1,5 +1,4 @@ #![allow(missing_docs)] - // We use jemalloc for performance reasons. #[cfg(all(feature = "jemalloc", unix))] #[global_allocator] @@ -12,6 +11,9 @@ use reth_node_builder::{NodeBuilder, NodeConfig, NodeHandle}; use reth_node_ethereum::EthereumNode; use reth_tasks::TaskManager; +// Define a list of suffixes for chain IDs and RPC ports +const SUFFIXES: [char; 2] = ['A', 'B'/* , 'C'*/]; // Add more suffixes as needed, like C + fn main() -> eyre::Result<()> { reth::cli::Cli::parse_args().run(|builder, _| async move { let tasks = TaskManager::current(); @@ -22,34 +24,51 @@ fn main() -> eyre::Result<()> { ..NetworkArgs::default() }; - let chain_spec = ChainSpecBuilder::default() - .chain(gwyneth::exex::CHAIN_ID.into()) - .genesis( - serde_json::from_str(include_str!( - "../../../crates/ethereum/node/tests/assets/genesis.json" - )) - .unwrap(), - ) - .cancun_activated() - .build(); + let mut gwyneth_nodes = Vec::new(); + + for suffix in SUFFIXES.iter() { + let chain_id = match suffix { + 'A' => gwyneth::exex::CHAIN_ID_A, + 'B' => gwyneth::exex::CHAIN_ID_B, + // 'C' => gwyneth::exex::CHAIN_ID_C, // Add this constant in your exex.rs + _ => panic!("Unsupported chain ID suffix"), + }; - let node_config = NodeConfig::test() - .with_chain(chain_spec.clone()) - .with_network(network_config.clone()) - .with_unused_ports() - .with_rpc(RpcServerArgs::default().with_unused_ports().with_static_l2_rpc_ip_and_port(chain_spec.chain.id())); + let chain_spec = ChainSpecBuilder::default() + .chain(chain_id.into()) + .genesis( + serde_json::from_str(include_str!( + "../../../crates/ethereum/node/tests/assets/genesis.json" + )) + .unwrap(), + ) + .cancun_activated() + .build(); - let NodeHandle { node: gwyneth_node, node_exit_future: _ } = - NodeBuilder::new(node_config.clone()) - .gwyneth_node(exec.clone(), chain_spec.chain.id()) - .node(GwynethNode::default()) - .launch() - .await?; + let node_config = NodeConfig::test() + .with_chain(chain_spec.clone()) + .with_network(network_config.clone()) + .with_unused_ports() + .with_rpc( + RpcServerArgs::default() + .with_unused_ports() + .with_static_l2_rpc_ip_and_port(chain_spec.chain.id(), *suffix) + ); + + let NodeHandle { node: gwyneth_node, node_exit_future: _ } = + NodeBuilder::new(node_config.clone()) + .gwyneth_node(exec.clone(), chain_spec.chain.id()) + .node(GwynethNode::default()) + .launch() + .await?; + + gwyneth_nodes.push(gwyneth_node); + } let handle = builder .node(EthereumNode::default()) .install_exex("Rollup", move |ctx| async { - Ok(gwyneth::exex::Rollup::new(ctx, gwyneth_node).await?.start()) + Ok(gwyneth::exex::Rollup::new(ctx, gwyneth_nodes).await?.start()) }) .launch() .await?; @@ -69,4 +88,4 @@ mod tests { #[command(flatten)] args: T, } -} +} \ No newline at end of file diff --git a/crates/gwyneth/src/engine_api.rs b/crates/gwyneth/src/engine_api.rs index 16e8287a82e1..3d585f79718e 100644 --- a/crates/gwyneth/src/engine_api.rs +++ b/crates/gwyneth/src/engine_api.rs @@ -113,18 +113,35 @@ impl PayloadEnvelopeExt for ExecutionPayloadEnvelopeV3 { } } pub trait RpcServerArgsExEx { - fn with_static_l2_rpc_ip_and_port(self, chain_id: u64) -> Self; + fn with_static_l2_rpc_ip_and_port(self, chain_id: u64, suffix: char) -> Self; } impl RpcServerArgsExEx for RpcServerArgs { - fn with_static_l2_rpc_ip_and_port(mut self, chain_id: u64) -> Self { + fn with_static_l2_rpc_ip_and_port(mut self, chain_id: u64, suffix: char) -> Self { self.http = true; - // On the instance the program is running, we wanna have 10111 exposed as the (exex) L2's - // RPC port. self.http_addr = Ipv4Addr::new(0, 0, 0, 0).into(); - self.http_port = 10110u16; - self.ws_port = 10111u16; - self.ipcpath = format!("{}-{}", constants::DEFAULT_IPC_ENDPOINT, chain_id); + + // Set HTTP and WS ports based on suffix + match suffix { + 'A' => { + self.http_port = 10110u16; + self.ws_port = 10111u16; + }, + 'B' => { + self.http_port = 20110u16; + self.ws_port = 20111u16; + }, + 'C' => { + self.http_port = 30110u16; + self.ws_port = 30111u16; + }, + // Obviously add more if needed more chain + _ => panic!("Unsupported suffix: {}", suffix), + } + + // Set IPC path + self.ipcpath = format!("{}-{}-{}", constants::DEFAULT_IPC_ENDPOINT, chain_id, suffix); + self } } diff --git a/crates/gwyneth/src/exex.rs b/crates/gwyneth/src/exex.rs index 144f5cc8f25b..9160b307bf6a 100644 --- a/crates/gwyneth/src/exex.rs +++ b/crates/gwyneth/src/exex.rs @@ -31,7 +31,8 @@ use reth_transaction_pool::{ use RollupContract::{BlockProposed, RollupContractEvents}; const ROLLUP_CONTRACT_ADDRESS: Address = address!("9fCF7D13d10dEdF17d0f24C62f0cf4ED462f65b7"); -pub const CHAIN_ID: u64 = 167010; +pub const CHAIN_ID_A: u64 = 167010; +pub const CHAIN_ID_B: u64 = 267010; const INITIAL_TIMESTAMP: u64 = 1710338135; pub type GwynethFullNode = FullNode< @@ -69,18 +70,22 @@ sol!(RollupContract, "TaikoL1.json"); pub struct Rollup { ctx: ExExContext, - node: GwynethFullNode, - engine_api: EngineApiContext, + nodes: Vec, + engine_apis: Vec>, } impl Rollup { - pub async fn new(ctx: ExExContext, node: GwynethFullNode) -> eyre::Result { - let engine_api = EngineApiContext { - engine_api_client: node.auth_server_handle().http_client(), - canonical_stream: node.provider.canonical_state_stream(), - _marker: PhantomData::, - }; - Ok(Self { ctx, node, /* payload_event_stream, */ engine_api }) + pub async fn new(ctx: ExExContext, nodes: Vec) -> eyre::Result { + let mut engine_apis = Vec::new(); + for node in &nodes { + let engine_api = EngineApiContext { + engine_api_client: node.auth_server_handle().http_client(), + canonical_stream: node.provider.canonical_state_stream(), + _marker: PhantomData::, + }; + engine_apis.push(engine_api); + } + Ok(Self { ctx, nodes, /* payload_event_stream, */ engine_apis }) } pub async fn start(mut self) -> eyre::Result<()> { @@ -91,7 +96,13 @@ impl Rollup { } if let Some(committed_chain) = notification.committed_chain() { - self.commit(&committed_chain).await?; + let nodes = &self.nodes; + let engine_apis = &self.engine_apis; + for i in 0..nodes.len() { + let node = &nodes[i]; + let engine_api = &engine_apis[i]; + self.commit(&committed_chain, node, engine_api).await?; + } self.ctx.events.send(ExExEvent::FinishedHeight(committed_chain.tip().number))?; } } @@ -103,13 +114,14 @@ impl Rollup { /// /// This function decodes all transactions to the rollup contract into events, executes the /// corresponding actions and inserts the results into the database. - pub async fn commit(&mut self, chain: &Chain) -> eyre::Result<()> { + pub async fn commit( + &mut self, + chain: &Chain, + node: &GwynethFullNode, + engine_api: &EngineApiContext, + ) -> eyre::Result<()> { let events = decode_chain_into_rollup_events(chain); for (block, _, event) in events { - // TODO: Don't emit ProposeBlock event but directely - // read the function call RollupContractCalls to extract Txs - // let _call = RollupContractCalls::abi_decode(tx.input(), true)?; - if let RollupContractEvents::BlockProposed(BlockProposed { blockId: block_number, meta, @@ -148,15 +160,16 @@ impl Rollup { let payload_id = builder_attrs.inner.payload_id(); let parrent_beacon_block_root = builder_attrs.inner.parent_beacon_block_root.unwrap(); + // trigger new payload building draining the pool - self.node.payload_builder.new_payload(builder_attrs).await.unwrap(); + node.payload_builder.new_payload(builder_attrs).await.unwrap(); + // wait for the payload builder to have finished building let mut payload = EthBuiltPayload::new(payload_id, SealedBlock::default(), U256::ZERO); loop { - let result = self.node.payload_builder.best_payload(payload_id).await; + let result = node.payload_builder.best_payload(payload_id).await; - // TODO: There seems to be no result when there's an empty tx list if let Some(result) = result { if let Ok(new_payload) = result { payload = new_payload; @@ -174,11 +187,12 @@ impl Rollup { } break; } + // trigger resolve payload via engine api - self.engine_api.get_payload_v3_value(payload_id).await?; + engine_api.get_payload_v3_value(payload_id).await?; + // submit payload to engine api - let block_hash = self - .engine_api + let block_hash = engine_api .submit_payload( payload.clone(), parrent_beacon_block_root, @@ -188,7 +202,7 @@ impl Rollup { .await?; // trigger forkchoice update via engine api to commit the block to the blockchain - self.engine_api.update_forkchoice(block_hash, block_hash).await?; + engine_api.update_forkchoice(block_hash, block_hash).await?; } } From ca53bd32b1ed04d26cd09c3471380ba15244dc12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Keszey=20D=C3=A1niel?= Date: Fri, 18 Oct 2024 21:45:46 +0200 Subject: [PATCH 04/15] some req changes --- bin/reth/src/main.rs | 16 +++++----------- crates/gwyneth/src/engine_api.rs | 27 +++++++-------------------- crates/gwyneth/src/exex.rs | 26 +++++++------------------- 3 files changed, 19 insertions(+), 50 deletions(-) diff --git a/bin/reth/src/main.rs b/bin/reth/src/main.rs index 141e90a97a47..5656019e0536 100644 --- a/bin/reth/src/main.rs +++ b/bin/reth/src/main.rs @@ -11,14 +11,13 @@ use reth_node_builder::{NodeBuilder, NodeConfig, NodeHandle}; use reth_node_ethereum::EthereumNode; use reth_tasks::TaskManager; -// Define a list of suffixes for chain IDs and RPC ports -const SUFFIXES: [char; 2] = ['A', 'B'/* , 'C'*/]; // Add more suffixes as needed, like C +const BASE_CHAIN_ID: u64 = gwyneth::exex::BASE_CHAIN_ID; // Base chain ID for L2s +const NUM_L2_CHAINS: u64 = 2; // Number of L2 chains to create fn main() -> eyre::Result<()> { reth::cli::Cli::parse_args().run(|builder, _| async move { let tasks = TaskManager::current(); let exec = tasks.executor(); - let network_config = NetworkArgs { discovery: DiscoveryArgs { disable_discovery: true, ..DiscoveryArgs::default() }, ..NetworkArgs::default() @@ -26,13 +25,8 @@ fn main() -> eyre::Result<()> { let mut gwyneth_nodes = Vec::new(); - for suffix in SUFFIXES.iter() { - let chain_id = match suffix { - 'A' => gwyneth::exex::CHAIN_ID_A, - 'B' => gwyneth::exex::CHAIN_ID_B, - // 'C' => gwyneth::exex::CHAIN_ID_C, // Add this constant in your exex.rs - _ => panic!("Unsupported chain ID suffix"), - }; + for i in 0..NUM_L2_CHAINS { + let chain_id = BASE_CHAIN_ID + (i * 100000); // Increment by 100000 for each L2 let chain_spec = ChainSpecBuilder::default() .chain(chain_id.into()) @@ -52,7 +46,7 @@ fn main() -> eyre::Result<()> { .with_rpc( RpcServerArgs::default() .with_unused_ports() - .with_static_l2_rpc_ip_and_port(chain_spec.chain.id(), *suffix) + .with_static_l2_rpc_ip_and_port(chain_id) ); let NodeHandle { node: gwyneth_node, node_exit_future: _ } = diff --git a/crates/gwyneth/src/engine_api.rs b/crates/gwyneth/src/engine_api.rs index 3d585f79718e..4e63207e3266 100644 --- a/crates/gwyneth/src/engine_api.rs +++ b/crates/gwyneth/src/engine_api.rs @@ -113,34 +113,21 @@ impl PayloadEnvelopeExt for ExecutionPayloadEnvelopeV3 { } } pub trait RpcServerArgsExEx { - fn with_static_l2_rpc_ip_and_port(self, chain_id: u64, suffix: char) -> Self; + fn with_static_l2_rpc_ip_and_port(self, chain_id: u64) -> Self; } impl RpcServerArgsExEx for RpcServerArgs { - fn with_static_l2_rpc_ip_and_port(mut self, chain_id: u64, suffix: char) -> Self { + fn with_static_l2_rpc_ip_and_port(mut self, chain_id: u64) -> Self { self.http = true; self.http_addr = Ipv4Addr::new(0, 0, 0, 0).into(); - // Set HTTP and WS ports based on suffix - match suffix { - 'A' => { - self.http_port = 10110u16; - self.ws_port = 10111u16; - }, - 'B' => { - self.http_port = 20110u16; - self.ws_port = 20111u16; - }, - 'C' => { - self.http_port = 30110u16; - self.ws_port = 30111u16; - }, - // Obviously add more if needed more chain - _ => panic!("Unsupported suffix: {}", suffix), - } + // Calculate HTTP and WS ports based on chain_id + let port_offset = ((chain_id - 167010) / 100000) as u16; + self.http_port = 10110 + (port_offset * 10000); + self.ws_port = 10111 + (port_offset * 10000); // Set IPC path - self.ipcpath = format!("{}-{}-{}", constants::DEFAULT_IPC_ENDPOINT, chain_id, suffix); + self.ipcpath = format!("{}-{}", constants::DEFAULT_IPC_ENDPOINT, chain_id); self } diff --git a/crates/gwyneth/src/exex.rs b/crates/gwyneth/src/exex.rs index 9160b307bf6a..cff105d27b99 100644 --- a/crates/gwyneth/src/exex.rs +++ b/crates/gwyneth/src/exex.rs @@ -31,8 +31,7 @@ use reth_transaction_pool::{ use RollupContract::{BlockProposed, RollupContractEvents}; const ROLLUP_CONTRACT_ADDRESS: Address = address!("9fCF7D13d10dEdF17d0f24C62f0cf4ED462f65b7"); -pub const CHAIN_ID_A: u64 = 167010; -pub const CHAIN_ID_B: u64 = 267010; +pub const BASE_CHAIN_ID: u64 = 167010; const INITIAL_TIMESTAMP: u64 = 1710338135; pub type GwynethFullNode = FullNode< @@ -89,19 +88,14 @@ impl Rollup { } pub async fn start(mut self) -> eyre::Result<()> { - // Process all new chain state notifications while let Some(notification) = self.ctx.notifications.recv().await { if let Some(reverted_chain) = notification.reverted_chain() { self.revert(&reverted_chain)?; } if let Some(committed_chain) = notification.committed_chain() { - let nodes = &self.nodes; - let engine_apis = &self.engine_apis; - for i in 0..nodes.len() { - let node = &nodes[i]; - let engine_api = &engine_apis[i]; - self.commit(&committed_chain, node, engine_api).await?; + for i in 0..self.nodes.len() { + self.commit(&committed_chain, i).await?; } self.ctx.events.send(ExExEvent::FinishedHeight(committed_chain.tip().number))?; } @@ -110,16 +104,10 @@ impl Rollup { Ok(()) } - /// Process a new chain commit. - /// - /// This function decodes all transactions to the rollup contract into events, executes the - /// corresponding actions and inserts the results into the database. - pub async fn commit( - &mut self, - chain: &Chain, - node: &GwynethFullNode, - engine_api: &EngineApiContext, - ) -> eyre::Result<()> { + pub async fn commit(&mut self, chain: &Chain, node_idx: usize) -> eyre::Result<()> { + let node = &self.nodes[node_idx]; + let engine_api = &self.engine_apis[node_idx]; + let events = decode_chain_into_rollup_events(chain); for (block, _, event) in events { if let RollupContractEvents::BlockProposed(BlockProposed { From 5d20f22a10276c6e2c96d5fabfb968615ed72ecf Mon Sep 17 00:00:00 2001 From: CeciliaZ030 Date: Sun, 20 Oct 2024 14:47:20 -0300 Subject: [PATCH 05/15] build + run --- Cargo.lock | 1492 +++++++++++++++++++++++++++++----------------------- Cargo.toml | 8 +- 2 files changed, 848 insertions(+), 652 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 617993b8c497..83f0118b2521 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,19 +4,13 @@ version = 3 [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - [[package]] name = "adler2" version = "2.0.0" @@ -80,6 +74,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "aligned-vec" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e0966165eaf052580bd70eb1b32cb3d6245774c0104d1b2793e9650bf83b52a" +dependencies = [ + "equator", +] + [[package]] name = "alloc-no-stdlib" version = "2.0.4" @@ -103,10 +106,11 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy-chains" -version = "0.1.29" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb07629a5d0645d29f68d2fb6f4d0cf15c89ec0965be915f303967180929743f" +checksum = "d4932d790c723181807738cf1ac68198ab581cd699545b155601332541ee47bd" dependencies = [ + "alloy-primitives 0.8.8", "alloy-rlp", "arbitrary", "num_enum", @@ -117,12 +121,12 @@ dependencies = [ [[package]] name = "alloy-consensus" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7198a527b4c4762cb88d54bcaeb0428f4298b72552c9c8ec4af614b4a4990c59" +checksum = "629b62e38d471cc15fea534eb7283d2f8a4e8bdb1811bcc5d66dda6cfce6fae1" dependencies = [ "alloy-eips", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "alloy-serde", "arbitrary", @@ -132,12 +136,12 @@ dependencies = [ [[package]] name = "alloy-dyn-abi" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f95d76a38cae906fd394a5afb0736aaceee5432efe76addfd71048e623e208af" +checksum = "e6228abfc751a29cde117b0879b805a3e0b3b641358f063272c83ca459a56886" dependencies = [ "alloy-json-abi", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-sol-type-parser", "alloy-sol-types", "const-hex", @@ -154,7 +158,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0069cf0642457f87a01a014f6dc29d5d893cd4fd8fddf0c3cdfad1bb3ebafc41" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "arbitrary", "rand 0.8.5", @@ -163,11 +167,11 @@ dependencies = [ [[package]] name = "alloy-eip7702" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d319bb544ca6caeab58c39cea8921c55d924d4f68f2c60f24f914673f9a74a" +checksum = "ea59dc42102bc9a1905dc57901edc6dd48b9f38115df86c7d252acba70d71d04" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "arbitrary", "k256", @@ -177,13 +181,13 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "159eab0e4e15b88571f55673af37314f4b8f17630dc1b393c3d70f2128a1d494" +checksum = "f923dd5fca5f67a43d81ed3ebad0880bd41f6dd0ada930030353ac356c54cd0f" dependencies = [ "alloy-eip2930", "alloy-eip7702", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "alloy-serde", "arbitrary", @@ -196,22 +200,22 @@ dependencies = [ [[package]] name = "alloy-genesis" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "210f4b358d724f85df8adaec753c583defb58169ad3cad3d48c80d1a25a6ff0e" +checksum = "3a7a18afb0b318616b6b2b0e2e7ac5529d32a966c673b48091c9919e284e6aca" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-serde", "serde", ] [[package]] name = "alloy-json-abi" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03c66eec1acdd96b39b995b8f5ee5239bc0c871d62c527ae1ac9fd1d7fecd455" +checksum = "d46eb5871592c216d39192499c95a99f7175cb94104f88c307e6dc960676d9f1" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-sol-type-parser", "serde", "serde_json", @@ -219,11 +223,11 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7733446dd531f8eb877331fea02f6c40bdbb47444a17dc3464bf75319cc073a" +checksum = "d3c717b5298fad078cd3a418335b266eba91b511383ca9bd497f742d5975d5ab" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-sol-types", "serde", "serde_json", @@ -233,15 +237,15 @@ dependencies = [ [[package]] name = "alloy-network" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b80851d1697fc4fa2827998e3ee010a3d1fc59c7d25e87070840169fcf465832" +checksum = "fb3705ce7d8602132bcf5ac7a1dd293a42adc2f183abf5907c30ac535ceca049" dependencies = [ "alloy-consensus", "alloy-eips", "alloy-json-rpc", "alloy-network-primitives", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rpc-types-eth", "alloy-serde", "alloy-signer", @@ -254,24 +258,26 @@ dependencies = [ [[package]] name = "alloy-network-primitives" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76a2336889f3d0624b18213239d27f4f34eb476eb35bef22f6a8cc24e0c0078" +checksum = "94ad40869867ed2d9cd3842b1e800889e5b49e6b92da346e93862b4a741bedf3" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-eips", + "alloy-primitives 0.8.8", "alloy-serde", "serde", ] [[package]] name = "alloy-node-bindings" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2657dae91ae61ed6cdd4c58b7e09330de934eea4e14d2f54f72f2a6720b23437" +checksum = "5988a227293f949525f0a1b3e1ef728d2ef24afa96bad2b7788c6c9617fa3eec" dependencies = [ "alloy-genesis", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "k256", + "rand 0.8.5", "serde_json", "tempfile", "thiserror", @@ -303,9 +309,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb848c43f6b06ae3de2e4a67496cbbabd78ae87db0f1248934f15d76192c6a" +checksum = "38f35429a652765189c1c5092870d8360ee7b7769b09b06d89ebaefd34676446" dependencies = [ "alloy-rlp", "arbitrary", @@ -335,9 +341,9 @@ dependencies = [ [[package]] name = "alloy-provider" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2d2a195caa6707f5ce13905794865765afc6d9ea92c3a56e3a973c168d703bc" +checksum = "927f708dd457ed63420400ee5f06945df9632d5d101851952056840426a10dc5" dependencies = [ "alloy-chains", "alloy-consensus", @@ -345,7 +351,7 @@ dependencies = [ "alloy-json-rpc", "alloy-network", "alloy-network-primitives", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-pubsub", "alloy-rpc-client", "alloy-rpc-types-admin", @@ -357,7 +363,7 @@ dependencies = [ "async-stream", "async-trait", "auto_impl", - "dashmap 6.0.1", + "dashmap 6.1.0", "futures", "futures-utils-wasm", "lru", @@ -373,12 +379,12 @@ dependencies = [ [[package]] name = "alloy-pubsub" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c59e13200322138fe4279b4676b0d78c4f55502de127f5a448495d3ddfaa43" +checksum = "2d05f63677e210d758cd5d6d1ce10f20c980c3560ccfbe79ba1997791862a04f" dependencies = [ "alloy-json-rpc", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-transport", "bimap", "futures", @@ -386,7 +392,7 @@ dependencies = [ "serde_json", "tokio", "tokio-stream", - "tower", + "tower 0.5.1", "tracing", ] @@ -409,17 +415,17 @@ checksum = "4d0f2d905ebd295e7effec65e5f6868d153936130ae718352771de3e7d03c75c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] name = "alloy-rpc-client" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed31cdba2b23d71c555505b06674f8e7459496abfd7f4875d268434ef5a99ee6" +checksum = "7d82952dca71173813d4e5733e2c986d8b04aea9e0f3b0a576664c232ad050a5" dependencies = [ "alloy-json-rpc", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-pubsub", "alloy-transport", "alloy-transport-http", @@ -431,16 +437,16 @@ dependencies = [ "serde_json", "tokio", "tokio-stream", - "tower", + "tower 0.5.1", "tracing", "url", ] [[package]] name = "alloy-rpc-types" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d758f65aa648491c6358335c578de45cd7de6fdf2877c3cef61f2c9bebea21" +checksum = "64333d639f2a0cf73491813c629a405744e16343a4bc5640931be707c345ecc5" dependencies = [ "alloy-rpc-types-engine", "alloy-rpc-types-eth", @@ -450,35 +456,35 @@ dependencies = [ [[package]] name = "alloy-rpc-types-admin" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e41c33bbddaec71ca1bd7a4df38f95f408ef4fa3b3c29a7e9cc8d0e43be5fbe" +checksum = "fefd12e99dd6b7de387ed13ad047ce2c90d8950ca62fc48b8a457ebb8f936c61" dependencies = [ "alloy-genesis", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "serde", "serde_json", ] [[package]] name = "alloy-rpc-types-anvil" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa5ee4ffe3e687a6372dd02e998f4f65e512ffdfe0d2c248db822649814c36cd" +checksum = "d25cb45ad7c0930dd62eecf164d2afe4c3d2dd2c82af85680ad1f118e1e5cb83" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-serde", "serde", ] [[package]] name = "alloy-rpc-types-beacon" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3173bf0239a59d3616f4f4ab1682de25dd30b13fb8f52bf7ee7503729354f3c4" +checksum = "2e7081d2206dca51ce23a06338d78d9b536931cc3f15134fc1c6535eb2b77f18" dependencies = [ "alloy-eips", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rpc-types-engine", "serde", "serde_with", @@ -487,52 +493,53 @@ dependencies = [ [[package]] name = "alloy-rpc-types-engine" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24e800d959606fa19b36b31d7c24d68ef75b970c654b7aa581dce23de82db0a5" +checksum = "1464c4dd646e1bdfde86ae65ce5ba168dbb29180b478011fe87117ae46b1629b" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", - "alloy-rpc-types-eth", "alloy-serde", + "derive_more 1.0.0", "jsonrpsee-types", "jsonwebtoken", "rand 0.8.5", "serde", - "thiserror", ] [[package]] name = "alloy-rpc-types-eth" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ba05d6ee4db0d89113294a614137940f79abfc2c40a9a3bee2995660358776" +checksum = "83aa984386deda02482660aa31cb8ca1e63d533f1c31a52d7d181ac5ec68e9b8" dependencies = [ "alloy-consensus", "alloy-eips", "alloy-network-primitives", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "alloy-serde", "alloy-sol-types", "arbitrary", + "cfg-if", + "derive_more 1.0.0", + "hashbrown 0.14.5", "itertools 0.13.0", "jsonrpsee-types", "serde", "serde_json", - "thiserror", ] [[package]] name = "alloy-rpc-types-mev" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a0b28949d1077826684b5912fe9ab1c752a863af0419b1ba9abff19006d61b1" +checksum = "922d92389e5022650c4c60ffd2f9b2467c3f853764f0f74ff16a23106f9017d5" dependencies = [ "alloy-eips", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-serde", "serde", "serde_json", @@ -540,11 +547,11 @@ dependencies = [ [[package]] name = "alloy-rpc-types-trace" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd2af822ed58f2b6dd7cfccf88bf69f42c9a8cbf4663316227646a8a3e5a591f" +checksum = "98db35cd42c90b484377e6bc44d95377a7a38a5ebee996e67754ac0446d542ab" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rpc-types-eth", "alloy-serde", "serde", @@ -554,11 +561,11 @@ dependencies = [ [[package]] name = "alloy-rpc-types-txpool" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a8fbdf39e93a9b213df39541be51671e93e6e8b142c3602ddb4ff6219a1bc85" +checksum = "6bac37082c3b21283b3faf5cc0e08974272aee2f756ce1adeb26db56a5fce0d5" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rpc-types-eth", "alloy-serde", "serde", @@ -566,11 +573,11 @@ dependencies = [ [[package]] name = "alloy-serde" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfd260ede54f0b53761fdd04133acc10ae70427f66a69aa9590529bbd066cd58" +checksum = "731f75ec5d383107fd745d781619bd9cedf145836c51ecb991623d41278e71fa" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "arbitrary", "serde", "serde_json", @@ -578,11 +585,11 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5193ee6b370b89db154d7dc40c6a8e6ce11213865baaf2b418a9f2006be762" +checksum = "307324cca94354cd654d6713629f0383ec037e1ff9e3e3d547212471209860c0" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "async-trait", "auto_impl", "elliptic-curve", @@ -592,13 +599,13 @@ dependencies = [ [[package]] name = "alloy-signer-local" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf6b19bbb231c7f941af07f363d4c74d356dfcdfcd7dfa85a41a504ae856a6d5" +checksum = "9fabe917ab1778e760b4701628d1cae8e028ee9d52ac6307de4e1e9286ab6b5f" dependencies = [ "alloy-consensus", "alloy-network", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-signer", "async-trait", "coins-bip32", @@ -610,59 +617,59 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "661c516eb1fa3294cc7f2fb8955b3b609d639c282ac81a4eedb14d3046db503a" +checksum = "3b2395336745358cc47207442127c47c63801a7065ecc0aa928da844f8bb5576" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] name = "alloy-sol-macro-expander" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecbabb8fc3d75a0c2cea5215be22e7a267e3efde835b0f2a8922f5e3f5d47683" +checksum = "9ed5047c9a241df94327879c2b0729155b58b941eae7805a7ada2e19436e6b39" dependencies = [ "alloy-json-abi", "alloy-sol-macro-input", "const-hex", - "heck 0.5.0", + "heck", "indexmap 2.6.0", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", "syn-solidity", "tiny-keccak", ] [[package]] name = "alloy-sol-macro-input" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16517f2af03064485150d89746b8ffdcdbc9b6eeb3d536fb66efd7c2846fbc75" +checksum = "5dee02a81f529c415082235129f0df8b8e60aa1601b9c9298ffe54d75f57210b" dependencies = [ "alloy-json-abi", "const-hex", "dunce", - "heck 0.5.0", + "heck", "proc-macro2", "quote", "serde_json", - "syn 2.0.76", + "syn 2.0.81", "syn-solidity", ] [[package]] name = "alloy-sol-type-parser" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c07ebb0c1674ff8cbb08378d7c2e0e27919d2a2dae07ad3bca26174deda8d389" +checksum = "f631f0bd9a9d79619b27c91b6b1ab2c4ef4e606a65192369a1ee05d40dcf81cc" dependencies = [ "serde", "winnow", @@ -670,12 +677,12 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e448d879903624863f608c552d10efb0e0905ddbee98b0049412799911eb062" +checksum = "c2841af22d99e2c0f82a78fe107b6481be3dd20b89bfb067290092794734343a" dependencies = [ "alloy-json-abi", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-sol-macro", "const-hex", "serde", @@ -683,9 +690,9 @@ dependencies = [ [[package]] name = "alloy-transport" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "454220c714857cf68af87d788d1f0638ad8766268b94f6a49fed96cbc2ab382c" +checksum = "33616b2edf7454302a1d48084db185e52c309f73f6c10be99b0fe39354b3f1e9" dependencies = [ "alloy-json-rpc", "base64 0.22.1", @@ -695,31 +702,31 @@ dependencies = [ "serde_json", "thiserror", "tokio", - "tower", + "tower 0.5.1", "tracing", "url", ] [[package]] name = "alloy-transport-http" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "377f2353d7fea03a2dba6b9ffbb7d610402c040dd5700d1fae8b9ec2673eed9b" +checksum = "a944f5310c690b62bbb3e7e5ce34527cbd36b2d18532a797af123271ce595a49" dependencies = [ "alloy-json-rpc", "alloy-transport", "reqwest", "serde_json", - "tower", + "tower 0.5.1", "tracing", "url", ] [[package]] name = "alloy-transport-ipc" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8653c47dcc30326fb09a34140e8800fa21987fc52453de6cfcdd5c7b8b6e9886" +checksum = "09fd8491249f74d16ec979b1f5672377b12ebb818e6056478ffa386954dbd350" dependencies = [ "alloy-json-rpc", "alloy-pubsub", @@ -736,9 +743,9 @@ dependencies = [ [[package]] name = "alloy-transport-ws" -version = "0.3.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d43ba8e9a3a7fef626d5fd93cc87ff2d6d2c81acfb866f068b3dce31dda060" +checksum = "a9704761f6297fe482276bee7f77a93cb42bd541c2bd6c1c560b6f3a9ece672e" dependencies = [ "alloy-pubsub", "alloy-transport", @@ -754,11 +761,11 @@ dependencies = [ [[package]] name = "alloy-trie" -version = "0.5.0" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd491aade72a82d51db430379f48a44a1d388ff03711a2023f1faa302c5b675d" +checksum = "0a46c9c4fdccda7982e7928904bd85fe235a0404ee3d7e197fff13d61eac8b4f" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "arbitrary", "derive_arbitrary", @@ -844,9 +851,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "37bf3594c4c988a53154954629820791dde498571819ae4ca50ca811e060cc95" [[package]] name = "aquamarine" @@ -859,7 +866,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -905,7 +912,7 @@ dependencies = [ "num-bigint", "num-traits", "paste", - "rustc_version 0.4.0", + "rustc_version 0.4.1", "zeroize", ] @@ -997,9 +1004,9 @@ dependencies = [ [[package]] name = "arrayref" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" [[package]] name = "arrayvec" @@ -1032,9 +1039,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.12" +version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fec134f64e2bc57411226dfc4e52dec859ddfc7e711fc5e07b612584f000e4aa" +checksum = "103db485efc3e41214fe4fda9f3dbeae2eb9082f48fd236e6095627a9422066e" dependencies = [ "brotli", "flate2", @@ -1062,9 +1069,9 @@ dependencies = [ [[package]] name = "async-stream" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" dependencies = [ "async-stream-impl", "futures-core", @@ -1073,24 +1080,24 @@ dependencies = [ [[package]] name = "async-stream-impl" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] name = "async-trait" -version = "0.1.81" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -1101,7 +1108,7 @@ checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" dependencies = [ "futures", "pharos", - "rustc_version 0.4.0", + "rustc_version 0.4.1", ] [[package]] @@ -1128,14 +1135,14 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "backon" @@ -1151,17 +1158,17 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", - "miniz_oxide 0.7.4", + "miniz_oxide", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -1223,9 +1230,9 @@ dependencies = [ [[package]] name = "bindgen" -version = "0.69.4" +version = "0.69.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" +checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" dependencies = [ "bitflags 2.6.0", "cexpr", @@ -1238,14 +1245,32 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.76", + "syn 2.0.81", +] + +[[package]] +name = "bindgen" +version = "0.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f" +dependencies = [ + "bitflags 2.6.0", + "cexpr", + "clang-sys", + "itertools 0.13.0", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn 2.0.81", ] [[package]] name = "binout" -version = "0.2.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b60b1af88a588fca5fe424ae7d735bc52814f80ff57614f57043cc4e2024f2ea" +checksum = "581d67184175e0c94926cb5e82df97bb6e0d8261d27a88a6ead80994ee73a4ac" [[package]] name = "bit-set" @@ -1280,9 +1305,9 @@ dependencies = [ [[package]] name = "bitm" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b06e8e5bec3490b9f6f3adbb78aa4f53e8396fd9994e8a62a346b44ea7c15f35" +checksum = "e7edec3daafc233e78a219c85a77bcf535ee267b0fae7a1aad96bd1a67add5d3" dependencies = [ "dyn_size_of", ] @@ -1341,9 +1366,9 @@ dependencies = [ [[package]] name = "boa_ast" -version = "0.19.0" +version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b49637e7ecb7c541c46c3e885d4c49326ad8076dbfb88bef2cf3165d8ea7df2b" +checksum = "3a69ee3a749ea36d4e56d92941e7b25076b493d4917c3d155b6cf369e23547d9" dependencies = [ "bitflags 2.6.0", "boa_interner", @@ -1355,9 +1380,9 @@ dependencies = [ [[package]] name = "boa_engine" -version = "0.19.0" +version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "411558b4cbc7d0303012e26721815e612fed78179313888fd5dd8d6c50d70099" +checksum = "06e4559b35b80ceb2e6328481c0eca9a24506663ea33ee1e279be6b5b618b25c" dependencies = [ "arrayvec", "bitflags 2.6.0", @@ -1400,9 +1425,9 @@ dependencies = [ [[package]] name = "boa_gc" -version = "0.19.0" +version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eff345a85a39cf9b8ed863198947d61e6df2b1d774002b57341158b0ce2c525" +checksum = "716406f57d67bc3ac7fd227d5513b42df401dff14a3be22cbd8ee29817225363" dependencies = [ "boa_macros", "boa_profiler", @@ -1413,9 +1438,9 @@ dependencies = [ [[package]] name = "boa_interner" -version = "0.19.0" +version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b779280420804c70da9043d152c84eb96e2f7c9e7d1ec3262decf59f9349df" +checksum = "4e18df2272616e1ba0322a69333d37dbb78797f1aa0595aad9dc41e8ecd06ad9" dependencies = [ "boa_gc", "boa_macros", @@ -1429,21 +1454,21 @@ dependencies = [ [[package]] name = "boa_macros" -version = "0.19.0" +version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25e0097fa69cde4c95f9869654004340fbbe2bcf3ce9189ba2a31a65ac40e0a1" +checksum = "240f4126219a83519bad05c9a40bfc0303921eeb571fc2d7e44c17ffac99d3f1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", "synstructure", ] [[package]] name = "boa_parser" -version = "0.19.0" +version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd63fe8faf62561fc8c50f9402687e8cfde720b57d292fb3b4ac17c821878ac1" +checksum = "62b59dc05bf1dc019b11478a92986f590cff43fced4d20e866eefb913493e91c" dependencies = [ "bitflags 2.6.0", "boa_ast", @@ -1460,15 +1485,15 @@ dependencies = [ [[package]] name = "boa_profiler" -version = "0.19.0" +version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd9da895f0df9e2a97b36c1f98e0c5d2ab963abc8679d80f2a66f7bcb211ce90" +checksum = "00ee0645509b3b91abd724f25072649d9e8e65653a78ff0b6e592788a58dd838" [[package]] name = "boa_string" -version = "0.19.0" +version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9ca6668df83fcd3c2903f6f296b7180421908c5b478ebe0d1c468be9fd60e1c" +checksum = "ae85205289bab1f2c7c8a30ddf0541cf89ba2ff7dbd144feef50bbfa664288d4" dependencies = [ "fast-float", "paste", @@ -1488,9 +1513,9 @@ dependencies = [ [[package]] name = "brotli" -version = "6.0.0" +version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" +checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -1524,7 +1549,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" dependencies = [ "memchr", - "regex-automata 0.4.7", + "regex-automata 0.4.8", "serde", ] @@ -1542,22 +1567,22 @@ checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "bytemuck" -version = "1.17.1" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773d90827bc3feecfb67fab12e24de0749aad83c74b9504ecde46237b5cd24e2" +checksum = "8334215b81e418a0a7bdb8ef0849474f40bb10c8b71f1c4ed315cff49f32494d" dependencies = [ "bytemuck_derive", ] [[package]] name = "bytemuck_derive" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc8b54b395f2fcfbb3d90c47b01c7f444d94d05bdeb775811dec868ac3bbc26" +checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -1568,9 +1593,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.1" +version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" +checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" dependencies = [ "serde", ] @@ -1645,9 +1670,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.15" +version = "1.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57b6a275aa2903740dc87da01c62040406b8812552e97129a63ea8850a17c6e6" +checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f" dependencies = [ "jobserver", "libc", @@ -1740,9 +1765,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.16" +version = "4.5.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019" +checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" dependencies = [ "clap_builder", "clap_derive", @@ -1750,9 +1775,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.15" +version = "4.5.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" +checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" dependencies = [ "anstream", "anstyle", @@ -1762,14 +1787,14 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.13" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -1902,9 +1927,9 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.12.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fb8a24a26d37e1ffd45343323dc9fe6654ceea44c12f2fcb3d7ac29e610bc6" +checksum = "0121754e84117e65f9d90648ee6aa4882a6e63110307ab73967a4c5e7e69e586" dependencies = [ "cfg-if", "cpufeatures", @@ -1921,9 +1946,9 @@ checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const_format" -version = "0.2.32" +version = "0.2.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673" +checksum = "50c655d81ff1114fb0dcdea9225ea9f0cc712a6f8d189378e82bdf62a473a64b" dependencies = [ "const_format_proc_macros", "konst", @@ -1931,9 +1956,9 @@ dependencies = [ [[package]] name = "const_format_proc_macros" -version = "0.2.32" +version = "0.2.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500" +checksum = "eff1a44b93f47b1bac19a27932f5c591e43d1ba357ee4f61526c8a25603f0eb1" dependencies = [ "proc-macro2", "quote", @@ -1982,18 +2007,18 @@ dependencies = [ [[package]] name = "cpp_demangle" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8227005286ec39567949b33df9896bcadfa6051bccca2488129f108ca23119" +checksum = "96e58d342ad113c2b878f16d5d034c03be492ae460cdbc02b7f0f2284d310c7d" dependencies = [ "cfg-if", ] [[package]] name = "cpufeatures" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51e852e6dc9a5bed1fae92dd2375037bf2b768725bf3be87811edee3249d09ad" +checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" dependencies = [ "libc", ] @@ -2213,7 +2238,7 @@ dependencies = [ "curve25519-dalek-derive", "digest 0.10.7", "fiat-crypto", - "rustc_version 0.4.0", + "rustc_version 0.4.1", "subtle", "zeroize", ] @@ -2226,7 +2251,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -2250,7 +2275,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -2261,7 +2286,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -2279,9 +2304,9 @@ dependencies = [ [[package]] name = "dashmap" -version = "6.0.1" +version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "804c8821570c3f8b70230c2ba75ffa5c0f9a4189b9a432b6656c536712acae28" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" dependencies = [ "cfg-if", "crossbeam-utils", @@ -2381,7 +2406,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -2393,8 +2418,8 @@ dependencies = [ "convert_case 0.4.0", "proc-macro2", "quote", - "rustc_version 0.4.0", - "syn 2.0.76", + "rustc_version 0.4.1", + "syn 2.0.81", ] [[package]] @@ -2415,7 +2440,7 @@ dependencies = [ "convert_case 0.6.0", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", "unicode-xid", ] @@ -2523,7 +2548,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -2552,9 +2577,9 @@ checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" [[package]] name = "dyn_size_of" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d4f78a40b1ec35bf8cafdaaf607ba2f773c366b0b3bda48937cacd7a8d5134" +checksum = "fdbac012a81cc46ca554aceae23c52f4f55adb343f2f32ca99bb4e5ef868cee2" [[package]] name = "ecdsa" @@ -2673,14 +2698,14 @@ dependencies = [ [[package]] name = "enum-as-inner" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" +checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" dependencies = [ - "heck 0.4.1", + "heck", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -2691,7 +2716,27 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", +] + +[[package]] +name = "equator" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c35da53b5a021d2484a7cc49b2ac7f2d840f8236a286f84202369bd338d761ea" +dependencies = [ + "equator-macro", +] + +[[package]] +name = "equator-macro" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bf679796c0322556351f287a51b49e48f7c4986e727b5dd78c972d30e2e16cc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.81", ] [[package]] @@ -3101,12 +3146,12 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.33" +version = "1.0.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "324a1be68054ef05ad64b861cc9eaf1d623d2d8cb25b4bf2cb9cdd902b4bf253" +checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" dependencies = [ "crc32fast", - "miniz_oxide 0.8.0", + "miniz_oxide", ] [[package]] @@ -3144,9 +3189,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", @@ -3159,9 +3204,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", @@ -3169,15 +3214,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", @@ -3186,9 +3231,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-lite" @@ -3207,26 +3252,26 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] name = "futures-sink" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-timer" @@ -3240,9 +3285,9 @@ dependencies = [ [[package]] name = "futures-util" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-channel", "futures-core", @@ -3309,9 +3354,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.29.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" @@ -3421,7 +3466,7 @@ dependencies = [ "reth-tracing", "reth-transaction-pool", "reth-trie", - "revm", + "revm 14.0.1", "serde", "serde_json", "thiserror", @@ -3494,6 +3539,8 @@ version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" dependencies = [ + "allocator-api2", + "equivalent", "foldhash", "serde", ] @@ -3517,12 +3564,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - [[package]] name = "heck" version = "0.5.0" @@ -3668,9 +3709,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.9.4" +version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" [[package]] name = "httpdate" @@ -3702,9 +3743,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.4.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" dependencies = [ "bytes", "futures-channel", @@ -3723,9 +3764,9 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.2" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", "http", @@ -3733,7 +3774,7 @@ dependencies = [ "hyper-util", "log", "rustls", - "rustls-native-certs", + "rustls-native-certs 0.8.0", "rustls-pki-types", "tokio", "tokio-rustls", @@ -3743,9 +3784,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.7" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cde7055719c54e36e95e8719f95883f22072a48ede39db7fc17a4e1d5281e9b9" +checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" dependencies = [ "bytes", "futures-channel", @@ -3756,7 +3797,6 @@ dependencies = [ "pin-project-lite", "socket2 0.5.7", "tokio", - "tower", "tower-service", "tracing", ] @@ -3781,7 +3821,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -3795,9 +3835,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.60" +version = "0.1.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -3931,7 +3971,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -4088,9 +4128,9 @@ dependencies = [ [[package]] name = "intrusive-collections" -version = "0.9.6" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b694dc9f70c3bda874626d2aed13b780f137aab435f4e9814121955cf706122e" +checksum = "189d0897e4cbe8c75efedf3502c18c887b05046e59d28404d4d8e46cbc4d1e86" dependencies = [ "memoffset", ] @@ -4109,15 +4149,15 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.9.0" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" [[package]] name = "iri-string" -version = "0.7.2" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f5f6c2df22c009ac44f6f1499308e7a3ac7ba42cd2378475cc691510e1eef1b" +checksum = "dc0f0a572e8ffe56e2ff4f769f32ffe919282c3916799f8b68688b6030063bea" dependencies = [ "memchr", "serde", @@ -4204,18 +4244,18 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.70" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" +checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" dependencies = [ "wasm-bindgen", ] [[package]] name = "jsonrpsee" -version = "0.24.3" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ec465b607a36dc5dd45d48b7689bc83f679f66a3ac6b6b21cc787a11e0f8685" +checksum = "c5c71d8c1a731cc4227c2f698d377e7848ca12c8a48866fc5e6951c43a4db843" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -4231,9 +4271,9 @@ dependencies = [ [[package]] name = "jsonrpsee-client-transport" -version = "0.24.3" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90f0977f9c15694371b8024c35ab58ca043dbbf4b51ccb03db8858a021241df1" +checksum = "548125b159ba1314104f5bb5f38519e03a41862786aa3925cf349aae9cdd546e" dependencies = [ "base64 0.22.1", "futures-channel", @@ -4256,9 +4296,9 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.24.3" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e942c55635fbf5dc421938b8558a8141c7e773720640f4f1dbe1f4164ca4e221" +checksum = "f2882f6f8acb9fdaec7cefc4fd607119a9bd709831df7d7672a1d3b644628280" dependencies = [ "async-trait", "bytes", @@ -4283,9 +4323,9 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.24.3" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33774602df12b68a2310b38a535733c477ca4a498751739f89fe8dbbb62ec4c" +checksum = "b3638bc4617f96675973253b3a45006933bde93c2fd8a6170b33c777cc389e5b" dependencies = [ "async-trait", "base64 0.22.1", @@ -4301,29 +4341,29 @@ dependencies = [ "serde_json", "thiserror", "tokio", - "tower", + "tower 0.4.13", "tracing", "url", ] [[package]] name = "jsonrpsee-proc-macros" -version = "0.24.3" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b07a2daf52077ab1b197aea69a5c990c060143835bf04c77070e98903791715" +checksum = "c06c01ae0007548e73412c08e2285ffe5d723195bf268bce67b1b77c3bb2a14d" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] name = "jsonrpsee-server" -version = "0.24.3" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038fb697a709bec7134e9ccbdbecfea0e2d15183f7140254afef7c5610a3f488" +checksum = "82ad8ddc14be1d4290cd68046e7d1d37acd408efed6d3ca08aefcc3ad6da069c" dependencies = [ "futures-util", "http", @@ -4342,15 +4382,15 @@ dependencies = [ "tokio", "tokio-stream", "tokio-util", - "tower", + "tower 0.4.13", "tracing", ] [[package]] name = "jsonrpsee-types" -version = "0.24.3" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23b67d6e008164f027afbc2e7bb79662650158d26df200040282d2aa1cbb093b" +checksum = "a178c60086f24cc35bb82f57c651d0d25d99c4742b4d335de04e97fa1f08a8a1" dependencies = [ "http", "serde", @@ -4360,9 +4400,9 @@ dependencies = [ [[package]] name = "jsonrpsee-wasm-client" -version = "0.24.3" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0470d0ae043ffcb0cd323797a631e637fb4b55fe3eaa6002934819458bba62a7" +checksum = "1a01cd500915d24ab28ca17527e23901ef1be6d659a2322451e1045532516c25" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -4371,9 +4411,9 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-client" -version = "0.24.3" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "992bf67d1132f88edf4a4f8cff474cf01abb2be203004a2b8e11c2b20795b99e" +checksum = "0fe322e0896d0955a3ebdd5bf813571c53fea29edd713bc315b76620b327e86d" dependencies = [ "http", "jsonrpsee-client-transport", @@ -4399,9 +4439,9 @@ dependencies = [ [[package]] name = "k256" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "956ff9b67e26e1a6a866cb758f12c6f8746208489e3e4a4b5580802f2f0a587b" +checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b" dependencies = [ "cfg-if", "ecdsa", @@ -4422,9 +4462,9 @@ dependencies = [ [[package]] name = "keccak-asm" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "422fbc7ff2f2f5bdffeb07718e5a5324dca72b0c9293d50df4026652385e3314" +checksum = "505d1856a39b200489082f90d897c3f07c455563880bc5952e38eabf731c83b6" dependencies = [ "digest 0.10.7", "sha3-asm", @@ -4462,9 +4502,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.158" +version = "0.2.161" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1" [[package]] name = "libloading" @@ -4503,11 +4543,11 @@ dependencies = [ [[package]] name = "libproc" -version = "0.14.8" +version = "0.14.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae9ea4b75e1a81675429dafe43441df1caea70081e82246a8cccf514884a88bb" +checksum = "e78a09b56be5adbcad5aa1197371688dc6bb249a26da3bca2011ee2fb987ebfb" dependencies = [ - "bindgen", + "bindgen 0.70.1", "errno", "libc", ] @@ -4615,11 +4655,11 @@ checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "lru" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ee39891760e7d94734f6f63fedc29a2e4a152f836120753a72503f09fcf904" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.14.5", + "hashbrown 0.15.0", ] [[package]] @@ -4669,9 +4709,9 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memmap2" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe751422e4a8caa417e13c3ea66452215d7d63e19e604f4980461212f3ae1322" +checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f" dependencies = [ "libc", ] @@ -4711,17 +4751,18 @@ dependencies = [ [[package]] name = "metrics-process" -version = "2.1.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb524e5438255eaa8aa74214d5a62713b77b2c3c6e3c0bbeee65cfd9a58948ba" +checksum = "e69e6ced169644e186e060ddc15f3923fdf06862c811a867bb1e5e7c7824f4d0" dependencies = [ + "libc", "libproc", "mach2", "metrics", "once_cell", - "procfs", + "procfs 0.17.0", "rlimit", - "windows 0.57.0", + "windows 0.58.0", ] [[package]] @@ -4785,15 +4826,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" -[[package]] -name = "miniz_oxide" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" -dependencies = [ - "adler", -] - [[package]] name = "miniz_oxide" version = "0.8.0" @@ -4851,7 +4883,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -4883,9 +4915,9 @@ checksum = "1fafa6961cabd9c63bcd77a45d7e3b7f3b552b70417831fb0f56db717e72407e" [[package]] name = "multiaddr" -version = "0.18.1" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b852bc02a2da5feed68cd14fa50d0774b92790a5bdbfa932a813926c8472070" +checksum = "fe6351f60b488e04c1d21bc69e56b89cb3f5e8f5d22557d6e8031bdfd79b6961" dependencies = [ "arrayref", "byteorder", @@ -4896,7 +4928,7 @@ dependencies = [ "percent-encoding", "serde", "static_assertions", - "unsigned-varint", + "unsigned-varint 0.8.0", "url", ] @@ -4918,7 +4950,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "076d548d76a0e2a0d4ab471d0b1c36c577786dfc4471242035d97a12a735c492" dependencies = [ "core2", - "unsigned-varint", + "unsigned-varint 0.7.2", ] [[package]] @@ -5089,7 +5121,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -5117,18 +5149,18 @@ dependencies = [ [[package]] name = "object" -version = "0.36.3" +version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "oorandom" @@ -5138,44 +5170,78 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.2.2" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0db6e3a9bbbcef7cef19d77aa2cc76d61377376e3bb86f89167e7e3f30ea023" +checksum = "21aad1fbf80d2bcd7406880efc7ba109365f44bbb72896758ddcbfa46bf1592c" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "alloy-serde", "derive_more 1.0.0", "serde", + "spin", ] [[package]] -name = "op-alloy-network" -version = "0.2.2" +name = "op-alloy-genesis" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66184e6c92269ba4ef1f80e8566ce11d41b584884ce7476d4b1b5e0e38503ecb" +checksum = "6e1b8a9b70da0e027242ec1762f0f3a386278b6291d00d12ff5a64929dc19f68" dependencies = [ "alloy-consensus", "alloy-eips", + "alloy-primitives 0.8.8", + "alloy-sol-types", + "serde", + "serde_repr", +] + +[[package]] +name = "op-alloy-network" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "783ce4ebc0a994eee2188431511b16692b704e1e8fff0c77d8c0354d3c2b1fc8" +dependencies = [ + "alloy-consensus", "alloy-network", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rpc-types-eth", "op-alloy-consensus", "op-alloy-rpc-types", ] +[[package]] +name = "op-alloy-protocol" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf300a82ae2d30e2255bfea87a2259da49f63a25a44db561ae64cc9e3084139f" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 0.8.8", + "alloy-rlp", + "alloy-serde", + "hashbrown 0.14.5", + "op-alloy-consensus", + "op-alloy-genesis", + "serde", +] + [[package]] name = "op-alloy-rpc-types" -version = "0.2.2" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9c604cd3b9680d0edd0b7127f3550bcff634c2d2efe27b2b4853e72320186a8" +checksum = "e281fbfc2198b7c0c16457d6524f83d192662bc9f3df70f24c3038d4521616df" dependencies = [ - "alloy-network", - "alloy-primitives 0.8.7", + "alloy-eips", + "alloy-network-primitives", + "alloy-primitives 0.8.8", "alloy-rpc-types-eth", "alloy-serde", + "cfg-if", + "hashbrown 0.14.5", "op-alloy-consensus", "serde", "serde_json", @@ -5183,13 +5249,18 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.2.2" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "620e645c36cc66220909bf97e6632e7a154a2309356221cbf33ae78bf5294478" +checksum = "2947272a81ebf988f4804b6f0f6a7c0b2f6f89a908cb410e36f8f3828f81c778" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-eips", + "alloy-primitives 0.8.8", "alloy-rpc-types-engine", "alloy-serde", + "derive_more 1.0.0", + "op-alloy-consensus", + "op-alloy-genesis", + "op-alloy-protocol", "serde", ] @@ -5213,9 +5284,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "ordered-float" -version = "4.2.2" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a91171844676f8c7990ce64959210cd2eaef32c2612c50f9fae9f8aaa6065a6" +checksum = "83e7ccb95e240b7c9506a3d544f10d935e142cc90b0a1d56954fb44d89ad6b97" dependencies = [ "num-traits", ] @@ -5277,9 +5348,9 @@ dependencies = [ [[package]] name = "parking" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" @@ -5324,7 +5395,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.3", + "redox_syscall 0.5.7", "smallvec", "windows-targets 0.52.6", ] @@ -5363,9 +5434,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.11" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" +checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" dependencies = [ "memchr", "thiserror", @@ -5374,10 +5445,11 @@ dependencies = [ [[package]] name = "ph" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b7b74d575d7c11fb653fae69688be5206cafc1ead33c01ce61ac7f36eae45b" +checksum = "2662713b3e8e02977b289a7ada32d672ae5477b5c23f290e5999122d53658847" dependencies = [ + "aligned-vec", "binout", "bitm", "dyn_size_of", @@ -5392,7 +5464,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" dependencies = [ "futures", - "rustc_version 0.4.0", + "rustc_version 0.4.1", ] [[package]] @@ -5425,7 +5497,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -5439,22 +5511,22 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +checksum = "baf123a161dde1e524adf36f90bc5d8d3462824a9c43553ad07a8183161189ec" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +checksum = "a4502d8515ca9f32f1fb543d987f63d95a14934883db45bdb48060b6b69257f8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -5481,9 +5553,9 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "plain_hasher" @@ -5496,9 +5568,9 @@ dependencies = [ [[package]] name = "plotters" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15b6eccb8484002195a3e44fe65a4ce8e93a625797a063735536fd59cb01cf3" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" dependencies = [ "num-traits", "plotters-backend", @@ -5509,15 +5581,15 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "414cec62c6634ae900ea1c56128dfe87cf63e7caece0852ec76aba307cebadb7" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" [[package]] name = "plotters-svg" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81b30686a7d9c3e010b84284bdd26a29f2138574f52f5eb6f794fc0ad924e705" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" dependencies = [ "plotters-backend", ] @@ -5542,9 +5614,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.7.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265" +checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2" [[package]] name = "powerfmt" @@ -5611,12 +5683,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.22" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" +checksum = "910d41a655dac3b764f1ade94821093d3610248694320cd072303a8eedcf221d" dependencies = [ "proc-macro2", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -5691,14 +5763,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "7c3a7fc5db1e57d5a779a352c8cdb57b29aa4c40cc69c3a68a7fedc815fbf2f9" dependencies = [ "unicode-ident", ] @@ -5714,7 +5786,19 @@ dependencies = [ "flate2", "hex", "lazy_static", - "procfs-core", + "procfs-core 0.16.0", + "rustix", +] + +[[package]] +name = "procfs" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc5b72d8145275d844d4b5f6d4e1eef00c8cd889edb6035c21675d1bb1f45c9f" +dependencies = [ + "bitflags 2.6.0", + "hex", + "procfs-core 0.17.0", "rustix", ] @@ -5729,6 +5813,16 @@ dependencies = [ "hex", ] +[[package]] +name = "procfs-core" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "239df02d8349b06fc07398a3a1697b06418223b1c7725085e801e7c0fc6a12ec" +dependencies = [ + "bitflags 2.6.0", + "hex", +] + [[package]] name = "proptest" version = "1.5.0" @@ -5743,7 +5837,7 @@ dependencies = [ "rand 0.8.5", "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax 0.8.4", + "regex-syntax 0.8.5", "rusty-fork", "tempfile", "unarray", @@ -5767,7 +5861,7 @@ checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -5811,9 +5905,9 @@ dependencies = [ [[package]] name = "quinn" -version = "0.11.3" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b22d8e7369034b9a7132bc2008cac12f2013c8132b45e0554e6e20e2617f2156" +checksum = "8c7c5fdde3cdae7203427dc4f0a68fe0ed09833edc525a03456b153b79828684" dependencies = [ "bytes", "pin-project-lite", @@ -5829,9 +5923,9 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.6" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba92fb39ec7ad06ca2582c0ca834dfeadcaf06ddfc8e635c80aa7e1c05315fdd" +checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" dependencies = [ "bytes", "rand 0.8.5", @@ -5846,15 +5940,15 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bffec3605b73c6f1754535084a85229fa8a30f86014e6c81aeec4abb68b0285" +checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b" dependencies = [ "libc", "once_cell", "socket2 0.5.7", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -5986,9 +6080,9 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "11.1.0" +version = "11.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb9ee317cfe3fbd54b36a511efc1edd42e216903c9cd575e686dd68a2ba90d8d" +checksum = "1ab240315c661615f2ee9f0f2cd32d5a7343a84d5ebcccb99d46e6637565e7b0" dependencies = [ "bitflags 2.6.0", ] @@ -6030,9 +6124,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.3" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" +checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" dependencies = [ "bitflags 2.6.0", ] @@ -6050,14 +6144,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.6" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.7", - "regex-syntax 0.8.4", + "regex-automata 0.4.8", + "regex-syntax 0.8.5", ] [[package]] @@ -6071,13 +6165,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.4", + "regex-syntax 0.8.5", ] [[package]] @@ -6088,15 +6182,15 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "regress" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16fe0a24af5daaae947294213d2fd2646fbf5e1fbacc1d4ba3e84b2393854842" +checksum = "1541daf4e4ed43a0922b7969bdc2170178bcacc5dabf7e39bc508a9fa3953a7a" dependencies = [ "hashbrown 0.14.5", "memchr", @@ -6104,9 +6198,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.7" +version = "0.12.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8f4955649ef5c38cc7f9e8aa41761d48fb9677197daea9984dc54f56aad5e63" +checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b" dependencies = [ "base64 0.22.1", "bytes", @@ -6127,13 +6221,13 @@ dependencies = [ "pin-project-lite", "quinn", "rustls", - "rustls-native-certs", + "rustls-native-certs 0.8.0", "rustls-pemfile", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 1.0.1", "tokio", "tokio-rustls", "tokio-util", @@ -6286,7 +6380,7 @@ dependencies = [ "reth-revm", "reth-tasks", "reth-transaction-pool", - "revm", + "revm 14.0.1", "tokio", "tracing", ] @@ -6377,7 +6471,7 @@ dependencies = [ "tikv-jemallocator", "tokio", "tokio-util", - "tower", + "tower 0.4.13", "tracing", ] @@ -6446,7 +6540,7 @@ dependencies = [ "reth-primitives", "reth-storage-api", "reth-trie", - "revm", + "revm 14.0.1", "tokio", "tokio-stream", "tracing", @@ -6459,7 +6553,7 @@ dependencies = [ "alloy-chains", "alloy-eips", "alloy-genesis", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "alloy-trie", "auto_impl", @@ -6554,7 +6648,7 @@ name = "reth-cli-util" version = "1.0.6" dependencies = [ "alloy-eips", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "eyre", "libc", "rand 0.8.5", @@ -6570,7 +6664,7 @@ dependencies = [ "alloy-consensus", "alloy-eips", "alloy-genesis", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-trie", "arbitrary", "bytes", @@ -6591,7 +6685,7 @@ dependencies = [ "proc-macro2", "quote", "similar-asserts", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -6765,7 +6859,7 @@ dependencies = [ name = "reth-discv4" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "assert_matches", "discv5", @@ -6791,7 +6885,7 @@ dependencies = [ name = "reth-discv5" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "derive_more 1.0.0", "discv5", @@ -6816,7 +6910,7 @@ name = "reth-dns-discovery" version = "1.0.6" dependencies = [ "alloy-chains", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "data-encoding", "enr", @@ -6914,7 +7008,7 @@ name = "reth-ecies" version = "1.0.6" dependencies = [ "aes", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "block-padding", "byteorder", @@ -7045,7 +7139,7 @@ dependencies = [ "reth-rpc-types", "reth-rpc-types-compat", "reth-trie", - "revm-primitives", + "revm-primitives 9.0.1", "serde", "serde_json", "tokio", @@ -7154,7 +7248,7 @@ dependencies = [ "reth-primitives", "reth-rpc-types", "reth-rpc-types-compat", - "revm-primitives", + "revm-primitives 9.0.1", "serde", "serde_json", "sha2 0.10.8", @@ -7165,7 +7259,7 @@ name = "reth-ethereum-forks" version = "1.0.6" dependencies = [ "alloy-chains", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "arbitrary", "auto_impl", @@ -7194,7 +7288,7 @@ dependencies = [ "reth-revm", "reth-transaction-pool", "reth-trie", - "revm", + "revm 14.0.1", "tracing", ] @@ -7202,7 +7296,7 @@ dependencies = [ name = "reth-etl" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "rayon", "reth-db-api", "tempfile", @@ -7222,8 +7316,8 @@ dependencies = [ "reth-primitives", "reth-prune-types", "reth-storage-errors", - "revm", - "revm-primitives", + "revm 14.0.1", + "revm-primitives 9.0.1", ] [[package]] @@ -7242,7 +7336,7 @@ dependencies = [ "reth-revm", "reth-storage-api", "reth-testing-utils", - "revm-primitives", + "revm-primitives 9.0.1", "secp256k1", "serde_json", ] @@ -7261,8 +7355,8 @@ dependencies = [ "reth-primitives", "reth-prune-types", "reth-revm", - "revm", - "revm-primitives", + "revm 14.0.1", + "revm-primitives 9.0.1", "thiserror", "tracing", ] @@ -7272,14 +7366,14 @@ name = "reth-execution-errors" version = "1.0.6" dependencies = [ "alloy-eips", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "derive_more 1.0.0", "nybbles", "reth-consensus", "reth-prune-types", "reth-storage-errors", - "revm-primitives", + "revm-primitives 9.0.1", ] [[package]] @@ -7287,12 +7381,12 @@ name = "reth-execution-types" version = "1.0.6" dependencies = [ "alloy-eips", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "reth-chainspec", "reth-execution-errors", "reth-primitives", "reth-trie", - "revm", + "revm 14.0.1", "serde", ] @@ -7363,7 +7457,7 @@ dependencies = [ name = "reth-exex-types" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "reth-provider", "serde", ] @@ -7395,7 +7489,7 @@ dependencies = [ "tokio", "tokio-stream", "tokio-util", - "tower", + "tower 0.4.13", "tracing", ] @@ -7406,7 +7500,7 @@ dependencies = [ "bitflags 2.6.0", "byteorder", "criterion", - "dashmap 6.0.1", + "dashmap 6.1.0", "derive_more 1.0.0", "indexmap 2.6.0", "parking_lot 0.12.3", @@ -7423,7 +7517,7 @@ dependencies = [ name = "reth-mdbx-sys" version = "1.0.6" dependencies = [ - "bindgen", + "bindgen 0.69.5", "cc", ] @@ -7447,7 +7541,7 @@ dependencies = [ "quote", "regex", "serial_test", - "syn 2.0.76", + "syn 2.0.81", "trybuild", ] @@ -7455,7 +7549,7 @@ dependencies = [ name = "reth-net-banlist" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", ] [[package]] @@ -7531,7 +7625,7 @@ dependencies = [ name = "reth-network-api" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rpc-types-admin", "auto_impl", "derive_more 1.0.0", @@ -7571,7 +7665,7 @@ dependencies = [ name = "reth-network-peers" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "enr", "rand 0.8.5", @@ -7750,7 +7844,7 @@ name = "reth-node-ethereum" version = "1.0.6" dependencies = [ "alloy-genesis", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "eyre", "futures", "futures-util", @@ -7812,7 +7906,7 @@ dependencies = [ "metrics-exporter-prometheus", "metrics-process", "metrics-util", - "procfs", + "procfs 0.16.0", "reqwest", "reth-chainspec", "reth-db", @@ -7823,7 +7917,7 @@ dependencies = [ "socket2 0.4.10", "tikv-jemalloc-ctl", "tokio", - "tower", + "tower 0.4.13", "tracing", "vergen", ] @@ -7833,7 +7927,7 @@ name = "reth-node-optimism" version = "1.0.6" dependencies = [ "alloy-genesis", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "async-trait", "clap", "eyre", @@ -7883,7 +7977,7 @@ version = "1.0.6" dependencies = [ "alloy-chains", "alloy-genesis", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "derive_more 1.0.0", "once_cell", "op-alloy-rpc-types", @@ -7897,7 +7991,7 @@ dependencies = [ name = "reth-optimism-cli" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "clap", "eyre", @@ -7963,7 +8057,7 @@ dependencies = [ "reth-rpc-types-compat", "reth-transaction-pool", "reth-trie", - "revm", + "revm 14.0.1", "sha2 0.10.8", "thiserror", "tracing", @@ -7977,7 +8071,7 @@ version = "1.0.6" name = "reth-optimism-rpc" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "jsonrpsee-types", "op-alloy-network", "parking_lot 0.12.3", @@ -7997,7 +8091,7 @@ dependencies = [ "reth-rpc-types", "reth-tasks", "reth-transaction-pool", - "revm", + "revm 14.0.1", "serde_json", "thiserror", "tokio", @@ -8020,7 +8114,7 @@ dependencies = [ "reth-revm", "reth-rpc-types", "reth-transaction-pool", - "revm", + "revm 14.0.1", "thiserror", "tokio", "tokio-stream", @@ -8059,7 +8153,7 @@ dependencies = [ "alloy-consensus", "alloy-eips", "alloy-genesis", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "alloy-rpc-types", "alloy-serde", @@ -8085,7 +8179,7 @@ dependencies = [ "reth-primitives-traits", "reth-static-file-types", "reth-trie-common", - "revm-primitives", + "revm-primitives 9.0.1", "secp256k1", "serde", "serde_json", @@ -8102,7 +8196,7 @@ dependencies = [ "alloy-consensus", "alloy-eips", "alloy-genesis", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "alloy-rpc-types-eth", "arbitrary", @@ -8114,7 +8208,7 @@ dependencies = [ "proptest-arbitrary-interop", "rand 0.8.5", "reth-codecs", - "revm-primitives", + "revm-primitives 9.0.1", "roaring", "serde", "serde_json", @@ -8128,7 +8222,7 @@ dependencies = [ "alloy-rpc-types-engine", "assert_matches", "auto_impl", - "dashmap 6.0.1", + "dashmap 6.1.0", "eyre", "itertools 0.13.0", "metrics", @@ -8157,7 +8251,7 @@ dependencies = [ "reth-testing-utils", "reth-trie", "reth-trie-db", - "revm", + "revm 14.0.1", "strum", "tempfile", "tokio", @@ -8168,7 +8262,7 @@ dependencies = [ name = "reth-prune" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "assert_matches", "itertools 0.13.0", "metrics", @@ -8197,7 +8291,7 @@ dependencies = [ name = "reth-prune-types" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "arbitrary", "assert_matches", "bytes", @@ -8226,7 +8320,7 @@ dependencies = [ "reth-storage-api", "reth-storage-errors", "reth-trie", - "revm", + "revm 14.0.1", ] [[package]] @@ -8236,7 +8330,7 @@ dependencies = [ "alloy-dyn-abi", "alloy-genesis", "alloy-network", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "async-trait", "derive_more 1.0.0", @@ -8273,16 +8367,16 @@ dependencies = [ "reth-testing-utils", "reth-transaction-pool", "reth-trie", - "revm", + "revm 14.0.1", "revm-inspectors", - "revm-primitives", + "revm-primitives 9.0.1", "secp256k1", "serde", "serde_json", "thiserror", "tokio", "tokio-stream", - "tower", + "tower 0.4.13", "tracing", "tracing-futures", ] @@ -8358,7 +8452,7 @@ dependencies = [ "serde_json", "thiserror", "tokio", - "tower", + "tower 0.4.13", "tower-http", "tracing", ] @@ -8425,9 +8519,9 @@ dependencies = [ "reth-tasks", "reth-transaction-pool", "reth-trie", - "revm", + "revm 14.0.1", "revm-inspectors", - "revm-primitives", + "revm-primitives 9.0.1", "tokio", "tracing", ] @@ -8458,9 +8552,9 @@ dependencies = [ "reth-tasks", "reth-transaction-pool", "reth-trie", - "revm", + "revm 14.0.1", "revm-inspectors", - "revm-primitives", + "revm-primitives 9.0.1", "schnellru", "serde", "serde_json", @@ -8481,7 +8575,7 @@ dependencies = [ "pin-project", "reqwest", "tokio", - "tower", + "tower 0.4.13", "tracing", ] @@ -8489,7 +8583,7 @@ dependencies = [ name = "reth-rpc-server-types" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "jsonrpsee-core", "jsonrpsee-types", "reth-errors", @@ -8504,7 +8598,7 @@ dependencies = [ name = "reth-rpc-types" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rpc-types", "alloy-rpc-types-admin", "alloy-rpc-types-anvil", @@ -8585,7 +8679,7 @@ dependencies = [ name = "reth-stages-api" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "aquamarine", "assert_matches", "auto_impl", @@ -8614,7 +8708,7 @@ dependencies = [ name = "reth-stages-types" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "arbitrary", "bytes", "modular-bitfield", @@ -8631,7 +8725,7 @@ dependencies = [ name = "reth-static-file" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "assert_matches", "parking_lot 0.12.3", "rayon", @@ -8654,7 +8748,7 @@ dependencies = [ name = "reth-static-file-types" version = "1.0.6" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "clap", "derive_more 1.0.0", "serde", @@ -8766,7 +8860,7 @@ dependencies = [ "reth-storage-api", "reth-tasks", "reth-tracing", - "revm", + "revm 14.0.1", "rustc-hash 2.0.0", "schnellru", "serde", @@ -8800,7 +8894,7 @@ dependencies = [ "reth-stages-types", "reth-storage-errors", "reth-trie-common", - "revm", + "revm 14.0.1", "serde", "serde_json", "similar-asserts", @@ -8816,7 +8910,7 @@ version = "1.0.6" dependencies = [ "alloy-consensus", "alloy-genesis", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rlp", "alloy-trie", "arbitrary", @@ -8830,7 +8924,7 @@ dependencies = [ "proptest-arbitrary-interop", "reth-codecs", "reth-primitives-traits", - "revm-primitives", + "revm-primitives 9.0.1", "serde", "test-fuzz", "toml", @@ -8860,7 +8954,7 @@ dependencies = [ "reth-storage-errors", "reth-trie", "reth-trie-common", - "revm", + "revm 14.0.1", "serde", "serde_json", "similar-asserts", @@ -8897,6 +8991,19 @@ dependencies = [ "tracing", ] +[[package]] +name = "revm" +version = "14.0.1" +dependencies = [ + "auto_impl", + "cfg-if", + "dyn-clone", + "revm-interpreter 10.0.1", + "revm-precompile 11.0.1", + "serde", + "serde_json", +] + [[package]] name = "revm" version = "14.0.1" @@ -8905,8 +9012,8 @@ dependencies = [ "auto_impl", "cfg-if", "dyn-clone", - "revm-interpreter", - "revm-precompile", + "revm-interpreter 10.0.1 (git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth)", + "revm-precompile 11.0.1 (git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth)", "serde", "serde_json", ] @@ -8916,7 +9023,7 @@ name = "revm-inspectors" version = "0.6.0" source = "git+https://github.com/taikoxyz/revm-inspectors.git?branch=main-rbuilder#a7db16ce222d58eac84cfeeed4b5a9541dadc1d0" dependencies = [ - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "alloy-rpc-types-eth", "alloy-rpc-types-trace", "alloy-sol-types", @@ -8924,24 +9031,31 @@ dependencies = [ "boa_engine", "boa_gc", "colorchoice", - "revm", + "revm 14.0.1 (git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth)", "serde_json", "thiserror", ] +[[package]] +name = "revm-interpreter" +version = "10.0.1" +dependencies = [ + "revm-primitives 9.0.1", + "serde", +] + [[package]] name = "revm-interpreter" version = "10.0.1" source = "git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth#959611e7a6ceeb693acffe00ba8e4d976591479f" dependencies = [ - "revm-primitives", + "revm-primitives 9.0.1 (git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth)", "serde", ] [[package]] name = "revm-precompile" version = "11.0.1" -source = "git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth#959611e7a6ceeb693acffe00ba8e4d976591479f" dependencies = [ "aurora-engine-modexp", "blst", @@ -8950,7 +9064,24 @@ dependencies = [ "k256", "once_cell", "p256", - "revm-primitives", + "revm-primitives 9.0.1", + "ripemd", + "secp256k1", + "sha2 0.10.8", + "substrate-bn", +] + +[[package]] +name = "revm-precompile" +version = "11.0.1" +source = "git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth#959611e7a6ceeb693acffe00ba8e4d976591479f" +dependencies = [ + "aurora-engine-modexp", + "c-kzg", + "cfg-if", + "k256", + "once_cell", + "revm-primitives 9.0.1 (git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth)", "ripemd", "secp256k1", "sha2 0.10.8", @@ -8960,10 +9091,9 @@ dependencies = [ [[package]] name = "revm-primitives" version = "9.0.1" -source = "git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth#959611e7a6ceeb693acffe00ba8e4d976591479f" dependencies = [ "alloy-eips", - "alloy-primitives 0.8.7", + "alloy-primitives 0.8.8", "auto_impl", "bitflags 2.6.0", "bitvec", @@ -8976,6 +9106,24 @@ dependencies = [ "serde", ] +[[package]] +name = "revm-primitives" +version = "9.0.1" +source = "git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth#959611e7a6ceeb693acffe00ba8e4d976591479f" +dependencies = [ + "alloy-eips", + "alloy-primitives 0.8.8", + "auto_impl", + "bitflags 2.6.0", + "bitvec", + "cfg-if", + "dyn-clone", + "enumn", + "hashbrown 0.14.5", + "hex", + "serde", +] + [[package]] name = "rfc6979" version = "0.4.0" @@ -8988,9 +9136,9 @@ dependencies = [ [[package]] name = "rgb" -version = "0.8.48" +version = "0.8.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f86ae463694029097b846d8f99fd5536740602ae00022c0c50c5600720b2f71" +checksum = "57397d16646700483b67d2dd6511d79318f9d057fdbd21a4066aeac8b41d310a" dependencies = [ "bytemuck", ] @@ -9027,9 +9175,9 @@ dependencies = [ [[package]] name = "rlimit" -version = "0.10.1" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3560f70f30a0f16d11d01ed078a07740fe6b489667abc7c7b029155d9f21c3d8" +checksum = "7043b63bd0cd1aaa628e476b80e6d4023a3b50eb32789f2728908107bd0c793a" dependencies = [ "libc", ] @@ -9138,18 +9286,18 @@ dependencies = [ [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver 1.0.23", ] [[package]] name = "rustix" -version = "0.38.35" +version = "0.38.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a85d50532239da68e9addb745ba38ff4612a242c1c7ceea689c4bc7c2f43c36f" +checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" dependencies = [ "bitflags 2.6.0", "errno", @@ -9160,9 +9308,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.12" +version = "0.23.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c58f8c84392efc0a126acce10fa59ff7b3d2ac06ab451a33f2741989b806b044" +checksum = "5fbb44d7acc4e873d613422379f69f237a1b141928c02f6bc6ccfddddc2d7993" dependencies = [ "log", "once_cell", @@ -9175,9 +9323,22 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.7.2" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04182dffc9091a404e0fc069ea5cd60e5b866c3adf881eff99a32d048242dffa" +checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" dependencies = [ "openssl-probe", "rustls-pemfile", @@ -9188,19 +9349,18 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.1.3" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" dependencies = [ - "base64 0.22.1", "rustls-pki-types", ] [[package]] name = "rustls-pki-types" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" +checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" [[package]] name = "rustls-platform-verifier" @@ -9214,7 +9374,7 @@ dependencies = [ "log", "once_cell", "rustls", - "rustls-native-certs", + "rustls-native-certs 0.7.3", "rustls-platform-verifier-android", "rustls-webpki", "security-framework", @@ -9231,9 +9391,9 @@ checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" [[package]] name = "rustls-webpki" -version = "0.102.7" +version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84678086bd54edf2b415183ed7a94d0efb049f1b646a33e22a36f3794be6ae56" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ "ring", "rustls-pki-types", @@ -9242,9 +9402,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" [[package]] name = "rusty-fork" @@ -9281,20 +9441,20 @@ dependencies = [ [[package]] name = "scc" -version = "2.1.16" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aeb7ac86243095b70a7920639507b71d51a63390d1ba26c4f60a552fbb914a37" +checksum = "f2c1f7fc6deb21665a9060dfc7d271be784669295a31babdcd4dd2c79ae8cbfb" dependencies = [ "sdd", ] [[package]] name = "schannel" -version = "0.1.23" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -9316,9 +9476,9 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sdd" -version = "3.0.2" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0495e4577c672de8254beb68d01a9b62d0e8a13c099edecdbedccce3223cd29f" +checksum = "49c1eeaf4b6a87c7479688c6d52b9f1153cedd3c489300564f932b065c6eab95" [[package]] name = "sec1" @@ -9336,9 +9496,9 @@ dependencies = [ [[package]] name = "secp256k1" -version = "0.29.0" +version = "0.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e0cc0f1cf93f4969faf3ea1c7d8a9faed25918d96affa959720823dfe86d4f3" +checksum = "9465315bc9d4566e1724f0fffcbcc446268cb522e60f9a27bcded6b19c108113" dependencies = [ "rand 0.8.5", "secp256k1-sys", @@ -9347,9 +9507,9 @@ dependencies = [ [[package]] name = "secp256k1-sys" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1433bd67156263443f14d603720b082dd3121779323fce20cba2aa07b874bc1b" +checksum = "d4387882333d3aa8cb20530a17c69a3752e97837832f34f6dccc760e715001d9" dependencies = [ "cc", ] @@ -9370,9 +9530,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.11.1" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf" +checksum = "ea4a292869320c0272d7bc55a5a6aafaff59b4f63404a003887b679a2e05b4b6" dependencies = [ "core-foundation-sys", "libc", @@ -9419,9 +9579,9 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "serde" -version = "1.0.209" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" dependencies = [ "serde_derive", ] @@ -9437,20 +9597,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.209" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] name = "serde_json" -version = "1.0.127" +version = "1.0.132" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad" +checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" dependencies = [ "indexmap 2.6.0", "itoa", @@ -9470,11 +9630,22 @@ dependencies = [ "thiserror", ] +[[package]] +name = "serde_repr" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.81", +] + [[package]] name = "serde_spanned" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] @@ -9493,9 +9664,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.9.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cecfa94848272156ea67b2b1a53f20fc7bc638c4a46d2f8abde08f05f4b857" +checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" dependencies = [ "base64 0.22.1", "chrono", @@ -9511,14 +9682,14 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.9.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8fee4991ef4f274617a51ad4af30519438dacb2f56ac773b08a1922ff743350" +checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -9543,7 +9714,7 @@ checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -9593,9 +9764,9 @@ dependencies = [ [[package]] name = "sha3-asm" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57d79b758b7cb2085612b11a235055e485605a5103faccdd633f35bd7aee69dd" +checksum = "c28efc5e327c837aa837c59eae585fc250715ef939ac32881bcc11677cd02d46" dependencies = [ "cc", "cfg-if", @@ -9677,9 +9848,9 @@ dependencies = [ [[package]] name = "similar-asserts" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e041bb827d1bfca18f213411d51b665309f1afb37a04a5d1464530e13779fc0f" +checksum = "cfe85670573cd6f0fa97940f26e7e6601213c3b0555246c24234131f88c5709e" dependencies = [ "console", "similar", @@ -9775,6 +9946,9 @@ name = "spin" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] [[package]] name = "spki" @@ -9799,7 +9973,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d904e7009df136af5297832a3ace3370cd14ff1546a232f4f185036c2736fcac" dependencies = [ "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -9841,11 +10015,11 @@ version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", "rustversion", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -9879,9 +10053,9 @@ dependencies = [ [[package]] name = "symbolic-common" -version = "12.10.0" +version = "12.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16629323a4ec5268ad23a575110a724ad4544aae623451de600c747bf87b36cf" +checksum = "366f1b4c6baf6cfefc234bbd4899535fca0b06c74443039a73f6dfb2fad88d77" dependencies = [ "debugid", "memmap2", @@ -9891,9 +10065,9 @@ dependencies = [ [[package]] name = "symbolic-demangle" -version = "12.10.0" +version = "12.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c043a45f08f41187414592b3ceb53fb0687da57209cc77401767fb69d5b596" +checksum = "aba05ba5b9962ea5617baf556293720a8b2d0a282aa14ee4bf10e22efc7da8c8" dependencies = [ "cpp_demangle", "rustc-demangle", @@ -9913,9 +10087,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.76" +version = "2.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578e081a14e0cefc3279b0472138c513f37b41a08d5a3cca9b6e4e8ceb6cd525" +checksum = "198514704ca887dd5a1e408c6c6cdcba43672f9b4062e1b24aa34e74e6d7faae" dependencies = [ "proc-macro2", "quote", @@ -9924,16 +10098,22 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20e7b52ad118b2153644eea95c6fc740b6c1555b2344fdab763fc9de4075f665" +checksum = "ebfc1bfd06acc78f16d8fd3ef846bc222ee7002468d10a7dce8d703d6eab89a3" dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + [[package]] name = "sync_wrapper" version = "1.0.1" @@ -9951,7 +10131,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -9974,11 +10154,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "target-triple" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42a4d50cdb458045afc8131fd91b64904da29548bcb63c7236e0844936c13078" + [[package]] name = "tempfile" -version = "3.12.0" +version = "3.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" +checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" dependencies = [ "cfg-if", "fastrand 2.1.1", @@ -10037,7 +10223,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -10061,22 +10247,22 @@ checksum = "a38c90d48152c236a3ab59271da4f4ae63d678c5d7ad6b7714d7cb9760be5e4b" [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -10229,9 +10415,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.39.3" +version = "1.40.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9babc99b9923bfa4804bd74722ff02c0381021eafa4db9949217e3be8e84fff5" +checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" dependencies = [ "backtrace", "bytes", @@ -10253,7 +10439,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -10269,9 +10455,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" dependencies = [ "futures-core", "pin-project-lite", @@ -10297,9 +10483,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ "bytes", "futures-core", @@ -10333,9 +10519,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.20" +version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ "indexmap 2.6.0", "serde", @@ -10365,6 +10551,20 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 0.1.2", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-http" version = "0.5.2" @@ -10389,7 +10589,7 @@ dependencies = [ "pin-project-lite", "tokio", "tokio-util", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", "tracing", @@ -10440,7 +10640,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -10592,14 +10792,15 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "trybuild" -version = "1.0.99" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "207aa50d36c4be8d8c6ea829478be44a372c6a77669937bb39c698e52f1491e8" +checksum = "8dcd332a5496c026f1e14b7f3d2b7bd98e509660c04239c58b0ba38a12daded4" dependencies = [ "glob", "serde", "serde_derive", "serde_json", + "target-triple", "termcolor", "toml", ] @@ -10632,9 +10833,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ucd-trie" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "uint" @@ -10656,39 +10857,36 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicase" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] +checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" [[package]] name = "unicode-bidi" -version = "0.3.15" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" +checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" [[package]] name = "unicode-normalization" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" dependencies = [ "tinyvec", ] [[package]] name = "unicode-segmentation" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-truncate" @@ -10703,15 +10901,15 @@ dependencies = [ [[package]] name = "unicode-width" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode-xid" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229730647fbc343e3a80e463c1db7f78f3855d3f3739bee0dda773c9a037c90a" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" [[package]] name = "universal-hash" @@ -10729,6 +10927,12 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6889a77d49f1f013504cec6bf97a2c730394adedaeb1deb5ea08949a50541105" +[[package]] +name = "unsigned-varint" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb066959b24b5196ae73cb057f45598450d2c5f71460e98c49b738086eff9c06" + [[package]] name = "untrusted" version = "0.9.0" @@ -10773,9 +10977,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" dependencies = [ "getrandom 0.2.15", ] @@ -10854,9 +11058,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" +checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" dependencies = [ "cfg-if", "once_cell", @@ -10865,24 +11069,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" +checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.43" +version = "0.4.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed" +checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" dependencies = [ "cfg-if", "js-sys", @@ -10892,9 +11096,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" +checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -10902,28 +11106,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" +checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" +checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" [[package]] name = "wasm-streams" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +checksum = "4e072d4e72f700fb3443d8fe94a39315df013eef1104903cdb0a2abd322bbecd" dependencies = [ "futures-util", "js-sys", @@ -10934,9 +11138,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.70" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26fdeaafd9bd129f65e7c031593c24d62186301e0c72c8978fa1678be7d532c0" +checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" dependencies = [ "js-sys", "wasm-bindgen", @@ -10944,9 +11148,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.3" +version = "0.26.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd7c23921eeb1713a4e851530e9b9756e4fb0e89978582942612524cf09f01cd" +checksum = "841c67bff177718f1d4dfefde8d8f0e78f9b6589319ba88312f567fc5841a958" dependencies = [ "rustls-pki-types", ] @@ -11000,11 +11204,11 @@ dependencies = [ [[package]] name = "windows" -version = "0.57.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" +checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" dependencies = [ - "windows-core 0.57.0", + "windows-core 0.58.0", "windows-targets 0.52.6", ] @@ -11019,36 +11223,37 @@ dependencies = [ [[package]] name = "windows-core" -version = "0.57.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" +checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" dependencies = [ "windows-implement", "windows-interface", - "windows-result 0.1.2", + "windows-result", + "windows-strings", "windows-targets 0.52.6", ] [[package]] name = "windows-implement" -version = "0.57.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" +checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] name = "windows-interface" -version = "0.57.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" +checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -11057,20 +11262,11 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" dependencies = [ - "windows-result 0.2.0", + "windows-result", "windows-strings", "windows-targets 0.52.6", ] -[[package]] -name = "windows-result" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" -dependencies = [ - "windows-targets 0.52.6", -] - [[package]] name = "windows-result" version = "0.2.0" @@ -11086,7 +11282,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" dependencies = [ - "windows-result 0.2.0", + "windows-result", "windows-targets 0.52.6", ] @@ -11240,9 +11436,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.18" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" +checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" dependencies = [ "memchr", ] @@ -11280,7 +11476,7 @@ dependencies = [ "js-sys", "log", "pharos", - "rustc_version 0.4.0", + "rustc_version 0.4.1", "send_wrapper 0.6.0", "thiserror", "wasm-bindgen", @@ -11326,7 +11522,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", "synstructure", ] @@ -11348,7 +11544,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -11368,7 +11564,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", "synstructure", ] @@ -11389,7 +11585,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] @@ -11411,7 +11607,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.81", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index d2ad596afb16..6b32e33d5464 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -566,8 +566,8 @@ tempfile = "3.8" test-fuzz = "5" [patch.crates-io] -revm = { git = "https://github.com/taikoxyz/revm.git", branch = "v43-gwyneth" } -revm-primitives = { git = "https://github.com/taikoxyz/revm.git", branch = "v43-gwyneth" } -revm-interpreter = { git = "https://github.com/taikoxyz/revm.git", branch = "v43-gwyneth" } -revm-precompile = { git = "https://github.com/taikoxyz/revm.git", branch = "v43-gwyneth" } +revm = { path = "../revm/crates/revm" } +revm-primitives = { path = "../revm/crates/primitives" } +revm-interpreter = { path = "../revm/crates/interpreter" } +revm-precompile = { path = "../revm/crates/precompile" } revm-inspectors = { git = "https://github.com/taikoxyz/revm-inspectors.git", branch = "main-rbuilder" } \ No newline at end of file From ef21fd0c9ed6754400356032f719c0a7ee6f8ba1 Mon Sep 17 00:00:00 2001 From: CeciliaZ030 Date: Sun, 20 Oct 2024 16:06:40 -0300 Subject: [PATCH 06/15] fix docker file --- Cargo.lock | 129 ++++++++++-------------------------- Cargo.toml | 2 +- Dockerfile | 51 +++++--------- crates/rpc/rpc/src/debug.rs | 3 + 4 files changed, 56 insertions(+), 129 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 83f0118b2521..42bbebb0b427 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3466,7 +3466,7 @@ dependencies = [ "reth-tracing", "reth-transaction-pool", "reth-trie", - "revm 14.0.1", + "revm", "serde", "serde_json", "thiserror", @@ -6380,7 +6380,7 @@ dependencies = [ "reth-revm", "reth-tasks", "reth-transaction-pool", - "revm 14.0.1", + "revm", "tokio", "tracing", ] @@ -6540,7 +6540,7 @@ dependencies = [ "reth-primitives", "reth-storage-api", "reth-trie", - "revm 14.0.1", + "revm", "tokio", "tokio-stream", "tracing", @@ -7139,7 +7139,7 @@ dependencies = [ "reth-rpc-types", "reth-rpc-types-compat", "reth-trie", - "revm-primitives 9.0.1", + "revm-primitives", "serde", "serde_json", "tokio", @@ -7248,7 +7248,7 @@ dependencies = [ "reth-primitives", "reth-rpc-types", "reth-rpc-types-compat", - "revm-primitives 9.0.1", + "revm-primitives", "serde", "serde_json", "sha2 0.10.8", @@ -7288,7 +7288,7 @@ dependencies = [ "reth-revm", "reth-transaction-pool", "reth-trie", - "revm 14.0.1", + "revm", "tracing", ] @@ -7316,8 +7316,8 @@ dependencies = [ "reth-primitives", "reth-prune-types", "reth-storage-errors", - "revm 14.0.1", - "revm-primitives 9.0.1", + "revm", + "revm-primitives", ] [[package]] @@ -7336,7 +7336,7 @@ dependencies = [ "reth-revm", "reth-storage-api", "reth-testing-utils", - "revm-primitives 9.0.1", + "revm-primitives", "secp256k1", "serde_json", ] @@ -7355,8 +7355,8 @@ dependencies = [ "reth-primitives", "reth-prune-types", "reth-revm", - "revm 14.0.1", - "revm-primitives 9.0.1", + "revm", + "revm-primitives", "thiserror", "tracing", ] @@ -7373,7 +7373,7 @@ dependencies = [ "reth-consensus", "reth-prune-types", "reth-storage-errors", - "revm-primitives 9.0.1", + "revm-primitives", ] [[package]] @@ -7386,7 +7386,7 @@ dependencies = [ "reth-execution-errors", "reth-primitives", "reth-trie", - "revm 14.0.1", + "revm", "serde", ] @@ -8057,7 +8057,7 @@ dependencies = [ "reth-rpc-types-compat", "reth-transaction-pool", "reth-trie", - "revm 14.0.1", + "revm", "sha2 0.10.8", "thiserror", "tracing", @@ -8091,7 +8091,7 @@ dependencies = [ "reth-rpc-types", "reth-tasks", "reth-transaction-pool", - "revm 14.0.1", + "revm", "serde_json", "thiserror", "tokio", @@ -8114,7 +8114,7 @@ dependencies = [ "reth-revm", "reth-rpc-types", "reth-transaction-pool", - "revm 14.0.1", + "revm", "thiserror", "tokio", "tokio-stream", @@ -8179,7 +8179,7 @@ dependencies = [ "reth-primitives-traits", "reth-static-file-types", "reth-trie-common", - "revm-primitives 9.0.1", + "revm-primitives", "secp256k1", "serde", "serde_json", @@ -8208,7 +8208,7 @@ dependencies = [ "proptest-arbitrary-interop", "rand 0.8.5", "reth-codecs", - "revm-primitives 9.0.1", + "revm-primitives", "roaring", "serde", "serde_json", @@ -8251,7 +8251,7 @@ dependencies = [ "reth-testing-utils", "reth-trie", "reth-trie-db", - "revm 14.0.1", + "revm", "strum", "tempfile", "tokio", @@ -8320,7 +8320,7 @@ dependencies = [ "reth-storage-api", "reth-storage-errors", "reth-trie", - "revm 14.0.1", + "revm", ] [[package]] @@ -8367,9 +8367,9 @@ dependencies = [ "reth-testing-utils", "reth-transaction-pool", "reth-trie", - "revm 14.0.1", + "revm", "revm-inspectors", - "revm-primitives 9.0.1", + "revm-primitives", "secp256k1", "serde", "serde_json", @@ -8519,9 +8519,9 @@ dependencies = [ "reth-tasks", "reth-transaction-pool", "reth-trie", - "revm 14.0.1", + "revm", "revm-inspectors", - "revm-primitives 9.0.1", + "revm-primitives", "tokio", "tracing", ] @@ -8552,9 +8552,9 @@ dependencies = [ "reth-tasks", "reth-transaction-pool", "reth-trie", - "revm 14.0.1", + "revm", "revm-inspectors", - "revm-primitives 9.0.1", + "revm-primitives", "schnellru", "serde", "serde_json", @@ -8860,7 +8860,7 @@ dependencies = [ "reth-storage-api", "reth-tasks", "reth-tracing", - "revm 14.0.1", + "revm", "rustc-hash 2.0.0", "schnellru", "serde", @@ -8894,7 +8894,7 @@ dependencies = [ "reth-stages-types", "reth-storage-errors", "reth-trie-common", - "revm 14.0.1", + "revm", "serde", "serde_json", "similar-asserts", @@ -8924,7 +8924,7 @@ dependencies = [ "proptest-arbitrary-interop", "reth-codecs", "reth-primitives-traits", - "revm-primitives 9.0.1", + "revm-primitives", "serde", "test-fuzz", "toml", @@ -8954,7 +8954,7 @@ dependencies = [ "reth-storage-errors", "reth-trie", "reth-trie-common", - "revm 14.0.1", + "revm", "serde", "serde_json", "similar-asserts", @@ -8998,22 +8998,8 @@ dependencies = [ "auto_impl", "cfg-if", "dyn-clone", - "revm-interpreter 10.0.1", - "revm-precompile 11.0.1", - "serde", - "serde_json", -] - -[[package]] -name = "revm" -version = "14.0.1" -source = "git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth#959611e7a6ceeb693acffe00ba8e4d976591479f" -dependencies = [ - "auto_impl", - "cfg-if", - "dyn-clone", - "revm-interpreter 10.0.1 (git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth)", - "revm-precompile 11.0.1 (git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth)", + "revm-interpreter", + "revm-precompile", "serde", "serde_json", ] @@ -9021,7 +9007,6 @@ dependencies = [ [[package]] name = "revm-inspectors" version = "0.6.0" -source = "git+https://github.com/taikoxyz/revm-inspectors.git?branch=main-rbuilder#a7db16ce222d58eac84cfeeed4b5a9541dadc1d0" dependencies = [ "alloy-primitives 0.8.8", "alloy-rpc-types-eth", @@ -9031,7 +9016,7 @@ dependencies = [ "boa_engine", "boa_gc", "colorchoice", - "revm 14.0.1 (git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth)", + "revm", "serde_json", "thiserror", ] @@ -9040,16 +9025,7 @@ dependencies = [ name = "revm-interpreter" version = "10.0.1" dependencies = [ - "revm-primitives 9.0.1", - "serde", -] - -[[package]] -name = "revm-interpreter" -version = "10.0.1" -source = "git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth#959611e7a6ceeb693acffe00ba8e4d976591479f" -dependencies = [ - "revm-primitives 9.0.1 (git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth)", + "revm-primitives", "serde", ] @@ -9064,24 +9040,7 @@ dependencies = [ "k256", "once_cell", "p256", - "revm-primitives 9.0.1", - "ripemd", - "secp256k1", - "sha2 0.10.8", - "substrate-bn", -] - -[[package]] -name = "revm-precompile" -version = "11.0.1" -source = "git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth#959611e7a6ceeb693acffe00ba8e4d976591479f" -dependencies = [ - "aurora-engine-modexp", - "c-kzg", - "cfg-if", - "k256", - "once_cell", - "revm-primitives 9.0.1 (git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth)", + "revm-primitives", "ripemd", "secp256k1", "sha2 0.10.8", @@ -9106,24 +9065,6 @@ dependencies = [ "serde", ] -[[package]] -name = "revm-primitives" -version = "9.0.1" -source = "git+https://github.com/taikoxyz/revm.git?branch=v43-gwyneth#959611e7a6ceeb693acffe00ba8e4d976591479f" -dependencies = [ - "alloy-eips", - "alloy-primitives 0.8.8", - "auto_impl", - "bitflags 2.6.0", - "bitvec", - "cfg-if", - "dyn-clone", - "enumn", - "hashbrown 0.14.5", - "hex", - "serde", -] - [[package]] name = "rfc6979" version = "0.4.0" diff --git a/Cargo.toml b/Cargo.toml index 6b32e33d5464..4b949490d5c1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -570,4 +570,4 @@ revm = { path = "../revm/crates/revm" } revm-primitives = { path = "../revm/crates/primitives" } revm-interpreter = { path = "../revm/crates/interpreter" } revm-precompile = { path = "../revm/crates/precompile" } -revm-inspectors = { git = "https://github.com/taikoxyz/revm-inspectors.git", branch = "main-rbuilder" } \ No newline at end of file +revm-inspectors = { path = "../revm-inspectors" } \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 3a6514d0ef04..d340803fe0ec 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,11 +8,25 @@ RUN apt-get update && apt-get -y upgrade && apt-get install -y libclang-dev pkg- # Builds a cargo-chef plan FROM chef AS planner -COPY . . +COPY ./reth/Cargo.lock ./Cargo.lock +COPY ./reth/Cargo.toml ./Cargo.toml +COPY ./reth/crates ./crates +COPY ./reth/bin ./bin +COPY ./reth/examples ./examples +COPY ./reth/testing ./testing RUN cargo chef prepare --recipe-path recipe.json FROM chef AS builder -COPY --from=planner /app/recipe.json recipe.json +COPY --from=planner /app/recipe.json /app/reth/recipe.json +COPY ./reth/Cargo.lock ./reth/Cargo.lock +COPY ./reth/Cargo.toml ./reth/Cargo.toml +COPY ./reth/crates ./reth/crates +COPY ./reth/bin ./reth/bin +COPY ./reth/examples ./reth/examples +COPY ./reth/testing ./reth/testing +COPY ./revm ./revm +COPY ./revm-inspectors ./revm-inspectors +WORKDIR /app/reth # Build profile, release by default ARG BUILD_PROFILE=release @@ -29,51 +43,20 @@ ENV FEATURES $FEATURES # Builds dependencies RUN cargo chef cook --profile $BUILD_PROFILE --features "$FEATURES" --recipe-path recipe.json # Build application -COPY . . RUN cargo build --profile $BUILD_PROFILE --features "$FEATURES" --locked --bin reth -# Hack: Add a cache busting step (above steps are the more -# time consuming ones but we need to make sure the rbuilder is -# always freshly cloned and not cached !) -# Since the content of this file will change -# with each build, Docker will consider this -# layer (and all subsequent layers) as modified, -# forcing a re-execution of the following steps. -# ADD https://worldtimeapi.org/api/ip /tmp/bustcache - -# Clone and build rbuilder (gwyneth branch) -RUN git clone -b gwyneth https://github.com/taikoxyz/rbuilder.git /app/rbuilder -WORKDIR /app/rbuilder -RUN cargo build --release - # Copy binaries to a temporary location -RUN cp /app/target/$BUILD_PROFILE/reth /app/reth -RUN cp /app/rbuilder/target/release/rbuilder /app/rbuilder +RUN cp /app/reth/target/$BUILD_PROFILE/reth /app/reth # Use Ubuntu as the release image FROM ubuntu:22.04 AS runtime WORKDIR /app -# Install necessary runtime dependencies and Rust/Cargo -RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/* - -# Install Rust and Cargo -RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y -ENV PATH="/root/.cargo/bin:${PATH}" - # Copy reth and rbuilder binaries over from the build stage COPY --from=builder /app/reth /usr/local/bin -COPY --from=builder /app/rbuilder /usr/local/bin - -# Copy the entire rbuilder repository -COPY --from=builder /app/rbuilder /app/rbuilder # Copy licenses COPY LICENSE-* ./ -# Create start script -RUN echo '#!/bin/bash\nrbuilder run /app/rbuilder/config-gwyneth-reth.toml' > /app/start_rbuilder.sh && \ - chmod +x /app/start_rbuilder.sh - EXPOSE 30303 30303/udp 9001 8545 8546 ENTRYPOINT ["/usr/local/bin/reth"] diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 810596c27391..66a9630c73c8 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -395,6 +395,9 @@ where .await?; return Ok(frame) } + GethDebugBuiltInTracerType::FlatCallTracer => { + return Err(EthApiError::Unsupported("FlatCallTracer is not supported").into()) + } }, #[cfg(not(feature = "js-tracer"))] GethDebugTracerType::JsTracer(_) => { From d7f4eced5192fc699a837a511ac491c0bdabfce0 Mon Sep 17 00:00:00 2001 From: Brecht Devos Date: Tue, 22 Oct 2024 00:17:29 +0200 Subject: [PATCH 07/15] fix compile error --- crates/gwyneth/src/exex.rs | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/crates/gwyneth/src/exex.rs b/crates/gwyneth/src/exex.rs index cff105d27b99..7d7d8dc177cc 100644 --- a/crates/gwyneth/src/exex.rs +++ b/crates/gwyneth/src/exex.rs @@ -105,9 +105,6 @@ impl Rollup { } pub async fn commit(&mut self, chain: &Chain, node_idx: usize) -> eyre::Result<()> { - let node = &self.nodes[node_idx]; - let engine_api = &self.engine_apis[node_idx]; - let events = decode_chain_into_rollup_events(chain); for (block, _, event) in events { if let RollupContractEvents::BlockProposed(BlockProposed { @@ -148,15 +145,15 @@ impl Rollup { let payload_id = builder_attrs.inner.payload_id(); let parrent_beacon_block_root = builder_attrs.inner.parent_beacon_block_root.unwrap(); - + // trigger new payload building draining the pool - node.payload_builder.new_payload(builder_attrs).await.unwrap(); - + self.nodes[node_idx].payload_builder.new_payload(builder_attrs).await.unwrap(); + // wait for the payload builder to have finished building let mut payload = EthBuiltPayload::new(payload_id, SealedBlock::default(), U256::ZERO); loop { - let result = node.payload_builder.best_payload(payload_id).await; + let result = self.nodes[node_idx].payload_builder.best_payload(payload_id).await; if let Some(result) = result { if let Ok(new_payload) = result { @@ -175,12 +172,12 @@ impl Rollup { } break; } - + // trigger resolve payload via engine api - engine_api.get_payload_v3_value(payload_id).await?; - + self.engine_apis[node_idx].get_payload_v3_value(payload_id).await?; + // submit payload to engine api - let block_hash = engine_api + let block_hash = self.engine_apis[node_idx] .submit_payload( payload.clone(), parrent_beacon_block_root, @@ -190,7 +187,7 @@ impl Rollup { .await?; // trigger forkchoice update via engine api to commit the block to the blockchain - engine_api.update_forkchoice(block_hash, block_hash).await?; + self.engine_apis[node_idx].update_forkchoice(block_hash, block_hash).await?; } } From 34e62a33eb754bfd15eb13fac5e46768076b6485 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Keszey=20D=C3=A1niel?= Date: Tue, 22 Oct 2024 15:09:36 +0200 Subject: [PATCH 08/15] necessary deployemnts for multiple rollups --- packages/protocol/scripts/confs/network_params.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/protocol/scripts/confs/network_params.yaml b/packages/protocol/scripts/confs/network_params.yaml index 9a8eec16992a..e97f16aa52fc 100644 --- a/packages/protocol/scripts/confs/network_params.yaml +++ b/packages/protocol/scripts/confs/network_params.yaml @@ -3,6 +3,7 @@ participants: el_image: taiko_reth cl_type: lighthouse cl_image: sigp/lighthouse:latest + el_extra_params: ["--num_of_l2s", "2"] cl_extra_params: [--always-prepare-payload, --prepare-payload-lookahead, "12000"] - el_type: reth el_image: taiko_reth @@ -12,7 +13,7 @@ network_params: network_id: '160010' additional_services: - blockscout - - blockscout_l2_1 + - blockscout_l2_2 port_publisher: nat_exit_ip: KURTOSIS_IP_ADDR_PLACEHOLDER el: From da36de612cbd877e72e258490f17fe4ca0e44e45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Keszey=20D=C3=A1niel?= Date: Tue, 22 Oct 2024 16:02:55 +0200 Subject: [PATCH 09/15] filter for propoer chainId --- crates/gwyneth/src/exex.rs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/gwyneth/src/exex.rs b/crates/gwyneth/src/exex.rs index 7d7d8dc177cc..f11dd2ff3ec7 100644 --- a/crates/gwyneth/src/exex.rs +++ b/crates/gwyneth/src/exex.rs @@ -117,6 +117,14 @@ impl Rollup { let transactions: Vec = decode_transactions(&meta.txList); println!("transactions: {:?}", transactions); + let all_transactions: Vec = decode_transactions(&meta.txList); + let node_chain_id = BASE_CHAIN_ID + node_idx as u64; + + let filtered_transactions: Vec = all_transactions + .into_iter() + .filter(|tx| tx.chain_id() == Some(node_chain_id)) + .collect(); + let attrs = GwynethPayloadAttributes { inner: EthPayloadAttributes { timestamp: block.timestamp, @@ -125,7 +133,7 @@ impl Rollup { withdrawals: Some(vec![]), parent_beacon_block_root: Some(B256::ZERO), }, - transactions: Some(transactions.clone()), + transactions: Some(filtered_transactions.clone()), gas_limit: None, }; From e31c08a076b00d11082871614f5d33fb87588b46 Mon Sep 17 00:00:00 2001 From: Brecht Devos Date: Thu, 24 Oct 2024 02:37:39 +0200 Subject: [PATCH 10/15] fix transactions on different chains --- crates/gwyneth/src/exex.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/crates/gwyneth/src/exex.rs b/crates/gwyneth/src/exex.rs index f11dd2ff3ec7..d3e6df503aba 100644 --- a/crates/gwyneth/src/exex.rs +++ b/crates/gwyneth/src/exex.rs @@ -118,13 +118,18 @@ impl Rollup { println!("transactions: {:?}", transactions); let all_transactions: Vec = decode_transactions(&meta.txList); - let node_chain_id = BASE_CHAIN_ID + node_idx as u64; - + let node_chain_id = BASE_CHAIN_ID + (node_idx as u64) * 100000; + let filtered_transactions: Vec = all_transactions .into_iter() .filter(|tx| tx.chain_id() == Some(node_chain_id)) .collect(); + if filtered_transactions.len() == 0 { + println!("no transactions for chain: {}", node_chain_id); + continue; + } + let attrs = GwynethPayloadAttributes { inner: EthPayloadAttributes { timestamp: block.timestamp, From 5f6d3a6ffcf0ea359e4c52bbd94a251aef28b54c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Keszey=20D=C3=A1niel?= Date: Thu, 24 Oct 2024 09:58:07 +0200 Subject: [PATCH 11/15] change ports --- bin/reth/src/main.rs | 4 ++-- crates/gwyneth/src/engine_api.rs | 8 +++++--- crates/gwyneth/src/exex.rs | 2 +- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/bin/reth/src/main.rs b/bin/reth/src/main.rs index 5656019e0536..6d975a178e64 100644 --- a/bin/reth/src/main.rs +++ b/bin/reth/src/main.rs @@ -12,7 +12,7 @@ use reth_node_ethereum::EthereumNode; use reth_tasks::TaskManager; const BASE_CHAIN_ID: u64 = gwyneth::exex::BASE_CHAIN_ID; // Base chain ID for L2s -const NUM_L2_CHAINS: u64 = 2; // Number of L2 chains to create +const NUM_L2_CHAINS: u64 = 2; // Number of L2 chains to create. Todo: Shall come from config */ fn main() -> eyre::Result<()> { reth::cli::Cli::parse_args().run(|builder, _| async move { @@ -26,7 +26,7 @@ fn main() -> eyre::Result<()> { let mut gwyneth_nodes = Vec::new(); for i in 0..NUM_L2_CHAINS { - let chain_id = BASE_CHAIN_ID + (i * 100000); // Increment by 100000 for each L2 + let chain_id = BASE_CHAIN_ID + i; // Increment by 1 for each L2 let chain_spec = ChainSpecBuilder::default() .chain(chain_id.into()) diff --git a/crates/gwyneth/src/engine_api.rs b/crates/gwyneth/src/engine_api.rs index 4e63207e3266..f27b61c827bd 100644 --- a/crates/gwyneth/src/engine_api.rs +++ b/crates/gwyneth/src/engine_api.rs @@ -17,6 +17,8 @@ use reth_rpc_types::{ use std::{marker::PhantomData, net::Ipv4Addr}; use reth_rpc_builder::constants; +use crate::exex::BASE_CHAIN_ID; + /// Helper for engine api operations pub struct EngineApiContext { pub canonical_stream: CanonStateNotificationStream, @@ -122,9 +124,9 @@ impl RpcServerArgsExEx for RpcServerArgs { self.http_addr = Ipv4Addr::new(0, 0, 0, 0).into(); // Calculate HTTP and WS ports based on chain_id - let port_offset = ((chain_id - 167010) / 100000) as u16; - self.http_port = 10110 + (port_offset * 10000); - self.ws_port = 10111 + (port_offset * 10000); + let port_offset = (chain_id - BASE_CHAIN_ID) as u16; + self.http_port = 10110 + (port_offset * 100); + self.ws_port = 10111 + (port_offset * 100); // Set IPC path self.ipcpath = format!("{}-{}", constants::DEFAULT_IPC_ENDPOINT, chain_id); diff --git a/crates/gwyneth/src/exex.rs b/crates/gwyneth/src/exex.rs index d3e6df503aba..6ae2206a7be4 100644 --- a/crates/gwyneth/src/exex.rs +++ b/crates/gwyneth/src/exex.rs @@ -118,7 +118,7 @@ impl Rollup { println!("transactions: {:?}", transactions); let all_transactions: Vec = decode_transactions(&meta.txList); - let node_chain_id = BASE_CHAIN_ID + (node_idx as u64) * 100000; + let node_chain_id = BASE_CHAIN_ID + (node_idx as u64); let filtered_transactions: Vec = all_transactions .into_iter() From 6476cb1e021ee05e814a0049b7f0e23cd4208f70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Keszey=20D=C3=A1niel?= Date: Thu, 24 Oct 2024 10:17:12 +0200 Subject: [PATCH 12/15] add readme and network_params --- README.md | 18 ++++++++++++++++++ .../protocol/scripts/confs/network_params.yaml | 1 + 2 files changed, 19 insertions(+) diff --git a/README.md b/README.md index d42fb8e95bfe..04b28744f33f 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,14 @@ Currency: ETH Block explorer: http://127.0.0.1:64003 ``` +``` +chain_id: 167011 +name: Gwyneth-2 +rpc: http://127.0.0.1:32006 +Currency: ETH +Block explorer: http://127.0.0.1:64005 +``` + ``` chain_id: 160010 name: Gwyneth L1 @@ -35,6 +43,16 @@ Add test accounts that have some ETH to play with: Rabby/Brave wallet works, but some issues with nonces so you may have to manually input the correct nonce. +# How to add extra layer2s ? + +In order to add extra layer 2 networks, you need to increase the the `NUM_L2_CHAINS` in the main function [here](https://github.com/taikoxyz/gwyneth/blob/5f6d3a6ffcf0ea359e4c52bbd94a251aef28b54c/bin/reth/src/main.rs#L15). (Later on it will be a configurational setting - no code !) + +If you want infrastructure support too, namingly: + +1. Exposing the jspn rpc port to the host machine (since everything is running in Docker with Kurtosis), you need to specify as a config param like [here](https://github.com/taikoxyz/gwyneth/blob/5f6d3a6ffcf0ea359e4c52bbd94a251aef28b54c/packages/protocol/scripts/confs/network_params.yaml#L6). (By default, if you dont specify this param, the first Layer2 port - which is 10110 - will be exposed to the host anyways. You only need to add this param if you are exposing more than 1 ports to the outter world.) +2. Blockscout support: [Here](https://github.com/taikoxyz/gwyneth/blob/5f6d3a6ffcf0ea359e4c52bbd94a251aef28b54c/packages/protocol/scripts/confs/network_params.yaml#L16) you can see a pattern, how to shoot up blockscout service too. If you want 3 layer2 explorers, just use the service name `blockscout_l2_3`. + + # reth [![CI status](https://github.com/paradigmxyz/reth/workflows/unit/badge.svg)][gh-ci] diff --git a/packages/protocol/scripts/confs/network_params.yaml b/packages/protocol/scripts/confs/network_params.yaml index e97f16aa52fc..e0c2a2bb1d76 100644 --- a/packages/protocol/scripts/confs/network_params.yaml +++ b/packages/protocol/scripts/confs/network_params.yaml @@ -9,6 +9,7 @@ participants: el_image: taiko_reth cl_type: teku cl_image: consensys/teku:latest + el_extra_params: ["--num_of_l2s", "2"] network_params: network_id: '160010' additional_services: From 72795669bee8e4f1396bb58c3b66c76f57610f29 Mon Sep 17 00:00:00 2001 From: CeciliaZ030 Date: Sun, 27 Oct 2024 04:26:37 +0800 Subject: [PATCH 13/15] --l2.chain_ids --l2.datadir ,... --- Dockerfile | 51 +++---------------------- bin/reth/src/cli/mod.rs | 44 +++++++++++++++++++--- bin/reth/src/main.rs | 26 +++++++++---- crates/cli/commands/src/node.rs | 52 +++++++++++++++++++++++++- crates/gwyneth/src/engine_api.rs | 17 +++++++++ crates/gwyneth/src/exex.rs | 12 +++--- crates/node/builder/src/builder/mod.rs | 38 +------------------ crates/rpc/ipc/src/server/mod.rs | 3 ++ 8 files changed, 142 insertions(+), 101 deletions(-) diff --git a/Dockerfile b/Dockerfile index d340803fe0ec..51531b6d89a9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,23 +1,6 @@ -FROM lukemathwalker/cargo-chef:latest-rust-1 AS chef -WORKDIR /app -LABEL org.opencontainers.image.source=https://github.com/paradigmxyz/reth -LABEL org.opencontainers.image.licenses="MIT OR Apache-2.0" - -# Install system dependencies +FROM lukemathwalker/cargo-chef:latest-rust-1 AS builder RUN apt-get update && apt-get -y upgrade && apt-get install -y libclang-dev pkg-config git -# Builds a cargo-chef plan -FROM chef AS planner -COPY ./reth/Cargo.lock ./Cargo.lock -COPY ./reth/Cargo.toml ./Cargo.toml -COPY ./reth/crates ./crates -COPY ./reth/bin ./bin -COPY ./reth/examples ./examples -COPY ./reth/testing ./testing -RUN cargo chef prepare --recipe-path recipe.json - -FROM chef AS builder -COPY --from=planner /app/recipe.json /app/reth/recipe.json COPY ./reth/Cargo.lock ./reth/Cargo.lock COPY ./reth/Cargo.toml ./reth/Cargo.toml COPY ./reth/crates ./reth/crates @@ -26,37 +9,15 @@ COPY ./reth/examples ./reth/examples COPY ./reth/testing ./reth/testing COPY ./revm ./revm COPY ./revm-inspectors ./revm-inspectors -WORKDIR /app/reth - -# Build profile, release by default -ARG BUILD_PROFILE=release -ENV BUILD_PROFILE $BUILD_PROFILE - -# Extra Cargo flags -ARG RUSTFLAGS="" -ENV RUSTFLAGS "$RUSTFLAGS" -# Extra Cargo features -ARG FEATURES="" -ENV FEATURES $FEATURES +WORKDIR /reth +RUN cargo build --release --bin reth -# Builds dependencies -RUN cargo chef cook --profile $BUILD_PROFILE --features "$FEATURES" --recipe-path recipe.json -# Build application -RUN cargo build --profile $BUILD_PROFILE --features "$FEATURES" --locked --bin reth - -# Copy binaries to a temporary location -RUN cp /app/reth/target/$BUILD_PROFILE/reth /app/reth - -# Use Ubuntu as the release image FROM ubuntu:22.04 AS runtime -WORKDIR /app +COPY --from=builder /reth/target/release/reth /usr/local/bin -# Copy reth and rbuilder binaries over from the build stage -COPY --from=builder /app/reth /usr/local/bin - -# Copy licenses -COPY LICENSE-* ./ +WORKDIR /app +# RUN reth EXPOSE 30303 30303/udp 9001 8545 8546 ENTRYPOINT ["/usr/local/bin/reth"] diff --git a/bin/reth/src/cli/mod.rs b/bin/reth/src/cli/mod.rs index d6fc7d3c4df4..3b716dc00ab6 100644 --- a/bin/reth/src/cli/mod.rs +++ b/bin/reth/src/cli/mod.rs @@ -89,6 +89,23 @@ impl Cli { } } + +impl Cli { + /// Parsers only the default CLI arguments + pub fn parse_args_l2() -> Self { + Self::parse() + } + + /// Parsers only the default CLI arguments from the given iterator + pub fn try_parse_args_from_l2(itr: I) -> Result + where + I: IntoIterator, + T: Into + Clone, + { + Self::try_parse_from(itr) + } +} + impl Cli { /// Execute the configured cli command. /// @@ -242,10 +259,11 @@ mod tests { use super::*; use crate::args::ColorMode; use clap::CommandFactory; + use node::L2Args; #[test] fn parse_color_mode() { - let reth = Cli::try_parse_args_from(["reth", "node", "--color", "always"]).unwrap(); + let reth = Cli::::try_parse_args_from(["reth", "node", "--color", "always"]).unwrap(); assert_eq!(reth.logs.color, ColorMode::Always); } @@ -256,7 +274,7 @@ mod tests { fn test_parse_help_all_subcommands() { let reth = Cli::::command(); for sub_command in reth.get_subcommands() { - let err = Cli::try_parse_args_from(["reth", sub_command.get_name(), "--help"]) + let err = Cli::::try_parse_args_from(["reth", sub_command.get_name(), "--help"]) .err() .unwrap_or_else(|| { panic!("Failed to parse help message {}", sub_command.get_name()) @@ -272,7 +290,7 @@ mod tests { /// name #[test] fn parse_logs_path() { - let mut reth = Cli::try_parse_args_from(["reth", "node"]).unwrap(); + let mut reth = Cli::::try_parse_args_from(["reth", "node"]).unwrap(); reth.logs.log_file_directory = reth.logs.log_file_directory.join(reth.chain.chain.to_string()); let log_dir = reth.logs.log_file_directory; @@ -282,7 +300,7 @@ mod tests { let mut iter = SUPPORTED_CHAINS.iter(); iter.next(); for chain in iter { - let mut reth = Cli::try_parse_args_from(["reth", "node", "--chain", chain]).unwrap(); + let mut reth = Cli::::try_parse_args_from(["reth", "node", "--chain", chain]).unwrap(); reth.logs.log_file_directory = reth.logs.log_file_directory.join(reth.chain.chain.to_string()); let log_dir = reth.logs.log_file_directory; @@ -296,7 +314,7 @@ mod tests { let temp_dir = tempfile::tempdir().unwrap(); std::env::set_var("RUST_LOG", "info,evm=debug"); - let reth = Cli::try_parse_args_from([ + let reth = Cli::::try_parse_args_from([ "reth", "init", "--datadir", @@ -307,4 +325,20 @@ mod tests { .unwrap(); assert!(reth.run(|_, _| async move { Ok(()) }).is_ok()); } + + #[test] + fn parse_l2_chains() { + let reth = Cli::::try_parse_args_from_l2([ + "reth", + "node", + "--l2.chain_ids", + "160010", + "160011", + "--l2.datadirs", + "path/one", + "path/two" + ]) + .unwrap(); + assert!(reth.run(|_, _| async move { Ok(()) }).is_ok()); + } } diff --git a/bin/reth/src/main.rs b/bin/reth/src/main.rs index 6d975a178e64..c52b30f0c635 100644 --- a/bin/reth/src/main.rs +++ b/bin/reth/src/main.rs @@ -4,18 +4,21 @@ #[global_allocator] static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; +use std::sync::Arc; + use gwyneth::{engine_api::RpcServerArgsExEx, GwynethNode}; use reth::args::{DiscoveryArgs, NetworkArgs, RpcServerArgs}; use reth_chainspec::ChainSpecBuilder; +use reth_cli_commands::node::L2Args; +use reth_db::init_db; use reth_node_builder::{NodeBuilder, NodeConfig, NodeHandle}; use reth_node_ethereum::EthereumNode; use reth_tasks::TaskManager; -const BASE_CHAIN_ID: u64 = gwyneth::exex::BASE_CHAIN_ID; // Base chain ID for L2s -const NUM_L2_CHAINS: u64 = 2; // Number of L2 chains to create. Todo: Shall come from config */ - fn main() -> eyre::Result<()> { - reth::cli::Cli::parse_args().run(|builder, _| async move { + println!("WTF"); + reth::cli::Cli::::parse_args_l2().run(|builder, ext| async move { + println!("Starting reth node with custom exex \n {:?}", ext); let tasks = TaskManager::current(); let exec = tasks.executor(); let network_config = NetworkArgs { @@ -25,9 +28,13 @@ fn main() -> eyre::Result<()> { let mut gwyneth_nodes = Vec::new(); - for i in 0..NUM_L2_CHAINS { - let chain_id = BASE_CHAIN_ID + i; // Increment by 1 for each L2 + // Assuming chain_ids & datadirs are mandetory + // If ports and ipc are not supported we used the default ways to derive + assert_eq!(ext.chain_ids.len(), ext.datadirs.len()); + assert!(ext.chain_ids.len() > 0); + + for (idx, (chain_id, datadir)) in ext.chain_ids.into_iter().zip(ext.datadirs).enumerate() { let chain_spec = ChainSpecBuilder::default() .chain(chain_id.into()) .genesis( @@ -46,12 +53,15 @@ fn main() -> eyre::Result<()> { .with_rpc( RpcServerArgs::default() .with_unused_ports() - .with_static_l2_rpc_ip_and_port(chain_id) + .with_ports_and_ipc(ext.ports.get(idx), ext.ipc_path.clone(), chain_id) ); + + let db = Arc::new(init_db(datadir, reth_db::mdbx::DatabaseArguments::default())?); let NodeHandle { node: gwyneth_node, node_exit_future: _ } = NodeBuilder::new(node_config.clone()) - .gwyneth_node(exec.clone(), chain_spec.chain.id()) + .with_database(db) + .with_launch_context(exec.clone()) .node(GwynethNode::default()) .launch() .await?; diff --git a/crates/cli/commands/src/node.rs b/crates/cli/commands/src/node.rs index 2cf1689fdd8f..720a04ec8e07 100644 --- a/crates/cli/commands/src/node.rs +++ b/crates/cli/commands/src/node.rs @@ -1,6 +1,6 @@ //! Main node command for launching a node -use clap::{value_parser, Args, Parser}; +use clap::{builder::Str, value_parser, Args, Parser}; use reth_chainspec::ChainSpec; use reth_cli_runner::CliContext; use reth_cli_util::parse_socket_address; @@ -199,6 +199,22 @@ impl NodeCommand { #[non_exhaustive] pub struct NoArgs; +#[derive(Debug, Clone, Default, Args, PartialEq, Eq)] +pub struct L2Args { + #[arg(long = "l2.chain_ids", required = true, num_args = 1..,)] + pub chain_ids: Vec, + + #[arg(long = "l2.datadirs", required = true, num_args = 1..,)] + pub datadirs: Vec, + + #[arg(long = "l2.ports", num_args = 1..,)] + pub ports: Vec, + + #[arg(long = "l2.ipc_path")] + pub ipc_path: String, +} + + #[cfg(test)] mod tests { use super::*; @@ -222,6 +238,40 @@ mod tests { } } + #[test] + fn parse_common_node_command_l2_args() { + let args = NodeCommand::::parse_from([ + "reth", + "--l2.chain_ids", + "160010", + "160011", + "--l2.datadirs", + "path/one", + "path/two", + "--l2.ports", + "1234", + "2345", + "--l2.ipc_path", + "/tmp/ipc", + ]); + assert_eq!( + args.ext, + L2Args { + chain_ids: vec![160010, 160011], + datadirs: vec!["path/one".into(), "path/two".into()], + ports: vec![1234, 2345], + ipc_path: "/tmp/ipc".into(), + }) + } + + #[test] + #[should_panic] + fn parse_l2_args() { + let args = NodeCommand::::try_parse_from([ + "reth", + ]).unwrap(); + } + #[test] fn parse_discovery_addr() { let cmd = diff --git a/crates/gwyneth/src/engine_api.rs b/crates/gwyneth/src/engine_api.rs index f27b61c827bd..d5e0514bbef7 100644 --- a/crates/gwyneth/src/engine_api.rs +++ b/crates/gwyneth/src/engine_api.rs @@ -116,6 +116,7 @@ impl PayloadEnvelopeExt for ExecutionPayloadEnvelopeV3 { } pub trait RpcServerArgsExEx { fn with_static_l2_rpc_ip_and_port(self, chain_id: u64) -> Self; + fn with_ports_and_ipc(self, port: Option<&u16>, ipc: String, chain_id: u64) -> Self; } impl RpcServerArgsExEx for RpcServerArgs { @@ -133,4 +134,20 @@ impl RpcServerArgsExEx for RpcServerArgs { self } + + fn with_ports_and_ipc(mut self, port: Option<&u16>, ipc: String, chain_id: u64) -> Self { + self.http = true; + self.http_addr = Ipv4Addr::new(0, 0, 0, 0).into(); + if let Some(port) = port { + self.http_port = *port; + self.ws_port = port + 100; + } else { + let port_offset = (chain_id - BASE_CHAIN_ID) as u16; + self.http_port = 10110 + (port_offset * 100); + self.ws_port = 10111 + (port_offset * 100); + } + self.ipcpath = format!("{}/l2.ipc-{}", ipc.clone(), chain_id); + println!("IPC path: {}", self.ipcpath); + self + } } diff --git a/crates/gwyneth/src/exex.rs b/crates/gwyneth/src/exex.rs index 6ae2206a7be4..f4f08fd71c92 100644 --- a/crates/gwyneth/src/exex.rs +++ b/crates/gwyneth/src/exex.rs @@ -8,7 +8,7 @@ use crate::{ GwynethPayloadBuilderAttributes, }; use reth_consensus::Consensus; -use reth_db::{test_utils::TempDatabase, DatabaseEnv}; +use reth_db::DatabaseEnv; use reth_ethereum_engine_primitives::EthPayloadAttributes; use reth_evm_ethereum::EthEvmConfig; use reth_execution_types::Chain; @@ -38,19 +38,19 @@ pub type GwynethFullNode = FullNode< NodeAdapter< FullNodeTypesAdapter< GwynethNode, - Arc>, - BlockchainProvider>>, + Arc, + BlockchainProvider>, >, Components< FullNodeTypesAdapter< GwynethNode, - Arc>, - BlockchainProvider>>, + Arc, + BlockchainProvider>, >, Pool< TransactionValidationTaskExecutor< EthTransactionValidator< - BlockchainProvider>>, + BlockchainProvider>, EthPooledTransaction, >, >, diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index 1d993352e7ad..4e913a7ecbfc 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -5,6 +5,7 @@ pub mod add_ons; mod states; +use reth_db::init_db; use reth_rpc_types::WithOtherFields; pub use states::*; @@ -23,11 +24,7 @@ use reth_network::{ }; use reth_node_api::{FullNodeTypes, FullNodeTypesAdapter, NodeAddOns, NodeTypes}; use reth_node_core::{ - cli::config::{PayloadBuilderConfig, RethTransactionPoolConfig}, - dirs::{ChainPath, DataDirPath}, - node_config::NodeConfig, - primitives::Head, - rpc::eth::{helpers::AddDevSigners, FullEthApiServer}, + cli::config::{PayloadBuilderConfig, RethTransactionPoolConfig}, dirs::{ChainPath, DataDirPath}, node_config::NodeConfig, primitives::Head, rpc::eth::{helpers::AddDevSigners, FullEthApiServer} }; use reth_primitives::revm_primitives::EnvKzgSettings; use reth_provider::{providers::BlockchainProvider, ChainSpecProvider, FullProvider}; @@ -176,37 +173,6 @@ impl NodeBuilder { WithLaunchContext { builder: self, task_executor } } - /// Creates a Gwyneth node - pub fn gwyneth_node( - mut self, - task_executor: TaskExecutor, - chain_id: u64, - ) -> WithLaunchContext>>> - { - let folder_name = format!("/data/reth/gwyneth-{}/", chain_id); - let path = reth_node_core::dirs::MaybePlatformPath::::from( - PathBuf::from(folder_name.clone()), - ); - - println!("path: {:?}", folder_name); - - fs::create_dir_all(folder_name).expect("gwyneth db dir creation failed"); - - self.config = self.config.with_datadir_args(reth_node_core::args::DatadirArgs { - datadir: path.clone(), - ..Default::default() - }); - - let data_dir = - path.unwrap_or_chain_default(self.config.chain.chain, self.config.datadir.clone()); - - println!("data_dir: {:?}", data_dir); - - let db = reth_db::test_utils::create_test_rw_db_with_path(data_dir.db()); - - WithLaunchContext { builder: self.with_database(db), task_executor } - } - /// Creates an _ephemeral_ preconfigured node for testing purposes. diff --git a/crates/rpc/ipc/src/server/mod.rs b/crates/rpc/ipc/src/server/mod.rs index 28c0f6e8cb4f..eee5bcc112b8 100644 --- a/crates/rpc/ipc/src/server/mod.rs +++ b/crates/rpc/ipc/src/server/mod.rs @@ -127,9 +127,12 @@ where ) { trace!(endpoint = ?self.endpoint, "starting ipc server"); + println!("****************"); if cfg!(unix) { + println!("Unix"); // ensure the file does not exist if std::fs::remove_file(&self.endpoint).is_ok() { + println!("Removed existing IPC endpoint file"); debug!(endpoint = ?self.endpoint, "removed existing IPC endpoint file"); } } From 4cc80223eaa29b8432cf4be407bb165cf98d5200 Mon Sep 17 00:00:00 2001 From: CeciliaZ030 Date: Sun, 27 Oct 2024 15:13:12 +0800 Subject: [PATCH 14/15] rbuilder runs --- bin/reth/src/main.rs | 9 ++++----- crates/gwyneth/src/exex.rs | 12 ++++++------ crates/node/builder/src/builder/mod.rs | 25 +++++++++++++++++++++++++ 3 files changed, 35 insertions(+), 11 deletions(-) diff --git a/bin/reth/src/main.rs b/bin/reth/src/main.rs index c52b30f0c635..401a41aecadb 100644 --- a/bin/reth/src/main.rs +++ b/bin/reth/src/main.rs @@ -7,7 +7,7 @@ static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; use std::sync::Arc; use gwyneth::{engine_api::RpcServerArgsExEx, GwynethNode}; -use reth::args::{DiscoveryArgs, NetworkArgs, RpcServerArgs}; +use reth::{args::{DiscoveryArgs, NetworkArgs, RpcServerArgs}, dirs::ChainPath}; use reth_chainspec::ChainSpecBuilder; use reth_cli_commands::node::L2Args; use reth_db::init_db; @@ -45,7 +45,7 @@ fn main() -> eyre::Result<()> { ) .cancun_activated() .build(); - + let node_config = NodeConfig::test() .with_chain(chain_spec.clone()) .with_network(network_config.clone()) @@ -56,12 +56,11 @@ fn main() -> eyre::Result<()> { .with_ports_and_ipc(ext.ports.get(idx), ext.ipc_path.clone(), chain_id) ); - let db = Arc::new(init_db(datadir, reth_db::mdbx::DatabaseArguments::default())?); + // let db = Arc::new(init_db(node_config.datadir().db(), reth_db::mdbx::DatabaseArguments::default())?); let NodeHandle { node: gwyneth_node, node_exit_future: _ } = NodeBuilder::new(node_config.clone()) - .with_database(db) - .with_launch_context(exec.clone()) + .gwyneth_node(exec.clone(), datadir) .node(GwynethNode::default()) .launch() .await?; diff --git a/crates/gwyneth/src/exex.rs b/crates/gwyneth/src/exex.rs index f4f08fd71c92..6ae2206a7be4 100644 --- a/crates/gwyneth/src/exex.rs +++ b/crates/gwyneth/src/exex.rs @@ -8,7 +8,7 @@ use crate::{ GwynethPayloadBuilderAttributes, }; use reth_consensus::Consensus; -use reth_db::DatabaseEnv; +use reth_db::{test_utils::TempDatabase, DatabaseEnv}; use reth_ethereum_engine_primitives::EthPayloadAttributes; use reth_evm_ethereum::EthEvmConfig; use reth_execution_types::Chain; @@ -38,19 +38,19 @@ pub type GwynethFullNode = FullNode< NodeAdapter< FullNodeTypesAdapter< GwynethNode, - Arc, - BlockchainProvider>, + Arc>, + BlockchainProvider>>, >, Components< FullNodeTypesAdapter< GwynethNode, - Arc, - BlockchainProvider>, + Arc>, + BlockchainProvider>>, >, Pool< TransactionValidationTaskExecutor< EthTransactionValidator< - BlockchainProvider>, + BlockchainProvider>>, EthPooledTransaction, >, >, diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index 4e913a7ecbfc..ba320a40b0a2 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -173,6 +173,31 @@ impl NodeBuilder { WithLaunchContext { builder: self, task_executor } } + /// Creates a Gwyneth node + pub fn gwyneth_node( + mut self, + task_executor: TaskExecutor, + datadir: PathBuf, + ) -> WithLaunchContext>>> + { + let path = reth_node_core::dirs::MaybePlatformPath::::from(datadir); + self.config = self.config.with_datadir_args(reth_node_core::args::DatadirArgs { + datadir: path.clone(), + ..Default::default() + }); + + let data_dir = + path.unwrap_or_chain_default(self.config.chain.chain, self.config.datadir.clone()); + + println!("data_dir: {:?}", data_dir); + + let db = reth_db::test_utils::create_test_rw_db_with_path(data_dir.db()); + + WithLaunchContext { builder: self.with_database(db), task_executor } + } + + + /// Creates an _ephemeral_ preconfigured node for testing purposes. From a1d0a960f6aee3e42c7f686fb1ffff8ea4075aed Mon Sep 17 00:00:00 2001 From: CeciliaZ030 Date: Tue, 29 Oct 2024 16:14:59 +0800 Subject: [PATCH 15/15] removed packages --- packages/package.json | 10 - packages/protocol/.env | 3 - packages/protocol/.env_sample | 3 - packages/protocol/.github/workflows/test.yml | 34 - packages/protocol/.gitignore | 15 - packages/protocol/Makefile | 9 - packages/protocol/README.md | 66 - .../contracts/4844/BlobHashReader.yulp | 27 - .../contracts/4844/IBlobHashReader.sol | 17 - packages/protocol/contracts/4844/Lib4844.sol | 46 - .../protocol/contracts/L1/ChainProver.sol | 83 - packages/protocol/contracts/L1/ITaikoL1.sol | 53 - packages/protocol/contracts/L1/TaikoData.sol | 81 - .../protocol/contracts/L1/TaikoErrors.sol | 55 - .../protocol/contracts/L1/TaikoEvents.sol | 30 - packages/protocol/contracts/L1/TaikoL1.sol | 200 - .../contracts/L1/VerifierBattleRoyale.sol | 177 - .../contracts/L1/VerifierRegistry.sol | 49 - .../protocol/contracts/L1/actors/PBSActor.sol | 38 - .../contracts/L1/actors/ProverPayment.sol | 101 - .../L1/preconfs/ISequencerRegistry.sol | 10 - .../L1/preconfs/SequencerRegistry.sol | 50 - .../contracts/L1/provers/GuardianProver.sol | 39 - .../L1/provers/GuardianProver_tm.sol | 286 -- .../contracts/L1/provers/Guardians.sol | 108 - .../L1/verifiers/GuardianVerifier.sol | 38 - .../contracts/L1/verifiers/IVerifier.sol | 21 - .../L1/verifiers/MockSgxVerifier.sol | 194 - .../contracts/L1/verifiers/SgxVerifier.sol | 215 - .../L1/verifiers/libs/LibPublicInput.sol | 36 - .../protocol/contracts/L2/Lib1559Math.sol | 76 - .../protocol/contracts/L2/LibL2Config.sol | 20 - packages/protocol/contracts/L2/TaikoL2.sol | 263 -- .../protocol/contracts/L2/eip1559_util.py | 135 - .../AutomataDcapV3Attestation.sol | 508 --- .../contracts/automata-attestation/README.md | 5 - .../interfaces/IAttestation.sol | 13 - .../interfaces/ISigVerifyLib.sol | 15 - .../lib/EnclaveIdStruct.sol | 30 - .../lib/PEMCertChainLib.sol | 375 -- .../lib/QuoteV3Auth/V3Parser.sol | 306 -- .../lib/QuoteV3Auth/V3Struct.sol | 61 - .../lib/TCBInfoStruct.sol | 29 - .../lib/interfaces/IPEMCertChainLib.sol | 51 - .../automata-attestation/utils/Asn1Decode.sol | 134 - .../automata-attestation/utils/BytesUtils.sol | 140 - .../automata-attestation/utils/SHA1.sol | 195 - .../utils/SigVerifyLib.sol | 48 - .../utils/X509DateUtils.sol | 77 - packages/protocol/contracts/bridge/Bridge.sol | 738 ---- .../protocol/contracts/bridge/IBridge.sol | 161 - .../contracts/bridge/IQuotaManager.sol | 19 - packages/protocol/contracts/bridge/README.md | 105 - .../contracts/common/AddressManager.sol | 68 - .../contracts/common/AddressResolver.sol | 106 - .../contracts/common/AuthorizableContract.sol | 34 - .../contracts/common/EssentialContract.sol | 175 - .../contracts/common/IAddressManager.sol | 15 - .../contracts/common/IAddressResolver.sol | 42 - .../contracts/common/ICrossChainSync.sol | 39 - .../protocol/contracts/common/LibStrings.sol | 33 - .../contracts/examples/xErc20Example.sol | 8 - packages/protocol/contracts/gwyneth/Bus.sol | 44 - packages/protocol/contracts/gwyneth/EVM.sol | 68 - .../protocol/contracts/gwyneth/XChain.sol | 150 - .../contracts/gwyneth/XChainERC20Token.sol | 167 - .../protocol/contracts/libs/LibAddress.sol | 102 - packages/protocol/contracts/libs/LibBytes.sol | 45 - .../protocol/contracts/libs/LibDeploy.sol | 32 - packages/protocol/contracts/libs/LibMath.sol | 27 - .../protocol/contracts/libs/LibNetwork.sol | 57 - .../protocol/contracts/libs/LibTrieProof.sol | 66 - .../contracts/signal/ISignalService.sol | 181 - .../contracts/signal/SignalService.sol | 372 -- .../contracts/test/erc20/FreeMintERC20.sol | 28 - .../test/erc20/MayFailFreeMintERC20.sol | 55 - .../contracts/test/erc20/RegularERC20.sol | 11 - .../contracts/thirdparty/LibBytesUtils.sol | 141 - .../thirdparty/LibFixedPointMath.sol | 83 - .../protocol/contracts/thirdparty/README.md | 9 - .../contracts/thirdparty/optimism/Bytes.sol | 152 - .../thirdparty/optimism/rlp/RLPReader.sol | 303 -- .../thirdparty/optimism/rlp/RLPWriter.sol | 70 - .../thirdparty/optimism/trie/MerkleTrie.sol | 247 -- .../optimism/trie/MerkleTrieProofVerifier.sol | 58 - .../optimism/trie/SecureMerkleTrie.sol | 57 - .../risczero/IRiscZeroReceiptVerifier.sol | 27 - .../thirdparty/solmate/LibFixedPointMath.sol | 82 - .../contracts/tko/BridgedTaikoToken.sol | 55 - .../protocol/contracts/tko/TaikoToken.sol | 30 - .../protocol/contracts/tko/TaikoTokenBase.sol | 36 - .../contracts/tokenvault/BaseNFTVault.sol | 134 - .../contracts/tokenvault/BaseVault.sol | 87 - .../contracts/tokenvault/BridgedERC1155.sol | 116 - .../contracts/tokenvault/BridgedERC20.sol | 178 - .../contracts/tokenvault/BridgedERC721.sol | 113 - .../contracts/tokenvault/ERC1155Vault.sol | 306 -- .../contracts/tokenvault/ERC20Vault.sol | 474 --- .../contracts/tokenvault/ERC721Vault.sol | 261 -- .../contracts/tokenvault/IBridgedERC1155.sol | 49 - .../contracts/tokenvault/IBridgedERC20.sol | 71 - .../contracts/tokenvault/IBridgedERC721.sol | 42 - .../contracts/tokenvault/LibBridgedToken.sol | 44 - packages/protocol/deployments/deploy_l1.json | 11 - .../protocol/deployments/local_deployment.md | 67 - packages/protocol/foundry.toml | 52 - packages/protocol/package.json | 63 - packages/protocol/pnpm-lock.yaml | 3735 ----------------- packages/protocol/remappings.txt | 2 - .../AuthorizeRemoteTaikoProtocols.s.sol | 38 - .../protocol/scripts/DeployL1Locally.s.sol | 380 -- packages/protocol/scripts/DeployOnL1.s.sol | 400 -- .../L2_txn_simulation/CreateXChainTxn.s.sol | 35 - .../L2_txn_simulation/ProposeBlock.s.sol | 78 - .../scripts/L2_txn_simulation/createL2Txn.py | 50 - .../scripts/L2_txn_simulation/readme.md | 54 - .../scripts/L2_txn_simulation/sendTx.py | 69 - packages/protocol/scripts/SetAddress.s.sol | 42 - .../scripts/SetRemoteBridgeSuites.s.sol | 92 - .../scripts/confs/network_params.yaml | 25 - packages/protocol/scripts/download_solc.sh | 27 - .../protocol/scripts/launch_second_node.sh | 181 - packages/protocol/scripts/merge_contracts.py | 27 - packages/protocol/scripts/propose_block.sh | 29 - packages/protocol/scripts/setup_deps.sh | 228 - .../protocol/scripts/test_deploy_on_l1.sh | 25 - packages/protocol/scripts/upgrade_to.sh | 10 - packages/protocol/scripts/verify_contracts.sh | 90 - packages/protocol/src/Counter.sol | 14 - packages/protocol/test/DeployCapability.sol | 88 - packages/protocol/test/HelperContracts.sol | 51 - packages/protocol/test/L1/Guardians.t.sol | 88 - packages/protocol/test/L1/SgxVerifier.t.sol | 55 - packages/protocol/test/L1/TaikoL1.t.sol | 114 - .../test/L1/TaikoL1LibProvingWithTiers.t.sol | 820 ---- packages/protocol/test/L1/TaikoL1TestBase.sol | 544 --- packages/protocol/test/L2/Lib1559Math.t.sol | 39 - packages/protocol/test/TaikoTest.sol | 91 - packages/protocol/test/bridge/Bridge.t.sol | 623 --- .../test/common/EssentialContract.t.sol | 66 - .../test/libs/LibFixedPointMath.t.sol | 47 - .../protocol/test/signal/SignalService.t.sol | 649 --- .../test/tokenvault/BridgedERC20.t.sol | 139 - .../test/tokenvault/ERC1155Vault.t.sol | 988 ----- .../protocol/test/tokenvault/ERC20Vault.t.sol | 681 --- .../test/tokenvault/ERC721Vault.t.sol | 918 ---- 146 files changed, 22573 deletions(-) delete mode 100644 packages/package.json delete mode 100644 packages/protocol/.env delete mode 100644 packages/protocol/.env_sample delete mode 100644 packages/protocol/.github/workflows/test.yml delete mode 100644 packages/protocol/.gitignore delete mode 100644 packages/protocol/Makefile delete mode 100644 packages/protocol/README.md delete mode 100644 packages/protocol/contracts/4844/BlobHashReader.yulp delete mode 100644 packages/protocol/contracts/4844/IBlobHashReader.sol delete mode 100644 packages/protocol/contracts/4844/Lib4844.sol delete mode 100644 packages/protocol/contracts/L1/ChainProver.sol delete mode 100644 packages/protocol/contracts/L1/ITaikoL1.sol delete mode 100644 packages/protocol/contracts/L1/TaikoData.sol delete mode 100644 packages/protocol/contracts/L1/TaikoErrors.sol delete mode 100644 packages/protocol/contracts/L1/TaikoEvents.sol delete mode 100644 packages/protocol/contracts/L1/TaikoL1.sol delete mode 100644 packages/protocol/contracts/L1/VerifierBattleRoyale.sol delete mode 100644 packages/protocol/contracts/L1/VerifierRegistry.sol delete mode 100644 packages/protocol/contracts/L1/actors/PBSActor.sol delete mode 100644 packages/protocol/contracts/L1/actors/ProverPayment.sol delete mode 100644 packages/protocol/contracts/L1/preconfs/ISequencerRegistry.sol delete mode 100644 packages/protocol/contracts/L1/preconfs/SequencerRegistry.sol delete mode 100644 packages/protocol/contracts/L1/provers/GuardianProver.sol delete mode 100644 packages/protocol/contracts/L1/provers/GuardianProver_tm.sol delete mode 100644 packages/protocol/contracts/L1/provers/Guardians.sol delete mode 100644 packages/protocol/contracts/L1/verifiers/GuardianVerifier.sol delete mode 100644 packages/protocol/contracts/L1/verifiers/IVerifier.sol delete mode 100644 packages/protocol/contracts/L1/verifiers/MockSgxVerifier.sol delete mode 100644 packages/protocol/contracts/L1/verifiers/SgxVerifier.sol delete mode 100644 packages/protocol/contracts/L1/verifiers/libs/LibPublicInput.sol delete mode 100644 packages/protocol/contracts/L2/Lib1559Math.sol delete mode 100644 packages/protocol/contracts/L2/LibL2Config.sol delete mode 100644 packages/protocol/contracts/L2/TaikoL2.sol delete mode 100644 packages/protocol/contracts/L2/eip1559_util.py delete mode 100644 packages/protocol/contracts/automata-attestation/AutomataDcapV3Attestation.sol delete mode 100644 packages/protocol/contracts/automata-attestation/README.md delete mode 100644 packages/protocol/contracts/automata-attestation/interfaces/IAttestation.sol delete mode 100644 packages/protocol/contracts/automata-attestation/interfaces/ISigVerifyLib.sol delete mode 100644 packages/protocol/contracts/automata-attestation/lib/EnclaveIdStruct.sol delete mode 100644 packages/protocol/contracts/automata-attestation/lib/PEMCertChainLib.sol delete mode 100644 packages/protocol/contracts/automata-attestation/lib/QuoteV3Auth/V3Parser.sol delete mode 100644 packages/protocol/contracts/automata-attestation/lib/QuoteV3Auth/V3Struct.sol delete mode 100644 packages/protocol/contracts/automata-attestation/lib/TCBInfoStruct.sol delete mode 100644 packages/protocol/contracts/automata-attestation/lib/interfaces/IPEMCertChainLib.sol delete mode 100644 packages/protocol/contracts/automata-attestation/utils/Asn1Decode.sol delete mode 100644 packages/protocol/contracts/automata-attestation/utils/BytesUtils.sol delete mode 100644 packages/protocol/contracts/automata-attestation/utils/SHA1.sol delete mode 100644 packages/protocol/contracts/automata-attestation/utils/SigVerifyLib.sol delete mode 100644 packages/protocol/contracts/automata-attestation/utils/X509DateUtils.sol delete mode 100644 packages/protocol/contracts/bridge/Bridge.sol delete mode 100644 packages/protocol/contracts/bridge/IBridge.sol delete mode 100644 packages/protocol/contracts/bridge/IQuotaManager.sol delete mode 100644 packages/protocol/contracts/bridge/README.md delete mode 100644 packages/protocol/contracts/common/AddressManager.sol delete mode 100644 packages/protocol/contracts/common/AddressResolver.sol delete mode 100644 packages/protocol/contracts/common/AuthorizableContract.sol delete mode 100644 packages/protocol/contracts/common/EssentialContract.sol delete mode 100644 packages/protocol/contracts/common/IAddressManager.sol delete mode 100644 packages/protocol/contracts/common/IAddressResolver.sol delete mode 100644 packages/protocol/contracts/common/ICrossChainSync.sol delete mode 100644 packages/protocol/contracts/common/LibStrings.sol delete mode 100644 packages/protocol/contracts/examples/xErc20Example.sol delete mode 100644 packages/protocol/contracts/gwyneth/Bus.sol delete mode 100644 packages/protocol/contracts/gwyneth/EVM.sol delete mode 100644 packages/protocol/contracts/gwyneth/XChain.sol delete mode 100644 packages/protocol/contracts/gwyneth/XChainERC20Token.sol delete mode 100644 packages/protocol/contracts/libs/LibAddress.sol delete mode 100644 packages/protocol/contracts/libs/LibBytes.sol delete mode 100644 packages/protocol/contracts/libs/LibDeploy.sol delete mode 100644 packages/protocol/contracts/libs/LibMath.sol delete mode 100644 packages/protocol/contracts/libs/LibNetwork.sol delete mode 100644 packages/protocol/contracts/libs/LibTrieProof.sol delete mode 100644 packages/protocol/contracts/signal/ISignalService.sol delete mode 100644 packages/protocol/contracts/signal/SignalService.sol delete mode 100644 packages/protocol/contracts/test/erc20/FreeMintERC20.sol delete mode 100644 packages/protocol/contracts/test/erc20/MayFailFreeMintERC20.sol delete mode 100644 packages/protocol/contracts/test/erc20/RegularERC20.sol delete mode 100644 packages/protocol/contracts/thirdparty/LibBytesUtils.sol delete mode 100644 packages/protocol/contracts/thirdparty/LibFixedPointMath.sol delete mode 100644 packages/protocol/contracts/thirdparty/README.md delete mode 100644 packages/protocol/contracts/thirdparty/optimism/Bytes.sol delete mode 100644 packages/protocol/contracts/thirdparty/optimism/rlp/RLPReader.sol delete mode 100644 packages/protocol/contracts/thirdparty/optimism/rlp/RLPWriter.sol delete mode 100644 packages/protocol/contracts/thirdparty/optimism/trie/MerkleTrie.sol delete mode 100644 packages/protocol/contracts/thirdparty/optimism/trie/MerkleTrieProofVerifier.sol delete mode 100644 packages/protocol/contracts/thirdparty/optimism/trie/SecureMerkleTrie.sol delete mode 100644 packages/protocol/contracts/thirdparty/risczero/IRiscZeroReceiptVerifier.sol delete mode 100644 packages/protocol/contracts/thirdparty/solmate/LibFixedPointMath.sol delete mode 100644 packages/protocol/contracts/tko/BridgedTaikoToken.sol delete mode 100644 packages/protocol/contracts/tko/TaikoToken.sol delete mode 100644 packages/protocol/contracts/tko/TaikoTokenBase.sol delete mode 100644 packages/protocol/contracts/tokenvault/BaseNFTVault.sol delete mode 100644 packages/protocol/contracts/tokenvault/BaseVault.sol delete mode 100644 packages/protocol/contracts/tokenvault/BridgedERC1155.sol delete mode 100644 packages/protocol/contracts/tokenvault/BridgedERC20.sol delete mode 100644 packages/protocol/contracts/tokenvault/BridgedERC721.sol delete mode 100644 packages/protocol/contracts/tokenvault/ERC1155Vault.sol delete mode 100644 packages/protocol/contracts/tokenvault/ERC20Vault.sol delete mode 100644 packages/protocol/contracts/tokenvault/ERC721Vault.sol delete mode 100644 packages/protocol/contracts/tokenvault/IBridgedERC1155.sol delete mode 100644 packages/protocol/contracts/tokenvault/IBridgedERC20.sol delete mode 100644 packages/protocol/contracts/tokenvault/IBridgedERC721.sol delete mode 100644 packages/protocol/contracts/tokenvault/LibBridgedToken.sol delete mode 100644 packages/protocol/deployments/deploy_l1.json delete mode 100644 packages/protocol/deployments/local_deployment.md delete mode 100644 packages/protocol/foundry.toml delete mode 100644 packages/protocol/package.json delete mode 100644 packages/protocol/pnpm-lock.yaml delete mode 100644 packages/protocol/remappings.txt delete mode 100644 packages/protocol/scripts/AuthorizeRemoteTaikoProtocols.s.sol delete mode 100644 packages/protocol/scripts/DeployL1Locally.s.sol delete mode 100644 packages/protocol/scripts/DeployOnL1.s.sol delete mode 100644 packages/protocol/scripts/L2_txn_simulation/CreateXChainTxn.s.sol delete mode 100644 packages/protocol/scripts/L2_txn_simulation/ProposeBlock.s.sol delete mode 100644 packages/protocol/scripts/L2_txn_simulation/createL2Txn.py delete mode 100644 packages/protocol/scripts/L2_txn_simulation/readme.md delete mode 100755 packages/protocol/scripts/L2_txn_simulation/sendTx.py delete mode 100644 packages/protocol/scripts/SetAddress.s.sol delete mode 100644 packages/protocol/scripts/SetRemoteBridgeSuites.s.sol delete mode 100644 packages/protocol/scripts/confs/network_params.yaml delete mode 100755 packages/protocol/scripts/download_solc.sh delete mode 100755 packages/protocol/scripts/launch_second_node.sh delete mode 100644 packages/protocol/scripts/merge_contracts.py delete mode 100755 packages/protocol/scripts/propose_block.sh delete mode 100755 packages/protocol/scripts/setup_deps.sh delete mode 100755 packages/protocol/scripts/test_deploy_on_l1.sh delete mode 100755 packages/protocol/scripts/upgrade_to.sh delete mode 100755 packages/protocol/scripts/verify_contracts.sh delete mode 100644 packages/protocol/src/Counter.sol delete mode 100644 packages/protocol/test/DeployCapability.sol delete mode 100644 packages/protocol/test/HelperContracts.sol delete mode 100644 packages/protocol/test/L1/Guardians.t.sol delete mode 100644 packages/protocol/test/L1/SgxVerifier.t.sol delete mode 100644 packages/protocol/test/L1/TaikoL1.t.sol delete mode 100644 packages/protocol/test/L1/TaikoL1LibProvingWithTiers.t.sol delete mode 100644 packages/protocol/test/L1/TaikoL1TestBase.sol delete mode 100644 packages/protocol/test/L2/Lib1559Math.t.sol delete mode 100644 packages/protocol/test/TaikoTest.sol delete mode 100644 packages/protocol/test/bridge/Bridge.t.sol delete mode 100644 packages/protocol/test/common/EssentialContract.t.sol delete mode 100644 packages/protocol/test/libs/LibFixedPointMath.t.sol delete mode 100644 packages/protocol/test/signal/SignalService.t.sol delete mode 100644 packages/protocol/test/tokenvault/BridgedERC20.t.sol delete mode 100644 packages/protocol/test/tokenvault/ERC1155Vault.t.sol delete mode 100644 packages/protocol/test/tokenvault/ERC20Vault.t.sol delete mode 100644 packages/protocol/test/tokenvault/ERC721Vault.t.sol diff --git a/packages/package.json b/packages/package.json deleted file mode 100644 index 1439b95ab7f7..000000000000 --- a/packages/package.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "name": "gwyneth", - "version": "1.0.0", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [], - "author": "Taiko Labs", - "license": "MIT" -} diff --git a/packages/protocol/.env b/packages/protocol/.env deleted file mode 100644 index 3289806d9c1a..000000000000 --- a/packages/protocol/.env +++ /dev/null @@ -1,3 +0,0 @@ -L2_GENESIS_HASH=0xdf90a9c4daa571aa308e967c9a6b4bf21ba8842d95d73d28be112b6fe0618e8c -PRIVATE_KEY=0xbcdf20249abf0ed6d944c0288fad489e33f66b3960d9e6229c1cd214ed3bbe31 -MAINNET_CONTRACT_OWNER=0x8943545177806ED17B9F23F0a21ee5948eCaa776 \ No newline at end of file diff --git a/packages/protocol/.env_sample b/packages/protocol/.env_sample deleted file mode 100644 index 3289806d9c1a..000000000000 --- a/packages/protocol/.env_sample +++ /dev/null @@ -1,3 +0,0 @@ -L2_GENESIS_HASH=0xdf90a9c4daa571aa308e967c9a6b4bf21ba8842d95d73d28be112b6fe0618e8c -PRIVATE_KEY=0xbcdf20249abf0ed6d944c0288fad489e33f66b3960d9e6229c1cd214ed3bbe31 -MAINNET_CONTRACT_OWNER=0x8943545177806ED17B9F23F0a21ee5948eCaa776 \ No newline at end of file diff --git a/packages/protocol/.github/workflows/test.yml b/packages/protocol/.github/workflows/test.yml deleted file mode 100644 index 9282e82944e8..000000000000 --- a/packages/protocol/.github/workflows/test.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: test - -on: workflow_dispatch - -env: - FOUNDRY_PROFILE: ci - -jobs: - check: - strategy: - fail-fast: true - - name: Foundry project - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Install Foundry - uses: foundry-rs/foundry-toolchain@v1 - with: - version: nightly - - - name: Run Forge build - run: | - forge --version - forge build --sizes - id: build - - - name: Run Forge tests - run: | - forge test -vvv - id: test diff --git a/packages/protocol/.gitignore b/packages/protocol/.gitignore deleted file mode 100644 index d4dfc4c9e51a..000000000000 --- a/packages/protocol/.gitignore +++ /dev/null @@ -1,15 +0,0 @@ -# Compiler files -cache/ -out/ -node_modules/ - -# Ignores development broadcast logs -!/broadcast -/broadcast/*/31337/ -/broadcast/**/dry-run/ - -# Docs -docs/ - -# Dotenv file -.env diff --git a/packages/protocol/Makefile b/packages/protocol/Makefile deleted file mode 100644 index 4c267eea37eb..000000000000 --- a/packages/protocol/Makefile +++ /dev/null @@ -1,9 +0,0 @@ -# build/Makefile - -.PHONY: install - -install: - ./scripts/setup_deps.sh - -propose: - ./scripts/propose_block.sh \ No newline at end of file diff --git a/packages/protocol/README.md b/packages/protocol/README.md deleted file mode 100644 index 9265b4558406..000000000000 --- a/packages/protocol/README.md +++ /dev/null @@ -1,66 +0,0 @@ -## Foundry - -**Foundry is a blazing fast, portable and modular toolkit for Ethereum application development written in Rust.** - -Foundry consists of: - -- **Forge**: Ethereum testing framework (like Truffle, Hardhat and DappTools). -- **Cast**: Swiss army knife for interacting with EVM smart contracts, sending transactions and getting chain data. -- **Anvil**: Local Ethereum node, akin to Ganache, Hardhat Network. -- **Chisel**: Fast, utilitarian, and verbose solidity REPL. - -## Documentation - -https://book.getfoundry.sh/ - -## Usage - -### Build - -```shell -$ forge build -``` - -### Test - -```shell -$ forge test -``` - -### Format - -```shell -$ forge fmt -``` - -### Gas Snapshots - -```shell -$ forge snapshot -``` - -### Anvil - -```shell -$ anvil -``` - -### Deploy - -```shell -$ forge script script/Counter.s.sol:CounterScript --rpc-url --private-key -``` - -### Cast - -```shell -$ cast -``` - -### Help - -```shell -$ forge --help -$ anvil --help -$ cast --help -``` diff --git a/packages/protocol/contracts/4844/BlobHashReader.yulp b/packages/protocol/contracts/4844/BlobHashReader.yulp deleted file mode 100644 index 7490d5c1c8fe..000000000000 --- a/packages/protocol/contracts/4844/BlobHashReader.yulp +++ /dev/null @@ -1,27 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -// An implemenatation of IBlobHashReader -object "BlobHashReader" { - code { - datacopy(0, dataoffset("runtime"), datasize("runtime")) - return(0, datasize("runtime")) - } - object "runtime" { - code { - // Match against the keccak of the ABI function signature needed. - switch shr(0xe0,calldataload(0)) - // bytes4(keccak("function getFirstBlobHash()")) - // Returns the versioned hash for the first blob in this transaction. - case 0xfd122ecf { - // DATAHASH opcode has hex value 0x49 - let hash := verbatim_1i_1o(hex"49", 0) - mstore(0, hash) - return(0, 32) - } - } - } -} \ No newline at end of file diff --git a/packages/protocol/contracts/4844/IBlobHashReader.sol b/packages/protocol/contracts/4844/IBlobHashReader.sol deleted file mode 100644 index eb88b6e19350..000000000000 --- a/packages/protocol/contracts/4844/IBlobHashReader.sol +++ /dev/null @@ -1,17 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -/// @title IBlobHashReader -/// @dev Labeled in AddressResolver as "blob_hash_reader" -/// @dev This interface and its corresponding implementation may deprecate once -/// solidity supports the new BLOBHASH opcode natively. -interface IBlobHashReader { - /// @notice Returns the versioned hash for the first blob in this - /// transaction. If there is no blob found, 0x0 is returned. - function getFirstBlobHash() external view returns (bytes32); -} diff --git a/packages/protocol/contracts/4844/Lib4844.sol b/packages/protocol/contracts/4844/Lib4844.sol deleted file mode 100644 index 64ae752c17ff..000000000000 --- a/packages/protocol/contracts/4844/Lib4844.sol +++ /dev/null @@ -1,46 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -/// @title Lib4844 -/// @notice A library for handling EIP-4844 blobs -/// `solc contracts/libs/Lib4844.sol --ir > contracts/libs/Lib4844.yul` -library Lib4844 { - address public constant POINT_EVALUATION_PRECOMPILE_ADDRESS = address(0x0A); - uint32 public constant FIELD_ELEMENTS_PERBLOB = 4096; - uint256 public constant BLS_MODULUS = - 52_435_875_175_126_190_479_447_740_508_185_965_837_690_552_500_527_637_822_603_658_699_938_581_184_513; - - error EVAL_FAILED(); - error POINT_X_TOO_LARGE(); - error POINT_Y_TOO_LARGE(); - - /// @notice Evaluates the 4844 point using the precompile. - /// @param blobHash The versioned hash - /// @param x The evaluation point - /// @param y The expected output - /// @param commitment The input kzg point - /// @param pointProof The quotient kzg - function evaluatePoint( - bytes32 blobHash, - uint256 x, - uint256 y, - bytes1[48] memory commitment, - bytes1[48] memory pointProof - ) - internal - view - { - if (x >= BLS_MODULUS) revert POINT_X_TOO_LARGE(); - if (y >= BLS_MODULUS) revert POINT_Y_TOO_LARGE(); - - (bool ok,) = POINT_EVALUATION_PRECOMPILE_ADDRESS.staticcall( - abi.encodePacked(blobHash, x, y, commitment, pointProof) - ); - if (!ok) revert EVAL_FAILED(); - } -} diff --git a/packages/protocol/contracts/L1/ChainProver.sol b/packages/protocol/contracts/L1/ChainProver.sol deleted file mode 100644 index 5b8e2a0cf270..000000000000 --- a/packages/protocol/contracts/L1/ChainProver.sol +++ /dev/null @@ -1,83 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "../common/EssentialContract.sol"; -import "../libs/LibAddress.sol"; -import "./TaikoData.sol"; -import "./TaikoErrors.sol"; -import "./VerifierRegistry.sol"; -import "./verifiers/IVerifier.sol"; - -/// @title ChainProver -/// @notice The prover contract for Taiko. -contract ChainProver is EssentialContract, TaikoErrors { - using LibAddress for address; - - /// @dev Struct representing transition to be proven. - struct ProofData { - IVerifier verifier; - bytes proof; - } - - /// @dev Struct representing transition to be proven. - struct ProofBatch { - // These 2 keccak(new_l1_blockhash, new_root)) will be the new state (hash) - // and the transition hash it the old and the new, hashed together. - uint64 newL1BlockNumber; // Which L1 block is "covered" (proved) with this transaction - bytes32 newL1Root; // The new root hash - ProofData[] proofs; - address prover; - } - - // New, and only state var - bytes32 public currentStateHash; //equals to: keccak(newL1BlockNumber, newL1Root) - - function init(address _owner, address _addressManager) external initializer { - if (_addressManager == address(0)) { - revert L1_INVALID_ADDRESS(); - } - __Essential_init(_owner, _addressManager); - } - - /// @dev Proves up until a specific L1 block - function prove(bytes calldata data) external nonReentrant whenNotPaused { - // Decode the block data - ProofBatch memory proofBatch = abi.decode(data, (ProofBatch)); - // This is hwo we get the transition hash - bytes32 l1BlockHash = blockhash(proofBatch.newL1BlockNumber); - bytes32 newStateHash = keccak256(abi.encode(l1BlockHash, proofBatch.newL1Root)); - - VerifierRegistry verifierRegistry = VerifierRegistry(resolve("verifier_registry", false)); - // Verify the proofs - uint160 prevVerifier = uint160(0); - for (uint256 i = 0; i < proofBatch.proofs.length; i++) { - IVerifier verifier = proofBatch.proofs[i].verifier; - // Make sure each verifier is unique - if (prevVerifier >= uint160(address(verifier))) { - revert L1_INVALID_OR_DUPLICATE_VERIFIER(); - } - // Make sure it's a valid verifier - require(verifierRegistry.isVerifier(address(verifier)), "invalid verifier"); - // Verify the proof - verifier.verifyProof( - keccak256(abi.encode(currentStateHash, newStateHash)), - proofBatch.prover, - proofBatch.proofs[i].proof - ); - prevVerifier = uint160(address(verifier)); - } - - // Make sure the supplied proofs are sufficient. - // Can use some custom logic here. but let's keep it simple - require(proofBatch.proofs.length >= 3, "insufficient number of proofs"); - - currentStateHash = newStateHash; - //todo(@Brecht, @Dani) If somebody still gets an invalid proof through, we have to have - // another safety mechanisms! (e.g.: guardians, etc.) - } -} diff --git a/packages/protocol/contracts/L1/ITaikoL1.sol b/packages/protocol/contracts/L1/ITaikoL1.sol deleted file mode 100644 index ca343a5ba571..000000000000 --- a/packages/protocol/contracts/L1/ITaikoL1.sol +++ /dev/null @@ -1,53 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "./TaikoData.sol"; - -/// @title ITaikoL1 -/// @custom:security-contact security@taiko.xyz -interface ITaikoL1 { - /// @notice Proposes a Taiko L2 block. - /// @param _params Block parameters, currently an encoded BlockParams object. - /// @param _txList txList data if calldata is used for DA. - /// @return meta_ The metadata of the proposed L2 block. - function proposeBlock( - bytes calldata _params, - bytes calldata _txList - ) - external - payable - returns (TaikoData.BlockMetadata memory meta_); - - /// @notice Proves or contests a block transition. - /// @param _blockId The index of the block to prove. This is also used to - /// select the right implementation version. - /// @param _input An abi-encoded (TaikoData.BlockMetadata, TaikoData.Transition, - /// TaikoData.TierProof) tuple. - function proveBlock(uint64 _blockId, bytes calldata _input) external; - - /// @notice Verifies up to a certain number of blocks. - /// @param _maxBlocksToVerify Max number of blocks to verify. - function verifyBlocks(uint64 _maxBlocksToVerify) external; - - /// @notice Pause block proving. - /// @param _pause True if paused. - function pauseProving(bool _pause) external; - - /// @notice Deposits Taiko token to be used as bonds. - /// @param _amount The amount of Taiko token to deposit. - function depositBond(uint256 _amount) external; - - /// @notice Withdraws Taiko token. - /// @param _amount The amount of Taiko token to withdraw. - function withdrawBond(uint256 _amount) external; - - // /// @notice Gets the prover that actually proved a verified block. - // /// @param _blockId The index of the block. - // /// @return The prover's address. If the block is not verified yet, address(0) will be - // returned. - // function getVerifiedBlockProver(uint64 _blockId) external view returns (address); - - /// @notice Gets the configuration of the TaikoL1 contract. - /// @return Config struct containing configuration parameters. - function getConfig() external pure returns (TaikoData.Config memory); -} diff --git a/packages/protocol/contracts/L1/TaikoData.sol b/packages/protocol/contracts/L1/TaikoData.sol deleted file mode 100644 index 91d983740b44..000000000000 --- a/packages/protocol/contracts/L1/TaikoData.sol +++ /dev/null @@ -1,81 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -/// @title TaikoData -/// @notice This library defines various data structures used in the Taiko -/// protocol. -library TaikoData { - /// @dev Struct holding Taiko configuration parameters. See {TaikoConfig}. - struct Config { - // The chain ID of the network where Taiko contracts are deployed. - uint64 chainId; - // The maximum gas limit allowed for a block. - uint32 blockMaxGasLimit; - // The maximum allowed bytes for the proposed transaction list calldata. - uint24 blockMaxTxListBytes; - } - - /// @dev Struct containing data only required for proving a block - struct BlockMetadata { - bytes32 blockHash; - bytes32 parentBlockHash; - bytes32 parentMetaHash; - bytes32 l1Hash; - uint256 difficulty; - bytes32 blobHash; - bytes32 extraData; - address coinbase; - uint64 l2BlockNumber; - uint32 gasLimit; - uint32 l1StateBlockNumber; - uint64 timestamp; - uint24 txListByteOffset; - uint24 txListByteSize; - // todo: Do we need this below ? - // bytes32 blobId OR blobHash; ? as per in current taiko-mono's preconfirmation branch ? - bool blobUsed; - bytes txList; - } - - /// @dev Struct representing transition to be proven. - struct Transition { - bytes32 parentBlockHash; - bytes32 blockHash; - } - - /// @dev Struct representing state transition data. - struct TransitionState { - bytes32 blockHash; //Might be removed.. - uint64 timestamp; - address prover; - uint64 verifiableAfter; - bool isProven; - } - - /// @dev Struct containing data required for verifying a block. - struct Block { - bytes32 blockHash; - bytes32 metaHash; - uint64 blockId; - uint64 timestamp; - uint32 l1StateBlockNumber; - } - - /// @dev Struct holding the state variables for the {TaikoL1} contract. - struct State { - mapping(uint256 blockId => Block) blocks; - mapping(uint256 blockId => mapping(bytes32 parentBlockHash => TransitionState)) transitions; - uint64 genesisHeight; - uint64 genesisTimestamp; - uint64 numBlocks; - uint64 lastVerifiedBlockId; - bool provingPaused; - uint64 lastUnpausedAt; - uint256[143] __gap; - } -} diff --git a/packages/protocol/contracts/L1/TaikoErrors.sol b/packages/protocol/contracts/L1/TaikoErrors.sol deleted file mode 100644 index 545a9843ab41..000000000000 --- a/packages/protocol/contracts/L1/TaikoErrors.sol +++ /dev/null @@ -1,55 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -/// @title TaikoErrors -/// @notice This abstract contract provides custom error declartions used in -/// the Taiko protocol. Each error corresponds to specific situations where -/// exceptions might be thrown. -abstract contract TaikoErrors { - // NOTE: The following custom errors must match the definitions in - // `L1/libs/*.sol`. - error L1_ALREADY_CONTESTED(); - error L1_ALREADY_PROVED(); - error L1_ASSIGNED_PROVER_NOT_ALLOWED(); - error L1_BLOB_FOR_DA_DISABLED(); - error L1_BLOB_NOT_FOUND(); - error L1_BLOB_NOT_REUSEABLE(); - error L1_BLOCK_MISMATCH(); - error L1_INCORRECT_BLOCK(); - error L1_INSUFFICIENT_TOKEN(); - error L1_INVALID_ADDRESS(); - error L1_INVALID_AMOUNT(); - error L1_INVALID_BLOCK_ID(); - error L1_INVALID_CONFIG(); - error L1_INVALID_ETH_DEPOSIT(); - error L1_INVALID_L1_STATE_BLOCK(); - error L1_INVALID_OR_DUPLICATE_VERIFIER(); - error L1_INVALID_PARAM(); - error L1_INVALID_PAUSE_STATUS(); - error L1_INVALID_PROOF(); - error L1_INVALID_PROPOSER(); - error L1_INVALID_PROVER(); - error L1_INVALID_TIER(); - error L1_INVALID_TIMESTAMP(); - error L1_INVALID_TRANSITION(); - error L1_LIVENESS_BOND_NOT_RECEIVED(); - error L1_NOT_ASSIGNED_PROVER(); - error L1_PROPOSER_NOT_EOA(); - error L1_PROVING_PAUSED(); - error L1_RECEIVE_DISABLED(); - error L1_TOO_MANY_BLOCKS(); - error L1_TOO_MANY_TIERS(); - error L1_TRANSITION_ID_ZERO(); - error L1_TRANSITION_NOT_FOUND(); - error L1_TXLIST_OFFSET_SIZE(); - error L1_TXLIST_TOO_LARGE(); - error L1_UNAUTHORIZED(); - error L1_UNEXPECTED_PARENT(); - error L1_UNEXPECTED_TRANSITION_ID(); - error L1_UNEXPECTED_TRANSITION_TIER(); -} diff --git a/packages/protocol/contracts/L1/TaikoEvents.sol b/packages/protocol/contracts/L1/TaikoEvents.sol deleted file mode 100644 index 06f77c1cad5f..000000000000 --- a/packages/protocol/contracts/L1/TaikoEvents.sol +++ /dev/null @@ -1,30 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "./TaikoData.sol"; - -/// @title TaikoEvents -/// @notice This abstract contract provides event declarations for the Taiko -/// protocol, which are emitted during block proposal, proof, verification, and -/// Ethereum deposit processes. -/// @dev The events defined here must match the definitions in the corresponding -/// L1 libraries. -abstract contract TaikoEvents { - /// @dev Emitted when a block is proposed. - /// @param blockId The ID of the proposed block. - /// @param meta The block metadata containing information about the proposed - /// block. - event BlockProposed(uint256 indexed blockId, TaikoData.BlockMetadata meta); - /// @dev Emitted when a block is verified. - /// @param blockId The ID of the verified block. - /// @param blockHash The hash of the verified block. - event BlockVerified(uint256 indexed blockId, bytes32 blockHash); - - /// @dev Emitted when a block transition is proved or re-proved. - event TransitionProved(uint256 indexed blockId, TaikoData.Transition tran, address prover); -} diff --git a/packages/protocol/contracts/L1/TaikoL1.sol b/packages/protocol/contracts/L1/TaikoL1.sol deleted file mode 100644 index cbc43be7d332..000000000000 --- a/packages/protocol/contracts/L1/TaikoL1.sol +++ /dev/null @@ -1,200 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "../common/EssentialContract.sol"; -import "./TaikoErrors.sol"; -import "./preconfs/ISequencerRegistry.sol"; -import "./TaikoEvents.sol"; - -/// @title TaikoL1 -contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors { - event ProvingPaused(bool paused); - - uint256 public constant SECURITY_DELAY_AFTER_PROVEN = 8 hours; - - // According to EIP4844, each blob has up to 4096 field elements, and each - // field element has 32 bytes. - uint256 public constant MAX_BYTES_PER_BLOB = 4096 * 32; - - TaikoData.State public state; - uint256[100] private __gap; - - /// @notice Initializes the rollup. - /// @param _addressManager The {AddressManager} address. - /// @param _genesisBlockHash The block hash of the genesis block. - function init( - address _owner, - address _addressManager, - bytes32 _genesisBlockHash - ) - external - initializer - { - __Essential_init(_owner, _addressManager); - - TaikoData.Config memory config = getConfig(); - require(isConfigValid(config), "invalid config"); - - // Init state - state.genesisHeight = uint64(block.number); - state.genesisTimestamp = uint64(block.timestamp); - state.numBlocks = 1; - - // Init the genesis block - TaikoData.Block storage blk = state.blocks[0]; - blk.blockHash = _genesisBlockHash; - blk.timestamp = uint64(block.timestamp); - - emit BlockVerified({ blockId: 0, blockHash: _genesisBlockHash }); - } - - /// @dev Proposes multiple Taiko L2 blocks. - function proposeBlock( - TaikoData.BlockMetadata[] calldata data - ) - external - payable - nonReentrant - whenNotPaused - returns (TaikoData.BlockMetadata[] memory _blocks) - { - for (uint256 i = 0; i < data.length; i++) { - _proposeBlock(data[i]); - - // Check if we have whitelisted proposers - //if (!_isProposerPermitted()) { - // revert L1_INVALID_PROPOSER(); - //} - } - - _blocks = data; - } - - /// Proposes a Taiko L2 block. - /// @param _block Block parameters, currently an encoded BlockMetadata object. - function _proposeBlock( - TaikoData.BlockMetadata calldata _block - ) - private - { - //TaikoData.Config memory config = getConfig(); - - // Decode the block data - //_block = abi.decode(data, (TaikoData.BlockMetadata)); - - // Verify L1 data - // TODO(Brecht): needs to be more configurable for preconfirmations - // require(_block.l1Hash == blockhash(_block.l1StateBlockNumber), "INVALID_L1_BLOCKHASH"); - // require(_block.blockHash != 0x0, "INVALID_L2_BLOCKHASH"); - // //require(_block.difficulty == block.prevrandao, "INVALID_DIFFICULTY"); - // // Verify misc data - // require(_block.gasLimit == config.blockMaxGasLimit, "INVALID_GAS_LIMIT"); - - // require(_block.blobUsed == (_block.txList.length == 0), "INVALID_BLOB_USED"); - // // Verify DA data - // if (_block.blobUsed) { - // // Todo: Is blobHash posisble to be checked and pre-calculated in input metadata - // // off-chain ? - // // or shall we do something with it to cross check ? - // // require(_block.blobHash == blobhash(0), "invalid data blob"); - // require( - // uint256(_block.txListByteOffset) + _block.txListByteSize <= MAX_BYTES_PER_BLOB, - // "invalid blob size" - // ); - // } else { - // require(_block.blobHash == keccak256(txList), "INVALID_TXLIST_HASH"); - // require(_block.txListByteOffset == 0, "INVALID_TXLIST_START"); - // require(_block.txListByteSize == uint24(txList.length), "INVALID_TXLIST_SIZE"); - // } - - // // Check that the tx length is non-zero and within the supported range - // require(_block.txListByteSize <= config.blockMaxTxListBytes, "invalid txlist size"); - - /* NOT NEEDED ! Commenting out. When PR approved, i'll delete also. */ - // // Also since we dont write into storage this check is hard to do here + the - // // parentBlock.l1StateBlockNumber too for the preconfs (checking the 4 epoch window) - // // I just guess, but also during proving we can see if this condition is - // // fulfilled OR not, and then resulting in an empty block (+slashing of the - // // proposer/preconfer) ? - // TaikoData.Block storage parentBlock = state.blocks[(state.numBlocks - 1)]; - - // require(_block.parentMetaHash == parentBlock.metaHash, "invalid parentMetaHash"); - // require(_block.parentBlockHash == parentBlock.blockHash, "invalid parentHash"); - - // // Verify the passed in L1 state block number. - // // We only allow the L1 block to be 4 epochs old. - // // The other constraint is that the L1 block number needs to be larger than or equal the one - // // in the previous L2 block. - - // if ( - // _block.l1StateBlockNumber + 128 < block.number - // || _block.l1StateBlockNumber >= block.number - // || _block.l1StateBlockNumber < parentBlock.l1StateBlockNumber - // ) { - // revert L1_INVALID_L1_STATE_BLOCK(); - // } - - // // Verify the passed in timestamp. - // // We only allow the timestamp to be 4 epochs old. - // // The other constraint is that the timestamp needs to be larger than or equal the one - // // in the previous L2 block. - // if ( - // _block.timestamp + 128 * 12 < block.timestamp || _block.timestamp > block.timestamp - // || _block.timestamp < parentBlock.timestamp - // ) { - // revert L1_INVALID_TIMESTAMP(); - // } - - emit BlockProposed({ blockId: _block.l2BlockNumber, meta: _block }); - } - - // These will be unknown in the smart contract - // NOT NEEDED ! Commenting out. When PR approved, i'll delete also. - // Maybe possible to extract with ChainProver, but not directly from here. - // function getBlock(uint64 blockId) {} - // function getLastVerifiedBlockId() {} - // function getNumOfBlocks() {} - - /// @notice Gets the configuration of the TaikoL1 contract. - /// @return Config struct containing configuration parameters. - function getConfig() public view virtual returns (TaikoData.Config memory) { - return TaikoData.Config({ - chainId: 167_008, //Maybe use a range or just thro this shit away. - // Limited by the PSE zkEVM circuits. - blockMaxGasLimit: 15_000_000, - // Each go-ethereum transaction has a size limit of 128KB, - // and right now txList is still saved in calldata, so we set it - // to 120KB. - blockMaxTxListBytes: 120_000 - }); - } - - function isConfigValid(TaikoData.Config memory config) public pure returns (bool) { - if ( - config.chainId <= 1 // - || config.blockMaxGasLimit == 0 || config.blockMaxTxListBytes == 0 - || config.blockMaxTxListBytes > 128 * 1024 // calldata up to 128K - ) return false; - - return true; - } - - // Additinal proposer rules - function _isProposerPermitted() private returns (bool) { - // If there's a sequencer registry, check if the block can be proposed by the current - // proposer - ISequencerRegistry sequencerRegistry = - ISequencerRegistry(resolve("sequencer_registry", true)); - if (sequencerRegistry != ISequencerRegistry(address(0))) { - if (!sequencerRegistry.isEligibleSigner(msg.sender)) { - return false; - } - } - return true; - } -} diff --git a/packages/protocol/contracts/L1/VerifierBattleRoyale.sol b/packages/protocol/contracts/L1/VerifierBattleRoyale.sol deleted file mode 100644 index bbb8ba7e8dac..000000000000 --- a/packages/protocol/contracts/L1/VerifierBattleRoyale.sol +++ /dev/null @@ -1,177 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "../common/AddressResolver.sol"; -import "../common/EssentialContract.sol"; -import "../libs/LibAddress.sol"; -import "./verifiers/IVerifier.sol"; -import "./VerifierRegistry.sol"; -import "./TaikoData.sol"; - -import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; - -/// @title VerifierBattleRoyale -/// @notice A permissionless bounty to claim a reward for breaking a prover -contract VerifierBattleRoyale is EssentialContract { - struct Bounty { - uint256 startedAt; - uint256 rate; // per second - uint256 maxReward; - uint256 claimedAt; - address winner; - } - - /// @dev Struct representing transition to be proven. - struct ProofData { - IVerifier verifier; - bytes32 postRoot; // post root from this hashing: keccak(new_l1_blockhash, new_root) - bytes proof; - } - - struct ProofBatch { - bytes32 preTransitionHash; //(l1BlockHash and root) // This has to be same for all - // proofData, and we need to prove that we can achieve different post state -> which - // should not be allowed. - bytes32 postL1BlockHash; - ProofData[] proofs; - address prover; - } - - uint256 public constant PERCENTAGE_CLAIMED_IMMEDIATELY = 25; - - VerifierRegistry public verifierRegistry; - mapping(address verifier => Bounty) public bounties; - - function init(address _addressManager) external initializer { - __Essential_init(_addressManager); - } - - /// @dev Proposes a Taiko L2 block. - function openBounty(address verifier, Bounty memory bounty) external onlyOwner { - require(bounty.winner == address(0), "winner needs to be set to 0"); - bounties[verifier] = bounty; - } - - // Allows anyone to claim the bounty be proving that some verifier is broken - function claimBounty(address brokenVerifier, bytes calldata data) external { - require(bounties[brokenVerifier].startedAt != 0, "bounty doesn't exist"); - require(bounties[brokenVerifier].winner == address(0), "bounty already claimed"); - - // Decode the block data - ProofBatch memory proofBatch = abi.decode(data, (ProofBatch)); - - // Verify the all the proofs - for (uint256 i = 0; i < proofBatch.proofs.length; i++) { - IVerifier verifier = proofBatch.proofs[i].verifier; - require(verifierRegistry.isVerifier(address(verifier)), "invalid verifier"); - - bytes32 transitionToBeVerified = keccak256( - abi.encode( - proofBatch.preTransitionHash, - keccak256(abi.encode(proofBatch.postL1BlockHash, proofBatch.proofs[i].postRoot)) - ) - ); - - verifier.verifyProof( - transitionToBeVerified, proofBatch.prover, proofBatch.proofs[i].proof - ); - } - - if (proofBatch.proofs.length == 2) { - /* Same verifier, same block, but different blockhashes/signalroots */ - require( - proofBatch.proofs[0].verifier == proofBatch.proofs[1].verifier, - "verifiers not the same" - ); - require( - address(proofBatch.proofs[0].verifier) == brokenVerifier, - "incorrect broken verifier address" - ); - - require( - proofBatch.proofs[0].postRoot != proofBatch.proofs[1].postRoot, - "post state is the same" - ); - } else if (proofBatch.proofs.length == 3) { - /* Multiple verifiers in a consensus show that another verifier is faulty */ - - // Check that all verifiers are unique - // Verify the proofs - uint160 prevVerifier = 0; - for (uint256 i = 0; i < proofBatch.proofs.length; i++) { - require( - prevVerifier >= uint160(address(proofBatch.proofs[i].verifier)), - "duplicated verifier" - ); - prevVerifier = uint160(address(proofBatch.proofs[i].verifier)); - } - - // Reference proofs need to be placed first in the array, the faulty proof is listed - // last - require( - proofBatch.proofs[0].postRoot == proofBatch.proofs[1].postRoot, "incorrect order" - ); - require( - proofBatch.proofs[1].postRoot != proofBatch.proofs[2].postRoot, "incorrect order" - ); - - //require also that brokenVerifier is the same as the 3rd's verifier address - require( - proofBatch.proofs[1].postRoot != proofBatch.proofs[2].postRoot, "incorrect order" - ); - require( - address(proofBatch.proofs[1].verifier) == brokenVerifier, - "incorrect broken verifier address" - ); - } else { - revert("unsupported claim"); - } - - // Mark the bounty as claimed - bounties[brokenVerifier].claimedAt = block.timestamp; - bounties[brokenVerifier].winner = msg.sender; - - // Distribute part of the reward immediately - uint256 initialReward = - (calculateTotalReward(bounties[brokenVerifier]) * PERCENTAGE_CLAIMED_IMMEDIATELY) / 100; - IERC20 tko = IERC20(resolve("taiko_token", false)); - tko.transfer(bounties[brokenVerifier].winner, initialReward); - - // Poison the verifier so it cannot be used anymore - verifierRegistry.poisonVerifier(brokenVerifier); - } - - // Called after the one who claimed a bounty has either disclosed - // how the prover was broken or not - function closeBounty(address verifier, bool disclosed) external onlyOwner { - require(bounties[verifier].winner != address(0), "bounty not claimed yet"); - - // Transfer out the remaining locked part only the winner has disclosed how the prover was - // broken - if (disclosed) { - // Distribute the remaining part of the reward - uint256 remainingReward = ( - calculateTotalReward(bounties[verifier]) * (100 - PERCENTAGE_CLAIMED_IMMEDIATELY) - ) / 100; - IERC20 tko = IERC20(resolve("taiko_token", false)); - tko.transfer(bounties[verifier].winner, remainingReward); - } - - // Delete the bounty - // A new bounty needs to be started for the verifier - delete bounties[verifier]; - } - - function calculateTotalReward(Bounty memory bounty) internal pure returns (uint256) { - uint256 accumulated = (bounty.claimedAt - bounty.startedAt) * bounty.rate; - if (accumulated > bounty.maxReward) { - accumulated = bounty.maxReward; - } - return accumulated; - } -} diff --git a/packages/protocol/contracts/L1/VerifierRegistry.sol b/packages/protocol/contracts/L1/VerifierRegistry.sol deleted file mode 100644 index c50145e98419..000000000000 --- a/packages/protocol/contracts/L1/VerifierRegistry.sol +++ /dev/null @@ -1,49 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "../common/AddressResolver.sol"; -import "../common/EssentialContract.sol"; - -/// @title VerifierRegistry -/// @notice A registry for handling all known verifiers -contract VerifierRegistry is EssentialContract { - struct Verifier { - uint16 id; - bytes4 tag; - bool poisoned; - } - - mapping(address verifier => Verifier) public verifiers; - mapping(address verifier => uint256 id) public verifierId; - mapping(uint256 id => address verifier) public verifierAddress; - - uint16 public verifierIdGenerator; - - function init(address _owner, address _addressManager) external initializer { - __Essential_init(_owner, _addressManager); - verifierIdGenerator = 1; - } - - /// Adds a verifier - function addVerifier(address verifier, bytes4 tag) external onlyOwner { - // Generate a unique id - uint16 id = verifierIdGenerator++; - verifiers[verifier] = Verifier({ id: id, tag: tag, poisoned: false }); - verifierId[verifier] = id; - verifierAddress[id] = verifier; - } - - /// Makes a verifier unusable - function poisonVerifier(address verifier) external onlyFromOwnerOrNamed("verifier_watchdog") { - delete verifiers[verifier]; - } - - function isVerifier(address addr) external view returns (bool) { - return verifiers[addr].id != 0 && !verifiers[addr].poisoned; - } -} diff --git a/packages/protocol/contracts/L1/actors/PBSActor.sol b/packages/protocol/contracts/L1/actors/PBSActor.sol deleted file mode 100644 index 128dba47818a..000000000000 --- a/packages/protocol/contracts/L1/actors/PBSActor.sol +++ /dev/null @@ -1,38 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; -import "../../common/AddressResolver.sol"; -import "../../libs/LibAddress.sol"; -import "../TaikoData.sol"; -import "./ProverPayment.sol"; - -/// @title LibProposing -/// @notice A library for handling block proposals in the Taiko protocol. -contract PBSActor { - using LibAddress for address; - - ProverPayment public operator; - - /// @dev Proposes a Taiko L2 block. - function proposeBlock( - bytes[] calldata data, - bytes[] calldata txLists, - bytes memory proverPaymentData, - uint256 tip - ) - external - payable - { - // TODO(Brecht): just pass in opaque data to make it general, though kind of doesn't matter - operator.proposeBlock{ value: msg.value - tip }(data, txLists, proverPaymentData); - - // Do conditional payment - address(block.coinbase).sendEtherAndVerify(tip); - } -} diff --git a/packages/protocol/contracts/L1/actors/ProverPayment.sol b/packages/protocol/contracts/L1/actors/ProverPayment.sol deleted file mode 100644 index c36f64bb5258..000000000000 --- a/packages/protocol/contracts/L1/actors/ProverPayment.sol +++ /dev/null @@ -1,101 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "../../common/AddressResolver.sol"; -import "../../libs/LibAddress.sol"; -import "../TaikoData.sol"; -import "../TaikoL1.sol"; - -/// @title ProverPayment -/// @notice A library for handling block proposals in the Taiko protocol. -contract ProverPayment { - using LibAddress for address; - - struct ProverAssignment { - address prover; - uint256 fee; - uint64 maxBlockId; - uint64 maxProposedIn; - bytes32 metaHash; - bytes signature; - } - - TaikoL1 public taikoL1; - - mapping(address => uint256) public balances; - - // Max gas paying the prover. This should be large enough to prevent the - // worst cases, usually block proposer shall be aware the risks and only - // choose provers that cannot consume too much gas when receiving Ether. - uint256 public constant MAX_GAS_PAYING_PROVER = 200_000; - - /// @dev Proposes a Taiko L2 block. - function proposeBlock( - bytes[] calldata data, - bytes[] calldata txLists, - bytes calldata proverAssignment - ) - external - payable - returns (TaikoData.BlockMetadata[] memory _blocks) - { - // Decode the assignment data - ProverAssignment memory assignment = abi.decode(proverAssignment, (ProverAssignment)); - - // // Subtract prover bond from the prover - // balances[assignment.prover] -= taikoL1.PROVER_BOND(); - - // // Propose the block - // _blocks = - // taikoL1.proposeBlock{ value: taikoL1.PROVER_BOND() }(data, txLists, - // assignment.prover); - - uint64 highestl2BlockNumber = _blocks[_blocks.length - 1].l2BlockNumber; - - // Hash the assignment with the blobHash, this hash will be signed by - // the prover, therefore, we add a string as a prefix. - // IMPORTANT!! Assignment now multi-block assignment!! - bytes32 hash = hashAssignment(assignment); - require(assignment.prover.isValidSignature(hash, assignment.signature), "invalid signature"); - - // Check assignment validity - require( - (assignment.metaHash != 0 || keccak256(abi.encode(_blocks)) != assignment.metaHash) - && (assignment.maxBlockId != 0 || highestl2BlockNumber > assignment.maxBlockId) - && (assignment.maxProposedIn != 0 || block.number > assignment.maxProposedIn), - "unexpected block" - ); - - // Pay the prover - assignment.prover.sendEtherAndVerify(msg.value, MAX_GAS_PAYING_PROVER); - } - - function hashAssignment(ProverAssignment memory assignment) internal view returns (bytes32) { - return keccak256( - abi.encode( - "PROVER_ASSIGNMENT", - address(this), - block.chainid, - assignment.metaHash, - msg.value, - assignment.maxBlockId, - assignment.maxProposedIn - ) - ); - } - - function deposit(address to) external payable { - balances[to] += msg.value; - } - - // TODO(Brecht): delay - function witdraw(address from, address to, uint256 amount) external { - balances[from] -= amount; - to.sendEtherAndVerify(amount); - } -} diff --git a/packages/protocol/contracts/L1/preconfs/ISequencerRegistry.sol b/packages/protocol/contracts/L1/preconfs/ISequencerRegistry.sol deleted file mode 100644 index a0bfcbf5d531..000000000000 --- a/packages/protocol/contracts/L1/preconfs/ISequencerRegistry.sol +++ /dev/null @@ -1,10 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title ISequencerRegistry -/// @custom:security-contact security@taiko.xyz -interface ISequencerRegistry { - /// @notice Return true if the specified address can propose blocks, false otherwise - /// @param _proposer The address proposing a block - function isEligibleSigner(address _proposer) external returns (bool); -} diff --git a/packages/protocol/contracts/L1/preconfs/SequencerRegistry.sol b/packages/protocol/contracts/L1/preconfs/SequencerRegistry.sol deleted file mode 100644 index 7842512169f7..000000000000 --- a/packages/protocol/contracts/L1/preconfs/SequencerRegistry.sol +++ /dev/null @@ -1,50 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../../common/EssentialContract.sol"; -import "./ISequencerRegistry.sol"; - -/// @title SequencerRegistry -/// A dummy implementation that only whitelist some trusted addresses. A real -/// implementation would only allow a single proposer address to propose a block -/// using some selection mechanism. -/// @custom:security-contact security@taiko.xyz -contract SequencerRegistry is EssentialContract, ISequencerRegistry { - /// @dev Emitted when the status of a sequencer is updated. - /// @param sequencer The address of the sequencer whose state has updated. - /// @param enabled If the sequencer is now enabled or not. - event SequencerUpdated(address indexed sequencer, bool enabled); - - /// @notice Whitelisted sequencers - mapping(address sequencer => bool enabled) public sequencers; - - uint256[49] private __gap; - - /// @notice Initializes the contract with the provided address manager. - /// @param _owner The address of the owner. - function init(address _owner) external initializer { - __Essential_init(_owner); - } - - /// @notice Sets/unsets an the imageId as trusted entity - /// @param _sequencers The list of sequencers - /// @param _enabled The corresponding list of the new status of the sequencers - function setSequencers( - address[] memory _sequencers, - bool[] memory _enabled - ) - external - onlyOwner - { - require(_sequencers.length == _enabled.length, "invalid input data"); - for (uint256 i = 0; i < _sequencers.length; i++) { - sequencers[_sequencers[i]] = _enabled[i]; - emit SequencerUpdated(_sequencers[i], _enabled[i]); - } - } - - /// @inheritdoc ISequencerRegistry - function isEligibleSigner(address _proposer) external view returns (bool) { - return sequencers[_proposer]; - } -} diff --git a/packages/protocol/contracts/L1/provers/GuardianProver.sol b/packages/protocol/contracts/L1/provers/GuardianProver.sol deleted file mode 100644 index 539b9858add4..000000000000 --- a/packages/protocol/contracts/L1/provers/GuardianProver.sol +++ /dev/null @@ -1,39 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "./Guardians.sol"; - -/// @title GuardianProver -contract GuardianProver is Guardians { - error PROVING_FAILED(); - - /// @notice Initializes the contract with the provided address manager. - /// @param _addressManager The address of the address manager contract. - function init(address _addressManager) external initializer { - __Essential_init(_addressManager); - } - - /// @dev Called by guardians to approve a guardian proof - function approve( - TaikoData.BlockMetadata calldata meta, - TaikoData.Transition calldata tran - ) - external - whenNotPaused - nonReentrant - returns (bool approved) - { - bytes32 hash = keccak256(abi.encode(meta, tran)); - approved = approve(meta.l2BlockNumber, hash); - - if (approved) { - deleteApproval(hash); - //ITaikoL1(resolve("taiko", false)).proveBlock(meta.id, abi.encode(meta, tran, proof)); - } - } -} diff --git a/packages/protocol/contracts/L1/provers/GuardianProver_tm.sol b/packages/protocol/contracts/L1/provers/GuardianProver_tm.sol deleted file mode 100644 index 5638929f2350..000000000000 --- a/packages/protocol/contracts/L1/provers/GuardianProver_tm.sol +++ /dev/null @@ -1,286 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; -import "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol"; -import "../../common/EssentialContract.sol"; -import "../../common/LibStrings.sol"; -import "../verifiers/IVerifier.sol"; -import "../ITaikoL1.sol"; - -/// @dev IMPORTANT NOTICE!! -/// @title GuardianProver - brought over from taiko-mono repository BUT MADE SOME CHANGES to make it -/// compiling. If we won't use, we can just delete/ignore this. -/// This prover uses itself as the verifier. -/// @custom:security-contact security@taiko.xyz -contract GuardianProver is EssentialContract { - /// @notice Contains the index of the guardian in `guardians` plus one (zero means not a - /// guardian) - /// @dev Slot 1 - mapping(address guardian => uint256 id) public guardianIds; - - /// @notice Mapping to store the approvals for a given hash, for a given version - mapping(uint256 version => mapping(bytes32 proofHash => uint256 approvalBits)) public approvals; - - /// @notice The set of guardians - /// @dev Slot 3 - address[] public guardians; - - /// @notice The version of the guardians - /// @dev Slot 4 - uint32 public version; - - /// @notice The minimum number of guardians required to approve - uint32 public minGuardians; - - /// @notice True to enable pausing taiko proving upon conflicting proofs - bool public provingAutoPauseEnabled; - - /// @notice Mapping from blockId to its latest proof hash - /// @dev Slot 5 - mapping(uint256 version => mapping(uint256 blockId => bytes32 hash)) public latestProofHash; - - uint256[45] private __gap; - - /// @notice Emitted when a guardian proof is approved. - /// @param addr The address of the guardian. - /// @param blockId The block ID. - /// @param blockHash The block hash. - /// @param approved If the proof is approved. - /// @param proofData The proof data. - event GuardianApproval( - address indexed addr, - uint256 indexed blockId, - bytes32 indexed blockHash, - bool approved, - bytes proofData - ); - - /// @notice Emitted when the set of guardians is updated - /// @param version The new version - /// @param guardians The new set of guardians - event GuardiansUpdated(uint32 version, address[] guardians); - - /// @notice Emitted when an approval is made - /// @param operationId The operation ID - /// @param approvalBits The new approval bits - /// @param minGuardiansReached If the proof was submitted - event Approved(uint256 indexed operationId, uint256 approvalBits, bool minGuardiansReached); - - /// @notice Emitted when a guardian prover submit a different proof for the same block - /// @param blockId The block ID - /// @param guardian The guardian prover address - /// @param currentProofHash The existing proof hash - /// @param newProofHash The new and different proof hash - /// @param provingPaused True if TaikoL1's proving is paused. - event ConflictingProofs( - uint256 indexed blockId, - address indexed guardian, - bytes32 currentProofHash, - bytes32 newProofHash, - bool provingPaused - ); - - /// @notice Emitted when auto pausing is enabled. - /// @param enabled True if TaikoL1 proving auto-pause is enabled. - event ProvingAutoPauseEnabled(bool indexed enabled); - - error GP_INVALID_GUARDIAN(); - error GP_INVALID_GUARDIAN_SET(); - error GP_INVALID_MIN_GUARDIANS(); - error GP_INVALID_STATUS(); - error GV_PERMISSION_DENIED(); - error GV_ZERO_ADDRESS(); - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - function init(address _owner, address _addressManager) external initializer { - __Essential_init(_owner, _addressManager); - } - - /// @notice Set the set of guardians - /// @param _newGuardians The new set of guardians - /// @param _minGuardians The minimum required to sign - /// @param _clearData true to invalidate all existing data. - function setGuardians( - address[] memory _newGuardians, - uint8 _minGuardians, - bool _clearData - ) - external - onlyOwner - { - // We need at most 255 guardians (so the approval bits fit in a uint256) - if (_newGuardians.length == 0 || _newGuardians.length > type(uint8).max) { - revert GP_INVALID_GUARDIAN_SET(); - } - // Minimum number of guardians to approve is at least equal or greater than half the - // guardians (rounded up) and less or equal than the total number of guardians - if (_minGuardians == 0 || _minGuardians > _newGuardians.length) { - revert GP_INVALID_MIN_GUARDIANS(); - } - - // Delete the current guardians - for (uint256 i; i < guardians.length; ++i) { - delete guardianIds[guardians[i]]; - } - delete guardians; - - // Set the new guardians - for (uint256 i; i < _newGuardians.length; ++i) { - address guardian = _newGuardians[i]; - if (guardian == address(0)) revert GP_INVALID_GUARDIAN(); - // This makes sure there are not duplicate addresses - if (guardianIds[guardian] != 0) revert GP_INVALID_GUARDIAN_SET(); - - // Save and index the guardian - guardians.push(guardian); - guardianIds[guardian] = guardians.length; - } - - // Bump the version so previous approvals get invalidated - if (_clearData) ++version; - - minGuardians = _minGuardians; - emit GuardiansUpdated(version, _newGuardians); - } - - /// @dev Enables or disables proving auto pause. - /// @param _enable true to enable, false to disable. - function enableProvingAutoPause(bool _enable) external onlyOwner { - if (provingAutoPauseEnabled == _enable) revert GP_INVALID_STATUS(); - provingAutoPauseEnabled = _enable; - - emit ProvingAutoPauseEnabled(_enable); - } - - /// @notice Enables unlimited allowance for Taiko L1 contract. - /// param _enable true if unlimited allowance is approved, false to set the allowance to 0. - function enableTaikoTokenAllowance(bool _enable) external onlyOwner { - address tko = resolve(LibStrings.B_TAIKO_TOKEN, false); - address taiko = resolve(LibStrings.B_TAIKO, false); - IERC20(tko).approve(taiko, _enable ? type(uint256).max : 0); - } - - /// @dev Withdraws Taiko Token to a given address. - /// @param _to The recipient address. - /// @param _amount The amount of Taiko token to withdraw. Use 0 for all balance. - function withdrawTaikoToken(address _to, uint256 _amount) external onlyOwner { - if (_to == address(0)) revert GV_ZERO_ADDRESS(); - - IERC20 tko = IERC20(resolve(LibStrings.B_TAIKO_TOKEN, false)); - uint256 amount = _amount == 0 ? tko.balanceOf(address(this)) : _amount; - tko.transfer(_to, amount); - } - - /// @dev Called by guardians to approve a guardian proof - /// @param _meta The block's metadata. - /// @param _tran The valid transition. - /// @return approved_ True if the minimum number of approval is acquired, false otherwise. - function approve( - TaikoData.BlockMetadata calldata _meta, - TaikoData.Transition calldata _tran /*, - TaikoData.TierProof calldata _proof*/ - ) - external - whenNotPaused - nonReentrant - returns (bool approved_) - { - bytes32 proofHash = keccak256(abi.encode(_meta, _tran, "")); //"" shall be removed - uint256 _version = version; - bytes32 currProofHash = latestProofHash[_version][0]; // constant ID for now.. for taiko-mon - // vs. taiko simplified comp. - - if (currProofHash == 0) { - latestProofHash[_version][0] = proofHash; - currProofHash = proofHash; - } - - bool conflicting = currProofHash != proofHash; - bool pauseProving = conflicting && provingAutoPauseEnabled - && address(this) == resolve(LibStrings.B_CHAIN_WATCHDOG, true); - - if (conflicting) { - latestProofHash[_version][0] = proofHash; - emit ConflictingProofs(0, msg.sender, currProofHash, proofHash, pauseProving); - } - - if (pauseProving) { - ITaikoL1(resolve(LibStrings.B_TAIKO, false)).pauseProving(true); - } else { - approved_ = _approve(0, proofHash); - emit GuardianApproval(msg.sender, 0, _tran.blockHash, approved_, ""); // "" = empty - // bytes - - if (approved_) { - delete approvals[_version][proofHash]; - delete latestProofHash[_version][0]; - - ITaikoL1(resolve(LibStrings.B_TAIKO, false)).proveBlock( - 0, - abi.encode(_meta, _tran, "") // empty bytes as proof for taiko-simplified vs. - // taiko-mono comp - ); - } - } - } - - /// @notice Pauses chain proving and verification. - function pauseTaikoProving() external whenNotPaused { - if (guardianIds[msg.sender] == 0) revert GP_INVALID_GUARDIAN(); - - if (address(this) != resolve(LibStrings.B_CHAIN_WATCHDOG, true)) { - revert GV_PERMISSION_DENIED(); - } - - ITaikoL1(resolve(LibStrings.B_TAIKO, false)).pauseProving(true); - } - - function verifyProof( - /*Context calldata _ctx,*/ - TaikoData.Transition calldata /*, - TaikoData.TierProof calldata*/ - ) - external - view - { - //if (_ctx.msgSender != address(this)) revert GV_PERMISSION_DENIED(); - } - - /// @notice Returns the number of guardians - /// @return The number of guardians - function numGuardians() public view returns (uint256) { - return guardians.length; - } - - function _approve(uint256 _blockId, bytes32 _proofHash) internal returns (bool approved_) { - uint256 id = guardianIds[msg.sender]; - if (id == 0) revert GP_INVALID_GUARDIAN(); - - uint256 _version = version; - - unchecked { - approvals[_version][_proofHash] |= 1 << (id - 1); - } - - uint256 _approval = approvals[_version][_proofHash]; - approved_ = _isApproved(_approval); - emit Approved(_blockId, _approval, approved_); - } - - function _isApproved(uint256 _approvalBits) private view returns (bool) { - uint256 count; - uint256 bits = _approvalBits; - uint256 guardiansLength = guardians.length; - unchecked { - for (uint256 i; i < guardiansLength; ++i) { - if (bits & 1 == 1) ++count; - if (count == minGuardians) return true; - bits >>= 1; - } - } - return false; - } -} diff --git a/packages/protocol/contracts/L1/provers/Guardians.sol b/packages/protocol/contracts/L1/provers/Guardians.sol deleted file mode 100644 index 5976e6650226..000000000000 --- a/packages/protocol/contracts/L1/provers/Guardians.sol +++ /dev/null @@ -1,108 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "../../common/EssentialContract.sol"; -import "../TaikoData.sol"; - -/// @title Guardians -abstract contract Guardians is EssentialContract { - uint256 public constant MIN_NUM_GUARDIANS = 5; - - mapping(address guardian => uint256 id) public guardianIds; // slot 1 - mapping(uint32 version => mapping(bytes32 => uint256 approvalBits)) internal _approvals; - address[] public guardians; // slot 3 - uint32 public version; // slot 4 - uint32 public minGuardians; - - uint256[46] private __gap; - - event GuardiansUpdated(uint32 version, address[] guardians); - event Approved(uint256 indexed operationId, uint256 approvalBits, bool proofSubmitted); - - error INVALID_GUARDIAN(); - error INVALID_GUARDIAN_SET(); - error INVALID_MIN_GUARDIANS(); - error INVALID_PROOF(); - - /// @notice Set the set of guardians - /// @param _guardians The new set of guardians - function setGuardians( - address[] memory _guardians, - uint8 _minGuardians - ) - external - onlyOwner - nonReentrant - { - if (_guardians.length < MIN_NUM_GUARDIANS || _guardians.length > type(uint8).max) { - revert INVALID_GUARDIAN_SET(); - } - if ( - _minGuardians == 0 || _minGuardians < _guardians.length / 2 - || _minGuardians > _guardians.length - ) revert INVALID_MIN_GUARDIANS(); - - // Delete current guardians data - for (uint256 i; i < guardians.length; ++i) { - delete guardianIds[guardians[i]]; - } - assembly { - sstore(guardians.slot, 0) - } - - for (uint256 i = 0; i < _guardians.length;) { - address guardian = _guardians[i]; - if (guardian == address(0)) revert INVALID_GUARDIAN(); - if (guardianIds[guardian] != 0) revert INVALID_GUARDIAN_SET(); - - // Save and index the guardian - guardians.push(guardian); - guardianIds[guardian] = ++i; - } - - minGuardians = _minGuardians; - emit GuardiansUpdated(++version, _guardians); - } - - function isApproved(bytes32 hash) public view returns (bool) { - return isApproved(_approvals[version][hash]); - } - - function numGuardians() public view returns (uint256) { - return guardians.length; - } - - function approve(uint256 operationId, bytes32 hash) internal returns (bool approved) { - uint256 id = guardianIds[msg.sender]; - if (id == 0) revert INVALID_GUARDIAN(); - - unchecked { - _approvals[version][hash] |= 1 << (id - 1); - } - - approved = isApproved(_approvals[version][hash]); - emit Approved(operationId, _approvals[version][hash], approved); - } - - function deleteApproval(bytes32 hash) internal { - delete _approvals[version][hash]; - } - - function isApproved(uint256 approvalBits) internal view returns (bool) { - uint256 count; - uint256 bits = approvalBits; - unchecked { - for (uint256 i; i < guardians.length; ++i) { - if (bits & 1 == 1) ++count; - if (count == minGuardians) return true; - bits >>= 1; - } - } - return false; - } -} diff --git a/packages/protocol/contracts/L1/verifiers/GuardianVerifier.sol b/packages/protocol/contracts/L1/verifiers/GuardianVerifier.sol deleted file mode 100644 index 7f1cf7b64a96..000000000000 --- a/packages/protocol/contracts/L1/verifiers/GuardianVerifier.sol +++ /dev/null @@ -1,38 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "../../common/EssentialContract.sol"; -import "../TaikoData.sol"; -import "./IVerifier.sol"; - -/// @title GuardianVerifier -contract GuardianVerifier is EssentialContract, IVerifier { - uint256[50] private __gap; - - error PERMISSION_DENIED(); - - /// @notice Initializes the contract with the provided address manager. - /// @param _addressManager The address of the address manager contract. - function init(address _addressManager) external initializer { - __Essential_init(_addressManager); - } - - /// @inheritdoc IVerifier - function verifyProof( - bytes32, /*transitionHash*/ - address prover, - bytes calldata /*proof*/ - ) - external - view - { - if (prover != resolve("guardian_prover", false)) { - revert PERMISSION_DENIED(); - } - } -} diff --git a/packages/protocol/contracts/L1/verifiers/IVerifier.sol b/packages/protocol/contracts/L1/verifiers/IVerifier.sol deleted file mode 100644 index 85a8e8f69f9b..000000000000 --- a/packages/protocol/contracts/L1/verifiers/IVerifier.sol +++ /dev/null @@ -1,21 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "../TaikoData.sol"; - -/// @title IVerifier Interface -/// @notice Defines the function that handles proof verification. -interface IVerifier { - function verifyProof( - bytes32 transitionHash, // keccak(keccak(current_l1_blockhash, current_root), - // keccak(new_l1_blockhash, new_root)) - address prover, - bytes calldata proof - ) - external; -} diff --git a/packages/protocol/contracts/L1/verifiers/MockSgxVerifier.sol b/packages/protocol/contracts/L1/verifiers/MockSgxVerifier.sol deleted file mode 100644 index 26d245f636c4..000000000000 --- a/packages/protocol/contracts/L1/verifiers/MockSgxVerifier.sol +++ /dev/null @@ -1,194 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/utils/cryptography/ECDSA.sol"; -import "../TaikoL1.sol"; -import "../../common/EssentialContract.sol"; -import "../../automata-attestation/interfaces/IAttestation.sol"; -import "../../automata-attestation/lib/QuoteV3Auth/V3Struct.sol"; -import "./libs/LibPublicInput.sol"; -import "./IVerifier.sol"; - -/// @title MockSgxVerifier -/// @notice This contract is the implementation of verifying SGX signature proofs -/// onchain. -/// @dev Please see references below: -/// - Reference #1: https://ethresear.ch/t/2fa-zk-rollups-using-sgx/14462 -/// - Reference #2: https://github.com/gramineproject/gramine/discussions/1579 -/// @custom:security-contact security@taiko.xyz -contract MockSgxVerifier is EssentialContract, IVerifier { - /// @dev Each public-private key pair (Ethereum address) is generated within - /// the SGX program when it boots up. The off-chain remote attestation - /// ensures the validity of the program hash and has the capability of - /// bootstrapping the network with trustworthy instances. - struct Instance { - address addr; - uint64 validSince; - } - - /// @notice The expiry time for the SGX instance. - uint64 public constant INSTANCE_EXPIRY = 365 days; - - /// @notice A security feature, a delay until an instance is enabled when using onchain RA - /// verification - uint64 public constant INSTANCE_VALIDITY_DELAY = 0; - - /// @dev For gas savings, we shall assign each SGX instance with an id that when we need to - /// set a new pub key, just write storage once. - /// Slot 1. - uint256 public nextInstanceId; - - /// @dev One SGX instance is uniquely identified (on-chain) by it's ECDSA public key - /// (or rather ethereum address). Once that address is used (by proof verification) it has to be - /// overwritten by a new one (representing the same instance). This is due to side-channel - /// protection. Also this public key shall expire after some time - /// (for now it is a long enough 6 months setting). - /// Slot 2. - mapping(uint256 instanceId => Instance instance) public instances; - - /// @dev One address shall be registered (during attestation) only once, otherwise it could - /// bypass this contract's expiry check by always registering with the same attestation and - /// getting multiple valid instanceIds. While during proving, it is technically possible to - /// register the old addresses, it is less of a problem, because the instanceId would be the - /// same for those addresses and if deleted - the attestation cannot be reused anyways. - /// Slot 3. - mapping(address instanceAddress => bool alreadyAttested) public addressRegistered; - - uint256[47] private __gap; - - /// @notice Emitted when a new SGX instance is added to the registry, or replaced. - /// @param id The ID of the SGX instance. - /// @param instance The address of the SGX instance. - /// @param replaced The address of the SGX instance that was replaced. If it is the first - /// instance, this value is zero address. - /// @param validSince The time since the instance is valid. - event InstanceAdded( - uint256 indexed id, address indexed instance, address indexed replaced, uint256 validSince - ); - - /// @notice Emitted when an SGX instance is deleted from the registry. - /// @param id The ID of the SGX instance. - /// @param instance The address of the SGX instance. - event InstanceDeleted(uint256 indexed id, address indexed instance); - - error SGX_ALREADY_ATTESTED(); - error SGX_INVALID_ATTESTATION(); - error SGX_INVALID_INSTANCE(); - error SGX_INVALID_PROOF(); - error SGX_RA_NOT_SUPPORTED(); - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - function init(address _owner, address _addressManager) external initializer { - __Essential_init(_owner, _addressManager); - } - - /// @notice Adds trusted SGX instances to the registry. - /// @param _instances The address array of trusted SGX instances. - /// @return The respective instanceId array per addresses. - function addInstances(address[] calldata _instances) - external - onlyOwner - returns (uint256[] memory) - { - return _addInstances(_instances, true); - } - - /// @notice Deletes SGX instances from the registry. - /// @param _ids The ids array of SGX instances. - function deleteInstances(uint256[] calldata _ids) - external - onlyFromOwnerOrNamed("sgx_watchdog") - { - for (uint256 i; i < _ids.length; ++i) { - uint256 idx = _ids[i]; - - if (instances[idx].addr == address(0)) revert SGX_INVALID_INSTANCE(); - - emit InstanceDeleted(idx, instances[idx].addr); - - delete instances[idx]; - } - } - - /// @notice Adds an SGX instance after the attestation is verified - /// @param _attestation The parsed attestation quote. - /// @return The respective instanceId - function registerInstance(V3Struct.ParsedV3QuoteStruct calldata _attestation) - external - returns (uint256) - { - address automataDcapAttestation = resolve("automata_dcap_attestation", true); - - if (automataDcapAttestation == address(0)) { - revert SGX_RA_NOT_SUPPORTED(); - } - - (bool verified,) = IAttestation(automataDcapAttestation).verifyParsedQuote(_attestation); - - if (!verified) revert SGX_INVALID_ATTESTATION(); - - address[] memory _address = new address[](1); - _address[0] = address(bytes20(_attestation.localEnclaveReport.reportData)); - - return _addInstances(_address, false)[0]; - } - - /// @inheritdoc IVerifier - /* MODIFIED - TO RETURN TRUE WITHOUT REAL VERIFICATION!!! */ - function verifyProof( - bytes32, /*transitionHash*/ - address, /*prover*/ - bytes calldata /*proof*/ - ) - external - { - return; - } - - function _addInstances( - address[] memory _instances, - bool instantValid - ) - private - returns (uint256[] memory ids) - { - ids = new uint256[](_instances.length); - - uint64 validSince = uint64(block.timestamp); - - if (!instantValid) { - validSince += INSTANCE_VALIDITY_DELAY; - } - - for (uint256 i; i < _instances.length; ++i) { - if (addressRegistered[_instances[i]]) revert SGX_ALREADY_ATTESTED(); - - addressRegistered[_instances[i]] = true; - - if (_instances[i] == address(0)) revert SGX_INVALID_INSTANCE(); - - instances[nextInstanceId] = Instance(_instances[i], validSince); - ids[i] = nextInstanceId; - - emit InstanceAdded(nextInstanceId, _instances[i], address(0), validSince); - - ++nextInstanceId; - } - } - - function _replaceInstance(uint256 id, address oldInstance, address newInstance) private { - // Replacing an instance means, it went through a cooldown (if added by on-chain RA) so no - // need to have a cooldown - instances[id] = Instance(newInstance, uint64(block.timestamp)); - emit InstanceAdded(id, newInstance, oldInstance, block.timestamp); - } - - function _isInstanceValid(uint256 id, address instance) private view returns (bool) { - if (instance == address(0)) return false; - if (instance != instances[id].addr) return false; - return instances[id].validSince <= block.timestamp - && block.timestamp <= instances[id].validSince + INSTANCE_EXPIRY; - } -} diff --git a/packages/protocol/contracts/L1/verifiers/SgxVerifier.sol b/packages/protocol/contracts/L1/verifiers/SgxVerifier.sol deleted file mode 100644 index 9d308f3eea70..000000000000 --- a/packages/protocol/contracts/L1/verifiers/SgxVerifier.sol +++ /dev/null @@ -1,215 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/utils/cryptography/ECDSA.sol"; -import "../TaikoL1.sol"; -import "../../common/EssentialContract.sol"; -import "../../automata-attestation/interfaces/IAttestation.sol"; -import "../../automata-attestation/lib/QuoteV3Auth/V3Struct.sol"; -import "./libs/LibPublicInput.sol"; -import "./IVerifier.sol"; - -/// @title SgxVerifier -/// @notice This contract is the implementation of verifying SGX signature proofs -/// onchain. -/// @dev Please see references below: -/// - Reference #1: https://ethresear.ch/t/2fa-zk-rollups-using-sgx/14462 -/// - Reference #2: https://github.com/gramineproject/gramine/discussions/1579 -/// @custom:security-contact security@taiko.xyz -contract SgxVerifier is EssentialContract, IVerifier { - /// @dev Each public-private key pair (Ethereum address) is generated within - /// the SGX program when it boots up. The off-chain remote attestation - /// ensures the validity of the program hash and has the capability of - /// bootstrapping the network with trustworthy instances. - struct Instance { - address addr; - uint64 validSince; - } - - /// @notice The expiry time for the SGX instance. - uint64 public constant INSTANCE_EXPIRY = 365 days; - - /// @notice A security feature, a delay until an instance is enabled when using onchain RA - /// verification - uint64 public constant INSTANCE_VALIDITY_DELAY = 0; - - /// @dev For gas savings, we shall assign each SGX instance with an id that when we need to - /// set a new pub key, just write storage once. - /// Slot 1. - uint256 public nextInstanceId; - - /// @dev One SGX instance is uniquely identified (on-chain) by it's ECDSA public key - /// (or rather ethereum address). Once that address is used (by proof verification) it has to be - /// overwritten by a new one (representing the same instance). This is due to side-channel - /// protection. Also this public key shall expire after some time - /// (for now it is a long enough 6 months setting). - /// Slot 2. - mapping(uint256 instanceId => Instance instance) public instances; - - /// @dev One address shall be registered (during attestation) only once, otherwise it could - /// bypass this contract's expiry check by always registering with the same attestation and - /// getting multiple valid instanceIds. While during proving, it is technically possible to - /// register the old addresses, it is less of a problem, because the instanceId would be the - /// same for those addresses and if deleted - the attestation cannot be reused anyways. - /// Slot 3. - mapping(address instanceAddress => bool alreadyAttested) public addressRegistered; - - uint256[47] private __gap; - - /// @notice Emitted when a new SGX instance is added to the registry, or replaced. - /// @param id The ID of the SGX instance. - /// @param instance The address of the SGX instance. - /// @param replaced The address of the SGX instance that was replaced. If it is the first - /// instance, this value is zero address. - /// @param validSince The time since the instance is valid. - event InstanceAdded( - uint256 indexed id, address indexed instance, address indexed replaced, uint256 validSince - ); - - /// @notice Emitted when an SGX instance is deleted from the registry. - /// @param id The ID of the SGX instance. - /// @param instance The address of the SGX instance. - event InstanceDeleted(uint256 indexed id, address indexed instance); - - error SGX_ALREADY_ATTESTED(); - error SGX_INVALID_ATTESTATION(); - error SGX_INVALID_INSTANCE(); - error SGX_INVALID_PROOF(); - error SGX_RA_NOT_SUPPORTED(); - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - function init(address _owner, address _addressManager) external initializer { - __Essential_init(_owner, _addressManager); - } - - /// @notice Adds trusted SGX instances to the registry. - /// @param _instances The address array of trusted SGX instances. - /// @return The respective instanceId array per addresses. - function addInstances(address[] calldata _instances) - external - onlyOwner - returns (uint256[] memory) - { - return _addInstances(_instances, true); - } - - /// @notice Deletes SGX instances from the registry. - /// @param _ids The ids array of SGX instances. - function deleteInstances(uint256[] calldata _ids) - external - onlyFromOwnerOrNamed("sgx_watchdog") - { - for (uint256 i; i < _ids.length; ++i) { - uint256 idx = _ids[i]; - - if (instances[idx].addr == address(0)) revert SGX_INVALID_INSTANCE(); - - emit InstanceDeleted(idx, instances[idx].addr); - - delete instances[idx]; - } - } - - /// @notice Adds an SGX instance after the attestation is verified - /// @param _attestation The parsed attestation quote. - /// @return The respective instanceId - function registerInstance(V3Struct.ParsedV3QuoteStruct calldata _attestation) - external - returns (uint256) - { - address automataDcapAttestation = resolve("automata_dcap_attestation", true); - - if (automataDcapAttestation == address(0)) { - revert SGX_RA_NOT_SUPPORTED(); - } - - (bool verified,) = IAttestation(automataDcapAttestation).verifyParsedQuote(_attestation); - - if (!verified) revert SGX_INVALID_ATTESTATION(); - - address[] memory _address = new address[](1); - _address[0] = address(bytes20(_attestation.localEnclaveReport.reportData)); - - return _addInstances(_address, false)[0]; - } - - /// @inheritdoc IVerifier - function verifyProof( - bytes32 transitionHash, - address prover, - bytes calldata proof - ) - external - onlyFromNamed("taiko") - { - // Size is: 89 bytes - // 4 bytes + 20 bytes + 65 bytes (signature) = 89 - if (proof.length != 89) revert SGX_INVALID_PROOF(); - - uint32 id = uint32(bytes4(proof[:4])); - address newInstance = address(bytes20(proof[4:24])); - bytes memory signature = proof[24:]; - - uint64 chainId = TaikoL1(resolve("taiko", false)).getConfig().chainId; - - address oldInstance = ECDSA.recover( - LibPublicInput.hashPublicInputs( - transitionHash, address(this), newInstance, prover, chainId - ), - signature - ); - - if (!_isInstanceValid(id, oldInstance)) revert SGX_INVALID_INSTANCE(); - - if (oldInstance != newInstance) { - _replaceInstance(id, oldInstance, newInstance); - } - } - - function _addInstances( - address[] memory _instances, - bool instantValid - ) - private - returns (uint256[] memory ids) - { - ids = new uint256[](_instances.length); - - uint64 validSince = uint64(block.timestamp); - - if (!instantValid) { - validSince += INSTANCE_VALIDITY_DELAY; - } - - for (uint256 i; i < _instances.length; ++i) { - if (addressRegistered[_instances[i]]) revert SGX_ALREADY_ATTESTED(); - - addressRegistered[_instances[i]] = true; - - if (_instances[i] == address(0)) revert SGX_INVALID_INSTANCE(); - - instances[nextInstanceId] = Instance(_instances[i], validSince); - ids[i] = nextInstanceId; - - emit InstanceAdded(nextInstanceId, _instances[i], address(0), validSince); - - ++nextInstanceId; - } - } - - function _replaceInstance(uint256 id, address oldInstance, address newInstance) private { - // Replacing an instance means, it went through a cooldown (if added by on-chain RA) so no - // need to have a cooldown - instances[id] = Instance(newInstance, uint64(block.timestamp)); - emit InstanceAdded(id, newInstance, oldInstance, block.timestamp); - } - - function _isInstanceValid(uint256 id, address instance) private view returns (bool) { - if (instance == address(0)) return false; - if (instance != instances[id].addr) return false; - return instances[id].validSince <= block.timestamp - && block.timestamp <= instances[id].validSince + INSTANCE_EXPIRY; - } -} diff --git a/packages/protocol/contracts/L1/verifiers/libs/LibPublicInput.sol b/packages/protocol/contracts/L1/verifiers/libs/LibPublicInput.sol deleted file mode 100644 index 0fe945e61775..000000000000 --- a/packages/protocol/contracts/L1/verifiers/libs/LibPublicInput.sol +++ /dev/null @@ -1,36 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../../TaikoData.sol"; - -/// @title LibPublicInput -/// @notice A library for handling hashing the so-called public input hash, used by sgx and zk -/// proofs. -/// @custom:security-contact security@taiko.xyz -library LibPublicInput { - /// @notice Hashes the public input for the proof verification. - /// @param _transitionHash The new state hash transition. - /// @param _verifierContract The contract address which as current verifier. - /// @param _newInstance The new instance address. For SGX it is the new signer address, for ZK - /// this variable is not used and must have value address(0). - /// @param _prover The prover address. - /// @param _chainId The chain id. - /// @return The public input hash. - function hashPublicInputs( - bytes32 _transitionHash, - address _verifierContract, - address _newInstance, - address _prover, - uint64 _chainId - ) - internal - pure - returns (bytes32) - { - return keccak256( - abi.encode( - "VERIFY_PROOF", _chainId, _verifierContract, _transitionHash, _newInstance, _prover - ) - ); - } -} diff --git a/packages/protocol/contracts/L2/Lib1559Math.sol b/packages/protocol/contracts/L2/Lib1559Math.sol deleted file mode 100644 index 4b5316c28f6f..000000000000 --- a/packages/protocol/contracts/L2/Lib1559Math.sol +++ /dev/null @@ -1,76 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../thirdparty/solmate/LibFixedPointMath.sol"; -import "../libs/LibMath.sol"; - -/// @title Lib1559Math -/// @notice Implements e^(x) based bonding curve for EIP-1559 -/// @dev See https://ethresear.ch/t/make-eip-1559-more-like-an-amm-curve/9082 but some minor -/// difference as stated in docs/eip1559_on_l2.md. -/// @custom:security-contact security@taiko.xyz -library Lib1559Math { - using LibMath for uint256; - - error EIP1559_INVALID_PARAMS(); - - function calc1559BaseFee( - uint32 _gasTargetPerL1Block, - uint8 _adjustmentQuotient, - uint64 _gasExcess, - uint64 _gasIssuance, - uint32 _parentGasUsed - ) - internal - pure - returns (uint256 basefee_, uint64 gasExcess_) - { - // We always add the gas used by parent block to the gas excess - // value as this has already happened - uint256 excess = uint256(_gasExcess) + _parentGasUsed; - excess = excess > _gasIssuance ? excess - _gasIssuance : 1; - gasExcess_ = uint64(excess.min(type(uint64).max)); - - // The base fee per gas used by this block is the spot price at the - // bonding curve, regardless the actual amount of gas used by this - // block, however, this block's gas used will affect the next - // block's base fee. - basefee_ = basefee(gasExcess_, uint256(_adjustmentQuotient) * _gasTargetPerL1Block); - - // Always make sure basefee is nonzero, this is required by the node. - if (basefee_ == 0) basefee_ = 1; - } - - /// @dev eth_qty(excess_gas_issued) / (TARGET * ADJUSTMENT_QUOTIENT) - /// @param _gasExcess The gas excess value - /// @param _adjustmentFactor The product of gasTarget and adjustmentQuotient - function basefee( - uint256 _gasExcess, - uint256 _adjustmentFactor - ) - internal - pure - returns (uint256) - { - if (_adjustmentFactor == 0) { - revert EIP1559_INVALID_PARAMS(); - } - return _ethQty(_gasExcess, _adjustmentFactor) / LibFixedPointMath.SCALING_FACTOR; - } - - /// @dev exp(gas_qty / TARGET / ADJUSTMENT_QUOTIENT) - function _ethQty( - uint256 _gasExcess, - uint256 _adjustmentFactor - ) - private - pure - returns (uint256) - { - uint256 input = _gasExcess * LibFixedPointMath.SCALING_FACTOR / _adjustmentFactor; - if (input > LibFixedPointMath.MAX_EXP_INPUT) { - input = LibFixedPointMath.MAX_EXP_INPUT; - } - return uint256(LibFixedPointMath.exp(int256(input))); - } -} diff --git a/packages/protocol/contracts/L2/LibL2Config.sol b/packages/protocol/contracts/L2/LibL2Config.sol deleted file mode 100644 index 70af37013b8c..000000000000 --- a/packages/protocol/contracts/L2/LibL2Config.sol +++ /dev/null @@ -1,20 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title LibL2Config -library LibL2Config { - struct Config { - uint32 gasTargetPerL1Block; - uint8 basefeeAdjustmentQuotient; - } - - /// @notice Returns EIP1559 related configurations. - /// @return config_ struct containing configuration parameters. - function get() internal pure returns (Config memory config_) { - // Assuming we sell 3x more blockspace than Ethereum: 15_000_000 * 4 - // Note that Brecht's concern is that this value may be too large. - // We need to monitor L2 state growth and lower this value when necessary. - config_.gasTargetPerL1Block = 60_000_000; - config_.basefeeAdjustmentQuotient = 8; - } -} diff --git a/packages/protocol/contracts/L2/TaikoL2.sol b/packages/protocol/contracts/L2/TaikoL2.sol deleted file mode 100644 index 896376c1bdc5..000000000000 --- a/packages/protocol/contracts/L2/TaikoL2.sol +++ /dev/null @@ -1,263 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; -import "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol"; - -import "../common/EssentialContract.sol"; -import "../common/LibStrings.sol"; -import "../libs/LibAddress.sol"; -import "../signal/ISignalService.sol"; -import "./Lib1559Math.sol"; -import "./LibL2Config.sol"; - -/// @title TaikoL2 -/// @notice Taiko L2 is a smart contract that handles cross-layer message -/// verification and manages EIP-1559 gas pricing for Layer 2 (L2) operations. -/// It is used to anchor the latest L1 block details to L2 for cross-layer -/// communication, manage EIP-1559 parameters for gas pricing, and store -/// verified L1 block information. -/// @custom:security-contact security@taiko.xyz -contract TaikoL2 is EssentialContract { - using LibAddress for address; - using SafeERC20 for IERC20; - - /// @notice Golden touch address is the only address that can do the anchor transaction. - address public constant GOLDEN_TOUCH_ADDRESS = 0x0000777735367b36bC9B61C50022d9D0700dB4Ec; - - /// @notice Mapping from L2 block numbers to their block hashes. All L2 block hashes will - /// be saved in this mapping. - mapping(uint256 blockId => bytes32 blockHash) public l2Hashes; - - /// @notice A hash to check the integrity of public inputs. - /// @dev Slot 2. - bytes32 public publicInputHash; - - /// @notice The gas excess value used to calculate the base fee. - /// @dev Slot 3. - uint64 public gasExcess; - - /// @notice The last synced L1 block height. - uint64 public lastSyncedBlock; - - uint64 private __deprecated1; // was parentTimestamp - uint64 private __deprecated2; // was __currentBlockTimestamp - - /// @notice The L1's chain ID. - uint64 public l1ChainId; - - uint256[46] private __gap; - - /// @notice Emitted when the latest L1 block details are anchored to L2. - /// @param parentHash The hash of the parent block. - /// @param gasExcess The gas excess value used to calculate the base fee. - event Anchored(bytes32 parentHash, uint64 gasExcess); - - error L2_BASEFEE_MISMATCH(); - error L2_INVALID_L1_CHAIN_ID(); - error L2_INVALID_L2_CHAIN_ID(); - error L2_INVALID_PARAM(); - error L2_INVALID_SENDER(); - error L2_PUBLIC_INPUT_HASH_MISMATCH(); - error L2_TOO_LATE(); - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - /// @param _l1ChainId The ID of the base layer. - /// @param _gasExcess The initial gasExcess. - function init( - address _owner, - address _addressManager, - uint64 _l1ChainId, - uint64 _gasExcess - ) - external - initializer - { - __Essential_init(_owner, _addressManager); - - if (_l1ChainId == 0 || _l1ChainId == block.chainid) { - revert L2_INVALID_L1_CHAIN_ID(); - } - if (block.chainid <= 1 || block.chainid > type(uint64).max) { - revert L2_INVALID_L2_CHAIN_ID(); - } - - if (block.number == 0) { - // This is the case in real L2 genesis - } else if (block.number == 1) { - // This is the case in tests - uint256 parentHeight = block.number - 1; - l2Hashes[parentHeight] = blockhash(parentHeight); - } else { - revert L2_TOO_LATE(); - } - - l1ChainId = _l1ChainId; - gasExcess = _gasExcess; - (publicInputHash,) = _calcPublicInputHash(block.number); - } - - /// @notice Anchors the latest L1 block details to L2 for cross-layer - /// message verification. - /// @dev This function can be called freely as the golden touch private key is publicly known, - /// but the Taiko node guarantees the first transaction of each block is always this anchor - /// transaction, and any subsequent calls will revert with L2_PUBLIC_INPUT_HASH_MISMATCH. - /// @param _l1BlockHash The latest L1 block hash when this block was - /// proposed. - /// @param _l1StateRoot The latest L1 block's state root. - /// @param _l1BlockId The latest L1 block height when this block was proposed. - /// @param _parentGasUsed The gas used in the parent block. - function anchor( - bytes32 _l1BlockHash, - bytes32 _l1StateRoot, - uint64 _l1BlockId, - uint32 _parentGasUsed - ) - external - nonReentrant - { - if ( - _l1BlockHash == 0 || _l1StateRoot == 0 || _l1BlockId == 0 - || (block.number != 1 && _parentGasUsed == 0) - ) { - revert L2_INVALID_PARAM(); - } - - if (msg.sender != GOLDEN_TOUCH_ADDRESS) revert L2_INVALID_SENDER(); - - uint256 parentId; - unchecked { - parentId = block.number - 1; - } - - // Verify ancestor hashes - (bytes32 publicInputHashOld, bytes32 publicInputHashNew) = _calcPublicInputHash(parentId); - if (publicInputHash != publicInputHashOld) { - revert L2_PUBLIC_INPUT_HASH_MISMATCH(); - } - - // Verify the base fee per gas is correct - (uint256 _basefee, uint64 _gasExcess) = getBasefee(_l1BlockId, _parentGasUsed); - - if (!skipFeeCheck() && block.basefee != _basefee) { - revert L2_BASEFEE_MISMATCH(); - } - - if (_l1BlockId > lastSyncedBlock) { - // Store the L1's state root as a signal to the local signal service to - // allow for multi-hop bridging. - ISignalService(resolve(LibStrings.B_SIGNAL_SERVICE, false)).syncChainData( - l1ChainId, LibStrings.H_STATE_ROOT, _l1BlockId, _l1StateRoot - ); - - lastSyncedBlock = _l1BlockId; - } - - // Update state variables - bytes32 _parentHash = blockhash(parentId); - l2Hashes[parentId] = _parentHash; - publicInputHash = publicInputHashNew; - gasExcess = _gasExcess; - - emit Anchored(_parentHash, _gasExcess); - } - - /// @notice Withdraw token or Ether from this address - /// @param _token Token address or address(0) if Ether. - /// @param _to Withdraw to address. - function withdraw( - address _token, - address _to - ) - external - whenNotPaused - onlyFromOwnerOrNamed(LibStrings.B_WITHDRAWER) - nonReentrant - { - if (_to == address(0)) revert L2_INVALID_PARAM(); - if (_token == address(0)) { - _to.sendEtherAndVerify(address(this).balance); - } else { - IERC20(_token).safeTransfer(_to, IERC20(_token).balanceOf(address(this))); - } - } - - /// @notice Gets the basefee and gas excess using EIP-1559 configuration for - /// the given parameters. - /// @param _l1BlockId The synced L1 height in the next Taiko block - /// @param _parentGasUsed Gas used in the parent block. - /// @return basefee_ The calculated EIP-1559 base fee per gas. - /// @return gasExcess_ The new gasExcess value. - function getBasefee( - uint64 _l1BlockId, - uint32 _parentGasUsed - ) - public - view - returns (uint256 basefee_, uint64 gasExcess_) - { - LibL2Config.Config memory config = getConfig(); - uint64 gasIssuance = uint64(_l1BlockId - lastSyncedBlock) * config.gasTargetPerL1Block; - - (basefee_, gasExcess_) = Lib1559Math.calc1559BaseFee( - config.gasTargetPerL1Block, - config.basefeeAdjustmentQuotient, - gasExcess, - gasIssuance, - _parentGasUsed - ); - } - - /// @notice Retrieves the block hash for the given L2 block number. - /// @param _blockId The L2 block number to retrieve the block hash for. - /// @return The block hash for the specified L2 block id, or zero if the - /// block id is greater than or equal to the current block number. - function getBlockHash(uint64 _blockId) public view returns (bytes32) { - if (_blockId >= block.number) return 0; - if (_blockId + 256 >= block.number) return blockhash(_blockId); - return l2Hashes[_blockId]; - } - - /// @notice Returns EIP1559 related configurations. - /// @return config_ struct containing configuration parameters. - function getConfig() public view virtual returns (LibL2Config.Config memory) { - return LibL2Config.get(); - } - - /// @notice Tells if we need to validate basefee (for simulation). - /// @return Returns true to skip checking basefee mismatch. - function skipFeeCheck() public pure virtual returns (bool) { - return false; - } - - function _calcPublicInputHash(uint256 _blockId) - private - view - returns (bytes32 publicInputHashOld, bytes32 publicInputHashNew) - { - bytes32[256] memory inputs; - - // Unchecked is safe because it cannot overflow. - unchecked { - // Put the previous 255 blockhashes (excluding the parent's) into a - // ring buffer. - for (uint256 i; i < 255 && _blockId >= i + 1; ++i) { - uint256 j = _blockId - i - 1; - inputs[j % 255] = blockhash(j); - } - } - - inputs[255] = bytes32(block.chainid); - - assembly { - publicInputHashOld := keccak256(inputs, 8192 /*mul(256, 32)*/ ) - } - - inputs[_blockId % 255] = blockhash(_blockId); - assembly { - publicInputHashNew := keccak256(inputs, 8192 /*mul(256, 32)*/ ) - } - } -} diff --git a/packages/protocol/contracts/L2/eip1559_util.py b/packages/protocol/contracts/L2/eip1559_util.py deleted file mode 100644 index 0e633df8c839..000000000000 --- a/packages/protocol/contracts/L2/eip1559_util.py +++ /dev/null @@ -1,135 +0,0 @@ -import math -import matplotlib.pyplot as plt - -SCALE = int(1e18) ## fix point scale -MAX_EXP_INPUT = 135_305_999_368_893_231_588 - -# Python function that matches the `exp(int256 x)` function in LibFixedPointMath.sol -def fixed_point_exp(x): - if x <= -42_139_678_854_452_767_551: - return 0 - - if x >= 135_305_999_368_893_231_589: - raise OverflowError("Overflow") - - x = (x << 78) // (5**18) - - k = ((x << 96) // 54_916_777_467_707_473_351_141_471_128 + (2**95)) >> 96 - x = x - k * 54_916_777_467_707_473_351_141_471_128 - - y = x + 1_346_386_616_545_796_478_920_950_773_328 - y = ((y * x) >> 96) + 57_155_421_227_552_351_082_224_309_758_442 - p = y + x - 94_201_549_194_550_492_254_356_042_504_812 - p = ((p * y) >> 96) + 28_719_021_644_029_726_153_956_944_680_412_240 - p = p * x + (4_385_272_521_454_847_904_659_076_985_693_276 << 96) - - q = x - 2_855_989_394_907_223_263_936_484_059_900 - q = ((q * x) >> 96) + 50_020_603_652_535_783_019_961_831_881_945 - q = ((q * x) >> 96) - 533_845_033_583_426_703_283_633_433_725_380 - q = ((q * x) >> 96) + 3_604_857_256_930_695_427_073_651_918_091_429 - q = ((q * x) >> 96) - 14_423_608_567_350_463_180_887_372_962_807_573 - q = ((q * x) >> 96) + 26_449_188_498_355_588_339_934_803_723_976_023 - - r = p // q # Integer division - - r = (r * 3_822_833_074_963_236_453_042_738_258_902_158_003_155_416_615_667) >> ( - 195 - k - ) - - return r - - -# Test exp(1) -print("exp(1) =", fixed_point_exp(SCALE) / SCALE) -print("exp(MAX) =", fixed_point_exp(MAX_EXP_INPUT) / SCALE) - -## Calculate initial gas_excess_issued -GWEI = 1e9 -ETHEREUM_TARGET = 15 * 1e6 -ETHEREUM_BASE_FEE = 10 * GWEI -TAIKO_TARGET = ETHEREUM_TARGET * 10 -TAIKO_BASE_FEE = ETHEREUM_BASE_FEE // 10 -ADJUSTMENT_QUOTIENT = 8 -ADJUSTMENT_FACTOR = TAIKO_TARGET * 8 - - -def calc_eth_qty(qty): - return math.exp(qty / TAIKO_TARGET / ADJUSTMENT_QUOTIENT) - - -def calc_basefee(excess, gas_in_block): - diff = calc_eth_qty(excess + gas_in_block) - calc_eth_qty(excess) - return diff / gas_in_block - - -def calculate_excess_gas_issued(expected_base_fee, gas_used): - numerator = expected_base_fee * gas_used / (calc_eth_qty(gas_used) - 1) + 1 - excess_gas_issued = math.log(numerator) * ADJUSTMENT_FACTOR - return excess_gas_issued - - -expected_basefee = TAIKO_BASE_FEE -gas_in_block = 1 -gas_excess_issued = int(calculate_excess_gas_issued(expected_basefee, gas_in_block)) -print("gas_excess_issued : ", gas_excess_issued) -print("actual_basefee : ", calc_basefee(gas_excess_issued, gas_in_block)) -print("expected_basefee : ", expected_basefee) - - -# See https://ethresear.ch/t/make-eip-1559-more-like-an-amm-curve/9082 -def eth_qty(gas_qty): - v = int(int(gas_qty) * SCALE // ADJUSTMENT_FACTOR) - if v > MAX_EXP_INPUT: - v = MAX_EXP_INPUT - return fixed_point_exp(v) - - -def calc_purchase_basefee(gas_used): - # Returns the average base fee per gas for purchasing gas_used gas - diff = eth_qty(gas_excess_issued + gas_used) - eth_qty(gas_excess_issued) - return int(int(diff // gas_used) // SCALE) - - -def calc_spot_basefee(): - # Returns the spot price - return int(int(eth_qty(gas_excess_issued) // SCALE) // ADJUSTMENT_FACTOR) - - -print("purchase basefee (1 gas) [fix point]1 : ", calc_purchase_basefee(gas_in_block)) -print("spot basefee [fix point]1 : ", calc_spot_basefee()) - - -# Set the excess value to the max possible -bkup = gas_excess_issued -gas_excess_issued = MAX_EXP_INPUT * ADJUSTMENT_FACTOR // SCALE -print("spot basefee [fix point]2 : ", calc_spot_basefee()) - - -exit() -gas_excess_issued = bkup -# one L2 block per L1 block vs multiple L2 blocks per L1 block -x1 = [] -y1 = [] -for i in range(10): - x1.append(i * 12) - y1.append(calc_spot_basefee()) - -x2 = [] -y2 = [] - -for i in range(10): - for j in range(12): - x2.append(i * 12 + j) - y2.append(calc_spot_basefee()) - gas_excess_issued += TAIKO_TARGET / 12 - gas_excess_issued -= TAIKO_TARGET - -plt.scatter(x2, y2, label="1s", color="red", marker="o") -plt.scatter(x1, y1, label="12s", color="blue", marker="x") - -plt.xlabel("basefee") -plt.ylabel("time") -plt.ylim(expected_basefee * 0.75, expected_basefee * 1.25) -plt.legend() -plt.title("EIP1559 Bond Curve") -plt.show() diff --git a/packages/protocol/contracts/automata-attestation/AutomataDcapV3Attestation.sol b/packages/protocol/contracts/automata-attestation/AutomataDcapV3Attestation.sol deleted file mode 100644 index 900ade360029..000000000000 --- a/packages/protocol/contracts/automata-attestation/AutomataDcapV3Attestation.sol +++ /dev/null @@ -1,508 +0,0 @@ -//SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import { V3Struct } from "./lib/QuoteV3Auth/V3Struct.sol"; -import { V3Parser } from "./lib/QuoteV3Auth/V3Parser.sol"; -import { IPEMCertChainLib } from "./lib/interfaces/IPEMCertChainLib.sol"; -import { PEMCertChainLib } from "./lib/PEMCertChainLib.sol"; -import { TCBInfoStruct } from "./lib/TCBInfoStruct.sol"; -import { EnclaveIdStruct } from "./lib/EnclaveIdStruct.sol"; -import { IAttestation } from "./interfaces/IAttestation.sol"; - -// Internal Libraries -import { Base64 } from "solady/src/utils/Base64.sol"; -import { LibString } from "solady/src/utils/LibString.sol"; -import { BytesUtils } from "./utils/BytesUtils.sol"; - -// External Libraries -import { ISigVerifyLib } from "./interfaces/ISigVerifyLib.sol"; - -import { EssentialContract } from "../common/EssentialContract.sol"; - -/// @title AutomataDcapV3Attestation -/// @custom:security-contact security@taiko.xyz -contract AutomataDcapV3Attestation is IAttestation, EssentialContract { - using BytesUtils for bytes; - - // https://github.com/intel/SGXDataCenterAttestationPrimitives/blob/e7604e02331b3377f3766ed3653250e03af72d45/QuoteVerification/QVL/Src/AttestationLibrary/src/CertVerification/X509Constants.h#L64 - uint256 internal constant CPUSVN_LENGTH = 16; - - // keccak256(hex"0ba9c4c0c0c86193a3fe23d6b02cda10a8bbd4e88e48b4458561a36e705525f567918e2edc88e40d860bd0cc4ee26aacc988e505a953558c453f6b0904ae7394") - // the uncompressed (0x04) prefix is not included in the pubkey pre-image - bytes32 internal constant ROOTCA_PUBKEY_HASH = - 0x89f72d7c488e5b53a77c23ebcb36970ef7eb5bcf6658e9b8292cfbe4703a8473; - - uint8 internal constant INVALID_EXIT_CODE = 255; - - ISigVerifyLib public sigVerifyLib; // slot 1 - IPEMCertChainLib public pemCertLib; // slot 2 - - bool public checkLocalEnclaveReport; // slot 3 - mapping(bytes32 enclave => bool trusted) public trustedUserMrEnclave; // slot 4 - mapping(bytes32 signer => bool trusted) public trustedUserMrSigner; // slot 5 - - // Quote Collateral Configuration - - // Index definition: - // 0 = Quote PCKCrl - // 1 = RootCrl - mapping(uint256 idx => mapping(bytes serialNum => bool revoked)) public serialNumIsRevoked; // slot - // 6 - // fmspc => tcbInfo - mapping(string fmspc => TCBInfoStruct.TCBInfo tcbInfo) public tcbInfo; // slot 7 - EnclaveIdStruct.EnclaveId public qeIdentity; // takes 4 slots, slot 8,9,10,11 - - uint256[39] __gap; - - event MrSignerUpdated(bytes32 indexed mrSigner, bool trusted); - event MrEnclaveUpdated(bytes32 indexed mrEnclave, bool trusted); - event TcbInfoJsonConfigured(string indexed fmspc, TCBInfoStruct.TCBInfo tcbInfoInput); - event QeIdentityConfigured(EnclaveIdStruct.EnclaveId qeIdentityInput); - event LocalReportCheckToggled(bool checkLocalEnclaveReport); - event RevokedCertSerialNumAdded(uint256 indexed index, bytes serialNum); - event RevokedCertSerialNumRemoved(uint256 indexed index, bytes serialNum); - - // @notice Initializes the contract. - /// @param sigVerifyLibAddr Address of the signature verification library. - /// @param pemCertLibAddr Address of certificate library. - function init( - address owner, - address sigVerifyLibAddr, - address pemCertLibAddr - ) - external - initializer - { - __Essential_init(owner); - sigVerifyLib = ISigVerifyLib(sigVerifyLibAddr); - pemCertLib = PEMCertChainLib(pemCertLibAddr); - } - - function setMrSigner(bytes32 _mrSigner, bool _trusted) external onlyOwner { - trustedUserMrSigner[_mrSigner] = _trusted; - emit MrSignerUpdated(_mrSigner, _trusted); - } - - function setMrEnclave(bytes32 _mrEnclave, bool _trusted) external onlyOwner { - trustedUserMrEnclave[_mrEnclave] = _trusted; - emit MrEnclaveUpdated(_mrEnclave, _trusted); - } - - function addRevokedCertSerialNum( - uint256 index, - bytes[] calldata serialNumBatch - ) - external - onlyOwner - { - for (uint256 i; i < serialNumBatch.length; ++i) { - if (serialNumIsRevoked[index][serialNumBatch[i]]) { - continue; - } - serialNumIsRevoked[index][serialNumBatch[i]] = true; - emit RevokedCertSerialNumAdded(index, serialNumBatch[i]); - } - } - - function removeRevokedCertSerialNum( - uint256 index, - bytes[] calldata serialNumBatch - ) - external - onlyOwner - { - for (uint256 i; i < serialNumBatch.length; ++i) { - if (!serialNumIsRevoked[index][serialNumBatch[i]]) { - continue; - } - delete serialNumIsRevoked[index][serialNumBatch[i]]; - emit RevokedCertSerialNumRemoved(index, serialNumBatch[i]); - } - } - - function configureTcbInfoJson( - string calldata fmspc, - TCBInfoStruct.TCBInfo calldata tcbInfoInput - ) - public - onlyOwner - { - // 2.2M gas - tcbInfo[fmspc] = tcbInfoInput; - emit TcbInfoJsonConfigured(fmspc, tcbInfoInput); - } - - function configureQeIdentityJson(EnclaveIdStruct.EnclaveId calldata qeIdentityInput) - external - onlyOwner - { - // 250k gas - qeIdentity = qeIdentityInput; - emit QeIdentityConfigured(qeIdentityInput); - } - - function toggleLocalReportCheck() external onlyOwner { - checkLocalEnclaveReport = !checkLocalEnclaveReport; - emit LocalReportCheckToggled(checkLocalEnclaveReport); - } - - function _attestationTcbIsValid(TCBInfoStruct.TCBStatus status) - internal - pure - virtual - returns (bool valid) - { - return status == TCBInfoStruct.TCBStatus.OK - || status == TCBInfoStruct.TCBStatus.TCB_SW_HARDENING_NEEDED - || status == TCBInfoStruct.TCBStatus.TCB_CONFIGURATION_AND_SW_HARDENING_NEEDED - || status == TCBInfoStruct.TCBStatus.TCB_OUT_OF_DATE - || status == TCBInfoStruct.TCBStatus.TCB_OUT_OF_DATE_CONFIGURATION_NEEDED; - } - - function verifyAttestation(bytes calldata data) external view override returns (bool success) { - (success,) = _verify(data); - } - - /// @dev Provide the raw quote binary as input - /// @dev The attestation data (or the returned data of this method) - /// is constructed depending on the validity of the quote verification. - /// @dev After confirming that a quote has been verified, the attestation's validity then - /// depends on the - /// status of the associated TCB. - /// @dev Example scenarios as below: - /// -------------------------------- - /// @dev Invalid quote verification: returns (false, INVALID_EXIT_CODE) - /// - /// @dev For all valid quote verification, the validity of the attestation depends on the status - /// of a - /// matching TCBInfo and this is defined in the _attestationTcbIsValid() method, which can be - /// overwritten - /// in derived contracts. (Except for "Revoked" status, which also returns (false, - /// INVALID_EXIT_CODE) value) - /// @dev For all valid quote verification, returns the following data: - /// (_attestationTcbIsValid(), abi.encodePacked(sha256(quote), uint8 exitCode)) - /// @dev exitCode is defined in the {{ TCBInfoStruct.TCBStatus }} enum - function _verify(bytes calldata quote) private view returns (bool, bytes memory) { - bytes memory retData = abi.encodePacked(INVALID_EXIT_CODE); - - // Step 1: Parse the quote input = 152k gas - (bool successful, V3Struct.ParsedV3QuoteStruct memory parsedV3Quote) = - V3Parser.parseInput(quote, address(pemCertLib)); - if (!successful) { - return (false, retData); - } - - return _verifyParsedQuote(parsedV3Quote); - } - - function _verifyQEReportWithIdentity(V3Struct.EnclaveReport memory quoteEnclaveReport) - private - view - returns (bool, EnclaveIdStruct.EnclaveIdStatus status) - { - EnclaveIdStruct.EnclaveId memory enclaveId = qeIdentity; - bool miscselectMatched = - quoteEnclaveReport.miscSelect & enclaveId.miscselectMask == enclaveId.miscselect; - - bool attributesMatched = - quoteEnclaveReport.attributes & enclaveId.attributesMask == enclaveId.attributes; - bool mrsignerMatched = quoteEnclaveReport.mrSigner == enclaveId.mrsigner; - - bool isvprodidMatched = quoteEnclaveReport.isvProdId == enclaveId.isvprodid; - - bool tcbFound; - for (uint256 i; i < enclaveId.tcbLevels.length; ++i) { - EnclaveIdStruct.TcbLevel memory tcb = enclaveId.tcbLevels[i]; - if (tcb.tcb.isvsvn <= quoteEnclaveReport.isvSvn) { - tcbFound = true; - status = tcb.tcbStatus; - break; - } - } - return ( - miscselectMatched && attributesMatched && mrsignerMatched && isvprodidMatched - && tcbFound, - status - ); - } - - function _checkTcbLevels( - IPEMCertChainLib.PCKCertificateField memory pck, - TCBInfoStruct.TCBInfo memory tcb - ) - private - pure - returns (bool, TCBInfoStruct.TCBStatus status) - { - for (uint256 i; i < tcb.tcbLevels.length; ++i) { - TCBInfoStruct.TCBLevelObj memory current = tcb.tcbLevels[i]; - bool pceSvnIsHigherOrGreater = pck.sgxExtension.pcesvn >= current.pcesvn; - bool cpuSvnsAreHigherOrGreater = _isCpuSvnHigherOrGreater( - pck.sgxExtension.sgxTcbCompSvnArr, current.sgxTcbCompSvnArr - ); - if (pceSvnIsHigherOrGreater && cpuSvnsAreHigherOrGreater) { - status = current.status; - bool tcbIsRevoked = status == TCBInfoStruct.TCBStatus.TCB_REVOKED; - return (!tcbIsRevoked, status); - } - } - return (true, TCBInfoStruct.TCBStatus.TCB_UNRECOGNIZED); - } - - function _isCpuSvnHigherOrGreater( - uint256[] memory pckCpuSvns, - uint8[] memory tcbCpuSvns - ) - private - pure - returns (bool) - { - if (pckCpuSvns.length != CPUSVN_LENGTH || tcbCpuSvns.length != CPUSVN_LENGTH) { - return false; - } - for (uint256 i; i < CPUSVN_LENGTH; ++i) { - if (pckCpuSvns[i] < tcbCpuSvns[i]) { - return false; - } - } - return true; - } - - function _verifyCertChain(IPEMCertChainLib.ECSha256Certificate[] memory certs) - private - view - returns (bool) - { - uint256 n = certs.length; - bool certRevoked; - bool certNotExpired; - bool verified; - bool certChainCanBeTrusted; - - for (uint256 i; i < n; ++i) { - IPEMCertChainLib.ECSha256Certificate memory issuer; - if (i == n - 1) { - // rootCA - issuer = certs[i]; - } else { - issuer = certs[i + 1]; - if (i == n - 2) { - // this cert is expected to be signed by the root - certRevoked = serialNumIsRevoked[uint256(IPEMCertChainLib.CRL.ROOT)][certs[i] - .serialNumber]; - } else if (certs[i].isPck) { - certRevoked = - serialNumIsRevoked[uint256(IPEMCertChainLib.CRL.PCK)][certs[i].serialNumber]; - } - if (certRevoked) { - break; - } - } - - certNotExpired = - block.timestamp > certs[i].notBefore && block.timestamp < certs[i].notAfter; - if (!certNotExpired) { - break; - } - - verified = sigVerifyLib.verifyES256Signature( - certs[i].tbsCertificate, certs[i].signature, issuer.pubKey - ); - if (!verified) { - break; - } - - bytes32 issuerPubKeyHash = keccak256(issuer.pubKey); - - if (issuerPubKeyHash == ROOTCA_PUBKEY_HASH) { - certChainCanBeTrusted = true; - break; - } - } - - return !certRevoked && certNotExpired && verified && certChainCanBeTrusted; - } - - function _enclaveReportSigVerification( - bytes memory pckCertPubKey, - bytes memory signedQuoteData, - V3Struct.ECDSAQuoteV3AuthData memory authDataV3, - V3Struct.EnclaveReport memory qeEnclaveReport - ) - private - view - returns (bool) - { - bytes32 expectedAuthDataHash = bytes32(qeEnclaveReport.reportData.substring(0, 32)); - bytes memory concatOfAttestKeyAndQeAuthData = - abi.encodePacked(authDataV3.ecdsaAttestationKey, authDataV3.qeAuthData.data); - bytes32 computedAuthDataHash = sha256(concatOfAttestKeyAndQeAuthData); - - bool qeReportDataIsValid = expectedAuthDataHash == computedAuthDataHash; - if (qeReportDataIsValid) { - bytes memory pckSignedQeReportBytes = - V3Parser.packQEReport(authDataV3.pckSignedQeReport); - bool qeSigVerified = sigVerifyLib.verifyES256Signature( - pckSignedQeReportBytes, authDataV3.qeReportSignature, pckCertPubKey - ); - bool quoteSigVerified = sigVerifyLib.verifyES256Signature( - signedQuoteData, authDataV3.ecdsa256BitSignature, authDataV3.ecdsaAttestationKey - ); - return qeSigVerified && quoteSigVerified; - } else { - return false; - } - } - - /// --------------- validate parsed quote --------------- - - /// @dev Provide the parsed quote binary as input - /// @dev The attestation data (or the returned data of this method) - /// is constructed depending on the validity of the quote verification. - /// @dev After confirming that a quote has been verified, the attestation's validity then - /// depends on the - /// status of the associated TCB. - /// @dev Example scenarios as below: - /// -------------------------------- - /// @dev Invalid quote verification: returns (false, INVALID_EXIT_CODE) - /// - /// @dev For all valid quote verification, the validity of the attestation depends on the status - /// of a - /// matching TCBInfo and this is defined in the _attestationTcbIsValid() method, which can be - /// overwritten - /// in derived contracts. (Except for "Revoked" status, which also returns (false, - /// INVALID_EXIT_CODE) value) - /// @dev For all valid quote verification, returns the following data: - /// (_attestationTcbIsValid()) - /// @dev exitCode is defined in the {{ TCBInfoStruct.TCBStatus }} enum - function verifyParsedQuote(V3Struct.ParsedV3QuoteStruct calldata v3quote) - external - view - override - returns (bool, bytes memory) - { - return _verifyParsedQuote(v3quote); - } - - function _verifyParsedQuote(V3Struct.ParsedV3QuoteStruct memory v3quote) - internal - view - returns (bool, bytes memory) - { - bytes memory retData = abi.encodePacked(INVALID_EXIT_CODE); - - // // Step 1: Parse the quote input = 152k gas - ( - bool successful, - , - , - bytes memory signedQuoteData, - V3Struct.ECDSAQuoteV3AuthData memory authDataV3 - ) = V3Parser.validateParsedInput(v3quote); - if (!successful) { - return (false, retData); - } - - // Step 2: Verify application enclave report MRENCLAVE and MRSIGNER - { - if (checkLocalEnclaveReport) { - // 4k gas - bool mrEnclaveIsTrusted = trustedUserMrEnclave[v3quote.localEnclaveReport.mrEnclave]; - bool mrSignerIsTrusted = trustedUserMrSigner[v3quote.localEnclaveReport.mrSigner]; - - if (!mrEnclaveIsTrusted || !mrSignerIsTrusted) { - return (false, retData); - } - } - } - - // Step 3: Verify enclave identity = 43k gas - EnclaveIdStruct.EnclaveIdStatus qeTcbStatus; - { - bool verifiedEnclaveIdSuccessfully; - (verifiedEnclaveIdSuccessfully, qeTcbStatus) = - _verifyQEReportWithIdentity(v3quote.v3AuthData.pckSignedQeReport); - if (!verifiedEnclaveIdSuccessfully) { - return (false, retData); - } - if ( - !verifiedEnclaveIdSuccessfully - || qeTcbStatus == EnclaveIdStruct.EnclaveIdStatus.SGX_ENCLAVE_REPORT_ISVSVN_REVOKED - ) { - return (false, retData); - } - } - - // Step 4: Parse Quote CertChain - IPEMCertChainLib.ECSha256Certificate[] memory parsedQuoteCerts; - TCBInfoStruct.TCBInfo memory fetchedTcbInfo; - { - // 536k gas - parsedQuoteCerts = new IPEMCertChainLib.ECSha256Certificate[](3); - for (uint256 i; i < 3; ++i) { - bool isPckCert = i == 0; // additional parsing for PCKCert - bool certDecodedSuccessfully; - // todo! move decodeCert offchain - (certDecodedSuccessfully, parsedQuoteCerts[i]) = pemCertLib.decodeCert( - authDataV3.certification.decodedCertDataArray[i], isPckCert - ); - if (!certDecodedSuccessfully) { - return (false, retData); - } - } - } - - // Step 5: basic PCK and TCB check = 381k gas - { - string memory parsedFmspc = parsedQuoteCerts[0].pck.sgxExtension.fmspc; - fetchedTcbInfo = tcbInfo[parsedFmspc]; - bool tcbConfigured = LibString.eq(parsedFmspc, fetchedTcbInfo.fmspc); - if (!tcbConfigured) { - return (false, retData); - } - - IPEMCertChainLib.ECSha256Certificate memory pckCert = parsedQuoteCerts[0]; - bool pceidMatched = LibString.eq(pckCert.pck.sgxExtension.pceid, fetchedTcbInfo.pceid); - if (!pceidMatched) { - return (false, retData); - } - } - - // Step 6: Verify TCB Level - TCBInfoStruct.TCBStatus tcbStatus; - { - // 4k gas - bool tcbVerified; - (tcbVerified, tcbStatus) = _checkTcbLevels(parsedQuoteCerts[0].pck, fetchedTcbInfo); - if (!tcbVerified) { - return (false, retData); - } - } - - // Step 7: Verify cert chain for PCK - { - // 660k gas (rootCA pubkey is trusted) - bool pckCertChainVerified = _verifyCertChain(parsedQuoteCerts); - if (!pckCertChainVerified) { - return (false, retData); - } - } - - // Step 8: Verify the local attestation sig and qe report sig = 670k gas - { - bool enclaveReportSigsVerified = _enclaveReportSigVerification( - parsedQuoteCerts[0].pubKey, - signedQuoteData, - authDataV3, - v3quote.v3AuthData.pckSignedQeReport - ); - if (!enclaveReportSigsVerified) { - return (false, retData); - } - } - - retData = abi.encodePacked(sha256(abi.encode(v3quote)), tcbStatus); - - return (_attestationTcbIsValid(tcbStatus), retData); - } -} diff --git a/packages/protocol/contracts/automata-attestation/README.md b/packages/protocol/contracts/automata-attestation/README.md deleted file mode 100644 index 448c4bcd42fb..000000000000 --- a/packages/protocol/contracts/automata-attestation/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Readme - -Original code (main branch) forked from https://github.com/automata-network/automata-dcap-v3-attestation and applied some gas optimizations here: https://github.com/smtmfft/automata-dcap-v3-attestation/tree/parse-quote-offline, which then got merged into taiko-mono. -The corresponding upstream PR is: https://github.com/automata-network/automata-dcap-v3-attestation/pull/6, waiting to be merged. -Atomata's attestation shall be 100% identical to taiko-mono's attestation code at this point. diff --git a/packages/protocol/contracts/automata-attestation/interfaces/IAttestation.sol b/packages/protocol/contracts/automata-attestation/interfaces/IAttestation.sol deleted file mode 100644 index 7f918de6e2ca..000000000000 --- a/packages/protocol/contracts/automata-attestation/interfaces/IAttestation.sol +++ /dev/null @@ -1,13 +0,0 @@ -//SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import { V3Struct } from "../lib/QuoteV3Auth/V3Struct.sol"; - -/// @title IAttestation -/// @custom:security-contact security@taiko.xyz -interface IAttestation { - function verifyAttestation(bytes calldata data) external returns (bool); - function verifyParsedQuote(V3Struct.ParsedV3QuoteStruct calldata v3quote) - external - returns (bool success, bytes memory retData); -} diff --git a/packages/protocol/contracts/automata-attestation/interfaces/ISigVerifyLib.sol b/packages/protocol/contracts/automata-attestation/interfaces/ISigVerifyLib.sol deleted file mode 100644 index 5f407625cadc..000000000000 --- a/packages/protocol/contracts/automata-attestation/interfaces/ISigVerifyLib.sol +++ /dev/null @@ -1,15 +0,0 @@ -//SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title ISigVerifyLib -/// @custom:security-contact security@taiko.xyz -interface ISigVerifyLib { - function verifyES256Signature( - bytes memory tbs, - bytes memory signature, - bytes memory publicKey - ) - external - view - returns (bool sigValid); -} diff --git a/packages/protocol/contracts/automata-attestation/lib/EnclaveIdStruct.sol b/packages/protocol/contracts/automata-attestation/lib/EnclaveIdStruct.sol deleted file mode 100644 index 3e889e084e57..000000000000 --- a/packages/protocol/contracts/automata-attestation/lib/EnclaveIdStruct.sol +++ /dev/null @@ -1,30 +0,0 @@ -//SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title EnclaveIdStruct -/// @custom:security-contact security@taiko.xyz -library EnclaveIdStruct { - struct EnclaveId { - bytes4 miscselect; // Slot 1: - bytes4 miscselectMask; - uint16 isvprodid; - bytes16 attributes; // Slot 2 - bytes16 attributesMask; - bytes32 mrsigner; // Slot 3 - TcbLevel[] tcbLevels; // Slot 4 - } - - struct TcbLevel { - TcbObj tcb; - EnclaveIdStatus tcbStatus; - } - - struct TcbObj { - uint16 isvsvn; - } - - enum EnclaveIdStatus { - OK, - SGX_ENCLAVE_REPORT_ISVSVN_REVOKED - } -} diff --git a/packages/protocol/contracts/automata-attestation/lib/PEMCertChainLib.sol b/packages/protocol/contracts/automata-attestation/lib/PEMCertChainLib.sol deleted file mode 100644 index f69c3e0200c9..000000000000 --- a/packages/protocol/contracts/automata-attestation/lib/PEMCertChainLib.sol +++ /dev/null @@ -1,375 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import { LibString } from "solady/src/utils/LibString.sol"; -import { Asn1Decode, NodePtr } from "../utils/Asn1Decode.sol"; -import { BytesUtils } from "../utils/BytesUtils.sol"; -import { X509DateUtils } from "../utils/X509DateUtils.sol"; -import { IPEMCertChainLib } from "./interfaces/IPEMCertChainLib.sol"; - -/// @title PEMCertChainLib -/// @custom:security-contact security@taiko.xyz -contract PEMCertChainLib is IPEMCertChainLib { - using Asn1Decode for bytes; - using NodePtr for uint256; - using BytesUtils for bytes; - - string internal constant HEADER = "-----BEGIN CERTIFICATE-----"; - string internal constant FOOTER = "-----END CERTIFICATE-----"; - uint256 internal constant HEADER_LENGTH = 27; - uint256 internal constant FOOTER_LENGTH = 25; - - string internal constant PCK_COMMON_NAME = "Intel SGX PCK Certificate"; - string internal constant PLATFORM_ISSUER_NAME = "Intel SGX PCK Platform CA"; - string internal constant PROCESSOR_ISSUER_NAME = "Intel SGX PCK Processor CA"; - bytes internal constant SGX_EXTENSION_OID = hex"2A864886F84D010D01"; - bytes internal constant TCB_OID = hex"2A864886F84D010D0102"; - bytes internal constant PCESVN_OID = hex"2A864886F84D010D010211"; - bytes internal constant PCEID_OID = hex"2A864886F84D010D0103"; - bytes internal constant FMSPC_OID = hex"2A864886F84D010D0104"; - - // https://github.com/intel/SGXDataCenterAttestationPrimitives/blob/e7604e02331b3377f3766ed3653250e03af72d45/QuoteVerification/QVL/Src/AttestationLibrary/src/CertVerification/X509Constants.h#L64 - uint256 constant SGX_TCB_CPUSVN_SIZE = 16; - - struct PCKTCBFlags { - bool fmspcFound; - bool pceidFound; - bool tcbFound; - } - - function splitCertificateChain( - bytes memory pemChain, - uint256 size - ) - external - pure - returns (bool success, bytes[] memory certs) - { - certs = new bytes[](size); - string memory pemChainStr = string(pemChain); - - uint256 index = 0; - uint256 len = pemChain.length; - - for (uint256 i; i < size; ++i) { - string memory input; - if (i != 0) { - input = LibString.slice(pemChainStr, index, index + len); - } else { - input = pemChainStr; - } - uint256 increment; - (success, certs[i], increment) = _removeHeadersAndFooters(input); - - if (!success) { - return (false, certs); - } - - index += increment; - } - - success = true; - } - - function decodeCert( - bytes memory der, - bool isPckCert - ) - external - pure - returns (bool success, ECSha256Certificate memory cert) - { - uint256 root = der.root(); - - // Entering tbsCertificate sequence - uint256 tbsParentPtr = der.firstChildOf(root); - - // Begin iterating through the descendants of tbsCertificate - uint256 tbsPtr = der.firstChildOf(tbsParentPtr); - - // The Serial Number is located one element below Version - - // The issuer commonName value is contained in the Issuer sequence - // which is 3 elements below the first element of the tbsCertificate sequence - - // The Validity sequence is located 4 elements below the first element of the tbsCertificate - // sequence - - // The subject commanName value is contained in the Subject sequence - // which is 5 elements below the first element of the tbsCertificate sequence - - // The PublicKey is located in the second element of subjectPublicKeyInfo sequence - // which is 6 elements below the first element of the tbsCertificate sequence - - tbsPtr = der.nextSiblingOf(tbsPtr); - - { - bytes memory serialNumBytes = der.bytesAt(tbsPtr); - cert.serialNumber = serialNumBytes; - } - - tbsPtr = der.nextSiblingOf(tbsPtr); - tbsPtr = der.nextSiblingOf(tbsPtr); - - if (isPckCert) { - uint256 issuerPtr = der.firstChildOf(tbsPtr); - issuerPtr = der.firstChildOf(issuerPtr); - issuerPtr = der.firstChildOf(issuerPtr); - issuerPtr = der.nextSiblingOf(issuerPtr); - cert.pck.issuerName = string(der.bytesAt(issuerPtr)); - bool issuerNameIsValid = LibString.eq(cert.pck.issuerName, PLATFORM_ISSUER_NAME) - || LibString.eq(cert.pck.issuerName, PROCESSOR_ISSUER_NAME); - if (!issuerNameIsValid) { - return (false, cert); - } - } - - tbsPtr = der.nextSiblingOf(tbsPtr); - - { - uint256 notBeforePtr = der.firstChildOf(tbsPtr); - uint256 notAfterPtr = der.nextSiblingOf(notBeforePtr); - bytes1 notBeforeTag = der[notBeforePtr.ixs()]; - bytes1 notAfterTag = der[notAfterPtr.ixs()]; - if ( - (notBeforeTag != 0x17 && notBeforeTag != 0x18) - || (notAfterTag != 0x17 && notAfterTag != 0x18) - ) { - return (false, cert); - } - cert.notBefore = X509DateUtils.toTimestamp(der.bytesAt(notBeforePtr)); - cert.notAfter = X509DateUtils.toTimestamp(der.bytesAt(notAfterPtr)); - } - - tbsPtr = der.nextSiblingOf(tbsPtr); - - if (isPckCert) { - uint256 subjectPtr = der.firstChildOf(tbsPtr); - subjectPtr = der.firstChildOf(subjectPtr); - subjectPtr = der.firstChildOf(subjectPtr); - subjectPtr = der.nextSiblingOf(subjectPtr); - cert.pck.commonName = string(der.bytesAt(subjectPtr)); - if (!LibString.eq(cert.pck.commonName, PCK_COMMON_NAME)) { - return (false, cert); - } - } - - tbsPtr = der.nextSiblingOf(tbsPtr); - - { - // Entering subjectPublicKeyInfo sequence - uint256 subjectPublicKeyInfoPtr = der.firstChildOf(tbsPtr); - subjectPublicKeyInfoPtr = der.nextSiblingOf(subjectPublicKeyInfoPtr); - - // The Signature sequence is located two sibling elements below the tbsCertificate - // element - uint256 sigPtr = der.nextSiblingOf(tbsParentPtr); - sigPtr = der.nextSiblingOf(sigPtr); - - // Skip three bytes to the right - // the three bytes in question: 0x034700 or 0x034800 or 0x034900 - sigPtr = NodePtr.getPtr(sigPtr.ixs() + 3, sigPtr.ixf() + 3, sigPtr.ixl()); - - sigPtr = der.firstChildOf(sigPtr); - bytes memory sigX = _trimBytes(der.bytesAt(sigPtr), 32); - - sigPtr = der.nextSiblingOf(sigPtr); - bytes memory sigY = _trimBytes(der.bytesAt(sigPtr), 32); - - cert.tbsCertificate = der.allBytesAt(tbsParentPtr); - cert.pubKey = _trimBytes(der.bytesAt(subjectPublicKeyInfoPtr), 64); - cert.signature = abi.encodePacked(sigX, sigY); - } - - if (isPckCert) { - // entering Extension sequence - tbsPtr = der.nextSiblingOf(tbsPtr); - - // check for the extension tag - if (der[tbsPtr.ixs()] != 0xA3) { - return (false, cert); - } - - tbsPtr = der.firstChildOf(tbsPtr); - tbsPtr = der.firstChildOf(tbsPtr); - - bool sgxExtnTraversedSuccessfully; - uint256 pcesvn; - uint256[] memory cpuSvns; - bytes memory fmspcBytes; - bytes memory pceidBytes; - (sgxExtnTraversedSuccessfully, pcesvn, cpuSvns, fmspcBytes, pceidBytes) = - _findPckTcbInfo(der, tbsPtr, tbsParentPtr); - if (!sgxExtnTraversedSuccessfully) { - return (false, cert); - } - cert.pck.sgxExtension.pcesvn = pcesvn; - cert.pck.sgxExtension.sgxTcbCompSvnArr = cpuSvns; - cert.pck.sgxExtension.pceid = LibString.toHexStringNoPrefix(pceidBytes); - cert.pck.sgxExtension.fmspc = LibString.toHexStringNoPrefix(fmspcBytes); - cert.isPck = true; - } - - success = true; - } - - function _removeHeadersAndFooters(string memory pemData) - private - pure - returns (bool success, bytes memory extracted, uint256 endIndex) - { - // Check if the input contains the "BEGIN" and "END" headers - uint256 beginPos = LibString.indexOf(pemData, HEADER); - uint256 endPos = LibString.indexOf(pemData, FOOTER); - - bool headerFound = beginPos != LibString.NOT_FOUND; - bool footerFound = endPos != LibString.NOT_FOUND; - - if (!headerFound || !footerFound) { - return (false, extracted, endIndex); - } - - // Extract the content between the headers - uint256 contentStart = beginPos + HEADER_LENGTH; - - // Extract and return the content - bytes memory contentBytes; - - // do not include newline - bytes memory delimiter = hex"0a"; - string memory contentSlice = LibString.slice(pemData, contentStart, endPos); - string[] memory split = LibString.split(contentSlice, string(delimiter)); - string memory contentStr; - - for (uint256 i; i < split.length; ++i) { - contentStr = LibString.concat(contentStr, split[i]); - } - - contentBytes = bytes(contentStr); - return (true, contentBytes, endPos + FOOTER_LENGTH); - } - - function _trimBytes( - bytes memory input, - uint256 expectedLength - ) - private - pure - returns (bytes memory output) - { - uint256 n = input.length; - - if (n <= expectedLength) { - return input; - } - uint256 lengthDiff = n - expectedLength; - output = input.substring(lengthDiff, expectedLength); - } - - function _findPckTcbInfo( - bytes memory der, - uint256 tbsPtr, - uint256 tbsParentPtr - ) - private - pure - returns ( - bool success, - uint256 pcesvn, - uint256[] memory cpusvns, - bytes memory fmspcBytes, - bytes memory pceidBytes - ) - { - // iterate through the elements in the Extension sequence - // until we locate the SGX Extension OID - while (tbsPtr != 0) { - uint256 internalPtr = der.firstChildOf(tbsPtr); - if (der[internalPtr.ixs()] != 0x06) { - return (false, pcesvn, cpusvns, fmspcBytes, pceidBytes); - } - - if (BytesUtils.compareBytes(der.bytesAt(internalPtr), SGX_EXTENSION_OID)) { - // 1.2.840.113741.1.13.1 - internalPtr = der.nextSiblingOf(internalPtr); - uint256 extnValueParentPtr = der.rootOfOctetStringAt(internalPtr); - uint256 extnValuePtr = der.firstChildOf(extnValueParentPtr); - - // Copy flags to memory to avoid stack too deep - PCKTCBFlags memory flags; - - while (!(flags.fmspcFound && flags.pceidFound && flags.tcbFound)) { - uint256 extnValueOidPtr = der.firstChildOf(extnValuePtr); - if (der[extnValueOidPtr.ixs()] != 0x06) { - return (false, pcesvn, cpusvns, fmspcBytes, pceidBytes); - } - if (BytesUtils.compareBytes(der.bytesAt(extnValueOidPtr), TCB_OID)) { - // 1.2.840.113741.1.13.1.2 - (flags.tcbFound, pcesvn, cpusvns) = _findTcb(der, extnValueOidPtr); - } - if (BytesUtils.compareBytes(der.bytesAt(extnValueOidPtr), PCEID_OID)) { - // 1.2.840.113741.1.13.1.3 - uint256 pceidPtr = der.nextSiblingOf(extnValueOidPtr); - pceidBytes = der.bytesAt(pceidPtr); - flags.pceidFound = true; - } - if (BytesUtils.compareBytes(der.bytesAt(extnValueOidPtr), FMSPC_OID)) { - // 1.2.840.113741.1.13.1.4 - uint256 fmspcPtr = der.nextSiblingOf(extnValueOidPtr); - fmspcBytes = der.bytesAt(fmspcPtr); - flags.fmspcFound = true; - } - - if (extnValuePtr.ixl() < extnValueParentPtr.ixl()) { - extnValuePtr = der.nextSiblingOf(extnValuePtr); - } else { - break; - } - } - success = flags.fmspcFound && flags.pceidFound && flags.tcbFound; - break; - } - - if (tbsPtr.ixl() < tbsParentPtr.ixl()) { - tbsPtr = der.nextSiblingOf(tbsPtr); - } else { - tbsPtr = 0; // exit - } - } - } - - function _findTcb( - bytes memory der, - uint256 oidPtr - ) - private - pure - returns (bool success, uint256 pcesvn, uint256[] memory cpusvns) - { - // sibling of tcbOid - uint256 tcbPtr = der.nextSiblingOf(oidPtr); - // get the first svn object in the sequence - uint256 svnParentPtr = der.firstChildOf(tcbPtr); - cpusvns = new uint256[](SGX_TCB_CPUSVN_SIZE); - for (uint256 i; i < SGX_TCB_CPUSVN_SIZE + 1; ++i) { - uint256 svnPtr = der.firstChildOf(svnParentPtr); // OID - uint256 svnValuePtr = der.nextSiblingOf(svnPtr); // value - bytes memory svnValueBytes = der.bytesAt(svnValuePtr); - uint16 svnValue = svnValueBytes.length < 2 - ? uint16(bytes2(svnValueBytes)) / 256 - : uint16(bytes2(svnValueBytes)); - if (BytesUtils.compareBytes(der.bytesAt(svnPtr), PCESVN_OID)) { - // pcesvn is 4 bytes in size - pcesvn = uint256(svnValue); - } else { - // each cpusvn is at maximum two bytes in size - uint256 cpusvn = uint256(svnValue); - cpusvns[i] = cpusvn; - } - - // iterate to the next svn object in the sequence - svnParentPtr = der.nextSiblingOf(svnParentPtr); - } - success = true; - } -} diff --git a/packages/protocol/contracts/automata-attestation/lib/QuoteV3Auth/V3Parser.sol b/packages/protocol/contracts/automata-attestation/lib/QuoteV3Auth/V3Parser.sol deleted file mode 100644 index 4e574b7eec91..000000000000 --- a/packages/protocol/contracts/automata-attestation/lib/QuoteV3Auth/V3Parser.sol +++ /dev/null @@ -1,306 +0,0 @@ -//SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import { Base64 } from "solady/src/utils/Base64.sol"; -import { BytesUtils } from "../../utils/BytesUtils.sol"; -import { IPEMCertChainLib, PEMCertChainLib } from "../../lib/PEMCertChainLib.sol"; -import { V3Struct } from "./V3Struct.sol"; - -/// @title V3Parser -/// @custom:security-contact security@taiko.xyz -library V3Parser { - using BytesUtils for bytes; - - uint256 internal constant MINIMUM_QUOTE_LENGTH = 1020; - bytes2 internal constant SUPPORTED_QUOTE_VERSION = 0x0300; - bytes2 internal constant SUPPORTED_ATTESTATION_KEY_TYPE = 0x0200; - // SGX only - bytes4 internal constant SUPPORTED_TEE_TYPE = 0; - bytes16 internal constant VALID_QE_VENDOR_ID = 0x939a7233f79c4ca9940a0db3957f0607; - - error V3PARSER_INVALID_QUOTE_LENGTN(); - error V3PARSER_INVALID_QUOTE_MEMBER_LENGTN(); - error V3PARSER_INVALID_QEREPORT_LENGTN(); - error V3PARSER_UNSUPPORT_CERTIFICATION_TYPE(); - error V3PARSER_INVALID_CERTIFICATION_CHAIN_SIZE(); - error V3PARSER_INVALID_CERTIFICATION_CHAIN_DATA(); - error V3PARSER_INVALID_ECDSA_SIGNATURE(); - error V3PARSER_INVALID_QEAUTHDATA_SIZE(); - - function parseInput( - bytes memory quote, - address pemCertLibAddr - ) - internal - pure - returns (bool success, V3Struct.ParsedV3QuoteStruct memory v3ParsedQuote) - { - if (quote.length <= MINIMUM_QUOTE_LENGTH) { - return (false, v3ParsedQuote); - } - - uint256 localAuthDataSize = littleEndianDecode(quote.substring(432, 4)); - if (quote.length - 436 != localAuthDataSize) { - return (false, v3ParsedQuote); - } - - bytes memory rawHeader = quote.substring(0, 48); - (bool headerVerifiedSuccessfully, V3Struct.Header memory header) = - parseAndVerifyHeader(rawHeader); - if (!headerVerifiedSuccessfully) { - return (false, v3ParsedQuote); - } - - (bool authDataVerifiedSuccessfully, V3Struct.ECDSAQuoteV3AuthData memory authDataV3) = - parseAuthDataAndVerifyCertType(quote.substring(436, localAuthDataSize), pemCertLibAddr); - if (!authDataVerifiedSuccessfully) { - return (false, v3ParsedQuote); - } - - bytes memory rawLocalEnclaveReport = quote.substring(48, 384); - V3Struct.EnclaveReport memory localEnclaveReport = parseEnclaveReport(rawLocalEnclaveReport); - - v3ParsedQuote = V3Struct.ParsedV3QuoteStruct({ - header: header, - localEnclaveReport: localEnclaveReport, - v3AuthData: authDataV3 - }); - success = true; - } - - function validateParsedInput(V3Struct.ParsedV3QuoteStruct memory v3Quote) - internal - pure - returns ( - bool success, - V3Struct.Header memory header, - V3Struct.EnclaveReport memory localEnclaveReport, - bytes memory signedQuoteData, // concatenation of header and local enclave report bytes - V3Struct.ECDSAQuoteV3AuthData memory authDataV3 - ) - { - success = true; - localEnclaveReport = v3Quote.localEnclaveReport; - V3Struct.EnclaveReport memory pckSignedQeReport = v3Quote.v3AuthData.pckSignedQeReport; - - if ( - localEnclaveReport.reserved3.length != 96 || localEnclaveReport.reserved4.length != 60 - || localEnclaveReport.reportData.length != 64 - ) revert V3PARSER_INVALID_QUOTE_MEMBER_LENGTN(); - - if ( - pckSignedQeReport.reserved3.length != 96 || pckSignedQeReport.reserved4.length != 60 - || pckSignedQeReport.reportData.length != 64 - ) { - revert V3PARSER_INVALID_QEREPORT_LENGTN(); - } - - if (v3Quote.v3AuthData.certification.certType != 5) { - revert V3PARSER_UNSUPPORT_CERTIFICATION_TYPE(); - } - - if (v3Quote.v3AuthData.certification.decodedCertDataArray.length != 3) { - revert V3PARSER_INVALID_CERTIFICATION_CHAIN_SIZE(); - } - - if ( - v3Quote.v3AuthData.ecdsa256BitSignature.length != 64 - || v3Quote.v3AuthData.ecdsaAttestationKey.length != 64 - || v3Quote.v3AuthData.qeReportSignature.length != 64 - ) { - revert V3PARSER_INVALID_ECDSA_SIGNATURE(); - } - - if ( - v3Quote.v3AuthData.qeAuthData.parsedDataSize - != v3Quote.v3AuthData.qeAuthData.data.length - ) { - revert V3PARSER_INVALID_QEAUTHDATA_SIZE(); - } - - uint32 totalQuoteSize = 48 // header - + 384 // local QE report - + 64 // ecdsa256BitSignature - + 64 // ecdsaAttestationKey - + 384 // QE report - + 64 // qeReportSignature - + 2 // sizeof(v3Quote.v3AuthData.qeAuthData.parsedDataSize) - + v3Quote.v3AuthData.qeAuthData.parsedDataSize + 2 // sizeof(v3Quote.v3AuthData.certification.certType) - + 4 // sizeof(v3Quote.v3AuthData.certification.certDataSize) - + v3Quote.v3AuthData.certification.certDataSize; - if (totalQuoteSize <= MINIMUM_QUOTE_LENGTH) { - revert V3PARSER_INVALID_QUOTE_LENGTN(); - } - - header = v3Quote.header; - bytes memory headerBytes = abi.encodePacked( - header.version, - header.attestationKeyType, - header.teeType, - header.qeSvn, - header.pceSvn, - header.qeVendorId, - header.userData - ); - - signedQuoteData = abi.encodePacked(headerBytes, V3Parser.packQEReport(localEnclaveReport)); - authDataV3 = v3Quote.v3AuthData; - } - - function parseEnclaveReport(bytes memory rawEnclaveReport) - internal - pure - returns (V3Struct.EnclaveReport memory enclaveReport) - { - enclaveReport.cpuSvn = bytes16(rawEnclaveReport.substring(0, 16)); - enclaveReport.miscSelect = bytes4(rawEnclaveReport.substring(16, 4)); - enclaveReport.reserved1 = bytes28(rawEnclaveReport.substring(20, 28)); - enclaveReport.attributes = bytes16(rawEnclaveReport.substring(48, 16)); - enclaveReport.mrEnclave = bytes32(rawEnclaveReport.substring(64, 32)); - enclaveReport.reserved2 = bytes32(rawEnclaveReport.substring(96, 32)); - enclaveReport.mrSigner = bytes32(rawEnclaveReport.substring(128, 32)); - enclaveReport.reserved3 = rawEnclaveReport.substring(160, 96); - enclaveReport.isvProdId = uint16(littleEndianDecode(rawEnclaveReport.substring(256, 2))); - enclaveReport.isvSvn = uint16(littleEndianDecode(rawEnclaveReport.substring(258, 2))); - enclaveReport.reserved4 = rawEnclaveReport.substring(260, 60); - enclaveReport.reportData = rawEnclaveReport.substring(320, 64); - } - - function littleEndianDecode(bytes memory encoded) private pure returns (uint256 decoded) { - for (uint256 i; i < encoded.length; ++i) { - uint256 digits = uint256(uint8(bytes1(encoded[i]))); - uint256 upperDigit = digits / 16; - uint256 lowerDigit = digits % 16; - - uint256 acc = lowerDigit * (16 ** (2 * i)); - acc += upperDigit * (16 ** ((2 * i) + 1)); - - decoded += acc; - } - } - - function parseAndVerifyHeader(bytes memory rawHeader) - private - pure - returns (bool success, V3Struct.Header memory header) - { - bytes2 version = bytes2(rawHeader.substring(0, 2)); - if (version != SUPPORTED_QUOTE_VERSION) { - return (false, header); - } - - bytes2 attestationKeyType = bytes2(rawHeader.substring(2, 2)); - if (attestationKeyType != SUPPORTED_ATTESTATION_KEY_TYPE) { - return (false, header); - } - - bytes4 teeType = bytes4(rawHeader.substring(4, 4)); - if (teeType != SUPPORTED_TEE_TYPE) { - return (false, header); - } - - bytes16 qeVendorId = bytes16(rawHeader.substring(12, 16)); - if (qeVendorId != VALID_QE_VENDOR_ID) { - return (false, header); - } - - header = V3Struct.Header({ - version: version, - attestationKeyType: attestationKeyType, - teeType: teeType, - qeSvn: bytes2(rawHeader.substring(8, 2)), - pceSvn: bytes2(rawHeader.substring(10, 2)), - qeVendorId: qeVendorId, - userData: bytes20(rawHeader.substring(28, 20)) - }); - - success = true; - } - - function parseAuthDataAndVerifyCertType( - bytes memory rawAuthData, - address pemCertLibAddr - ) - private - pure - returns (bool success, V3Struct.ECDSAQuoteV3AuthData memory authDataV3) - { - V3Struct.QEAuthData memory qeAuthData; - qeAuthData.parsedDataSize = uint16(littleEndianDecode(rawAuthData.substring(576, 2))); - qeAuthData.data = rawAuthData.substring(578, qeAuthData.parsedDataSize); - - uint256 offset = 578 + qeAuthData.parsedDataSize; - V3Struct.CertificationData memory cert; - cert.certType = uint16(littleEndianDecode(rawAuthData.substring(offset, 2))); - if (cert.certType < 1 || cert.certType > 5) { - return (false, authDataV3); - } - offset += 2; - cert.certDataSize = uint32(littleEndianDecode(rawAuthData.substring(offset, 4))); - offset += 4; - bytes memory certData = rawAuthData.substring(offset, cert.certDataSize); - cert.decodedCertDataArray = parseCerificationChainBytes(certData, pemCertLibAddr); - - authDataV3.ecdsa256BitSignature = rawAuthData.substring(0, 64); - authDataV3.ecdsaAttestationKey = rawAuthData.substring(64, 64); - bytes memory rawQeReport = rawAuthData.substring(128, 384); - authDataV3.pckSignedQeReport = parseEnclaveReport(rawQeReport); - authDataV3.qeReportSignature = rawAuthData.substring(512, 64); - authDataV3.qeAuthData = qeAuthData; - authDataV3.certification = cert; - - success = true; - } - - /// enclaveReport to bytes for hash calculation. - /// the only difference between enclaveReport and packedQEReport is the - /// order of isvProdId and isvSvn. enclaveReport is in little endian, while - /// in bytes should be in big endian according to Intel spec. - /// @param enclaveReport enclave report - /// @return packedQEReport enclave report in bytes - function packQEReport(V3Struct.EnclaveReport memory enclaveReport) - internal - pure - returns (bytes memory packedQEReport) - { - uint16 isvProdIdPackBE = (enclaveReport.isvProdId >> 8) | (enclaveReport.isvProdId << 8); - uint16 isvSvnPackBE = (enclaveReport.isvSvn >> 8) | (enclaveReport.isvSvn << 8); - packedQEReport = abi.encodePacked( - enclaveReport.cpuSvn, - enclaveReport.miscSelect, - enclaveReport.reserved1, - enclaveReport.attributes, - enclaveReport.mrEnclave, - enclaveReport.reserved2, - enclaveReport.mrSigner, - enclaveReport.reserved3, - isvProdIdPackBE, - isvSvnPackBE, - enclaveReport.reserved4, - enclaveReport.reportData - ); - } - - function parseCerificationChainBytes( - bytes memory certBytes, - address pemCertLibAddr - ) - internal - pure - returns (bytes[3] memory certChainData) - { - IPEMCertChainLib pemCertLib = PEMCertChainLib(pemCertLibAddr); - IPEMCertChainLib.ECSha256Certificate[] memory parsedQuoteCerts; - (bool certParsedSuccessfully, bytes[] memory quoteCerts) = - pemCertLib.splitCertificateChain(certBytes, 3); - if (!certParsedSuccessfully) { - revert V3PARSER_INVALID_CERTIFICATION_CHAIN_DATA(); - } - parsedQuoteCerts = new IPEMCertChainLib.ECSha256Certificate[](3); - for (uint256 i; i < 3; ++i) { - quoteCerts[i] = Base64.decode(string(quoteCerts[i])); - } - - certChainData = [quoteCerts[0], quoteCerts[1], quoteCerts[2]]; - } -} diff --git a/packages/protocol/contracts/automata-attestation/lib/QuoteV3Auth/V3Struct.sol b/packages/protocol/contracts/automata-attestation/lib/QuoteV3Auth/V3Struct.sol deleted file mode 100644 index 3fbf799c8fc3..000000000000 --- a/packages/protocol/contracts/automata-attestation/lib/QuoteV3Auth/V3Struct.sol +++ /dev/null @@ -1,61 +0,0 @@ -//SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title V3Struct -/// @custom:security-contact security@taiko.xyz -library V3Struct { - struct Header { - bytes2 version; - bytes2 attestationKeyType; - bytes4 teeType; - bytes2 qeSvn; - bytes2 pceSvn; - bytes16 qeVendorId; - bytes20 userData; - } - - struct EnclaveReport { - bytes16 cpuSvn; - bytes4 miscSelect; - bytes28 reserved1; - bytes16 attributes; - bytes32 mrEnclave; - bytes32 reserved2; - bytes32 mrSigner; - bytes reserved3; // 96 bytes - uint16 isvProdId; - uint16 isvSvn; - bytes reserved4; // 60 bytes - bytes reportData; // 64 bytes - For QEReports, this contains the hash of the concatenation - // of attestation key and QEAuthData - } - - struct QEAuthData { - uint16 parsedDataSize; - bytes data; - } - - struct CertificationData { - uint16 certType; - // todo! In encoded path, we need to calculate the size of certDataArray - // certDataSize = len(join((BEGIN_CERT, certArray[i], END_CERT) for i in 0..3)) - // But for plain bytes path, we don't need that. - uint32 certDataSize; - bytes[3] decodedCertDataArray; // base64 decoded cert bytes array - } - - struct ECDSAQuoteV3AuthData { - bytes ecdsa256BitSignature; // 64 bytes - bytes ecdsaAttestationKey; // 64 bytes - EnclaveReport pckSignedQeReport; // 384 bytes - bytes qeReportSignature; // 64 bytes - QEAuthData qeAuthData; - CertificationData certification; - } - - struct ParsedV3QuoteStruct { - Header header; - EnclaveReport localEnclaveReport; - ECDSAQuoteV3AuthData v3AuthData; - } -} diff --git a/packages/protocol/contracts/automata-attestation/lib/TCBInfoStruct.sol b/packages/protocol/contracts/automata-attestation/lib/TCBInfoStruct.sol deleted file mode 100644 index f40c05bdef58..000000000000 --- a/packages/protocol/contracts/automata-attestation/lib/TCBInfoStruct.sol +++ /dev/null @@ -1,29 +0,0 @@ -//SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title TCBInfoStruct -/// @custom:security-contact security@taiko.xyz -library TCBInfoStruct { - struct TCBInfo { - string pceid; - string fmspc; - TCBLevelObj[] tcbLevels; - } - - struct TCBLevelObj { - uint256 pcesvn; - uint8[] sgxTcbCompSvnArr; - TCBStatus status; - } - - enum TCBStatus { - OK, - TCB_SW_HARDENING_NEEDED, - TCB_CONFIGURATION_AND_SW_HARDENING_NEEDED, - TCB_CONFIGURATION_NEEDED, - TCB_OUT_OF_DATE, - TCB_OUT_OF_DATE_CONFIGURATION_NEEDED, - TCB_REVOKED, - TCB_UNRECOGNIZED - } -} diff --git a/packages/protocol/contracts/automata-attestation/lib/interfaces/IPEMCertChainLib.sol b/packages/protocol/contracts/automata-attestation/lib/interfaces/IPEMCertChainLib.sol deleted file mode 100644 index 7f728558bda8..000000000000 --- a/packages/protocol/contracts/automata-attestation/lib/interfaces/IPEMCertChainLib.sol +++ /dev/null @@ -1,51 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title IPEMCertChainLib -/// @custom:security-contact security@taiko.xyz -interface IPEMCertChainLib { - struct ECSha256Certificate { - uint256 notBefore; - uint256 notAfter; - bytes serialNumber; - bytes tbsCertificate; - bytes pubKey; - bytes signature; - bool isPck; - PCKCertificateField pck; - } - - struct PCKCertificateField { - string commonName; - string issuerName; - PCKTCBInfo sgxExtension; - } - - struct PCKTCBInfo { - string pceid; - string fmspc; - uint256 pcesvn; - uint256[] sgxTcbCompSvnArr; - } - - enum CRL { - PCK, - ROOT - } - - function splitCertificateChain( - bytes memory pemChain, - uint256 size - ) - external - pure - returns (bool success, bytes[] memory certs); - - function decodeCert( - bytes memory der, - bool isPckCert - ) - external - pure - returns (bool success, ECSha256Certificate memory cert); -} diff --git a/packages/protocol/contracts/automata-attestation/utils/Asn1Decode.sol b/packages/protocol/contracts/automata-attestation/utils/Asn1Decode.sol deleted file mode 100644 index 94b165c6c76b..000000000000 --- a/packages/protocol/contracts/automata-attestation/utils/Asn1Decode.sol +++ /dev/null @@ -1,134 +0,0 @@ -// SPDX-License-Identifier: MIT -// Original source: https://github.com/JonahGroendal/asn1-decode -pragma solidity 0.8.24; - -// Inspired by PufferFinance/rave - Apache-2.0 license -// https://github.com/JonahGroendal/asn1-decode/blob/5c2d1469fc678513753786acb441e597969192ec/contracts/Asn1Decode.sol - -import "./BytesUtils.sol"; - -/// @title NodePtr -/// @custom:security-contact security@taiko.xyz -library NodePtr { - // Unpack first byte index - function ixs(uint256 self) internal pure returns (uint256) { - return uint80(self); - } - - // Unpack first content byte index - function ixf(uint256 self) internal pure returns (uint256) { - return uint80(self >> 80); - } - - // Unpack last content byte index - function ixl(uint256 self) internal pure returns (uint256) { - return uint80(self >> 160); - } - - // Pack 3 uint80s into a uint256 - function getPtr(uint256 _ixs, uint256 _ixf, uint256 _ixl) internal pure returns (uint256) { - _ixs |= _ixf << 80; - _ixs |= _ixl << 160; - return _ixs; - } -} - -/// @title Asn1Decode -/// @custom:security-contact security@taiko.xyz -library Asn1Decode { - using NodePtr for uint256; - using BytesUtils for bytes; - - /* - * @dev Get the root node. First step in traversing an ASN1 structure - * @param der The DER-encoded ASN1 structure - * @return A pointer to the outermost node - */ - function root(bytes memory der) internal pure returns (uint256) { - return _readNodeLength(der, 0); - } - - /* - * @dev Get the root node of an ASN1 structure that's within an octet string value - * @param der The DER-encoded ASN1 structure - * @return A pointer to the outermost node - */ - function rootOfOctetStringAt(bytes memory der, uint256 ptr) internal pure returns (uint256) { - require(der[ptr.ixs()] == 0x04, "Not type OCTET STRING"); - return _readNodeLength(der, ptr.ixf()); - } - - /* - * @dev Get the next sibling node - * @param der The DER-encoded ASN1 structure - * @param ptr Points to the indices of the current node - * @return A pointer to the next sibling node - */ - function nextSiblingOf(bytes memory der, uint256 ptr) internal pure returns (uint256) { - return _readNodeLength(der, ptr.ixl() + 1); - } - - /* - * @dev Get the first child node of the current node - * @param der The DER-encoded ASN1 structure - * @param ptr Points to the indices of the current node - * @return A pointer to the first child node - */ - function firstChildOf(bytes memory der, uint256 ptr) internal pure returns (uint256) { - require(der[ptr.ixs()] & 0x20 == 0x20, "Not a constructed type"); - return _readNodeLength(der, ptr.ixf()); - } - - /* - * @dev Extract value of node from DER-encoded structure - * @param der The der-encoded ASN1 structure - * @param ptr Points to the indices of the current node - * @return Value bytes of node - */ - function bytesAt(bytes memory der, uint256 ptr) internal pure returns (bytes memory) { - return der.substring(ptr.ixf(), ptr.ixl() + 1 - ptr.ixf()); - } - - /* - * @dev Extract entire node from DER-encoded structure - * @param der The DER-encoded ASN1 structure - * @param ptr Points to the indices of the current node - * @return All bytes of node - */ - function allBytesAt(bytes memory der, uint256 ptr) internal pure returns (bytes memory) { - return der.substring(ptr.ixs(), ptr.ixl() + 1 - ptr.ixs()); - } - - function keccakOfBytesAt(bytes memory der, uint256 ptr) internal pure returns (bytes32) { - return der.keccak(ptr.ixf(), ptr.ixl() + 1 - ptr.ixf()); - } - - function keccakOfAllBytesAt(bytes memory der, uint256 ptr) internal pure returns (bytes32) { - return der.keccak(ptr.ixs(), ptr.ixl() + 1 - ptr.ixs()); - } - - function _readNodeLength(bytes memory der, uint256 ix) private pure returns (uint256) { - uint256 length; - uint80 ixFirstContentByte; - uint80 ixLastContentByte; - if ((der[ix + 1] & 0x80) == 0) { - length = uint8(der[ix + 1]); - ixFirstContentByte = uint80(ix + 2); - ixLastContentByte = uint80(ixFirstContentByte + length - 1); - } else { - uint8 lengthbytesLength = uint8(der[ix + 1] & 0x7F); - if (lengthbytesLength == 1) { - length = der.readUint8(ix + 2); - } else if (lengthbytesLength == 2) { - length = der.readUint16(ix + 2); - } else { - length = uint256( - der.readBytesN(ix + 2, lengthbytesLength) >> (32 - lengthbytesLength) * 8 - ); - } - ixFirstContentByte = uint80(ix + 2 + lengthbytesLength); - ixLastContentByte = uint80(ixFirstContentByte + length - 1); - } - return NodePtr.getPtr(ix, ixFirstContentByte, ixLastContentByte); - } -} diff --git a/packages/protocol/contracts/automata-attestation/utils/BytesUtils.sol b/packages/protocol/contracts/automata-attestation/utils/BytesUtils.sol deleted file mode 100644 index f1711842248c..000000000000 --- a/packages/protocol/contracts/automata-attestation/utils/BytesUtils.sol +++ /dev/null @@ -1,140 +0,0 @@ -// SPDX-License-Identifier: BSD 2-Clause License -pragma solidity 0.8.24; - -// Inspired by ensdomains/dnssec-oracle - BSD-2-Clause license -// https://github.com/ensdomains/dnssec-oracle/blob/master/contracts/BytesUtils.sol -/// @title BytesUtils -/// @custom:security-contact security@taiko.xyz -library BytesUtils { - /* - * @dev Returns the keccak-256 hash of a byte range. - * @param self The byte string to hash. - * @param offset The position to start hashing at. - * @param len The number of bytes to hash. - * @return The hash of the byte range. - */ - function keccak( - bytes memory self, - uint256 offset, - uint256 len - ) - internal - pure - returns (bytes32 ret) - { - require(offset + len <= self.length, "invalid offset"); - assembly { - ret := keccak256(add(add(self, 32), offset), len) - } - } - - /* - * @dev Returns true if the two byte ranges are equal. - * @param self The first byte range to compare. - * @param offset The offset into the first byte range. - * @param other The second byte range to compare. - * @param otherOffset The offset into the second byte range. - * @param len The number of bytes to compare - * @return true if the byte ranges are equal, false otherwise. - */ - function equals( - bytes memory self, - uint256 offset, - bytes memory other, - uint256 otherOffset, - uint256 len - ) - internal - pure - returns (bool) - { - return keccak(self, offset, len) == keccak(other, otherOffset, len); - } - - /* - * @dev Returns the 8-bit number at the specified index of self. - * @param self The byte string. - * @param idx The index into the bytes - * @return The specified 8 bits of the string, interpreted as an integer. - */ - function readUint8(bytes memory self, uint256 idx) internal pure returns (uint8 ret) { - return uint8(self[idx]); - } - - /* - * @dev Returns the 16-bit number at the specified index of self. - * @param self The byte string. - * @param idx The index into the bytes - * @return The specified 16 bits of the string, interpreted as an integer. - */ - function readUint16(bytes memory self, uint256 idx) internal pure returns (uint16 ret) { - require(idx + 2 <= self.length, "invalid idx"); - assembly { - ret := and(mload(add(add(self, 2), idx)), 0xFFFF) - } - } - - /* - * @dev Returns the n byte value at the specified index of self. - * @param self The byte string. - * @param idx The index into the bytes. - * @param len The number of bytes. - * @return The specified 32 bytes of the string. - */ - function readBytesN( - bytes memory self, - uint256 idx, - uint256 len - ) - internal - pure - returns (bytes32 ret) - { - require(len <= 32, "unexpected len"); - require(idx + len <= self.length, "unexpected idx"); - assembly { - let mask := not(sub(exp(256, sub(32, len)), 1)) - ret := and(mload(add(add(self, 32), idx)), mask) - } - } - - function memcpy(uint256 dest, uint256 src, uint256 len) private pure { - assembly { - mcopy(dest, src, len) - } - } - - /* - * @dev Copies a substring into a new byte string. - * @param self The byte string to copy from. - * @param offset The offset to start copying at. - * @param len The number of bytes to copy. - */ - function substring( - bytes memory self, - uint256 offset, - uint256 len - ) - internal - pure - returns (bytes memory) - { - require(offset + len <= self.length, "unexpected offset"); - - bytes memory ret = new bytes(len); - uint256 dest; - uint256 src; - - assembly { - dest := add(ret, 32) - src := add(add(self, 32), offset) - } - memcpy(dest, src, len); - - return ret; - } - - function compareBytes(bytes memory a, bytes memory b) internal pure returns (bool) { - return keccak256(a) == keccak256(b); - } -} diff --git a/packages/protocol/contracts/automata-attestation/utils/SHA1.sol b/packages/protocol/contracts/automata-attestation/utils/SHA1.sol deleted file mode 100644 index 856e841f90a6..000000000000 --- a/packages/protocol/contracts/automata-attestation/utils/SHA1.sol +++ /dev/null @@ -1,195 +0,0 @@ -// SPDX-License-Identifier: BSD 2-Clause License - -pragma solidity 0.8.24; - -// Inspired by ensdomains/solsha1 - BSD 2-Clause License -// https://github.com/ensdomains/solsha1/blob/master/contracts/SHA1.sol - -/// @title SHA1 -/// @custom:security-contact security@taiko.xyz -library SHA1 { - function sha1(bytes memory data) internal pure returns (bytes20 ret) { - assembly { - // Get a safe scratch location - let scratch := mload(0x40) - - // Get the data length, and point data at the first byte - let len := mload(data) - data := add(data, 32) - - // Find the length after padding - let totallen := add(and(add(len, 1), 0xFFFFFFFFFFFFFFC0), 64) - switch lt(sub(totallen, len), 9) - case 1 { totallen := add(totallen, 64) } - - let h := 0x6745230100EFCDAB890098BADCFE001032547600C3D2E1F0 - - function readword(ptr, off, count) -> result { - result := 0 - if lt(off, count) { - result := mload(add(ptr, off)) - count := sub(count, off) - if lt(count, 32) { - let mask := not(sub(exp(256, sub(32, count)), 1)) - result := and(result, mask) - } - } - } - - for { let i := 0 } lt(i, totallen) { i := add(i, 64) } { - mstore(scratch, readword(data, i, len)) - mstore(add(scratch, 32), readword(data, add(i, 32), len)) - - // If we loaded the last byte, store the terminator byte - switch lt(sub(len, i), 64) - case 1 { mstore8(add(scratch, sub(len, i)), 0x80) } - - // If this is the last block, store the length - switch eq(i, sub(totallen, 64)) - case 1 { mstore(add(scratch, 32), or(mload(add(scratch, 32)), mul(len, 8))) } - - // Expand the 16 32-bit words into 80 - for { let j := 64 } lt(j, 128) { j := add(j, 12) } { - let temp := - xor( - xor(mload(add(scratch, sub(j, 12))), mload(add(scratch, sub(j, 32)))), - xor(mload(add(scratch, sub(j, 56))), mload(add(scratch, sub(j, 64)))) - ) - temp := - or( - and( - mul(temp, 2), - 0xFFFFFFFEFFFFFFFEFFFFFFFEFFFFFFFEFFFFFFFEFFFFFFFEFFFFFFFEFFFFFFFE - ), - and( - div(temp, 0x80000000), - 0x0000000100000001000000010000000100000001000000010000000100000001 - ) - ) - mstore(add(scratch, j), temp) - } - for { let j := 128 } lt(j, 320) { j := add(j, 24) } { - let temp := - xor( - xor(mload(add(scratch, sub(j, 24))), mload(add(scratch, sub(j, 64)))), - xor(mload(add(scratch, sub(j, 112))), mload(add(scratch, sub(j, 128)))) - ) - temp := - or( - and( - mul(temp, 4), - 0xFFFFFFFCFFFFFFFCFFFFFFFCFFFFFFFCFFFFFFFCFFFFFFFCFFFFFFFCFFFFFFFC - ), - and( - div(temp, 0x40000000), - 0x0000000300000003000000030000000300000003000000030000000300000003 - ) - ) - mstore(add(scratch, j), temp) - } - - let x := h - let f := 0 - let k := 0 - for { let j := 0 } lt(j, 80) { j := add(j, 1) } { - switch div(j, 20) - case 0 { - // f = d xor (b and (c xor d)) - f := xor(div(x, 0x100000000000000000000), div(x, 0x10000000000)) - f := and(div(x, 0x1000000000000000000000000000000), f) - f := xor(div(x, 0x10000000000), f) - k := 0x5A827999 - } - case 1 { - // f = b xor c xor d - f := - xor( - div(x, 0x1000000000000000000000000000000), - div(x, 0x100000000000000000000) - ) - f := xor(div(x, 0x10000000000), f) - k := 0x6ED9EBA1 - } - case 2 { - // f = (b and c) or (d and (b or c)) - f := - or( - div(x, 0x1000000000000000000000000000000), - div(x, 0x100000000000000000000) - ) - f := and(div(x, 0x10000000000), f) - f := - or( - and( - div(x, 0x1000000000000000000000000000000), - div(x, 0x100000000000000000000) - ), - f - ) - k := 0x8F1BBCDC - } - case 3 { - // f = b xor c xor d - f := - xor( - div(x, 0x1000000000000000000000000000000), - div(x, 0x100000000000000000000) - ) - f := xor(div(x, 0x10000000000), f) - k := 0xCA62C1D6 - } - // temp = (a leftrotate 5) + f + e + k + w[i] - let temp := and(div(x, 0x80000000000000000000000000000000000000000000000), 0x1F) - temp := - or(and(div(x, 0x800000000000000000000000000000000000000), 0xFFFFFFE0), temp) - temp := add(f, temp) - temp := add(and(x, 0xFFFFFFFF), temp) - temp := add(k, temp) - temp := - add( - div( - mload(add(scratch, mul(j, 4))), - 0x100000000000000000000000000000000000000000000000000000000 - ), - temp - ) - x := - or( - div(x, 0x10000000000), - mul(temp, 0x10000000000000000000000000000000000000000) - ) - x := - or( - and(x, 0xFFFFFFFF00FFFFFFFF000000000000FFFFFFFF00FFFFFFFF), - mul( - or( - and(div(x, 0x4000000000000), 0xC0000000), - and(div(x, 0x400000000000000000000), 0x3FFFFFFF) - ), - 0x100000000000000000000 - ) - ) - } - - h := and(add(h, x), 0xFFFFFFFF00FFFFFFFF00FFFFFFFF00FFFFFFFF00FFFFFFFF) - } - ret := - mul( - or( - or( - or( - or( - and(div(h, 0x100000000), 0xFFFFFFFF00000000000000000000000000000000), - and(div(h, 0x1000000), 0xFFFFFFFF000000000000000000000000) - ), - and(div(h, 0x10000), 0xFFFFFFFF0000000000000000) - ), - and(div(h, 0x100), 0xFFFFFFFF00000000) - ), - and(h, 0xFFFFFFFF) - ), - 0x1000000000000000000000000 - ) - } - } -} diff --git a/packages/protocol/contracts/automata-attestation/utils/SigVerifyLib.sol b/packages/protocol/contracts/automata-attestation/utils/SigVerifyLib.sol deleted file mode 100644 index 80c99cb0b9ea..000000000000 --- a/packages/protocol/contracts/automata-attestation/utils/SigVerifyLib.sol +++ /dev/null @@ -1,48 +0,0 @@ -// SPDX-License-Identifier: GPL-3.0 -pragma solidity 0.8.24; - -import "../interfaces/ISigVerifyLib.sol"; -import "./BytesUtils.sol"; - -/// @title SigVerifyLib -/// @custom:security-contact security@taiko.xyz -// Library for verifying signatures -contract SigVerifyLib is ISigVerifyLib { - using BytesUtils for bytes; - - address private immutable __es256Verifier; - - constructor(address es256Verifier) { - __es256Verifier = es256Verifier; - } - - function verifyES256Signature( - bytes calldata tbs, - bytes calldata signature, - bytes calldata publicKey - ) - external - view - returns (bool sigValid) - { - // Parse signature - if (signature.length != 64) { - return false; - } - uint256 r = uint256(bytes32(signature.substring(0, 32))); - uint256 s = uint256(bytes32(signature.substring(32, 32))); - // Parse public key - if (publicKey.length != 64) { - return false; - } - uint256 gx = uint256(bytes32(publicKey.substring(0, 32))); - uint256 gy = uint256(bytes32(publicKey.substring(32, 32))); - - // Verify signature - bytes memory args = abi.encode(sha256(tbs), r, s, gx, gy); - (bool success, bytes memory ret) = __es256Verifier.staticcall(args); - assert(success); // never reverts, always returns 0 or 1 - - return abi.decode(ret, (uint256)) == 1; - } -} diff --git a/packages/protocol/contracts/automata-attestation/utils/X509DateUtils.sol b/packages/protocol/contracts/automata-attestation/utils/X509DateUtils.sol deleted file mode 100644 index dbbcb44ae717..000000000000 --- a/packages/protocol/contracts/automata-attestation/utils/X509DateUtils.sol +++ /dev/null @@ -1,77 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -/// @title X509DateUtils -/// @custom:security-contact security@taiko.xyz -library X509DateUtils { - function toTimestamp(bytes memory x509Time) internal pure returns (uint256) { - uint16 yrs; - uint8 mnths; - uint8 dys; - uint8 hrs; - uint8 mins; - uint8 secs; - uint8 offset; - - if (x509Time.length == 13) { - if (uint8(x509Time[0]) - 48 < 5) yrs += 2000; - else yrs += 1900; - } else { - yrs += (uint8(x509Time[0]) - 48) * 1000 + (uint8(x509Time[1]) - 48) * 100; - offset = 2; - } - yrs += (uint8(x509Time[offset + 0]) - 48) * 10 + uint8(x509Time[offset + 1]) - 48; - mnths = (uint8(x509Time[offset + 2]) - 48) * 10 + uint8(x509Time[offset + 3]) - 48; - dys += (uint8(x509Time[offset + 4]) - 48) * 10 + uint8(x509Time[offset + 5]) - 48; - hrs += (uint8(x509Time[offset + 6]) - 48) * 10 + uint8(x509Time[offset + 7]) - 48; - mins += (uint8(x509Time[offset + 8]) - 48) * 10 + uint8(x509Time[offset + 9]) - 48; - secs += (uint8(x509Time[offset + 10]) - 48) * 10 + uint8(x509Time[offset + 11]) - 48; - - return toUnixTimestamp(yrs, mnths, dys, hrs, mins, secs); - } - - function toUnixTimestamp( - uint16 year, - uint8 month, - uint8 day, - uint8 hour, - uint8 minute, - uint8 second - ) - internal - pure - returns (uint256) - { - uint256 timestamp = 0; - - for (uint16 i = 1970; i < year; ++i) { - if (isLeapYear(i)) { - timestamp += 31_622_400; // Leap year in seconds - } else { - timestamp += 31_536_000; // Normal year in seconds - } - } - - uint8[12] memory monthDays = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; - if (isLeapYear(year)) monthDays[1] = 29; - - for (uint8 i = 1; i < month; ++i) { - timestamp += uint256(monthDays[i - 1]) * 86_400; // Days in seconds - } - - timestamp += uint256(day - 1) * 86_400; // Days in seconds - timestamp += uint256(hour) * 3600; // Hours in seconds - timestamp += uint256(minute) * 60; // Minutes in seconds - timestamp += second; - - return timestamp; - } - - function isLeapYear(uint16 year) internal pure returns (bool) { - if (year % 4 != 0) return false; - if (year % 100 != 0) return true; - if (year % 400 != 0) return false; - return true; - } -} diff --git a/packages/protocol/contracts/bridge/Bridge.sol b/packages/protocol/contracts/bridge/Bridge.sol deleted file mode 100644 index 71ef33cc21db..000000000000 --- a/packages/protocol/contracts/bridge/Bridge.sol +++ /dev/null @@ -1,738 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts-upgradeable/token/ERC20/extensions/ERC20VotesUpgradeable.sol"; -import "../common/EssentialContract.sol"; -import "../common/LibStrings.sol"; -import "../libs/LibAddress.sol"; -import "../libs/LibMath.sol"; -import "../signal/ISignalService.sol"; -import "./IBridge.sol"; -import "./IQuotaManager.sol"; - -/// @title Bridge -/// @notice See the documentation for {IBridge}. -/// @dev Labeled in AddressResolver as "bridge". Additionally, the code hash for the same address on -/// L1 and L2 may be different. -/// @custom:security-contact security@taiko.xyz -contract Bridge is EssentialContract, IBridge { - using Address for address; - using LibMath for uint256; - using LibAddress for address; - using LibAddress for address payable; - - struct ProcessingStats { - uint32 gasUsedInFeeCalc; - uint32 proofSize; - uint32 numCacheOps; - bool processedByRelayer; - } - - /// @dev A debug event for fine-tuning gas related constants in the future. - event MessageProcessed(bytes32 indexed msgHash, Message message, ProcessingStats stats); - - /// @dev The amount of gas that will be deducted from message.gasLimit before calculating the - /// invocation gas limit. This value should be fine-tuned with production data. - uint32 public constant GAS_RESERVE = 800_000; - - /// @dev The gas overhead for both receiving and invoking a message, as well as the proof - /// calldata cost. - /// This value should be fine-tuned with production data. - uint32 public constant GAS_OVERHEAD = 120_000; - - ///@dev The max proof size for a message to be processable by a relayer. - uint256 public constant RELAYER_MAX_PROOF_BYTES = 200_000; - - /// @dev The amount of gas not to charge fee per cache operation. - uint256 private constant _GAS_REFUND_PER_CACHE_OPERATION = 20_000; - - /// @dev The slot in transient storage of the call context. This is the keccak256 hash - /// of "bridge.ctx_slot" - bytes32 private constant _CTX_SLOT = - 0xe4ece82196de19aabe639620d7f716c433d1348f96ce727c9989a982dbadc2b9; - - /// @dev Gas limit for sending Ether. - // - EOA gas used is < 21000 - // - For Loopring smart wallet, gas used is about 23000 - // - For Argent smart wallet on Ethereum, gas used is about 24000 - // - For Gnosis Safe wallet, gas used is about 28000 - uint256 private constant _SEND_ETHER_GAS_LIMIT = 35_000; - - /// @dev Place holder value when not using transient storage - uint256 private constant _PLACEHOLDER = type(uint256).max; - - /// @notice The next message ID. - /// @dev Slot 1. - uint64 private __reserved1; - uint64 public nextMessageId; - - /// @notice Mapping to store the status of a message from its hash. - /// @dev Slot 2. - mapping(bytes32 msgHash => Status status) public messageStatus; - - /// @dev Slots 3 and 4 - Context private __ctx; - - /// @dev Slot 5. - uint256 private __reserved2; - - /// @dev Slot 6. - uint256 private __reserved3; - - uint256[44] private __gap; - - error B_INVALID_CHAINID(); - error B_INVALID_CONTEXT(); - error B_INVALID_FEE(); - error B_INVALID_GAS_LIMIT(); - error B_INVALID_STATUS(); - error B_INVALID_VALUE(); - error B_INSUFFICIENT_GAS(); - error B_MESSAGE_NOT_SENT(); - error B_OUT_OF_ETH_QUOTA(); - error B_PERMISSION_DENIED(); - error B_PROOF_TOO_LARGE(); - error B_RETRY_FAILED(); - error B_SIGNAL_NOT_RECEIVED(); - - modifier sameChain(uint64 _chainId) { - if (_chainId != block.chainid) revert B_INVALID_CHAINID(); - _; - } - - modifier diffChain(uint64 _chainId) { - if (_chainId == 0 || _chainId == block.chainid) revert B_INVALID_CHAINID(); - _; - } - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - function init(address _owner, address _addressManager) external initializer { - __Essential_init(_owner, _addressManager); - } - - function init2() external onlyOwner reinitializer(2) { - // reset some previously used slots for future reuse - __reserved1 = 0; - __reserved2 = 0; - __reserved3 = 0; - } - - /// @notice Delegates a given token's voting power to the bridge itself. - /// @param _anyToken Any token that supports delegation. - function selfDelegate(address _anyToken) external nonZeroAddr(_anyToken) { - ERC20VotesUpgradeable(_anyToken).delegate(address(this)); - } - - /// @inheritdoc IBridge - function sendMessage(Message calldata _message) - external - payable - override - nonZeroAddr(_message.srcOwner) - nonZeroAddr(_message.destOwner) - diffChain(_message.destChainId) - whenNotPaused - nonReentrant - returns (bytes32 msgHash_, Message memory message_) - { - if (_message.gasLimit == 0) { - if (_message.fee != 0) revert B_INVALID_FEE(); - } else if (_invocationGasLimit(_message) == 0) { - revert B_INVALID_GAS_LIMIT(); - } - - // Check if the destination chain is enabled. - (bool destChainEnabled,) = isDestChainEnabled(_message.destChainId); - - // Verify destination chain. - if (!destChainEnabled) revert B_INVALID_CHAINID(); - - // Ensure the sent value matches the expected amount. - if (_message.value + _message.fee != msg.value) revert B_INVALID_VALUE(); - - message_ = _message; - - // Configure message details and send signal to indicate message sending. - message_.id = nextMessageId++; - message_.from = msg.sender; - message_.srcChainId = uint64(block.chainid); - - msgHash_ = hashMessage(message_); - - emit MessageSent(msgHash_, message_); - ISignalService(resolve(LibStrings.B_SIGNAL_SERVICE, false)).sendSignal(msgHash_); - } - - /// @inheritdoc IBridge - function recallMessage( - Message calldata _message, - bytes calldata _proof - ) - external - sameChain(_message.srcChainId) - diffChain(_message.destChainId) - whenNotPaused - nonReentrant - { - bytes32 msgHash = hashMessage(_message); - _checkStatus(msgHash, Status.NEW); - - address signalService = resolve(LibStrings.B_SIGNAL_SERVICE, false); - - if (!ISignalService(signalService).isSignalSent(address(this), msgHash)) { - revert B_MESSAGE_NOT_SENT(); - } - - _proveSignalReceived( - signalService, signalForFailedMessage(msgHash), _message.destChainId, _proof - ); - - _updateMessageStatus(msgHash, Status.RECALLED); - if (!_consumeEtherQuota(_message.value)) revert B_OUT_OF_ETH_QUOTA(); - - // Execute the recall logic based on the contract's support for the - // IRecallableSender interface - if (_message.from.supportsInterface(type(IRecallableSender).interfaceId)) { - _storeContext(msgHash, address(this), _message.srcChainId); - - // Perform recall - IRecallableSender(_message.from).onMessageRecalled{ value: _message.value }( - _message, msgHash - ); - - // Must reset the context after the message call - _resetContext(); - } else { - _message.srcOwner.sendEtherAndVerify(_message.value, _SEND_ETHER_GAS_LIMIT); - } - } - - /// @inheritdoc IBridge - /// @dev To ensure successful execution, we recommend this transaction's gas limit not to be - /// smaller than: - /// `(message.gasLimit - GAS_RESERVE) * 64 / 63 + GAS_RESERVE`, - /// Or we can use a simplified rule: `tx.gaslimit = message.gaslimit * 102%`. - function processMessage( - Message calldata _message, - bytes calldata _proof - ) - external - whenNotPaused - nonReentrant - returns (Status status_, StatusReason reason_) - { - uint256 gasStart = gasleft(); - - // same as `sameChain(_message.destChainId)` but without stack-too-deep - if (_message.destChainId != block.chainid) revert B_INVALID_CHAINID(); - - // same as `diffChain(_message.srcChainId)` but without stack-too-deep - if (_message.srcChainId == 0 || _message.srcChainId == block.chainid) { - revert B_INVALID_CHAINID(); - } - - ProcessingStats memory stats; - stats.processedByRelayer = msg.sender != _message.destOwner; - - // If the gas limit is set to zero, only the owner can process the message. - if (stats.processedByRelayer) { - if (_message.gasLimit == 0) revert B_PERMISSION_DENIED(); - if (_proof.length > RELAYER_MAX_PROOF_BYTES) revert B_PROOF_TOO_LARGE(); - } - - bytes32 msgHash = hashMessage(_message); - _checkStatus(msgHash, Status.NEW); - - address signalService = resolve(LibStrings.B_SIGNAL_SERVICE, false); - - stats.proofSize = uint32(_proof.length); - stats.numCacheOps = - _proveSignalReceived(signalService, msgHash, _message.srcChainId, _proof); - - if (!_consumeEtherQuota(_message.value + _message.fee)) revert B_OUT_OF_ETH_QUOTA(); - - uint256 refundAmount; - if (_unableToInvokeMessageCall(_message, signalService)) { - // Handle special addresses and message.data encoded function calldata that don't - // require or cannot proceed with actual invocation and mark message as DONE - refundAmount = _message.value; - status_ = Status.DONE; - reason_ = StatusReason.INVOCATION_PROHIBITED; - } else { - uint256 gasLimit = stats.processedByRelayer ? _invocationGasLimit(_message) : gasleft(); - - if (_invokeMessageCall(_message, msgHash, gasLimit, stats.processedByRelayer)) { - status_ = Status.DONE; - reason_ = StatusReason.INVOCATION_OK; - } else { - status_ = Status.RETRIABLE; - reason_ = StatusReason.INVOCATION_FAILED; - } - } - - if (_message.fee != 0) { - refundAmount += _message.fee; - - if (stats.processedByRelayer && _message.gasLimit != 0) { - unchecked { - // The relayer (=message processor) needs to get paid from the fee, and below it - // the calculation mechanism of that. - // The high level overview is: "gasCharged * block.basefee" with some caveat. - // Sometimes over or under estimated and it has different reasons: - // - a rational relayer shall simulate transactions off-chain so he/she would - // exactly know if the txn is profitable or not. - // - need to have a buffer/small revenue to the realyer since it consumes - // maintenance and infra costs to operate - uint256 refund = stats.numCacheOps * _GAS_REFUND_PER_CACHE_OPERATION; - // Taking into account the encoded message calldata cost, and can count with 16 - // gas per bytes (vs. checking each and every byte if zero or non-zero) - stats.gasUsedInFeeCalc = uint32( - GAS_OVERHEAD + gasStart + _messageCalldataCost(_message.data.length) - - gasleft() - ); - - uint256 gasCharged = refund.max(stats.gasUsedInFeeCalc) - refund; - uint256 maxFee = gasCharged * _message.fee / _message.gasLimit; - uint256 baseFee = gasCharged * block.basefee; - uint256 fee = - (baseFee >= maxFee ? maxFee : (maxFee + baseFee) >> 1).min(_message.fee); - - refundAmount -= fee; - msg.sender.sendEtherAndVerify(fee, _SEND_ETHER_GAS_LIMIT); - } - } - } - - _message.destOwner.sendEtherAndVerify(refundAmount, _SEND_ETHER_GAS_LIMIT); - - _updateMessageStatus(msgHash, status_); - emit MessageProcessed(msgHash, _message, stats); - } - - /// @inheritdoc IBridge - function retryMessage( - Message calldata _message, - bool _isLastAttempt - ) - external - sameChain(_message.destChainId) - diffChain(_message.srcChainId) - whenNotPaused - nonReentrant - { - bytes32 msgHash = hashMessage(_message); - _checkStatus(msgHash, Status.RETRIABLE); - - if (!_consumeEtherQuota(_message.value)) revert B_OUT_OF_ETH_QUOTA(); - - bool succeeded; - if (_unableToInvokeMessageCall(_message, resolve(LibStrings.B_SIGNAL_SERVICE, false))) { - succeeded = _message.destOwner.sendEther(_message.value, _SEND_ETHER_GAS_LIMIT, ""); - } else { - if ((_message.gasLimit == 0 || _isLastAttempt) && msg.sender != _message.destOwner) { - revert B_PERMISSION_DENIED(); - } - - // Attempt to invoke the messageCall. - succeeded = _invokeMessageCall(_message, msgHash, gasleft(), false); - } - - if (succeeded) { - _updateMessageStatus(msgHash, Status.DONE); - } else if (_isLastAttempt) { - _updateMessageStatus(msgHash, Status.FAILED); - - ISignalService(resolve(LibStrings.B_SIGNAL_SERVICE, false)).sendSignal( - signalForFailedMessage(msgHash) - ); - } else { - revert B_RETRY_FAILED(); - } - } - - /// @inheritdoc IBridge - function failMessage(Message calldata _message) - external - sameChain(_message.destChainId) - diffChain(_message.srcChainId) - whenNotPaused - nonReentrant - { - if (msg.sender != _message.destOwner) revert B_PERMISSION_DENIED(); - - bytes32 msgHash = hashMessage(_message); - _checkStatus(msgHash, Status.RETRIABLE); - - _updateMessageStatus(msgHash, Status.FAILED); - ISignalService(resolve(LibStrings.B_SIGNAL_SERVICE, false)).sendSignal( - signalForFailedMessage(msgHash) - ); - } - - /// @inheritdoc IBridge - function isMessageSent(Message calldata _message) external view returns (bool) { - if (_message.srcChainId != block.chainid) return false; - return ISignalService(resolve(LibStrings.B_SIGNAL_SERVICE, false)).isSignalSent({ - _app: address(this), - _signal: hashMessage(_message) - }); - } - - /// @notice Checks if a msgHash has failed on its destination chain. - /// This is the 'readonly' version of proveMessageFailed. - /// @param _message The message. - /// @param _proof The merkle inclusion proof. - /// @return true if the message has failed, false otherwise. - function isMessageFailed( - Message calldata _message, - bytes calldata _proof - ) - external - view - returns (bool) - { - if (_message.srcChainId != block.chainid) return false; - - return _isSignalReceived( - resolve(LibStrings.B_SIGNAL_SERVICE, false), - signalForFailedMessage(hashMessage(_message)), - _message.destChainId, - _proof - ); - } - - /// @notice Checks if a msgHash has been received on its source chain. - /// This is the 'readonly' version of proveMessageReceived. - /// @param _message The message. - /// @param _proof The merkle inclusion proof. - /// @return true if the message has been received, false otherwise. - function isMessageReceived( - Message calldata _message, - bytes calldata _proof - ) - external - view - returns (bool) - { - if (_message.destChainId != block.chainid) return false; - return _isSignalReceived( - resolve(LibStrings.B_SIGNAL_SERVICE, false), - hashMessage(_message), - _message.srcChainId, - _proof - ); - } - - /// @notice Checks if the destination chain is enabled. - /// @param _chainId The destination chain ID. - /// @return enabled_ True if the destination chain is enabled. - /// @return destBridge_ The bridge of the destination chain. - function isDestChainEnabled(uint64 _chainId) - public - view - returns (bool enabled_, address destBridge_) - { - destBridge_ = resolve(_chainId, LibStrings.B_BRIDGE, true); - enabled_ = destBridge_ != address(0); - } - - /// @notice Gets the current context. - /// @inheritdoc IBridge - function context() external view returns (Context memory ctx_) { - ctx_ = _loadContext(); - if (ctx_.msgHash == 0 || ctx_.msgHash == bytes32(_PLACEHOLDER)) { - revert B_INVALID_CONTEXT(); - } - } - - /// @inheritdoc IBridge - function hashMessage(Message memory _message) public pure returns (bytes32) { - return keccak256(abi.encode("TAIKO_MESSAGE", _message)); - } - - /// @notice Returns a signal representing a failed/recalled message. - /// @param _msgHash The message hash. - /// @return The failed representation of it as bytes32. - function signalForFailedMessage(bytes32 _msgHash) public pure returns (bytes32) { - return _msgHash ^ bytes32(uint256(Status.FAILED)); - } - - /// @notice Returns the minimal gas limit required for sending a given message. - /// @param dataLength The length of message.data. - /// @return The minimal gas limit required for sending this message. - function getMessageMinGasLimit(uint256 dataLength) public pure returns (uint32) { - return _messageCalldataCost(dataLength) + GAS_RESERVE; - } - - /// @notice Checks if the given address can pause and/or unpause the bridge. - /// @dev Considering that the watchdog is a hot wallet, in case its private key is leaked, we - /// only allow watchdog to pause the bridge, but does not allow it to unpause the bridge. - function _authorizePause(address addr, bool toPause) internal view override { - // Owner and chain_pauser can pause/unpause the bridge. - if (addr == owner() || addr == resolve(LibStrings.B_CHAIN_WATCHDOG, true)) return; - - // bridge_watchdog can pause the bridge, but cannot unpause it. - if (toPause && addr == resolve(LibStrings.B_BRIDGE_WATCHDOG, true)) return; - - revert RESOLVER_DENIED(); - } - - /// @notice Invokes a call message on the Bridge. - /// @param _message The call message to be invoked. - /// @param _msgHash The hash of the message. - /// @param _shouldCheckForwardedGas True to check gasleft is sufficient for target function - /// invocation. - /// @return success_ A boolean value indicating whether the message call was successful. - /// @dev This function updates the context in the state before and after the - /// message call. - function _invokeMessageCall( - Message calldata _message, - bytes32 _msgHash, - uint256 _gasLimit, - bool _shouldCheckForwardedGas - ) - private - returns (bool success_) - { - assert(_message.from != address(this)); - - if (_gasLimit == 0) return false; - - _storeContext(_msgHash, _message.from, _message.srcChainId); - - address to = _message.to; - uint256 value = _message.value; - bytes memory data = _message.data; - uint256 gasLeft; - - assembly { - success_ := call(_gasLimit, to, value, add(data, 0x20), mload(data), 0, 0) - gasLeft := gas() - } - - if (_shouldCheckForwardedGas) { - _checkForwardedGas(gasLeft, _gasLimit); - } - _resetContext(); - } - - /// @notice Updates the status of a bridge message. - /// @dev If the new status is different from the current status in the - /// mapping, the status is updated and an event is emitted. - /// @param _msgHash The hash of the message. - /// @param _status The new status of the message. - function _updateMessageStatus(bytes32 _msgHash, Status _status) private { - if (messageStatus[_msgHash] == _status) revert B_INVALID_STATUS(); - messageStatus[_msgHash] = _status; - emit MessageStatusChanged(_msgHash, _status); - } - - /// @notice Resets the call context - function _resetContext() private { - if (LibNetwork.isDencunSupported(block.chainid)) { - _storeContext(bytes32(0), address(0), uint64(0)); - } else { - _storeContext( - bytes32(_PLACEHOLDER), address(uint160(_PLACEHOLDER)), uint64(_PLACEHOLDER) - ); - } - } - - /// @notice Stores the call context - /// @param _msgHash The message hash. - /// @param _from The sender's address. - /// @param _srcChainId The source chain ID. - function _storeContext(bytes32 _msgHash, address _from, uint64 _srcChainId) private { - if (LibNetwork.isDencunSupported(block.chainid)) { - assembly { - tstore(_CTX_SLOT, _msgHash) - tstore(add(_CTX_SLOT, 1), _from) - tstore(add(_CTX_SLOT, 2), _srcChainId) - } - } else { - __ctx = Context(_msgHash, _from, _srcChainId); - } - } - - /// @notice Checks if the signal was received and caches cross-chain data if requested. - /// @param _signalService The signal service address. - /// @param _signal The signal. - /// @param _chainId The ID of the chain the signal is stored on. - /// @param _proof The merkle inclusion proof. - /// @return numCacheOps_ Num of cached items - function _proveSignalReceived( - address _signalService, - bytes32 _signal, - uint64 _chainId, - bytes calldata _proof - ) - private - returns (uint32 numCacheOps_) - { - try ISignalService(_signalService).proveSignalReceived( - _chainId, resolve(_chainId, LibStrings.B_BRIDGE, false), _signal, _proof - ) returns (uint256 numCacheOps) { - numCacheOps_ = uint32(numCacheOps); - } catch { - revert B_SIGNAL_NOT_RECEIVED(); - } - } - - /// @notice Consumes a given amount of Ether from quota manager. - /// @param _amount The amount of Ether to consume. - /// @return true if quota manager has unlimited quota for Ether or the given amount of Ether is - /// consumed already. - function _consumeEtherQuota(uint256 _amount) private returns (bool) { - address quotaManager = resolve(LibStrings.B_QUOTA_MANAGER, true); - if (quotaManager == address(0)) return true; - - try IQuotaManager(quotaManager).consumeQuota(address(0), _amount) { - return true; - } catch { - return false; - } - } - - /// @notice Loads and returns the call context. - /// @return ctx_ The call context. - function _loadContext() private view returns (Context memory) { - if (LibNetwork.isDencunSupported(block.chainid)) { - bytes32 msgHash; - address from; - uint64 srcChainId; - assembly { - msgHash := tload(_CTX_SLOT) - from := tload(add(_CTX_SLOT, 1)) - srcChainId := tload(add(_CTX_SLOT, 2)) - } - return Context(msgHash, from, srcChainId); - } else { - return __ctx; - } - } - - /// @notice Checks if the signal was received. - /// This is the 'readonly' version of _proveSignalReceived. - /// @param _signalService The signal service address. - /// @param _signal The signal. - /// @param _chainId The ID of the chain the signal is stored on. - /// @param _proof The merkle inclusion proof. - /// @return true if the message was received. - function _isSignalReceived( - address _signalService, - bytes32 _signal, - uint64 _chainId, - bytes calldata _proof - ) - private - view - returns (bool) - { - try ISignalService(_signalService).verifySignalReceived( - _chainId, resolve(_chainId, LibStrings.B_BRIDGE, false), _signal, _proof - ) { - return true; - } catch { - return false; - } - } - - function _checkStatus(bytes32 _msgHash, Status _expectedStatus) private view { - if (messageStatus[_msgHash] != _expectedStatus) revert B_INVALID_STATUS(); - } - - function _unableToInvokeMessageCall( - Message calldata _message, - address _signalService - ) - private - view - returns (bool) - { - if (_message.to == address(0)) return true; - if (_message.to == address(this)) return true; - if (_message.to == _signalService) return true; - - return _message.data.length >= 4 - && bytes4(_message.data) != IMessageInvocable.onMessageInvocation.selector - && _message.to.isContract(); - } - - function _invocationGasLimit(Message calldata _message) private pure returns (uint256) { - uint256 minGasRequired = getMessageMinGasLimit(_message.data.length); - unchecked { - return minGasRequired.max(_message.gasLimit) - minGasRequired; - } - } - - function _messageCalldataCost(uint256 dataLength) private pure returns (uint32) { - // The abi encoding of A = (Message calldata msg) is 10 * 32 bytes - // + 32 bytes (A is a dynamic tuple, offset to first elements) - // + 32 bytes (offset to last bytes element of Message) - // + 32 bytes (padded encoding of length of Message.data + dataLength - // (padded to 32 // bytes) = 13 * 32 + ((dataLength + 31) / 32 * 32). - // Non-zero calldata cost per byte is 16. - unchecked { - return uint32(((dataLength + 31) / 32 * 32 + 416) << 4); - } - } - - /// @dev Suggested by OpenZeppelin and copied from - /// https://github.com/OpenZeppelin/openzeppelin-contracts/ - /// blob/83c7e45092dac350b070c421cd2bf7105616cf1a/contracts/ - /// metatx/ERC2771Forwarder.sol#L327C1-L370C6 - /// - /// @dev Checks if the requested gas was correctly forwarded to the callee. - /// As a consequence of https://eips.ethereum.org/EIPS/eip-150[EIP-150]: - /// - At most `gasleft() - floor(gasleft() / 64)` is forwarded to the callee. - /// - At least `floor(gasleft() / 64)` is kept in the caller. - /// - /// It reverts consuming all the available gas if the forwarded gas is not the requested gas. - /// - /// IMPORTANT: The `gasLeft` parameter should be measured exactly at the end of the forwarded - /// call. - /// Any gas consumed in between will make room for bypassing this check. - function _checkForwardedGas(uint256 _gasLeft, uint256 _gasRequested) private pure { - // To avoid insufficient gas griefing attacks, as referenced in - // https://ronan.eth.limo/blog/ethereum-gas-dangers/ - // - // A malicious relayer can attempt to shrink the gas forwarded so that the underlying call - // reverts out-of-gas - // but the forwarding itself still succeeds. In order to make sure that the subcall received - // sufficient gas, - // we will inspect gasleft() after the forwarding. - // - // Let X be the gas available before the subcall, such that the subcall gets at most X * 63 - // / 64. - // We can't know X after CALL dynamic costs, but we want it to be such that X * 63 / 64 >= - // req.gas. - // Let Y be the gas used in the subcall. gasleft() measured immediately after the subcall - // will be gasleft() = X - Y. - // If the subcall ran out of gas, then Y = X * 63 / 64 and gasleft() = X - Y = X / 64. - // Under this assumption req.gas / 63 > gasleft() is true is true if and only if - // req.gas / 63 > X / 64, or equivalently req.gas > X * 63 / 64. - // This means that if the subcall runs out of gas we are able to detect that insufficient - // gas was passed. - // - // We will now also see that req.gas / 63 > gasleft() implies that req.gas >= X * 63 / 64. - // The contract guarantees Y <= req.gas, thus gasleft() = X - Y >= X - req.gas. - // - req.gas / 63 > gasleft() - // - req.gas / 63 >= X - req.gas - // - req.gas >= X * 63 / 64 - // In other words if req.gas < X * 63 / 64 then req.gas / 63 <= gasleft(), thus if the - // relayer behaves honestly - // the forwarding does not revert. - if (_gasLeft < _gasRequested / 63) { - // We explicitly trigger invalid opcode to consume all gas and bubble-up the effects, - // since - // neither revert or assert consume all gas since Solidity 0.8.20 - // https://docs.soliditylang.org/en/v0.8.20/control-structures.html#panic-via-assert-and-error-via-require - /// @solidity memory-safe-assembly - assembly { - invalid() - } - } - } -} diff --git a/packages/protocol/contracts/bridge/IBridge.sol b/packages/protocol/contracts/bridge/IBridge.sol deleted file mode 100644 index 99af78b249fd..000000000000 --- a/packages/protocol/contracts/bridge/IBridge.sol +++ /dev/null @@ -1,161 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title IBridge -/// @notice The bridge used in conjunction with the {ISignalService}. -/// @dev Ether is held by Bridges on L1 and L2s. -/// @custom:security-contact security@taiko.xyz -interface IBridge { - enum Status { - NEW, - RETRIABLE, - DONE, - FAILED, - RECALLED - } - - enum StatusReason { - INVOCATION_OK, - INVOCATION_PROHIBITED, - INVOCATION_FAILED, - OUT_OF_ETH_QUOTA - } - - struct Message { - // Message ID whose value is automatically assigned. - uint64 id; - // The max processing fee for the relayer. This fee has 3 parts: - // - the fee for message calldata. - // - the minimal fee reserve for general processing, excluding function call. - // - the invocation fee for the function call. - // Any unpaid fee will be refunded to the destOwner on the destination chain. - // Note that fee must be 0 if gasLimit is 0, or large enough to make the invocation fee - // non-zero. - uint64 fee; - // gasLimit that the processMessage call must have. - uint32 gasLimit; - // The address, EOA or contract, that interacts with this bridge. - // The value is automatically assigned. - address from; - // Source chain ID whose value is automatically assigned. - uint64 srcChainId; - // The owner of the message on the source chain. - address srcOwner; - // Destination chain ID where the `to` address lives. - uint64 destChainId; - // The owner of the message on the destination chain. - address destOwner; - // The destination address on the destination chain. - address to; - // value to invoke on the destination chain. - uint256 value; - // callData to invoke on the destination chain. - bytes data; - } - - // Struct representing the context of a bridge operation. - // 2 slots - struct Context { - bytes32 msgHash; // Message hash. - address from; // Sender's address. - uint64 srcChainId; // Source chain ID. - } - - /// @notice Emitted when a message is sent. - /// @param msgHash The hash of the message. - /// @param message The message. - event MessageSent(bytes32 indexed msgHash, Message message); - - /// @notice Emitted when the status of a message changes. - /// @param msgHash The hash of the message. - /// @param status The new status of the message. - event MessageStatusChanged(bytes32 indexed msgHash, Status status); - - /// @notice Sends a message to the destination chain and takes custody - /// of Ether required in this contract. - /// @param _message The message to be sent. - /// @return msgHash_ The hash of the sent message. - /// @return message_ The updated message sent. - function sendMessage(Message calldata _message) - external - payable - returns (bytes32 msgHash_, Message memory message_); - - /// @notice Recalls a failed message on its source chain, releasing - /// associated assets. - /// @dev This function checks if the message failed on the source chain and - /// releases associated Ether or tokens. - /// @param _message The message whose associated Ether should be released. - /// @param _proof The merkle inclusion proof. - function recallMessage(Message calldata _message, bytes calldata _proof) external; - - /// @notice Processes a bridge message on the destination chain. This - /// function is callable by any address, including the `message.destOwner`. - /// @dev The process begins by hashing the message and checking the message - /// status in the bridge If the status is "NEW", the message is invoked. The - /// status is updated accordingly, and processing fees are refunded as - /// needed. - /// @param _message The message to be processed. - /// @param _proof The merkle inclusion proof. - /// @return The message's status after processing and the reason for the change. - function processMessage( - Message calldata _message, - bytes calldata _proof - ) - external - returns (Status, StatusReason); - - /// @notice Retries to invoke the messageCall after releasing associated - /// Ether and tokens. - /// @dev This function can be called by any address, including the - /// `message.destOwner`. - /// It attempts to invoke the messageCall and updates the message status - /// accordingly. - /// @param _message The message to retry. - /// @param _isLastAttempt Specifies if this is the last attempt to retry the - /// message. - function retryMessage(Message calldata _message, bool _isLastAttempt) external; - - /// @notice Mark a message as failed if the message is currently retriable. - /// @dev This function can only be called by `message.destOwner`. - /// @param _message The message to fail. - /// message. - function failMessage(Message calldata _message) external; - - /// @notice Returns the bridge state context. - /// @return ctx_ The context of the current bridge operation. - function context() external view returns (Context memory ctx_); - - /// @notice Checks if the message was sent. - /// @param _message The message. - /// @return true if the message was sent. - function isMessageSent(Message calldata _message) external view returns (bool); - - /// @notice Hash the message - /// @param _message The message struct variable to be hashed. - /// @return The message's hash. - function hashMessage(Message memory _message) external pure returns (bytes32); -} - -/// @title IRecallableSender -/// @notice An interface that all recallable message senders shall implement. -interface IRecallableSender { - /// @notice Called when a message is recalled. - /// @param _message The recalled message. - /// @param _msgHash The hash of the recalled message. - function onMessageRecalled( - IBridge.Message calldata _message, - bytes32 _msgHash - ) - external - payable; -} - -/// @title IMessageInvocable -/// @notice An interface that all bridge message receiver shall implement -interface IMessageInvocable { - /// @notice Called when this contract is the bridge target. - /// @param _data The data for this contract to interpret. - /// @dev This method should be guarded with `onlyFromNamed("bridge")`. - function onMessageInvocation(bytes calldata _data) external payable; -} diff --git a/packages/protocol/contracts/bridge/IQuotaManager.sol b/packages/protocol/contracts/bridge/IQuotaManager.sol deleted file mode 100644 index 3091e0193151..000000000000 --- a/packages/protocol/contracts/bridge/IQuotaManager.sol +++ /dev/null @@ -1,19 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title IQuotaManager -/// @custom:security-contact security@taiko.xyz -interface IQuotaManager { - /// @notice Consumes a specific amount of quota for a given address. - /// This function must revert if available quota is smaller than the given amount of quota. - /// - /// @dev Note that IQuotaManager is used by vaults and bridge, and should be registered in a - /// shared address manager on the L1, therefore, a registered IQuotaManager and its per-token - /// quota settings will be shared by all Taiko L2s. To enable a per-L2 quota, we need to modify - /// this function to: - /// `function consumeQuota(uint256 _srcChainId, address _token, uint256 _amount) ` - /// - /// @param _token The token address. Ether is represented with address(0). - /// @param _amount The amount of quota to consume. - function consumeQuota(address _token, uint256 _amount) external; -} diff --git a/packages/protocol/contracts/bridge/README.md b/packages/protocol/contracts/bridge/README.md deleted file mode 100644 index 803d93d49495..000000000000 --- a/packages/protocol/contracts/bridge/README.md +++ /dev/null @@ -1,105 +0,0 @@ -# Bridging contract flow - -## High level overview - -There are two parties at play which will interact with the `Bridge` contract, which is deployed on **both** the **source chain (srcChain)** and the **destination chain (destChain)**: - -- The initiator of the bridge request (calls `Bridge.sendMessage`). -- The relayer (calls `Bridge.processMessage`). - -The initiator will start the request, making it known on the Bridge contract via a signal. The relayer will pick this request up and process it. - -## Diving deeper - -Let's go deeper into the steps that occur when bridging ETH from srcChain to destChain: - -### Send message / Send token - -The bridge distinguishes 4 different token types: `Ether`, `ERC20`, `ERC1155`, `ERC721`. Ether is kept in the Bridge contract, and token vaults for ERC20, ERC1155, and ERC721 tokens must be deployed to the source and destination chain, - -#### Bridging Ether - -If user wants to bridge ether, he/she will initiate a bridge transaction with `sendMessage` on the source chain which includes: - -``` - struct Message { - // Message ID. - uint256 id; - // Message sender address. - address from; - // Source chain ID. - uint64 srcChainId; - // Destination chain ID where the `to` address lives. - uint64 destChainId; - // User address of the bridged asset. - address user; - // Destination user address. - address to; - // Alternate address to send any refund. If blank, defaults to user. - address refundAddress; - // value to invoke on the destination chain. - uint256 value; - // Processing fee for the relayer. Zero if user will process themselves. - uint256 fee; - // gasLimit to invoke on the destination chain. - uint256 gasLimit; - // callData to invoke on the destination chain. - bytes data; - // Optional memo. - string memo; - } -``` - -- `value` and `fee` must sum to `msg.value`. -- The destination chain's ID (must be enabled via setting `addressResolver` for `${chainID}.bridge`). - -Inside the `sendMessage` call, the `msg.value` amount of Ether is kept in the Bridge contract, then a `signal` is created from the message, and a `key` is stored on the srcChain bridge contract address. The `key` is a hash of the `signal` and the srcChain bridge contract address. The `key` is stored on the `Bridge` contract with a value of `1`, and a `MessageSent` event is emitted for the relayer to pick up. - -#### Bridging other tokens - -If user wants to bridge other tokens (`ERC20`, `ERC1155` or `ERC721`.) he/she will just indirectly initiate a bridge transaction (`sendMessage`) by interacting with the corresponding token vault contracts. - -In case of ERC20 the transaction can be initiated by initializing a struct (below) and calling `sendToken`: - -``` - struct BridgeTransferOp { - uint256 destChainId; - address to; - address token; - uint256 amount; - uint256 gasLimit; - uint256 fee; - address refundTo; - string memo; - } -``` - -In case of `ERC1155` or `ERC721`, the mechanism is the same but struct looks like this: - -``` -struct BridgeTransferOp { - uint256 destChainId; - address to; - address token; - uint256[] tokenIds; - uint256[] amounts; - uint256 gasLimit; - uint256 fee; - address refundTo; - string memo; - } -``` - -### Process message - -If the `processingFee` is set to 0, only the user can call `processMessage`. Otherwise, either the user or an off-chain relayer can process the message. Let's explain the next steps in the case of a relayer -- the user will have to do the same steps anyways. In the case of a relayer, the relayer picks up the event and **generates a proof from srcChain** -- this can be obtained with `eth_getProof` on the srcChain bridge contract. This proof is sent along with the signal to `processMessage` on the destChain bridge contract. - -The `processMessage` call will first check that the message has not been processed yet, this status is stored in the destination chain's bridge contract state as `statuses`. Next, the proof (that the message is indeed sent to the SignalService on the source chain) is checked inside `proveSignalReceived`. The proof demonstrates that the storage on the `Bridge` contract on srcChain contains the `key` with a value of `1`. `LibSecureMerkleTrie` takes the proof, the signal, and the message sender address to check the `key` is set on the srcChain bridge contract state. This verifies that the message is sent on srcChain. Next, `proveSignalReceived` gets the header hash on destChain of the header height specified in the proof. It then checks that this hash is equal to the hash specified in the proof. This will verify that the message is received on destChain. - -The `processMessage` call will then proceed to invoke the message call, which will actually take the Ether from the vault and send it to the specified address. If it succeeds, it will mark the message as "DONE" on the srcChain bridge state. If it fails, it will mark the message as "RETRIABLE" and send the Ether back to the vault. Later, `retryMessage` can be called **only** by the user (`processMessage` cannot be called again for this message by the relayer). - -Finally, any unused funds are sent back to the user as a refund. - -### Failed bridging - -If the `statuses` is "RETRIABLE" and - for whatever reason - the second try also cannot successfully initiate releasing the funds/tokens to the recipient on the destination chain, the `statuses` will be set to "FAILED". In this case the `recallMessage` shall be called on the source chain's Bridge contract (with `message` and `proof` input params), which will send the assets back to the user. diff --git a/packages/protocol/contracts/common/AddressManager.sol b/packages/protocol/contracts/common/AddressManager.sol deleted file mode 100644 index 91ab17e86daf..000000000000 --- a/packages/protocol/contracts/common/AddressManager.sol +++ /dev/null @@ -1,68 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "./EssentialContract.sol"; - -/// @title AddressManager -/// @notice See the documentation in {IAddressManager}. -/// @custom:security-contact security@taiko.xyz -contract AddressManager is EssentialContract, IAddressManager { - /// @dev Mapping of chainId to mapping of name to address. - mapping(uint256 chainId => mapping(bytes32 name => address addr)) private __addresses; - - uint256[49] private __gap; - - /// @notice Emitted when an address is set. - /// @param chainId The chainId for the address mapping. - /// @param name The name for the address mapping. - /// @param newAddress The new address. - /// @param oldAddress The old address. - event AddressSet( - uint64 indexed chainId, bytes32 indexed name, address newAddress, address oldAddress - ); - - error AM_ADDRESS_ALREADY_SET(); - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - function init(address _owner) external initializer { - __Essential_init(_owner); - addressManager = address(this); - } - - function init2() external onlyOwner reinitializer(2) { - addressManager = address(this); - } - - /// @notice Sets the address for a specific chainId-name pair. - /// @param _chainId The chainId to which the address will be mapped. - /// @param _name The name to which the address will be mapped. - /// @param _newAddress The Ethereum address to be mapped. - function setAddress( - uint64 _chainId, - bytes32 _name, - address _newAddress - ) - external - virtual - onlyOwner - { - address oldAddress = __addresses[_chainId][_name]; - if (_newAddress == oldAddress) revert AM_ADDRESS_ALREADY_SET(); - __addresses[_chainId][_name] = _newAddress; - emit AddressSet(_chainId, _name, _newAddress, oldAddress); - } - - /// @inheritdoc IAddressManager - function getAddress(uint64 _chainId, bytes32 _name) external view override returns (address) { - address addr = _getOverride(_chainId, _name); - if (addr != address(0)) return addr; - else return __addresses[_chainId][_name]; - } - - /// @notice Gets the address mapped to a specific chainId-name pair. - /// @dev Sub-contracts can override this method to avoid reading from storage. - function _getOverride(uint64 _chainId, bytes32 _name) internal pure virtual returns (address) { } - - function _authorizePause(address, bool) internal pure override notImplemented { } -} diff --git a/packages/protocol/contracts/common/AddressResolver.sol b/packages/protocol/contracts/common/AddressResolver.sol deleted file mode 100644 index 27f88b2a2216..000000000000 --- a/packages/protocol/contracts/common/AddressResolver.sol +++ /dev/null @@ -1,106 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol"; -import "./IAddressManager.sol"; -import "./IAddressResolver.sol"; - -/// @title AddressResolver -/// @notice See the documentation in {IAddressResolver}. -/// @custom:security-contact security@taiko.xyz -abstract contract AddressResolver is IAddressResolver, Initializable { - /// @notice Address of the AddressManager. - address public addressManager; - uint256[49] private __gap; - - error RESOLVER_DENIED(); - error RESOLVER_INVALID_MANAGER(); - error RESOLVER_UNEXPECTED_CHAINID(); - error RESOLVER_ZERO_ADDR(uint64 chainId, bytes32 name); - - /// @dev Modifier that ensures the caller is the resolved address of a given - /// name. - /// @param _name The name to check against. - modifier onlyFromNamed(bytes32 _name) { - if (msg.sender != resolve(_name, true)) revert RESOLVER_DENIED(); - _; - } - - /// @dev Modifier that ensures the caller is a resolved address to either _name1 or _name2 - /// name. - /// @param _name1 The first name to check against. - /// @param _name2 The second name to check against. - modifier onlyFromNamedEither(bytes32 _name1, bytes32 _name2) { - if (msg.sender != resolve(_name1, true) && msg.sender != resolve(_name2, true)) { - revert RESOLVER_DENIED(); - } - _; - } - - /// @custom:oz-upgrades-unsafe-allow constructor - constructor() { - _disableInitializers(); - } - - /// @inheritdoc IAddressResolver - function resolve( - bytes32 _name, - bool _allowZeroAddress - ) - public - view - virtual - returns (address payable) - { - return _resolve(uint64(block.chainid), _name, _allowZeroAddress); - } - - /// @inheritdoc IAddressResolver - function resolve( - uint64 _chainId, - bytes32 _name, - bool _allowZeroAddress - ) - public - view - virtual - returns (address payable) - { - return _resolve(_chainId, _name, _allowZeroAddress); - } - - /// @dev Initialization method for setting up AddressManager reference. - /// @param _addressManager Address of the AddressManager. - function __AddressResolver_init(address _addressManager) internal virtual onlyInitializing { - if (block.chainid > type(uint64).max) { - revert RESOLVER_UNEXPECTED_CHAINID(); - } - addressManager = _addressManager; - } - - /// @dev Helper method to resolve name-to-address. - /// @param _chainId The chainId of interest. - /// @param _name Name whose address is to be resolved. - /// @param _allowZeroAddress If set to true, does not throw if the resolved - /// address is `address(0)`. - /// @return addr_ Address associated with the given name on the specified - /// chain. - function _resolve( - uint64 _chainId, - bytes32 _name, - bool _allowZeroAddress - ) - private - view - returns (address payable addr_) - { - address _addressManager = addressManager; - if (_addressManager == address(0)) revert RESOLVER_INVALID_MANAGER(); - - addr_ = payable(IAddressManager(_addressManager).getAddress(_chainId, _name)); - - if (!_allowZeroAddress && addr_ == address(0)) { - revert RESOLVER_ZERO_ADDR(_chainId, _name); - } - } -} diff --git a/packages/protocol/contracts/common/AuthorizableContract.sol b/packages/protocol/contracts/common/AuthorizableContract.sol deleted file mode 100644 index 82e57a24be9f..000000000000 --- a/packages/protocol/contracts/common/AuthorizableContract.sol +++ /dev/null @@ -1,34 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "../common/EssentialContract.sol"; - -/// @title AuthorizableContract -abstract contract AuthorizableContract is EssentialContract { - mapping(address => bytes32 label) public authorizedAddresses; - uint256[49] private __gap; - - event Authorized(address indexed addr, bytes32 oldLabel, bytes32 newLabel); - - error INVALID_ADDRESS(); - error INVALID_LABEL(); - - function authorize(address addr, bytes32 label) external onlyOwner { - if (addr == address(0)) revert INVALID_ADDRESS(); - - bytes32 oldLabel = authorizedAddresses[addr]; - if (oldLabel == label) revert INVALID_LABEL(); - authorizedAddresses[addr] = label; - - emit Authorized(addr, oldLabel, label); - } - - function isAuthorizedAs(address addr, bytes32 label) public view returns (bool) { - return label != 0 && authorizedAddresses[addr] == label; - } -} diff --git a/packages/protocol/contracts/common/EssentialContract.sol b/packages/protocol/contracts/common/EssentialContract.sol deleted file mode 100644 index 3bb208173661..000000000000 --- a/packages/protocol/contracts/common/EssentialContract.sol +++ /dev/null @@ -1,175 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/proxy/utils/UUPSUpgradeable.sol"; -import "@openzeppelin/contracts-upgradeable/access/Ownable2StepUpgradeable.sol"; -import "./AddressResolver.sol"; -import "../libs/LibNetwork.sol"; - -/// @title EssentialContract -/// @custom:security-contact security@taiko.xyz -abstract contract EssentialContract is UUPSUpgradeable, Ownable2StepUpgradeable, AddressResolver { - uint8 private constant _FALSE = 1; - - uint8 private constant _TRUE = 2; - - /// @dev The slot in transient storage of the reentry lock. - /// This is the result of keccak256("ownerUUPS.reentry_slot") plus 1. The addition aims to - /// prevent hash collisions with slots defined in EIP-1967, where slots are derived by - /// keccak256("something") - 1, and with slots in SignalService, calculated directly with - /// keccak256("something"). - bytes32 private constant _REENTRY_SLOT = - 0xa5054f728453d3dbe953bdc43e4d0cb97e662ea32d7958190f3dc2da31d9721b; - - /// @dev Slot 1. - uint8 private __reentry; - uint8 private __paused; - uint64 public lastUnpausedAt; - - uint256[49] private __gap; - - /// @notice Emitted when the contract is paused. - /// @param account The account that paused the contract. - event Paused(address account); - - /// @notice Emitted when the contract is unpaused. - /// @param account The account that unpaused the contract. - event Unpaused(address account); - - error INVALID_PAUSE_STATUS(); - error FUNC_NOT_IMPLEMENTED(); - error REENTRANT_CALL(); - error ZERO_ADDRESS(); - error ZERO_VALUE(); - - /// @dev Modifier that ensures the caller is the owner or resolved address of a given name. - /// @param _name The name to check against. - modifier onlyFromOwnerOrNamed(bytes32 _name) { - if (msg.sender != owner() && msg.sender != resolve(_name, true)) revert RESOLVER_DENIED(); - _; - } - - modifier notImplemented() { - revert FUNC_NOT_IMPLEMENTED(); - _; - } - - modifier nonReentrant() { - if (_loadReentryLock() == _TRUE) revert REENTRANT_CALL(); - _storeReentryLock(_TRUE); - _; - _storeReentryLock(_FALSE); - } - - modifier whenPaused() { - if (!paused()) revert INVALID_PAUSE_STATUS(); - _; - } - - modifier whenNotPaused() { - if (paused()) revert INVALID_PAUSE_STATUS(); - _; - } - - modifier nonZeroAddr(address _addr) { - if (_addr == address(0)) revert ZERO_ADDRESS(); - _; - } - - modifier nonZeroValue(bytes32 _value) { - if (_value == 0) revert ZERO_VALUE(); - _; - } - - /// @custom:oz-upgrades-unsafe-allow constructor - constructor() { - _disableInitializers(); - } - - /// @notice Pauses the contract. - function pause() public virtual { - _pause(); - // We call the authorize function here to avoid: - // Warning (5740): Unreachable code. - _authorizePause(msg.sender, true); - } - - /// @notice Unpauses the contract. - function unpause() public virtual { - _unpause(); - // We call the authorize function here to avoid: - // Warning (5740): Unreachable code. - _authorizePause(msg.sender, false); - } - - function impl() public view returns (address) { - return _getImplementation(); - } - - /// @notice Returns true if the contract is paused, and false otherwise. - /// @return true if paused, false otherwise. - function paused() public view returns (bool) { - return __paused == _TRUE; - } - - function inNonReentrant() public view returns (bool) { - return _loadReentryLock() == _TRUE; - } - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - function __Essential_init( - address _owner, - address _addressManager - ) - internal - nonZeroAddr(_addressManager) - { - __Essential_init(_owner); - __AddressResolver_init(_addressManager); - } - - function __Essential_init(address _owner) internal virtual onlyInitializing { - __Context_init(); - _transferOwnership(_owner == address(0) ? msg.sender : _owner); - __paused = _FALSE; - } - - function _pause() internal whenNotPaused { - __paused = _TRUE; - emit Paused(msg.sender); - } - - function _unpause() internal whenPaused { - __paused = _FALSE; - lastUnpausedAt = uint64(block.timestamp); - emit Unpaused(msg.sender); - } - - function _authorizeUpgrade(address) internal virtual override onlyOwner { } - - function _authorizePause(address, bool) internal virtual onlyOwner { } - - // Stores the reentry lock - function _storeReentryLock(uint8 _reentry) internal virtual { - if (LibNetwork.isDencunSupported(block.chainid)) { - assembly { - tstore(_REENTRY_SLOT, _reentry) - } - } else { - __reentry = _reentry; - } - } - - // Loads the reentry lock - function _loadReentryLock() internal view virtual returns (uint8 reentry_) { - if (LibNetwork.isDencunSupported(block.chainid)) { - assembly { - reentry_ := tload(_REENTRY_SLOT) - } - } else { - reentry_ = __reentry; - } - } -} diff --git a/packages/protocol/contracts/common/IAddressManager.sol b/packages/protocol/contracts/common/IAddressManager.sol deleted file mode 100644 index f88933462fe3..000000000000 --- a/packages/protocol/contracts/common/IAddressManager.sol +++ /dev/null @@ -1,15 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title IAddressManager -/// @notice Manages a mapping of (chainId, name) pairs to Ethereum addresses. -/// @custom:security-contact security@taiko.xyz -interface IAddressManager { - /// @notice Gets the address mapped to a specific chainId-name pair. - /// @dev Note that in production, this method shall be a pure function - /// without any storage access. - /// @param _chainId The chainId for which the address needs to be fetched. - /// @param _name The name for which the address needs to be fetched. - /// @return Address associated with the chainId-name pair. - function getAddress(uint64 _chainId, bytes32 _name) external view returns (address); -} diff --git a/packages/protocol/contracts/common/IAddressResolver.sol b/packages/protocol/contracts/common/IAddressResolver.sol deleted file mode 100644 index 886e123e1ba9..000000000000 --- a/packages/protocol/contracts/common/IAddressResolver.sol +++ /dev/null @@ -1,42 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title IAddressResolver -/// @notice This contract acts as a bridge for name-to-address resolution. -/// It delegates the resolution to the AddressManager. By separating the logic, -/// we can maintain flexibility in address management without affecting the -/// resolving process. -/// @dev Note that the address manager should be changed using upgradability, there -/// is no setAddressManager() function to guarantee atomicity across all -/// contracts that are resolvers. -/// @custom:security-contact security@taiko.xyz -interface IAddressResolver { - /// @notice Resolves a name to its address deployed on this chain. - /// @param _name Name whose address is to be resolved. - /// @param _allowZeroAddress If set to true, does not throw if the resolved - /// address is `address(0)`. - /// @return Address associated with the given name. - function resolve( - bytes32 _name, - bool _allowZeroAddress - ) - external - view - returns (address payable); - - /// @notice Resolves a name to its address deployed on a specified chain. - /// @param _chainId The chainId of interest. - /// @param _name Name whose address is to be resolved. - /// @param _allowZeroAddress If set to true, does not throw if the resolved - /// address is `address(0)`. - /// @return Address associated with the given name on the specified - /// chain. - function resolve( - uint64 _chainId, - bytes32 _name, - bool _allowZeroAddress - ) - external - view - returns (address payable); -} diff --git a/packages/protocol/contracts/common/ICrossChainSync.sol b/packages/protocol/contracts/common/ICrossChainSync.sol deleted file mode 100644 index 87fdf59cf709..000000000000 --- a/packages/protocol/contracts/common/ICrossChainSync.sol +++ /dev/null @@ -1,39 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -/// @title ICrossChainSync -/// @dev This interface is implemented by both the TaikoL1 and TaikoL2 -/// contracts. -/// It outlines the essential methods required for synchronizing and accessing -/// block hashes across chains. The core idea is to ensure that data between -/// both chains remain consistent and can be cross-referenced with integrity. -interface ICrossChainSync { - struct Snippet { - uint64 remoteBlockId; - uint64 syncedInBlock; - bytes32 blockHash; - bytes32 signalRoot; - } - - /// @dev Emitted when a block has been synced across chains. - /// @param syncedInBlock The ID of this chain's block where the sync - /// happened. - /// @param blockId The ID of the remote block whose block hash and - /// signal root are synced. - /// @param blockHash The hash of the synced block. - /// @param signalRoot The root hash representing cross-chain signals. - event CrossChainSynced( - uint64 indexed syncedInBlock, uint64 indexed blockId, bytes32 blockHash, bytes32 signalRoot - ); - - /// @notice Fetches the hash of a block from the opposite chain. - /// @param blockId The target block id. Specifying 0 retrieves the hash - /// of the latest block. - /// @return snippet The block hash and signal root synced. - function getSyncedSnippet(uint64 blockId) external view returns (Snippet memory snippet); -} diff --git a/packages/protocol/contracts/common/LibStrings.sol b/packages/protocol/contracts/common/LibStrings.sol deleted file mode 100644 index 3403c683a16f..000000000000 --- a/packages/protocol/contracts/common/LibStrings.sol +++ /dev/null @@ -1,33 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title LibStrings -/// @custom:security-contact security@taiko.xyz -library LibStrings { - bytes32 internal constant B_AUTOMATA_DCAP_ATTESTATION = bytes32("automata_dcap_attestation"); - bytes32 internal constant B_BRIDGE = bytes32("bridge"); - bytes32 internal constant B_BRIDGE_WATCHDOG = bytes32("bridge_watchdog"); - bytes32 internal constant B_BRIDGED_ERC1155 = bytes32("bridged_erc1155"); - bytes32 internal constant B_BRIDGED_ERC20 = bytes32("bridged_erc20"); - bytes32 internal constant B_BRIDGED_ERC721 = bytes32("bridged_erc721"); - bytes32 internal constant B_CHAIN_WATCHDOG = bytes32("chain_watchdog"); - bytes32 internal constant B_ERC1155_VAULT = bytes32("erc1155_vault"); - bytes32 internal constant B_ERC20_VAULT = bytes32("erc20_vault"); - bytes32 internal constant B_ERC721_VAULT = bytes32("erc721_vault"); - bytes32 internal constant B_PROVER_ASSIGNMENT = bytes32("PROVER_ASSIGNMENT"); - bytes32 internal constant B_PROVER_SET = bytes32("prover_set"); - bytes32 internal constant B_QUOTA_MANAGER = bytes32("quota_manager"); - bytes32 internal constant B_SGX_WATCHDOG = bytes32("sgx_watchdog"); - bytes32 internal constant B_SIGNAL_SERVICE = bytes32("signal_service"); - bytes32 internal constant B_TAIKO = bytes32("taiko"); - bytes32 internal constant B_TAIKO_TOKEN = bytes32("taiko_token"); - bytes32 internal constant B_TIER_GUARDIAN = bytes32("tier_guardian"); - bytes32 internal constant B_TIER_GUARDIAN_MINORITY = bytes32("tier_guardian_minority"); - bytes32 internal constant B_TIER_ROUTER = bytes32("tier_router"); - bytes32 internal constant B_TIER_SGX = bytes32("tier_sgx"); - bytes32 internal constant B_TIER_SGX_ZKVM = bytes32("tier_sgx_zkvm"); - bytes32 internal constant B_WITHDRAWER = bytes32("withdrawer"); - bytes32 internal constant H_RETURN_LIVENESS_BOND = keccak256("RETURN_LIVENESS_BOND"); - bytes32 internal constant H_SIGNAL_ROOT = keccak256("SIGNAL_ROOT"); - bytes32 internal constant H_STATE_ROOT = keccak256("STATE_ROOT"); -} diff --git a/packages/protocol/contracts/examples/xErc20Example.sol b/packages/protocol/contracts/examples/xErc20Example.sol deleted file mode 100644 index be1cf562b5d7..000000000000 --- a/packages/protocol/contracts/examples/xErc20Example.sol +++ /dev/null @@ -1,8 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -import "../gwyneth/XChainERC20Token.sol"; - -contract xERC20Example is XChainERC20Token { - constructor(string memory name_, string memory symbol_, address premintAddress_, uint256 premintAmount_ ) XChainERC20Token(name_, symbol_, premintAddress_, premintAmount_ ) {} -} \ No newline at end of file diff --git a/packages/protocol/contracts/gwyneth/Bus.sol b/packages/protocol/contracts/gwyneth/Bus.sol deleted file mode 100644 index 231949750ff7..000000000000 --- a/packages/protocol/contracts/gwyneth/Bus.sol +++ /dev/null @@ -1,44 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity >=0.8.12 <0.9.0; - -import "./XChain.sol"; - -contract Bus is XChain { - // Stored only on the target chain - mapping (bytes32 => bool) public consumed; - - function isMessageSent(bytes32 messageHash, uint busID) external view returns (bool) { - return messages[busID] == messageHash; - } - - function write(bytes memory message) public override returns (uint) { - messages.push(calcMessageHash(message)); - return messages.length - 1; - } - - function consume(uint fromChainId, bytes memory message, bytes calldata proof) public override { - ProofType proofType = ProofType(uint16(bytes2(proof[:2]))); - if (proofType == ProofType.ASYNC) { - // Decode the proof - AsyncBusProof memory busProof = abi.decode(proof[2:], (AsyncBusProof)); - - // Calculate the message hash - bytes32 messageHash = calcMessageHash(message); - - // Do the call on the source chain to see if the message was sent there - xCallOptions(fromChainId, true, busProof.boosterCallProof); - bool isSent = this.isMessageSent(messageHash, busProof.busID); - require(isSent == true); - - // Make sure this is the first and last time this message is consumed - require(consumed[messageHash] == false); - consumed[messageHash] = true; - } else if (proofType == ProofType.SYNC) { - // Sync system with shared validity (e.g.: like a SignalService shared validity thing) - write(message); - } else { - revert("INVALID BUS PROOF"); - } - } -} \ No newline at end of file diff --git a/packages/protocol/contracts/gwyneth/EVM.sol b/packages/protocol/contracts/gwyneth/EVM.sol deleted file mode 100644 index b77a3e6137d8..000000000000 --- a/packages/protocol/contracts/gwyneth/EVM.sol +++ /dev/null @@ -1,68 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity >=0.8.12 <0.9.0; - -// EVM library -library EVM { - // precompile addresses - address constant xCallOptionsAddress = address(0x1100); - - uint constant l1ChainId = 1; - uint constant version = 1; - - function xCallOnL1() - public - view - { - xCallOptions(l1ChainId); - } - - function xCallOptions(uint chainID) - public - view - { - xCallOptions(chainID, true); - } - - function xCallOptions(uint chainID, bool sandbox) - public - view - { - xCallOptions(chainID, sandbox, address(0), address(0)); - } - - function xCallOptions(uint chainID, bool sandbox, address txOrigin, address msgSender) - public - view - { - xCallOptions(chainID, sandbox, txOrigin, msgSender, 0x0, ""); - } - - function xCallOptions(uint chainID, bool sandbox, bytes32 blockHash, bytes memory proof) - public - view - { - xCallOptions(chainID, sandbox, address(0), address(0), blockHash, proof); - } - - function xCallOptions(uint chainID, bool sandbox, address txOrigin, address msgSender, bytes32 blockHash, bytes memory proof) - public - view - { - // This precompile is not supported on L1 - require(chainID != l1ChainId); - - // Call the custom precompile - bytes memory input = abi.encodePacked(version, chainID, sandbox, txOrigin, msgSender, blockHash, proof); - (bool success, ) = xCallOptionsAddress.staticcall(input); - require(success); - } - - function isOnL1() public view returns (bool) { - return chainId() == l1ChainId; - } - - function chainId() public view returns (uint256) { - return block.chainid; - } -} \ No newline at end of file diff --git a/packages/protocol/contracts/gwyneth/XChain.sol b/packages/protocol/contracts/gwyneth/XChain.sol deleted file mode 100644 index 5544d6382bb7..000000000000 --- a/packages/protocol/contracts/gwyneth/XChain.sol +++ /dev/null @@ -1,150 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity >=0.8.12 <0.9.0; - -import "./EVM.sol"; - -contract XChain { - struct XChainCallProof { - uint chainID; - uint blockID; - bytes callProof; - } - - struct AsyncBusProof { - uint busID; - bytes boosterCallProof; - } - - struct AsyncBusProofV2 { - uint blockNumber; - uint busID; - } - - enum ProofType { - INVALID, - ASYNC, - SYNC - } - - // Messages are stored only on the source chain for ASYNC messages. - // In SYNC mode, the message is stored on both the source and the target chain. - bytes32[] public messages; - - // Only stored on L1 - // Currently getBlockHash() is not supported via the new Taiko Gwyneth - //ITaiko public taiko; - // todo (@Brecht): XChain has a bus property but Bus is an XChain (inherits). It does not make too much sense to me, or maybe i'm missing the point ? - //Bus public bus; - - // Event that is logged when a transaction on a chain also needs to be executed on another chain - event ExecuteNextOn(uint chainID, address from, address target, bytes callData); - - error FUNC_NOT_IMPLEMENTED(); - error NO_NEED_BUS_PROOF_ALL_ASYNC(); - - function init(/*ITaiko _taiko*/) - internal - { - //taiko = _taiko; - } - - modifier notImplemented() { - revert FUNC_NOT_IMPLEMENTED(); - _; - } - - // xExecuteOn functions need - // - to be external - modifier xExecuteOn(uint chainID) { - if (EVM.chainId() == chainID) { - _; - } else { - EVM.xCallOptions(chainID, true); - (bool success, bytes memory data) = address(this).staticcall(msg.data); - require(success); - // Just pass through the return data - assembly { - return(add(data, 32), mload(data)) - } - } - } - - // xFunctions functions need - // - to be external - // - to have `bytes proof` as the last function argument - modifier xFunction(uint fromChainId, uint toChainId, bytes calldata proof) { - // Current code is written with async case ! (This is outdated there, no need to run if running in sync. comp mode) - if (fromChainId != toChainId) { - // Remove the proof data from the message data - // Bytes arays are padded to 32 bytes and start with a 32 byte length value - uint messageLength = msg.data.length - ((proof.length + 31) / 32 + 1) * 32; - bytes memory message = msg.data; - assembly { - mstore(message, messageLength) - } - - // Use the bus to communicate between chains - if (EVM.chainId() == fromChainId) { - uint busID = write(message); - - // Always suggest doing an async proof for now on the target chain - AsyncBusProofV2 memory asyncProof = AsyncBusProofV2({ - busID: busID, - blockNumber: block.number - }); - bytes memory encodedProof = abi.encode(asyncProof); - bytes memory callData = bytes(string.concat(string(new bytes(0x0001)), string(message), string(encodedProof))); - emit ExecuteNextOn(toChainId, address(0), address(this), callData); - } else if (EVM.chainId() == toChainId) { - consume(fromChainId, message, proof); - } else { - revert(); - } - } - _; - } - - // These could also be exposed using a precompile because we could get them from public input, - // but that requires extra work so let's just fetch them from L1 for now - function getBlockHash(uint chainID, uint blockID) external view xExecuteOn(EVM.l1ChainId) returns (bytes32) { - // todo(@Brecht): Currently not supported or well, at least TaikoL1 does not have it with the current design. - //return taiko.getBlockHash(chainID, blockID); - } - - function calcMessageHash(bytes memory message) internal view returns (bytes32) { - return keccak256(abi.encode(EVM.chainId(), msg.sender, message)); - } - - // Supports setting the call options using any L2 in the booster network. - // This is done by first checking the validity of the blockhash of the specified L2. - function xCallOptions(uint chainID, bool sandbox, bytes memory proof) internal view { - // Decode the proof - XChainCallProof memory chainCallProof = abi.decode(proof, (XChainCallProof)); - require(chainID == chainCallProof.chainID); - - // If the source chain isn't L1, go fetch the block header of the L2 stored on L1 - bytes32 blockHash = 0x0; - if (chainID != EVM.l1ChainId) { - - blockHash = this.getBlockHash(chainID, chainCallProof.blockID); - } - - // Do the call on the specified chain - EVM.xCallOptions(chainID, sandbox, blockHash, chainCallProof.callProof); - } - - // todo (@Brecht): - // There was a circular reference (XBus inherits from XChain, while also XChain has a XBus property, so i made these to compile) - // They will be inherited in XBus, but basically XBus can be incorporated into XChain, no ? - - // Question (Brecht): - //- Shall we put back these functionalities to bus ? - //- Shall we remove (as i did here) the ownership of the bus - then use the previous implementation ? (notImplemented modifier) and overwrite in the child "bus" ? - - // Currently, supposingly there is "synchronous composability", so let's assume a synchronous world - function write(bytes memory message) public virtual notImplemented returns (uint) {} - - // Even tho the function just passes thru to write(), it is needed to bus-compatibility, where the consume function will differ - function consume(uint256 /*fromChainId*/, bytes memory message, bytes calldata proof) public notImplemented virtual {} -} \ No newline at end of file diff --git a/packages/protocol/contracts/gwyneth/XChainERC20Token.sol b/packages/protocol/contracts/gwyneth/XChainERC20Token.sol deleted file mode 100644 index 38536bb49b47..000000000000 --- a/packages/protocol/contracts/gwyneth/XChainERC20Token.sol +++ /dev/null @@ -1,167 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity >=0.8.12 <0.9.0; - -import "@openzeppelin/contracts/token/ERC20/ERC20.sol"; -import "./XChain.sol"; - -// The reason we need this is because i realized we need to somehow 'override' some of the functions we have in ERC20, and since the balances need to be affected in ERC20 and XChainToken, it is not possible with the current standard, except if we linearize the inheritance (ERC20 -> XChainToken -> TokenImplementation) -contract XChainERC20Token is XChain, ERC20 { - // Only stored on L1 - // @Brecht -> Shall we overwrite in our xERC20Example the totalSupply() of ERC20 ? And use this var instead of the ERC20's _totalSupply - // Not sure in this because i guess it shall serve the same purpose as totalSupply(), also it is a completely different interaction (on xChain) than on the canonical chain, but the totalSupply shall be the same IMO. - //meeting meinutes: We can get rid of this. - uint private _totalBalance; - // Stored on all chains - // This lead me to realize we need thi sinheritance: - // Somehow this has to overwrite (or rather be used) in the ERC20 contract, right ? Like with the balanceOf(addr), otherwise the erc20 is not 'notified'. - // What if we have a function in child ERC20.. which needs to be implemented, like modifyERC20Balance(); - // Example: - // BOb does an xTransfer to Alice from cahin A to chain B. It is is OK but it shall translate into an ERC20 balance change too, not only in this contract but in the ERC20 contract which is with the prev. inheritance was not possible. - /*New variables - overriden from ERC20 since we want them to be modifiable*/ - mapping(address => uint) private _balances; // -> Need to redefine and override functions - uint256 private _totalSupply; // -> Need to redefine and override functions - - constructor(string memory name_, string memory symbol_, address premintAddress_, uint256 premintAmount_ ) ERC20(name_, symbol_) { - _mint(premintAddress_, premintAmount_); - } - - // xtransfer for async case (proof needed) - function xtransfer(address to, uint amount, uint256 fromChainId, uint256 toChainId, bytes calldata proof) - xFunction(fromChainId, toChainId, proof) - external - { - if (EVM.chainId() == fromChainId) { - _balances[msg.sender] -= amount; - } - if (EVM.chainId() == toChainId) { - _balances[to] += amount; - } - } - - // xtransfer for async case - function xtransfer(address to, uint amount, uint256 fromChainId, uint256 toChainId) - external - { - require(EVM.chainId() == fromChainId, "ASYNC_CASE, only call it on source chain"); - - _balances[msg.sender] -= amount; - // We need to do xCallOptions (incoprpotate the minting on the dest chain) - // We chack we are on the corect sourvce chain and then we do evm. - EVM.xCallOptions(toChainId); - this.xmint(to, amount); - } - - // DO a mind-puzzle with Brecht if this is really solving the problems of Alice sending Bob from chainA to chainB some tokens!! - // Mint function -> Should only be called by the SC itself. - function xmint(address to, uint amount) - external - { - // Only be called by itself (internal bookikeeping) - require(msg.sender == address(this), "NOT_ALLOWED"); - _balances[to] += amount; - } - - /* Overrides of ERC20 */ - //Change totalSupply and apply xExecuteOn modifier - function totalSupply() //Is it the same as totalSupply() if so, i think that shall be fine! - xExecuteOn(EVM.l1ChainId) //why it has an xExecuteOn modifier ? And why it is applied only here ? - public - view - override - returns (uint256) - { - return _totalSupply; - } - - function balanceOf(address account) public view virtual override returns (uint256) { - return _balances[account]; - } - - /** - * @dev Moves `amount` of tokens from `from` to `to`. - * - * This internal function is equivalent to {transfer}, and can be used to - * e.g. implement automatic token fees, slashing mechanisms, etc. - * - * Emits a {Transfer} event. - * - * Requirements: - * - * - `from` cannot be the zero address. - * - `to` cannot be the zero address. - * - `from` must have a balance of at least `amount`. - */ - function _transfer(address from, address to, uint256 amount) internal virtual override { - require(from != address(0), "ERC20: transfer from the zero address"); - require(to != address(0), "ERC20: transfer to the zero address"); - - _beforeTokenTransfer(from, to, amount); - - uint256 fromBalance = _balances[from]; - require(fromBalance >= amount, "ERC20: transfer amount exceeds balance"); - unchecked { - _balances[from] = fromBalance - amount; - // Overflow not possible: the sum of all balances is capped by totalSupply, and the sum is preserved by - // decrementing then incrementing. - _balances[to] += amount; - } - - emit Transfer(from, to, amount); - - _afterTokenTransfer(from, to, amount); - } - - /** @dev Creates `amount` tokens and assigns them to `account`, increasing - * the total supply. - * - * Emits a {Transfer} event with `from` set to the zero address. - * - * Requirements: - * - * - `account` cannot be the zero address. - */ - function _mint(address account, uint256 amount) internal virtual override { - require(account != address(0), "ERC20: mint to the zero address"); - - _beforeTokenTransfer(address(0), account, amount); - - _totalSupply += amount; - unchecked { - // Overflow not possible: balance + amount is at most totalSupply + amount, which is checked above. - _balances[account] += amount; - } - emit Transfer(address(0), account, amount); - - _afterTokenTransfer(address(0), account, amount); - } - - /** - * @dev Destroys `amount` tokens from `account`, reducing the - * total supply. - * - * Emits a {Transfer} event with `to` set to the zero address. - * - * Requirements: - * - * - `account` cannot be the zero address. - * - `account` must have at least `amount` tokens. - */ - function _burn(address account, uint256 amount) internal virtual override { - require(account != address(0), "ERC20: burn from the zero address"); - - _beforeTokenTransfer(account, address(0), amount); - - uint256 accountBalance = _balances[account]; - require(accountBalance >= amount, "ERC20: burn amount exceeds balance"); - unchecked { - _balances[account] = accountBalance - amount; - // Overflow not possible: amount <= accountBalance <= totalSupply. - _totalSupply -= amount; - } - - emit Transfer(account, address(0), amount); - - _afterTokenTransfer(account, address(0), amount); - } -} \ No newline at end of file diff --git a/packages/protocol/contracts/libs/LibAddress.sol b/packages/protocol/contracts/libs/LibAddress.sol deleted file mode 100644 index bf9f9b8107d0..000000000000 --- a/packages/protocol/contracts/libs/LibAddress.sol +++ /dev/null @@ -1,102 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/utils/Address.sol"; -import "@openzeppelin/contracts/utils/cryptography/ECDSA.sol"; -import "@openzeppelin/contracts/utils/introspection/IERC165.sol"; -import "@openzeppelin/contracts/interfaces/IERC1271.sol"; - -/// @title LibAddress -/// @dev Provides utilities for address-related operations. -/// @custom:security-contact security@taiko.xyz -library LibAddress { - bytes4 private constant EIP1271_MAGICVALUE = 0x1626ba7e; - - error ETH_TRANSFER_FAILED(); - - /// @dev Sends Ether to the specified address. This method will not revert even if sending ether - /// fails. - /// This function is inspired by - /// https://github.com/nomad-xyz/ExcessivelySafeCall/blob/main/src/ExcessivelySafeCall.sol - /// @param _to The recipient address. - /// @param _amount The amount of Ether to send in wei. - /// @param _gasLimit The max amount gas to pay for this transaction. - /// @return success_ true if the call is successful, false otherwise. - function sendEther( - address _to, - uint256 _amount, - uint256 _gasLimit, - bytes memory _calldata - ) - internal - returns (bool success_) - { - // Check for zero-address transactions - if (_to == address(0)) revert ETH_TRANSFER_FAILED(); - // dispatch message to recipient - // by assembly calling "handle" function - // we call via assembly to avoid memcopying a very large returndata - // returned by a malicious contract - assembly { - success_ := - call( - _gasLimit, // gas - _to, // recipient - _amount, // ether value - add(_calldata, 0x20), // inloc - mload(_calldata), // inlen - 0, // outloc - 0 // outlen - ) - } - } - - /// @dev Sends Ether to the specified address. This method will revert if sending ether fails. - /// @param _to The recipient address. - /// @param _amount The amount of Ether to send in wei. - /// @param _gasLimit The max amount gas to pay for this transaction. - function sendEtherAndVerify(address _to, uint256 _amount, uint256 _gasLimit) internal { - if (_amount == 0) return; - if (!sendEther(_to, _amount, _gasLimit, "")) { - revert ETH_TRANSFER_FAILED(); - } - } - - /// @dev Sends Ether to the specified address. This method will revert if sending ether fails. - /// @param _to The recipient address. - /// @param _amount The amount of Ether to send in wei. - function sendEtherAndVerify(address _to, uint256 _amount) internal { - sendEtherAndVerify(_to, _amount, gasleft()); - } - - function supportsInterface( - address _addr, - bytes4 _interfaceId - ) - internal - view - returns (bool result_) - { - if (!Address.isContract(_addr)) return false; - - try IERC165(_addr).supportsInterface(_interfaceId) returns (bool _result) { - result_ = _result; - } catch { } - } - - function isValidSignature( - address addr, - bytes32 hash, - bytes memory sig - ) - internal - view - returns (bool valid) - { - if (Address.isContract(addr)) { - return IERC1271(addr).isValidSignature(hash, sig) == EIP1271_MAGICVALUE; - } else { - return ECDSA.recover(hash, sig) == addr; - } - } -} diff --git a/packages/protocol/contracts/libs/LibBytes.sol b/packages/protocol/contracts/libs/LibBytes.sol deleted file mode 100644 index 3ddddcf2b6af..000000000000 --- a/packages/protocol/contracts/libs/LibBytes.sol +++ /dev/null @@ -1,45 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -library LibBytes { - error INNER_ERROR(bytes innerError); - - // Function body taken from: - // https://github.com/clober-dex/core/blob/main/contracts/utils/BoringERC20.sol#L17-L33 - /// @notice Function to convert returned data to string - /// returns '' as fallback value. - function toString(bytes memory _data) internal pure returns (string memory) { - if (_data.length >= 64) { - return abi.decode(_data, (string)); - } else if (_data.length == 32) { - uint8 i = 0; - while (i < 32 && _data[i] != 0) { - i++; - } - bytes memory bytesArray = new bytes(i); - for (i = 0; i < 32 && _data[i] != 0; i++) { - bytesArray[i] = _data[i]; - } - return string(bytesArray); - } else { - return ""; - } - } - - // Taken from: - // https://github.com/boringcrypto/BoringSolidity/blob/master/contracts/BoringBatchable.sol - /// @dev Helper function to extract a useful revert message from a failed call. - /// If the returned data is malformed or not correctly abi encoded then this call can fail - /// itself. - function revertWithExtractedError(bytes memory _returnData) internal pure { - // If the _res length is less than 68, then - // the transaction failed with custom error or silently (without a revert message) - if (_returnData.length < 68) revert INNER_ERROR(_returnData); - - assembly { - // Slice the sighash. - _returnData := add(_returnData, 0x04) - } - revert(abi.decode(_returnData, (string))); // All that remains is the revert string - } -} diff --git a/packages/protocol/contracts/libs/LibDeploy.sol b/packages/protocol/contracts/libs/LibDeploy.sol deleted file mode 100644 index 9a3fafd174d8..000000000000 --- a/packages/protocol/contracts/libs/LibDeploy.sol +++ /dev/null @@ -1,32 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Proxy.sol"; -import "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol"; - -/// @title LibDeploy -/// @dev Provides utilities for deploying contracts -library LibDeploy { - error NULL_IMPL_ADDR(); - - function deployERC1967Proxy( - address impl, - address owner, - bytes memory data - ) - internal - returns (address proxy) - { - if (impl == address(0)) revert NULL_IMPL_ADDR(); - proxy = address(new ERC1967Proxy(impl, data)); - - if (owner != address(0) && owner != OwnableUpgradeable(proxy).owner()) { - OwnableUpgradeable(proxy).transferOwnership(owner); - } - } -} diff --git a/packages/protocol/contracts/libs/LibMath.sol b/packages/protocol/contracts/libs/LibMath.sol deleted file mode 100644 index 1de714ba3310..000000000000 --- a/packages/protocol/contracts/libs/LibMath.sol +++ /dev/null @@ -1,27 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -/// @title LibMath -/// @dev This library offers additional math functions for uint256. -library LibMath { - /// @dev Returns the smaller of the two given values. - /// @param a The first number to compare. - /// @param b The second number to compare. - /// @return The smaller of the two numbers. - function min(uint256 a, uint256 b) internal pure returns (uint256) { - return a > b ? b : a; - } - - /// @dev Returns the larger of the two given values. - /// @param a The first number to compare. - /// @param b The second number to compare. - /// @return The larger of the two numbers. - function max(uint256 a, uint256 b) internal pure returns (uint256) { - return a > b ? a : b; - } -} diff --git a/packages/protocol/contracts/libs/LibNetwork.sol b/packages/protocol/contracts/libs/LibNetwork.sol deleted file mode 100644 index e5ddd51bccdc..000000000000 --- a/packages/protocol/contracts/libs/LibNetwork.sol +++ /dev/null @@ -1,57 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title LibNetwork -library LibNetwork { - uint256 internal constant MAINNET = 1; - uint256 internal constant ROPSTEN = 2; - uint256 internal constant RINKEBY = 4; - uint256 internal constant GOERLI = 5; - uint256 internal constant KOVAN = 42; - uint256 internal constant HOLESKY = 17_000; - uint256 internal constant SEPOLIA = 11_155_111; - - uint64 internal constant TAIKO_MAINNET = 167_000; - uint64 internal constant TAIKO_HEKLA = 167_009; - - /// @dev Checks if the chain ID represents an Ethereum testnet. - /// @param _chainId The chain ID. - /// @return true if the chain ID represents an Ethereum testnet, false otherwise. - function isEthereumTestnet(uint256 _chainId) internal pure returns (bool) { - return _chainId == LibNetwork.ROPSTEN || _chainId == LibNetwork.RINKEBY - || _chainId == LibNetwork.GOERLI || _chainId == LibNetwork.KOVAN - || _chainId == LibNetwork.HOLESKY || _chainId == LibNetwork.SEPOLIA; - } - - /// @dev Checks if the chain ID represents an Ethereum testnet or the Etheruem mainnet. - /// @param _chainId The chain ID. - /// @return true if the chain ID represents an Ethereum testnet or the Etheruem mainnet, false - /// otherwise. - function isEthereumMainnetOrTestnet(uint256 _chainId) internal pure returns (bool) { - return _chainId == LibNetwork.MAINNET || isEthereumTestnet(_chainId); - } - - /// @dev Checks if the chain ID represents the Taiko L2 mainnet. - /// @param _chainId The chain ID. - /// @return true if the chain ID represents the Taiko L2 mainnet. - function isTaikoMainnet(uint256 _chainId) internal pure returns (bool) { - return _chainId == TAIKO_MAINNET; - } - - /// @dev Checks if the chain ID represents an internal Taiko devnet's base layer. - /// @param _chainId The chain ID. - /// @return true if the chain ID represents an internal Taiko devnet's base layer, false - /// otherwise. - function isTaikoDevnet(uint256 _chainId) internal pure returns (bool) { - return _chainId >= 32_300 && _chainId <= 32_400; - } - - /// @dev Checks if the chain supports Dencun hardfork. Note that this check doesn't need to be - /// exhaustive. - /// @param _chainId The chain ID. - /// @return true if the chain supports Dencun hardfork, false otherwise. - function isDencunSupported(uint256 _chainId) internal pure returns (bool) { - return _chainId == LibNetwork.MAINNET || _chainId == LibNetwork.HOLESKY - || _chainId == LibNetwork.SEPOLIA || isTaikoDevnet(_chainId); - } -} diff --git a/packages/protocol/contracts/libs/LibTrieProof.sol b/packages/protocol/contracts/libs/LibTrieProof.sol deleted file mode 100644 index f28476318c89..000000000000 --- a/packages/protocol/contracts/libs/LibTrieProof.sol +++ /dev/null @@ -1,66 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity 0.8.24; - -import "../thirdparty/optimism/rlp/RLPReader.sol"; -import "../thirdparty/optimism/rlp/RLPWriter.sol"; -import "../thirdparty/optimism/trie/SecureMerkleTrie.sol"; - -/// @title LibTrieProof -/// @custom:security-contact security@taiko.xyz -library LibTrieProof { - // The consensus format representing account is RLP encoded in the - // following order: nonce, balance, storageHash, codeHash. - uint256 private constant _ACCOUNT_FIELD_INDEX_STORAGE_HASH = 2; - - error LTP_INVALID_ACCOUNT_PROOF(); - error LTP_INVALID_INCLUSION_PROOF(); - - /// @notice Verifies that the value of a slot in the storage of an account is value. - /// - /// @param _rootHash The merkle root of state tree or the account tree. If accountProof's length - /// is zero, it is used as the account's storage root, otherwise it will be used as the state - /// root. - /// @param _addr The address of contract. - /// @param _slot The slot in the contract. - /// @param _value The value to be verified. - /// @param _accountProof The account proof - /// @param _storageProof The storage proof - /// @return storageRoot_ The account's storage root - function verifyMerkleProof( - bytes32 _rootHash, - address _addr, - bytes32 _slot, - bytes32 _value, - bytes[] memory _accountProof, - bytes[] memory _storageProof - ) - internal - pure - returns (bytes32 storageRoot_) - { - if (_accountProof.length != 0) { - bytes memory rlpAccount = - SecureMerkleTrie.get(abi.encodePacked(_addr), _accountProof, _rootHash); - - if (rlpAccount.length == 0) revert LTP_INVALID_ACCOUNT_PROOF(); - - RLPReader.RLPItem[] memory accountState = RLPReader.readList(rlpAccount); - - storageRoot_ = - bytes32(RLPReader.readBytes(accountState[_ACCOUNT_FIELD_INDEX_STORAGE_HASH])); - } else { - storageRoot_ = _rootHash; - } - - bool verified = SecureMerkleTrie.verifyInclusionProof( - bytes.concat(_slot), RLPWriter.writeUint(uint256(_value)), _storageProof, storageRoot_ - ); - - if (!verified) revert LTP_INVALID_INCLUSION_PROOF(); - } -} diff --git a/packages/protocol/contracts/signal/ISignalService.sol b/packages/protocol/contracts/signal/ISignalService.sol deleted file mode 100644 index 804cfdde6e19..000000000000 --- a/packages/protocol/contracts/signal/ISignalService.sol +++ /dev/null @@ -1,181 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title ISignalService -/// @notice The SignalService contract serves as a secure cross-chain message -/// passing system. It defines methods for sending and verifying signals with -/// merkle proofs. The trust assumption is that the target chain has secure -/// access to the merkle root (such as Taiko injects it in the anchor -/// transaction). With this, verifying a signal is reduced to simply verifying -/// a merkle proof. -/// @custom:security-contact security@taiko.xyz -interface ISignalService { - enum CacheOption { - CACHE_NOTHING, - CACHE_SIGNAL_ROOT, - CACHE_STATE_ROOT, - CACHE_BOTH - } - - struct HopProof { - /// @notice This hop's destination chain ID. If there is a next hop, this ID is the next - /// hop's source chain ID. - uint64 chainId; - /// @notice The ID of a source chain block whose state root has been synced to the hop's - /// destination chain. - /// Note that this block ID must be greater than or equal to the block ID where the signal - /// was sent on the source chain. - uint64 blockId; - /// @notice The state root or signal root of the source chain at the above blockId. This - /// value has been synced to the destination chain. - /// @dev To get both the blockId and the rootHash, apps should subscribe to the - /// ChainDataSynced event or query `topBlockId` first using the source chain's ID and - /// LibStrings.H_STATE_ROOT to get the most recent block ID synced, then call - /// `getSyncedChainData` to read the synchronized data. - bytes32 rootHash; - /// @notice Options to cache either the state roots or signal roots of middle-hops to the - /// current chain. - CacheOption cacheOption; - /// @notice The signal service's account proof. If this value is empty, then `rootHash` will - /// be used as the signal root, otherwise, `rootHash` will be used as the state root. - bytes[] accountProof; - /// @notice The signal service's storage proof. - bytes[] storageProof; - } - - /// @notice Emitted when a remote chain's state root or signal root is - /// synced locally as a signal. - /// @param chainId The remote chainId. - /// @param blockId The chain data's corresponding blockId. - /// @param kind A value to mark the data type. - /// @param data The remote data. - /// @param signal The signal for this chain data. - event ChainDataSynced( - uint64 indexed chainId, - uint64 indexed blockId, - bytes32 indexed kind, - bytes32 data, - bytes32 signal - ); - - /// @notice Emitted when a signal is sent. - /// @param app The address that initiated the signal. - /// @param signal The signal (message) that was sent. - /// @param slot The location in storage where this signal is stored. - /// @param value The value of the signal. - event SignalSent(address app, bytes32 signal, bytes32 slot, bytes32 value); - - /// @notice Emitted when an address is authorized or deauthorized. - /// @param addr The address to be authorized or deauthorized. - /// @param authorized True if authorized, false otherwise. - event Authorized(address indexed addr, bool authorized); - - /// @notice Send a signal (message) by setting the storage slot to the same value as the signal - /// itself. - /// @param _signal The signal (message) to send. - /// @return slot_ The location in storage where this signal is stored. - function sendSignal(bytes32 _signal) external returns (bytes32 slot_); - - /// @notice Sync a data from a remote chain locally as a signal. The signal is calculated - /// uniquely from chainId, kind, and data. - /// @param _chainId The remote chainId. - /// @param _kind A value to mark the data type. - /// @param _blockId The chain data's corresponding blockId - /// @param _chainData The remote data. - /// @return signal_ The signal for this chain data. - function syncChainData( - uint64 _chainId, - bytes32 _kind, - uint64 _blockId, - bytes32 _chainData - ) - external - returns (bytes32 signal_); - - /// @notice Verifies if a signal has been received on the target chain. - /// @param _chainId The identifier for the source chain from which the - /// signal originated. - /// @param _app The address that initiated the signal. - /// @param _signal The signal (message) to send. - /// @param _proof Merkle proof that the signal was persisted on the - /// source chain. - /// @return numCacheOps_ The number of newly cached items. - function proveSignalReceived( - uint64 _chainId, - address _app, - bytes32 _signal, - bytes calldata _proof - ) - external - returns (uint256 numCacheOps_); - - /// @notice Verifies if a signal has been received on the target chain. - /// This is the "readonly" version of proveSignalReceived. - /// @param _chainId The identifier for the source chain from which the - /// signal originated. - /// @param _app The address that initiated the signal. - /// @param _signal The signal (message) to send. - /// @param _proof Merkle proof that the signal was persisted on the - /// source chain. - function verifySignalReceived( - uint64 _chainId, - address _app, - bytes32 _signal, - bytes calldata _proof - ) - external - view; - - /// @notice Verifies if a particular signal has already been sent. - /// @param _app The address that initiated the signal. - /// @param _signal The signal (message) that was sent. - /// @return true if the signal has been sent, otherwise false. - function isSignalSent(address _app, bytes32 _signal) external view returns (bool); - - /// @notice Checks if a chain data has been synced. - /// @param _chainId The remote chainId. - /// @param _kind A value to mark the data type. - /// @param _blockId The chain data's corresponding blockId - /// @param _chainData The remote data. - /// @return true if the data has been synced, otherwise false. - function isChainDataSynced( - uint64 _chainId, - bytes32 _kind, - uint64 _blockId, - bytes32 _chainData - ) - external - view - returns (bool); - - /// @notice Returns the given block's chain data. - /// @param _chainId Identifier of the chainId. - /// @param _kind A value to mark the data type. - /// @param _blockId The chain data's corresponding block id. If this value is 0, use the top - /// block id. - /// @return blockId_ The actual block id. - /// @return chainData_ The synced chain data. - function getSyncedChainData( - uint64 _chainId, - bytes32 _kind, - uint64 _blockId - ) - external - view - returns (uint64 blockId_, bytes32 chainData_); - - /// @notice Returns the data to be used for caching slot generation. - /// @param _chainId Identifier of the chainId. - /// @param _kind A value to mark the data type. - /// @param _blockId The chain data's corresponding block id. If this value is 0, use the top - /// block id. - /// @return signal_ The signal used for caching slot creation. - function signalForChainData( - uint64 _chainId, - bytes32 _kind, - uint64 _blockId - ) - external - pure - returns (bytes32 signal_); -} diff --git a/packages/protocol/contracts/signal/SignalService.sol b/packages/protocol/contracts/signal/SignalService.sol deleted file mode 100644 index 0448330d9d91..000000000000 --- a/packages/protocol/contracts/signal/SignalService.sol +++ /dev/null @@ -1,372 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../common/EssentialContract.sol"; -import "../common/LibStrings.sol"; -import "../libs/LibTrieProof.sol"; -import "./ISignalService.sol"; - -/// @title SignalService -/// @notice See the documentation in {ISignalService} for more details. -/// @dev Labeled in AddressResolver as "signal_service". -/// @custom:security-contact security@taiko.xyz -contract SignalService is EssentialContract, ISignalService { - /// @notice Mapping to store the top blockId. - /// @dev Slot 1. - mapping(uint64 chainId => mapping(bytes32 kind => uint64 blockId)) public topBlockId; - - /// @notice Mapping to store the authorized addresses. - /// @dev Slot 2. - mapping(address addr => bool authorized) public isAuthorized; - - uint256[48] private __gap; - - struct CacheAction { - bytes32 rootHash; - bytes32 signalRoot; - uint64 chainId; - uint64 blockId; - bool isFullProof; - bool isLastHop; - CacheOption option; - } - - error SS_EMPTY_PROOF(); - error SS_INVALID_HOPS_WITH_LOOP(); - error SS_INVALID_LAST_HOP_CHAINID(); - error SS_INVALID_MID_HOP_CHAINID(); - error SS_INVALID_STATE(); - error SS_SIGNAL_NOT_FOUND(); - error SS_UNAUTHORIZED(); - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - function init(address _owner, address _addressManager) external initializer { - __Essential_init(_owner, _addressManager); - } - - /// @dev Authorize or deauthorize an address for calling syncChainData. - /// @dev Note that addr is supposed to be TaikoL1 and TaikoL1 contracts deployed locally. - /// @param _addr The address to be authorized or deauthorized. - /// @param _authorize True if authorize, false otherwise. - function authorize(address _addr, bool _authorize) external onlyOwner { - if (isAuthorized[_addr] == _authorize) revert SS_INVALID_STATE(); - isAuthorized[_addr] = _authorize; - emit Authorized(_addr, _authorize); - } - - /// @inheritdoc ISignalService - function sendSignal(bytes32 _signal) external returns (bytes32) { - return _sendSignal(msg.sender, _signal, _signal); - } - - /// @inheritdoc ISignalService - function syncChainData( - uint64 _chainId, - bytes32 _kind, - uint64 _blockId, - bytes32 _chainData - ) - external - returns (bytes32) - { - if (!isAuthorized[msg.sender]) revert SS_UNAUTHORIZED(); - return _syncChainData(_chainId, _kind, _blockId, _chainData); - } - - /// @inheritdoc ISignalService - /// @dev This function may revert. - function proveSignalReceived( - uint64 _chainId, - address _app, - bytes32 _signal, - bytes calldata _proof - ) - external - virtual - whenNotPaused - nonReentrant - returns (uint256 numCacheOps_) - { - CacheAction[] memory actions = // actions for caching - _verifySignalReceived(_chainId, _app, _signal, _proof, true); - - for (uint256 i; i < actions.length; ++i) { - numCacheOps_ += _cache(actions[i]); - } - } - - /// @inheritdoc ISignalService - /// @dev This function may revert. - function verifySignalReceived( - uint64 _chainId, - address _app, - bytes32 _signal, - bytes calldata _proof - ) - external - view - { - _verifySignalReceived(_chainId, _app, _signal, _proof, false); - } - - /// @inheritdoc ISignalService - function isChainDataSynced( - uint64 _chainId, - bytes32 _kind, - uint64 _blockId, - bytes32 _chainData - ) - public - view - nonZeroValue(_chainData) - returns (bool) - { - bytes32 signal = signalForChainData(_chainId, _kind, _blockId); - return _loadSignalValue(address(this), signal) == _chainData; - } - - /// @inheritdoc ISignalService - function isSignalSent(address _app, bytes32 _signal) public view returns (bool) { - return _loadSignalValue(_app, _signal) != 0; - } - - /// @inheritdoc ISignalService - function getSyncedChainData( - uint64 _chainId, - bytes32 _kind, - uint64 _blockId - ) - public - view - returns (uint64 blockId_, bytes32 chainData_) - { - blockId_ = _blockId != 0 ? _blockId : topBlockId[_chainId][_kind]; - - if (blockId_ != 0) { - bytes32 signal = signalForChainData(_chainId, _kind, blockId_); - chainData_ = _loadSignalValue(address(this), signal); - if (chainData_ == 0) revert SS_SIGNAL_NOT_FOUND(); - } - } - - /// @inheritdoc ISignalService - function signalForChainData( - uint64 _chainId, - bytes32 _kind, - uint64 _blockId - ) - public - pure - returns (bytes32) - { - return keccak256(abi.encode(_chainId, _kind, _blockId)); - } - - /// @notice Returns the slot for a signal. - /// @param _chainId The chainId of the signal. - /// @param _app The address that initiated the signal. - /// @param _signal The signal (message) that was sent. - /// @return The slot for the signal. - function getSignalSlot( - uint64 _chainId, - address _app, - bytes32 _signal - ) - public - pure - returns (bytes32) - { - return keccak256(abi.encodePacked("SIGNAL", _chainId, _app, _signal)); - } - - function _verifyHopProof( - uint64 _chainId, - address _app, - bytes32 _signal, - bytes32 _value, - HopProof memory _hop, - address _signalService - ) - internal - view - virtual - nonZeroAddr(_app) - nonZeroValue(_signal) - nonZeroValue(_value) - returns (bytes32) - { - return LibTrieProof.verifyMerkleProof( - _hop.rootHash, - _signalService, - getSignalSlot(_chainId, _app, _signal), - _value, - _hop.accountProof, - _hop.storageProof - ); - } - - function _authorizePause(address, bool) internal pure override notImplemented { } - - function _syncChainData( - uint64 _chainId, - bytes32 _kind, - uint64 _blockId, - bytes32 _chainData - ) - private - returns (bytes32 signal_) - { - signal_ = signalForChainData(_chainId, _kind, _blockId); - _sendSignal(address(this), signal_, _chainData); - - if (topBlockId[_chainId][_kind] < _blockId) { - topBlockId[_chainId][_kind] = _blockId; - } - emit ChainDataSynced(_chainId, _blockId, _kind, _chainData, signal_); - } - - function _sendSignal( - address _app, - bytes32 _signal, - bytes32 _value - ) - private - nonZeroAddr(_app) - nonZeroValue(_signal) - nonZeroValue(_value) - returns (bytes32 slot_) - { - slot_ = getSignalSlot(uint64(block.chainid), _app, _signal); - assembly { - sstore(slot_, _value) - } - emit SignalSent(_app, _signal, slot_, _value); - } - - function _cache(CacheAction memory _action) private returns (uint256 numCacheOps_) { - // cache state root - bool cacheStateRoot = _action.option == CacheOption.CACHE_BOTH - || _action.option == CacheOption.CACHE_STATE_ROOT; - - if (cacheStateRoot && _action.isFullProof && !_action.isLastHop) { - numCacheOps_ = 1; - _syncChainData( - _action.chainId, LibStrings.H_STATE_ROOT, _action.blockId, _action.rootHash - ); - } - - // cache signal root - bool cacheSignalRoot = _action.option == CacheOption.CACHE_BOTH - || _action.option == CacheOption.CACHE_SIGNAL_ROOT; - - if (cacheSignalRoot && (_action.isFullProof || !_action.isLastHop)) { - numCacheOps_ += 1; - _syncChainData( - _action.chainId, LibStrings.H_SIGNAL_ROOT, _action.blockId, _action.signalRoot - ); - } - } - - function _loadSignalValue( - address _app, - bytes32 _signal - ) - private - view - nonZeroAddr(_app) - nonZeroValue(_signal) - returns (bytes32 value_) - { - bytes32 slot = getSignalSlot(uint64(block.chainid), _app, _signal); - assembly { - value_ := sload(slot) - } - } - - function _verifySignalReceived( - uint64 _chainId, - address _app, - bytes32 _signal, - bytes calldata _proof, - bool _prepareCaching - ) - private - view - nonZeroAddr(_app) - nonZeroValue(_signal) - returns (CacheAction[] memory actions) - { - HopProof[] memory hopProofs = abi.decode(_proof, (HopProof[])); - if (hopProofs.length == 0) revert SS_EMPTY_PROOF(); - - uint64[] memory trace = new uint64[](hopProofs.length - 1); - - if (_prepareCaching) { - actions = new CacheAction[](hopProofs.length); - } - - uint64 chainId = _chainId; - address app = _app; - bytes32 signal = _signal; - bytes32 value = _signal; - address signalService = resolve(chainId, LibStrings.B_SIGNAL_SERVICE, false); - if (signalService == address(this)) revert SS_INVALID_MID_HOP_CHAINID(); - - HopProof memory hop; - bytes32 signalRoot; - bool isFullProof; - bool isLastHop; - - for (uint256 i; i < hopProofs.length; ++i) { - hop = hopProofs[i]; - - for (uint256 j; j < i; ++j) { - if (trace[j] == hop.chainId) revert SS_INVALID_HOPS_WITH_LOOP(); - } - - signalRoot = _verifyHopProof(chainId, app, signal, value, hop, signalService); - isLastHop = i == trace.length; - if (isLastHop) { - if (hop.chainId != block.chainid) revert SS_INVALID_LAST_HOP_CHAINID(); - signalService = address(this); - } else { - trace[i] = hop.chainId; - - if (hop.chainId == 0 || hop.chainId == block.chainid) { - revert SS_INVALID_MID_HOP_CHAINID(); - } - signalService = resolve(hop.chainId, LibStrings.B_SIGNAL_SERVICE, false); - if (signalService == address(this)) revert SS_INVALID_MID_HOP_CHAINID(); - } - - isFullProof = hop.accountProof.length != 0; - - if (_prepareCaching) { - actions[i] = CacheAction( - hop.rootHash, - signalRoot, - chainId, - hop.blockId, - isFullProof, - isLastHop, - hop.cacheOption - ); - } - - signal = signalForChainData( - chainId, - isFullProof ? LibStrings.H_STATE_ROOT : LibStrings.H_SIGNAL_ROOT, - hop.blockId - ); - value = hop.rootHash; - chainId = hop.chainId; - app = signalService; - } - - if (value == 0 || value != _loadSignalValue(address(this), signal)) { - revert SS_SIGNAL_NOT_FOUND(); - } - } -} diff --git a/packages/protocol/contracts/test/erc20/FreeMintERC20.sol b/packages/protocol/contracts/test/erc20/FreeMintERC20.sol deleted file mode 100644 index 72f18bf3bda9..000000000000 --- a/packages/protocol/contracts/test/erc20/FreeMintERC20.sol +++ /dev/null @@ -1,28 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "@openzeppelin/contracts/token/ERC20/ERC20.sol"; - -// An ERC20 Token with a mint function anyone can call, for free, to receive -// 5 tokens. -contract FreeMintERC20 is ERC20 { - mapping(address minter => bool hasMinted) public minters; - - error HasMinted(); - - constructor(string memory name, string memory symbol) ERC20(name, symbol) { } - - function mint(address to) public { - if (minters[to]) { - revert HasMinted(); - } - - minters[to] = true; - _mint(to, 50 * (10 ** decimals())); - } -} diff --git a/packages/protocol/contracts/test/erc20/MayFailFreeMintERC20.sol b/packages/protocol/contracts/test/erc20/MayFailFreeMintERC20.sol deleted file mode 100644 index fd4c75794205..000000000000 --- a/packages/protocol/contracts/test/erc20/MayFailFreeMintERC20.sol +++ /dev/null @@ -1,55 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "@openzeppelin/contracts/token/ERC20/ERC20.sol"; - -// An ERC20 token for testing the Taiko Bridge on testnets. -// This token has 50% of failure on transfers so we can -// test the bridge's error handling. -contract MayFailFreeMintERC20 is ERC20 { - mapping(address minter => bool hasMinted) public minters; - - error HasMinted(); - error Failed(); - - constructor(string memory name, string memory symbol) ERC20(name, symbol) { } - - function mint(address to) public { - if (minters[msg.sender]) { - revert HasMinted(); - } - - minters[msg.sender] = true; - _mint(to, 50 * (10 ** decimals())); - } - - function transfer(address to, uint256 amount) public override returns (bool) { - _mayFail(); - return ERC20.transfer(to, amount); - } - - function transferFrom( - address from, - address to, - uint256 amount - ) - public - override - returns (bool) - { - _mayFail(); - return ERC20.transferFrom(from, to, amount); - } - - // Have a 50% change of failure. - function _mayFail() private view { - if (block.number % 2 == 0) { - revert Failed(); - } - } -} diff --git a/packages/protocol/contracts/test/erc20/RegularERC20.sol b/packages/protocol/contracts/test/erc20/RegularERC20.sol deleted file mode 100644 index ff3be495bd55..000000000000 --- a/packages/protocol/contracts/test/erc20/RegularERC20.sol +++ /dev/null @@ -1,11 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity ^0.8.20; - -import "@openzeppelin/contracts/token/ERC20/ERC20.sol"; - -contract RegularERC20 is ERC20 { - constructor(uint256 initialSupply) ERC20("RegularERC20", "RGL") { - _mint(msg.sender, initialSupply); - } -} diff --git a/packages/protocol/contracts/thirdparty/LibBytesUtils.sol b/packages/protocol/contracts/thirdparty/LibBytesUtils.sol deleted file mode 100644 index 24210d7c8fbc..000000000000 --- a/packages/protocol/contracts/thirdparty/LibBytesUtils.sol +++ /dev/null @@ -1,141 +0,0 @@ -// SPDX-License-Identifier: MIT -// Taken from -// https://github.com/ethereum-optimism/optimism/blob/develop/packages/contracts/contracts/libraries/utils/LibBytesUtils.sol -// (The MIT License) -// -// Copyright 2020-2021 Optimism -// Copyright 2022-2023 Taiko Labs -// -// Permission is hereby granted, free of charge, to any person obtaining -// a copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to -// permit persons to whom the Software is furnished to do so, subject to -// the following conditions: -// -// The above copyright notice and this permission notice shall be -// included in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -pragma solidity ^0.8.20; - -/** - * @title LibBytesUtils - */ -library LibBytesUtils { - function slice( - bytes memory _bytes, - uint256 _start, - uint256 _length - ) - internal - pure - returns (bytes memory) - { - require(_length + 31 >= _length, "slice_overflow"); - require(_start + _length >= _start, "slice_overflow"); - require(_bytes.length >= _start + _length, "slice_outOfBounds"); - - bytes memory tempBytes; - - assembly { - switch iszero(_length) - case 0 { - // Get a location of some free memory and store it in tempBytes - // as - // Solidity does for memory variables. - tempBytes := mload(0x40) - - // The first word of the slice result is potentially a partial - // word read from the original array. To read it, we calculate - // the length of that partial word and start copying that many - // bytes into the array. The first word we copy will start with - // data we don't care about, but the last `lengthmod` bytes will - // land at the beginning of the contents of the new array. When - // we're done copying, we overwrite the full first word with - // the actual length of the slice. - let lengthmod := and(_length, 31) - - // The multiplication in the next line is necessary - // because when slicing multiples of 32 bytes (lengthmod == 0) - // the following copy loop was copying the origin's length - // and then ending prematurely not copying everything it should. - let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod))) - let end := add(mc, _length) - - for { - // The multiplication in the next line has the same exact - // purpose - // as the one above. - let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start) - } lt(mc, end) { - mc := add(mc, 0x20) - cc := add(cc, 0x20) - } { mstore(mc, mload(cc)) } - - mstore(tempBytes, _length) - - // update free-memory pointer allocating the array padded to 32 - // bytes like the compiler does now - mstore(0x40, and(add(mc, 31), not(31))) - } - // if we want a zero-length slice let's just return a zero-length - // array - default { - tempBytes := mload(0x40) - - // zero out the 32 bytes slice we are about to return - // we need to do it because Solidity does not garbage collect - mstore(tempBytes, 0) - - mstore(0x40, add(tempBytes, 0x20)) - } - } - - return tempBytes; - } - - function slice(bytes memory _bytes, uint256 _start) internal pure returns (bytes memory) { - if (_start >= _bytes.length) { - return bytes(""); - } - - return slice(_bytes, _start, _bytes.length - _start); - } - - function toBytes32(bytes memory _bytes) internal pure returns (bytes32) { - if (_bytes.length < 32) { - bytes32 ret; - assembly { - ret := mload(add(_bytes, 32)) - } - return ret; - } - - return abi.decode(_bytes, (bytes32)); // will truncate if input length > - // 32 bytes - } - - function toNibbles(bytes memory _bytes) internal pure returns (bytes memory) { - bytes memory nibbles = new bytes(_bytes.length * 2); - - for (uint256 i; i < _bytes.length; ++i) { - nibbles[i * 2] = _bytes[i] >> 4; - nibbles[i * 2 + 1] = bytes1(uint8(_bytes[i]) % 16); - } - - return nibbles; - } - - function equal(bytes memory _bytes, bytes memory _other) internal pure returns (bool) { - return keccak256(_bytes) == keccak256(_other); - } -} diff --git a/packages/protocol/contracts/thirdparty/LibFixedPointMath.sol b/packages/protocol/contracts/thirdparty/LibFixedPointMath.sol deleted file mode 100644 index f9862f5151b6..000000000000 --- a/packages/protocol/contracts/thirdparty/LibFixedPointMath.sol +++ /dev/null @@ -1,83 +0,0 @@ -// SPDX-License-Identifier: MIT -// Taken from the contract below, expWad() function tailored to Taiko's need -// https://github.com/transmissions11/solmate/blob/v7/src/utils/FixedPointMathLib.sol -pragma solidity ^0.8.20; - -library LibFixedPointMath { - uint128 public constant MAX_EXP_INPUT = 135_305_999_368_893_231_588; - uint256 public constant SCALING_FACTOR = 1e18; // For fixed point - // representation factor - - error Overflow(); - - // Computes e^x in 1e18 fixed point. - function exp(int256 x) internal pure returns (int256 r) { - unchecked { - // Input x is in fixed point format, with scale factor 1/1e18. - - // When the result is < 0.5 we return zero. This happens when - // x <= floor(log(0.5e18) * 1e18) ~ -42e18 - if (x <= -42_139_678_854_452_767_551) { - return 0; - } - - // When the result is > (2**255 - 1) / 1e18 we can not represent it - // as an int256. This happens when x >= floor(log((2**255 -1) / - // 1e18) * 1e18) ~ 135. - if (x >= 135_305_999_368_893_231_589) revert Overflow(); - - // x is now in the range (-42, 136) * 1e18. Convert to (-42, 136) * - // 2**96 - // for more intermediate precision and a binary basis. This base - // conversion - // is a multiplication by 1e18 / 2**96 = 5**18 / 2**78. - x = (x << 78) / 5 ** 18; - - // Reduce range of x to (-½ ln 2, ½ ln 2) * 2**96 by factoring out - // powers of two - // such that exp(x) = exp(x') * 2**k, where k is an integer. - // Solving this gives k = round(x / log(2)) and x' = x - k * log(2). - int256 k = ((x << 96) / 54_916_777_467_707_473_351_141_471_128 + 2 ** 95) >> 96; - x = x - k * 54_916_777_467_707_473_351_141_471_128; - // k is in the range [-61, 195]. - - // Evaluate using a (6, 7)-term rational approximation. - // p is made monic, we'll multiply by a scale factor later. - int256 y = x + 1_346_386_616_545_796_478_920_950_773_328; - y = ((y * x) >> 96) + 57_155_421_227_552_351_082_224_309_758_442; - int256 p = y + x - 94_201_549_194_550_492_254_356_042_504_812; - p = ((p * y) >> 96) + 28_719_021_644_029_726_153_956_944_680_412_240; - p = p * x + (4_385_272_521_454_847_904_659_076_985_693_276 << 96); - - // We leave p in 2**192 basis so we don't need to scale it back up - // for the division. - int256 q = x - 2_855_989_394_907_223_263_936_484_059_900; - q = ((q * x) >> 96) + 50_020_603_652_535_783_019_961_831_881_945; - q = ((q * x) >> 96) - 533_845_033_583_426_703_283_633_433_725_380; - q = ((q * x) >> 96) + 3_604_857_256_930_695_427_073_651_918_091_429; - q = ((q * x) >> 96) - 14_423_608_567_350_463_180_887_372_962_807_573; - q = ((q * x) >> 96) + 26_449_188_498_355_588_339_934_803_723_976_023; - assembly { - // Div in assembly because solidity adds a zero check despite - // the `unchecked`. - // The q polynomial is known not to have zeros in the domain. - // (All roots are complex) - // No scaling required because p is already 2**96 too large. - r := sdiv(p, q) - } - // r should be in the range (0.09, 0.25) * 2**96. - - // We now need to multiply r by - // * the scale factor s = ~6.031367120..., - // * the 2**k factor from the range reduction, and - // * the 1e18 / 2**96 factor for base converison. - // We do all of this at once, with an intermediate result in 2**213 - // basis - // so the final right shift is always by a positive amount. - r = int256( - (uint256(r) * 3_822_833_074_963_236_453_042_738_258_902_158_003_155_416_615_667) - >> uint256(195 - k) - ); - } - } -} diff --git a/packages/protocol/contracts/thirdparty/README.md b/packages/protocol/contracts/thirdparty/README.md deleted file mode 100644 index 0102fe8cd679..000000000000 --- a/packages/protocol/contracts/thirdparty/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# ABOUT THIRDPARTY CODE - -- /optimism: code copied from `packages/contracts-bedrock/src/libraries` in https://github.com/ethereum-optimism/optimism/releases/tag/op-batcher%2Fv1.4.3 as-is with only solidity pragma changed. - -- /solmate: code copied from https://github.com/transmissions11/solmate/blob/v7/src/utils/FixedPointMathLib.sol as-is with only solidity pragma changed. - -- /nomad-xyz: code copied from https://github.com/nomad-xyz/ExcessivelySafeCall/blob/main/src/ExcessivelySafeCall.sol with unused coded removed and solidity pragma changed. - -- /risczero: interface copied from https://sepolia.etherscan.io/address/0x83c2e9cd64b2a16d3908e94c7654f3864212e2f8#code as per: https://dev.risczero.com/api/blockchain-integration/contracts/verifier diff --git a/packages/protocol/contracts/thirdparty/optimism/Bytes.sol b/packages/protocol/contracts/thirdparty/optimism/Bytes.sol deleted file mode 100644 index 6a6fba86e7d1..000000000000 --- a/packages/protocol/contracts/thirdparty/optimism/Bytes.sol +++ /dev/null @@ -1,152 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title Bytes -/// @notice Bytes is a library for manipulating byte arrays. -library Bytes { - /// @custom:attribution https://github.com/GNSPS/solidity-bytes-utils - /// @notice Slices a byte array with a given starting index and length. Returns a new byte array - /// as opposed to a pointer to the original array. Will throw if trying to slice more - /// bytes than exist in the array. - /// @param _bytes Byte array to slice. - /// @param _start Starting index of the slice. - /// @param _length Length of the slice. - /// @return Slice of the input byte array. - function slice( - bytes memory _bytes, - uint256 _start, - uint256 _length - ) - internal - pure - returns (bytes memory) - { - unchecked { - require(_length + 31 >= _length, "slice_overflow"); - require(_start + _length >= _start, "slice_overflow"); - require(_bytes.length >= _start + _length, "slice_outOfBounds"); - } - - bytes memory tempBytes; - - assembly { - switch iszero(_length) - case 0 { - // Get a location of some free memory and store it in tempBytes as - // Solidity does for memory variables. - tempBytes := mload(0x40) - - // The first word of the slice result is potentially a partial - // word read from the original array. To read it, we calculate - // the length of that partial word and start copying that many - // bytes into the array. The first word we copy will start with - // data we don't care about, but the last `lengthmod` bytes will - // land at the beginning of the contents of the new array. When - // we're done copying, we overwrite the full first word with - // the actual length of the slice. - let lengthmod := and(_length, 31) - - // The multiplication in the next line is necessary - // because when slicing multiples of 32 bytes (lengthmod == 0) - // the following copy loop was copying the origin's length - // and then ending prematurely not copying everything it should. - let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod))) - let end := add(mc, _length) - - for { - // The multiplication in the next line has the same exact purpose - // as the one above. - let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start) - } lt(mc, end) { - mc := add(mc, 0x20) - cc := add(cc, 0x20) - } { mstore(mc, mload(cc)) } - - mstore(tempBytes, _length) - - //update free-memory pointer - //allocating the array padded to 32 bytes like the compiler does now - mstore(0x40, and(add(mc, 31), not(31))) - } - //if we want a zero-length slice let's just return a zero-length array - default { - tempBytes := mload(0x40) - - //zero out the 32 bytes slice we are about to return - //we need to do it because Solidity does not garbage collect - mstore(tempBytes, 0) - - mstore(0x40, add(tempBytes, 0x20)) - } - } - - return tempBytes; - } - - /// @notice Slices a byte array with a given starting index up to the end of the original byte - /// array. Returns a new array rather than a pointer to the original. - /// @param _bytes Byte array to slice. - /// @param _start Starting index of the slice. - /// @return Slice of the input byte array. - function slice(bytes memory _bytes, uint256 _start) internal pure returns (bytes memory) { - if (_start >= _bytes.length) { - return bytes(""); - } - return slice(_bytes, _start, _bytes.length - _start); - } - - /// @notice Converts a byte array into a nibble array by splitting each byte into two nibbles. - /// Resulting nibble array will be exactly twice as long as the input byte array. - /// @param _bytes Input byte array to convert. - /// @return Resulting nibble array. - function toNibbles(bytes memory _bytes) internal pure returns (bytes memory) { - bytes memory _nibbles; - assembly { - // Grab a free memory offset for the new array - _nibbles := mload(0x40) - - // Load the length of the passed bytes array from memory - let bytesLength := mload(_bytes) - - // Calculate the length of the new nibble array - // This is the length of the input array times 2 - let nibblesLength := shl(0x01, bytesLength) - - // Update the free memory pointer to allocate memory for the new array. - // To do this, we add the length of the new array + 32 bytes for the array length - // rounded up to the nearest 32 byte boundary to the current free memory pointer. - mstore(0x40, add(_nibbles, and(not(0x1F), add(nibblesLength, 0x3F)))) - - // Store the length of the new array in memory - mstore(_nibbles, nibblesLength) - - // Store the memory offset of the _bytes array's contents on the stack - let bytesStart := add(_bytes, 0x20) - - // Store the memory offset of the nibbles array's contents on the stack - let nibblesStart := add(_nibbles, 0x20) - - // Loop through each byte in the input array - for { let i := 0x00 } lt(i, bytesLength) { i := add(i, 0x01) } { - // Get the starting offset of the next 2 bytes in the nibbles array - let offset := add(nibblesStart, shl(0x01, i)) - // Load the byte at the current index within the `_bytes` array - let b := byte(0x00, mload(add(bytesStart, i))) - - // Pull out the first nibble and store it in the new array - mstore8(offset, shr(0x04, b)) - // Pull out the second nibble and store it in the new array - mstore8(add(offset, 0x01), and(b, 0x0F)) - } - } - return _nibbles; - } - - /// @notice Compares two byte arrays by comparing their keccak256 hashes. - /// @param _bytes First byte array to compare. - /// @param _other Second byte array to compare. - /// @return true if the two byte arrays are equal, false otherwise. - function equal(bytes memory _bytes, bytes memory _other) internal pure returns (bool) { - return keccak256(_bytes) == keccak256(_other); - } -} diff --git a/packages/protocol/contracts/thirdparty/optimism/rlp/RLPReader.sol b/packages/protocol/contracts/thirdparty/optimism/rlp/RLPReader.sol deleted file mode 100644 index 9164b7490edd..000000000000 --- a/packages/protocol/contracts/thirdparty/optimism/rlp/RLPReader.sol +++ /dev/null @@ -1,303 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @custom:attribution https://github.com/hamdiallam/Solidity-RLP -/// @title RLPReader -/// @notice RLPReader is a library for parsing RLP-encoded byte arrays into Solidity types. Adapted -/// from Solidity-RLP (https://github.com/hamdiallam/Solidity-RLP) by Hamdi Allam with -/// various tweaks to improve readability. (A shout-out to Optimism !) -library RLPReader { - /// @notice Custom pointer type to avoid confusion between pointers and uint256s. - type MemoryPointer is uint256; - - /// @notice RLP item types. - /// @custom:value DATA_ITEM Represents an RLP data item (NOT a list). - /// @custom:value LIST_ITEM Represents an RLP list item. - enum RLPItemType { - DATA_ITEM, - LIST_ITEM - } - - /// @notice Struct representing an RLP item. - /// @custom:field length Length of the RLP item. - /// @custom:field ptr Pointer to the RLP item in memory. - struct RLPItem { - uint256 length; - MemoryPointer ptr; - } - - /// @notice Max list length that this library will accept. - uint256 internal constant MAX_LIST_LENGTH = 32; - - /// @notice Converts bytes to a reference to memory position and length. - /// @param _in Input bytes to convert. - /// @return out_ Output memory reference. - function toRLPItem(bytes memory _in) internal pure returns (RLPItem memory out_) { - // Empty arrays are not RLP items. - require( - _in.length > 0, - "RLPReader: length of an RLP item must be greater than zero to be decodable" - ); - - MemoryPointer ptr; - assembly { - ptr := add(_in, 32) - } - - out_ = RLPItem({ length: _in.length, ptr: ptr }); - } - - /// @notice Reads an RLP list value into a list of RLP items. - /// @param _in RLP list value. - /// @return out_ Decoded RLP list items. - function readList(RLPItem memory _in) internal pure returns (RLPItem[] memory out_) { - (uint256 listOffset, uint256 listLength, RLPItemType itemType) = _decodeLength(_in); - - require( - itemType == RLPItemType.LIST_ITEM, - "RLPReader: decoded item type for list is not a list item" - ); - - require( - listOffset + listLength == _in.length, - "RLPReader: list item has an invalid data remainder" - ); - - // Solidity in-memory arrays can't be increased in size, but *can* be decreased in size by - // writing to the length. Since we can't know the number of RLP items without looping over - // the entire input, we'd have to loop twice to accurately size this array. It's easier to - // simply set a reasonable maximum list length and decrease the size before we finish. - out_ = new RLPItem[](MAX_LIST_LENGTH); - - uint256 itemCount = 0; - uint256 offset = listOffset; - while (offset < _in.length) { - (uint256 itemOffset, uint256 itemLength,) = _decodeLength( - RLPItem({ - length: _in.length - offset, - ptr: MemoryPointer.wrap(MemoryPointer.unwrap(_in.ptr) + offset) - }) - ); - - // We don't need to check itemCount < out.length explicitly because Solidity already - // handles this check on our behalf, we'd just be wasting gas. - out_[itemCount] = RLPItem({ - length: itemLength + itemOffset, - ptr: MemoryPointer.wrap(MemoryPointer.unwrap(_in.ptr) + offset) - }); - - itemCount += 1; - offset += itemOffset + itemLength; - } - - // Decrease the array size to match the actual item count. - assembly { - mstore(out_, itemCount) - } - } - - /// @notice Reads an RLP list value into a list of RLP items. - /// @param _in RLP list value. - /// @return out_ Decoded RLP list items. - function readList(bytes memory _in) internal pure returns (RLPItem[] memory out_) { - out_ = readList(toRLPItem(_in)); - } - - /// @notice Reads an RLP bytes value into bytes. - /// @param _in RLP bytes value. - /// @return out_ Decoded bytes. - function readBytes(RLPItem memory _in) internal pure returns (bytes memory out_) { - (uint256 itemOffset, uint256 itemLength, RLPItemType itemType) = _decodeLength(_in); - - require( - itemType == RLPItemType.DATA_ITEM, - "RLPReader: decoded item type for bytes is not a data item" - ); - - require( - _in.length == itemOffset + itemLength, - "RLPReader: bytes value contains an invalid remainder" - ); - - out_ = _copy(_in.ptr, itemOffset, itemLength); - } - - /// @notice Reads an RLP bytes value into bytes. - /// @param _in RLP bytes value. - /// @return out_ Decoded bytes. - function readBytes(bytes memory _in) internal pure returns (bytes memory out_) { - out_ = readBytes(toRLPItem(_in)); - } - - /// @notice Reads the raw bytes of an RLP item. - /// @param _in RLP item to read. - /// @return out_ Raw RLP bytes. - function readRawBytes(RLPItem memory _in) internal pure returns (bytes memory out_) { - out_ = _copy(_in.ptr, 0, _in.length); - } - - /// @notice Decodes the length of an RLP item. - /// @param _in RLP item to decode. - /// @return offset_ Offset of the encoded data. - /// @return length_ Length of the encoded data. - /// @return type_ RLP item type (LIST_ITEM or DATA_ITEM). - function _decodeLength(RLPItem memory _in) - private - pure - returns (uint256 offset_, uint256 length_, RLPItemType type_) - { - // Short-circuit if there's nothing to decode, note that we perform this check when - // the user creates an RLP item via toRLPItem, but it's always possible for them to bypass - // that function and create an RLP item directly. So we need to check this anyway. - require( - _in.length > 0, - "RLPReader: length of an RLP item must be greater than zero to be decodable" - ); - - MemoryPointer ptr = _in.ptr; - uint256 prefix; - assembly { - prefix := byte(0, mload(ptr)) - } - - if (prefix <= 0x7f) { - // Single byte. - return (0, 1, RLPItemType.DATA_ITEM); - } else if (prefix <= 0xb7) { - // Short string. - - // slither-disable-next-line variable-scope - uint256 strLen = prefix - 0x80; - - require( - _in.length > strLen, - "RLPReader: length of content must be greater than string length (short string)" - ); - - bytes1 firstByteOfContent; - assembly { - firstByteOfContent := and(mload(add(ptr, 1)), shl(248, 0xff)) - } - - require( - strLen != 1 || firstByteOfContent >= 0x80, - "RLPReader: invalid prefix, single byte < 0x80 are not prefixed (short string)" - ); - - return (1, strLen, RLPItemType.DATA_ITEM); - } else if (prefix <= 0xbf) { - // Long string. - uint256 lenOfStrLen = prefix - 0xb7; - - require( - _in.length > lenOfStrLen, - "RLPReader: length of content must be > than length of string length (long string)" - ); - - bytes1 firstByteOfContent; - assembly { - firstByteOfContent := and(mload(add(ptr, 1)), shl(248, 0xff)) - } - - require( - firstByteOfContent != 0x00, - "RLPReader: length of content must not have any leading zeros (long string)" - ); - - uint256 strLen; - assembly { - strLen := shr(sub(256, mul(8, lenOfStrLen)), mload(add(ptr, 1))) - } - - require( - strLen > 55, - "RLPReader: length of content must be greater than 55 bytes (long string)" - ); - - require( - _in.length > lenOfStrLen + strLen, - "RLPReader: length of content must be greater than total length (long string)" - ); - - return (1 + lenOfStrLen, strLen, RLPItemType.DATA_ITEM); - } else if (prefix <= 0xf7) { - // Short list. - // slither-disable-next-line variable-scope - uint256 listLen = prefix - 0xc0; - - require( - _in.length > listLen, - "RLPReader: length of content must be greater than list length (short list)" - ); - - return (1, listLen, RLPItemType.LIST_ITEM); - } else { - // Long list. - uint256 lenOfListLen = prefix - 0xf7; - - require( - _in.length > lenOfListLen, - "RLPReader: length of content must be > than length of list length (long list)" - ); - - bytes1 firstByteOfContent; - assembly { - firstByteOfContent := and(mload(add(ptr, 1)), shl(248, 0xff)) - } - - require( - firstByteOfContent != 0x00, - "RLPReader: length of content must not have any leading zeros (long list)" - ); - - uint256 listLen; - assembly { - listLen := shr(sub(256, mul(8, lenOfListLen)), mload(add(ptr, 1))) - } - - require( - listLen > 55, - "RLPReader: length of content must be greater than 55 bytes (long list)" - ); - - require( - _in.length > lenOfListLen + listLen, - "RLPReader: length of content must be greater than total length (long list)" - ); - - return (1 + lenOfListLen, listLen, RLPItemType.LIST_ITEM); - } - } - - /// @notice Copies the bytes from a memory location. - /// @param _src Pointer to the location to read from. - /// @param _offset Offset to start reading from. - /// @param _length Number of bytes to read. - /// @return out_ Copied bytes. - function _copy( - MemoryPointer _src, - uint256 _offset, - uint256 _length - ) - private - pure - returns (bytes memory out_) - { - out_ = new bytes(_length); - if (_length == 0) { - return out_; - } - - // Mostly based on Solidity's copy_memory_to_memory: - // solhint-disable max-line-length - // https://github.com/ethereum/solidity/blob/34dd30d71b4da730488be72ff6af7083cf2a91f6/libsolidity/codegen/YulUtilFunctions.cpp#L102-L114 - uint256 src = MemoryPointer.unwrap(_src) + _offset; - assembly { - let dest := add(out_, 32) - let i := 0 - for { } lt(i, _length) { i := add(i, 32) } { mstore(add(dest, i), mload(add(src, i))) } - - if gt(i, _length) { mstore(add(dest, _length), 0) } - } - } -} diff --git a/packages/protocol/contracts/thirdparty/optimism/rlp/RLPWriter.sol b/packages/protocol/contracts/thirdparty/optimism/rlp/RLPWriter.sol deleted file mode 100644 index f6eb0bf54a1a..000000000000 --- a/packages/protocol/contracts/thirdparty/optimism/rlp/RLPWriter.sol +++ /dev/null @@ -1,70 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @custom:attribution https://github.com/bakaoh/solidity-rlp-encode -/// @title RLPWriter -/// @author RLPWriter is a library for encoding Solidity types to RLP bytes. Adapted from Bakaoh's -/// RLPEncode library (https://github.com/bakaoh/solidity-rlp-encode) with minor -/// modifications to improve legibility. (A shout-out to Optimism !) -library RLPWriter { - /// @notice RLP encodes a byte string. - /// @param _in The byte string to encode. - /// @return out_ The RLP encoded string in bytes. - function writeBytes(bytes memory _in) internal pure returns (bytes memory out_) { - if (_in.length == 1 && uint8(_in[0]) < 128) { - out_ = _in; - } else { - out_ = abi.encodePacked(_writeLength(_in.length, 128), _in); - } - } - - /// @notice RLP encodes a uint. - /// @param _in The uint256 to encode. - /// @return out_ The RLP encoded uint256 in bytes. - function writeUint(uint256 _in) internal pure returns (bytes memory out_) { - out_ = writeBytes(_toBinary(_in)); - } - - /// @notice Encode the first byte and then the `len` in binary form if `length` is more than 55. - /// @param _len The length of the string or the payload. - /// @param _offset 128 if item is string, 192 if item is list. - /// @return out_ RLP encoded bytes. - function _writeLength(uint256 _len, uint256 _offset) private pure returns (bytes memory out_) { - if (_len < 56) { - out_ = new bytes(1); - out_[0] = bytes1(uint8(_len) + uint8(_offset)); - } else { - uint256 lenLen; - uint256 i = 1; - while (_len / i != 0) { - lenLen++; - i *= 256; - } - - out_ = new bytes(lenLen + 1); - out_[0] = bytes1(uint8(lenLen) + uint8(_offset) + 55); - for (i = 1; i <= lenLen; i++) { - out_[i] = bytes1(uint8((_len / (256 ** (lenLen - i))) % 256)); - } - } - } - - /// @notice Encode integer in big endian binary form with no leading zeroes. - /// @param _x The integer to encode. - /// @return out_ RLP encoded bytes. - function _toBinary(uint256 _x) private pure returns (bytes memory out_) { - bytes memory b = abi.encodePacked(_x); - - uint256 i = 0; - for (; i < 32; i++) { - if (b[i] != 0) { - break; - } - } - - out_ = new bytes(32 - i); - for (uint256 j = 0; j < out_.length; j++) { - out_[j] = b[i++]; - } - } -} diff --git a/packages/protocol/contracts/thirdparty/optimism/trie/MerkleTrie.sol b/packages/protocol/contracts/thirdparty/optimism/trie/MerkleTrie.sol deleted file mode 100644 index 3b883d0185ec..000000000000 --- a/packages/protocol/contracts/thirdparty/optimism/trie/MerkleTrie.sol +++ /dev/null @@ -1,247 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import { Bytes } from "../Bytes.sol"; -import { RLPReader } from "../rlp/RLPReader.sol"; - -/// @title MerkleTrie -/// @notice MerkleTrie is a small library for verifying standard Ethereum Merkle-Patricia trie -/// inclusion proofs. By default, this library assumes a hexary trie. One can change the -/// trie radix constant to support other trie radixes. -library MerkleTrie { - /// @notice Struct representing a node in the trie. - /// @custom:field encoded The RLP-encoded node. - /// @custom:field decoded The RLP-decoded node. - struct TrieNode { - bytes encoded; - RLPReader.RLPItem[] decoded; - } - - /// @notice Determines the number of elements per branch node. - uint256 internal constant TREE_RADIX = 16; - - /// @notice Branch nodes have TREE_RADIX elements and one value element. - uint256 internal constant BRANCH_NODE_LENGTH = TREE_RADIX + 1; - - /// @notice Leaf nodes and extension nodes have two elements, a `path` and a `value`. - uint256 internal constant LEAF_OR_EXTENSION_NODE_LENGTH = 2; - - /// @notice Prefix for even-nibbled extension node paths. - uint8 internal constant PREFIX_EXTENSION_EVEN = 0; - - /// @notice Prefix for odd-nibbled extension node paths. - uint8 internal constant PREFIX_EXTENSION_ODD = 1; - - /// @notice Prefix for even-nibbled leaf node paths. - uint8 internal constant PREFIX_LEAF_EVEN = 2; - - /// @notice Prefix for odd-nibbled leaf node paths. - uint8 internal constant PREFIX_LEAF_ODD = 3; - - /// @notice Verifies a proof that a given key/value pair is present in the trie. - /// @param _key Key of the node to search for, as a hex string. - /// @param _value Value of the node to search for, as a hex string. - /// @param _proof Merkle trie inclusion proof for the desired node. Unlike traditional Merkle - /// trees, this proof is executed top-down and consists of a list of RLP-encoded - /// nodes that make a path down to the target node. - /// @param _root Known root of the Merkle trie. Used to verify that the included proof is - /// correctly constructed. - /// @return valid_ Whether or not the proof is valid. - function verifyInclusionProof( - bytes memory _key, - bytes memory _value, - bytes[] memory _proof, - bytes32 _root - ) - internal - pure - returns (bool valid_) - { - valid_ = Bytes.equal(_value, get(_key, _proof, _root)); - } - - /// @notice Retrieves the value associated with a given key. - /// @param _key Key to search for, as hex bytes. - /// @param _proof Merkle trie inclusion proof for the key. - /// @param _root Known root of the Merkle trie. - /// @return value_ Value of the key if it exists. - function get( - bytes memory _key, - bytes[] memory _proof, - bytes32 _root - ) - internal - pure - returns (bytes memory value_) - { - require(_key.length > 0, "MerkleTrie: empty key"); - - TrieNode[] memory proof = _parseProof(_proof); - bytes memory key = Bytes.toNibbles(_key); - bytes memory currentNodeID = abi.encodePacked(_root); - uint256 currentKeyIndex = 0; - - // Proof is top-down, so we start at the first element (root). - for (uint256 i = 0; i < proof.length; i++) { - TrieNode memory currentNode = proof[i]; - - // Key index should never exceed total key length or we'll be out of bounds. - require(currentKeyIndex <= key.length, "MerkleTrie: key index exceeds total key length"); - - if (currentKeyIndex == 0) { - // First proof element is always the root node. - require( - Bytes.equal(abi.encodePacked(keccak256(currentNode.encoded)), currentNodeID), - "MerkleTrie: invalid root hash" - ); - } else if (currentNode.encoded.length >= 32) { - // Nodes 32 bytes or larger are hashed inside branch nodes. - require( - Bytes.equal(abi.encodePacked(keccak256(currentNode.encoded)), currentNodeID), - "MerkleTrie: invalid large internal hash" - ); - } else { - // Nodes smaller than 32 bytes aren't hashed. - require( - Bytes.equal(currentNode.encoded, currentNodeID), - "MerkleTrie: invalid internal node hash" - ); - } - - if (currentNode.decoded.length == BRANCH_NODE_LENGTH) { - if (currentKeyIndex == key.length) { - // Value is the last element of the decoded list (for branch nodes). There's - // some ambiguity in the Merkle trie specification because bytes(0) is a - // valid value to place into the trie, but for branch nodes bytes(0) can exist - // even when the value wasn't explicitly placed there. Geth treats a value of - // bytes(0) as "key does not exist" and so we do the same. - value_ = RLPReader.readBytes(currentNode.decoded[TREE_RADIX]); - require( - value_.length > 0, - "MerkleTrie: value length must be greater than zero (branch)" - ); - - // Extra proof elements are not allowed. - require( - i == proof.length - 1, - "MerkleTrie: value node must be last node in proof (branch)" - ); - - return value_; - } else { - // We're not at the end of the key yet. - // Figure out what the next node ID should be and continue. - uint8 branchKey = uint8(key[currentKeyIndex]); - RLPReader.RLPItem memory nextNode = currentNode.decoded[branchKey]; - currentNodeID = _getNodeID(nextNode); - currentKeyIndex += 1; - } - } else if (currentNode.decoded.length == LEAF_OR_EXTENSION_NODE_LENGTH) { - bytes memory path = _getNodePath(currentNode); - uint8 prefix = uint8(path[0]); - uint8 offset = 2 - (prefix % 2); - bytes memory pathRemainder = Bytes.slice(path, offset); - bytes memory keyRemainder = Bytes.slice(key, currentKeyIndex); - uint256 sharedNibbleLength = _getSharedNibbleLength(pathRemainder, keyRemainder); - - // Whether this is a leaf node or an extension node, the path remainder MUST be a - // prefix of the key remainder (or be equal to the key remainder) or the proof is - // considered invalid. - require( - pathRemainder.length == sharedNibbleLength, - "MerkleTrie: path remainder must share all nibbles with key" - ); - - if (prefix == PREFIX_LEAF_EVEN || prefix == PREFIX_LEAF_ODD) { - // Prefix of 2 or 3 means this is a leaf node. For the leaf node to be valid, - // the key remainder must be exactly equal to the path remainder. We already - // did the necessary byte comparison, so it's more efficient here to check that - // the key remainder length equals the shared nibble length, which implies - // equality with the path remainder (since we already did the same check with - // the path remainder and the shared nibble length). - require( - keyRemainder.length == sharedNibbleLength, - "MerkleTrie: key remainder must be identical to path remainder" - ); - - // Our Merkle Trie is designed specifically for the purposes of the Ethereum - // state trie. Empty values are not allowed in the state trie, so we can safely - // say that if the value is empty, the key should not exist and the proof is - // invalid. - value_ = RLPReader.readBytes(currentNode.decoded[1]); - require( - value_.length > 0, - "MerkleTrie: value length must be greater than zero (leaf)" - ); - - // Extra proof elements are not allowed. - require( - i == proof.length - 1, - "MerkleTrie: value node must be last node in proof (leaf)" - ); - - return value_; - } else if (prefix == PREFIX_EXTENSION_EVEN || prefix == PREFIX_EXTENSION_ODD) { - // Prefix of 0 or 1 means this is an extension node. We move onto the next node - // in the proof and increment the key index by the length of the path remainder - // which is equal to the shared nibble length. - currentNodeID = _getNodeID(currentNode.decoded[1]); - currentKeyIndex += sharedNibbleLength; - } else { - revert("MerkleTrie: received a node with an unknown prefix"); - } - } else { - revert("MerkleTrie: received an unparseable node"); - } - } - - revert("MerkleTrie: ran out of proof elements"); - } - - /// @notice Parses an array of proof elements into a new array that contains both the original - /// encoded element and the RLP-decoded element. - /// @param _proof Array of proof elements to parse. - /// @return proof_ Proof parsed into easily accessible structs. - function _parseProof(bytes[] memory _proof) private pure returns (TrieNode[] memory proof_) { - uint256 length = _proof.length; - proof_ = new TrieNode[](length); - for (uint256 i = 0; i < length; ++i) { - proof_[i] = TrieNode({ encoded: _proof[i], decoded: RLPReader.readList(_proof[i]) }); - } - } - - /// @notice Picks out the ID for a node. Node ID is referred to as the "hash" within the - /// specification, but nodes < 32 bytes are not actually hashed. - /// @param _node Node to pull an ID for. - /// @return id_ ID for the node, depending on the size of its contents. - function _getNodeID(RLPReader.RLPItem memory _node) private pure returns (bytes memory id_) { - id_ = _node.length < 32 ? RLPReader.readRawBytes(_node) : RLPReader.readBytes(_node); - } - - /// @notice Gets the path for a leaf or extension node. - /// @param _node Node to get a path for. - /// @return nibbles_ Node path, converted to an array of nibbles. - function _getNodePath(TrieNode memory _node) private pure returns (bytes memory nibbles_) { - nibbles_ = Bytes.toNibbles(RLPReader.readBytes(_node.decoded[0])); - } - - /// @notice Utility; determines the number of nibbles shared between two nibble arrays. - /// @param _a First nibble array. - /// @param _b Second nibble array. - /// @return shared_ Number of shared nibbles. - function _getSharedNibbleLength( - bytes memory _a, - bytes memory _b - ) - private - pure - returns (uint256 shared_) - { - uint256 max = (_a.length < _b.length) ? _a.length : _b.length; - for (; shared_ < max && _a[shared_] == _b[shared_];) { - unchecked { - ++shared_; - } - } - } -} diff --git a/packages/protocol/contracts/thirdparty/optimism/trie/MerkleTrieProofVerifier.sol b/packages/protocol/contracts/thirdparty/optimism/trie/MerkleTrieProofVerifier.sol deleted file mode 100644 index 4a914e1045d0..000000000000 --- a/packages/protocol/contracts/thirdparty/optimism/trie/MerkleTrieProofVerifier.sol +++ /dev/null @@ -1,58 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import { MerkleTrie } from "./MerkleTrie.sol"; - -/// @title MerkleTrieProofVerifier -/// @notice MerkleTrieProofVerifier is a thin wrapper around the MerkleTrie library that hashes the -/// input -/// keys. Ethereum's state trie hashes input keys before storing them. -contract MerkleTrieProofVerifier { - /// @notice Verifies a proof that a given key/value pair is present in the Merkle trie. - /// @param _key Key of the node to search for, as a hex string. - /// @param _value Value of the node to search for, as a hex string. - /// @param _proof Merkle trie inclusion proof for the desired node. Unlike traditional Merkle - /// trees, this proof is executed top-down and consists of a list of RLP-encoded - /// nodes that make a path down to the target node. - /// @param _root Known root of the Merkle trie. Used to verify that the included proof is - /// correctly constructed. - /// @return valid_ Whether or not the proof is valid. - function verifyInclusionProof( - bytes memory _key, - bytes memory _value, - bytes[] memory _proof, - bytes32 _root - ) - internal - pure - returns (bool valid_) - { - bytes memory key = _getSecureKey(_key); - valid_ = MerkleTrie.verifyInclusionProof(key, _value, _proof, _root); - } - - /// @notice Retrieves the value associated with a given key. - /// @param _key Key to search for, as hex bytes. - /// @param _proof Merkle trie inclusion proof for the key. - /// @param _root Known root of the Merkle trie. - /// @return value_ Value of the key if it exists. - function get( - bytes memory _key, - bytes[] memory _proof, - bytes32 _root - ) - internal - pure - returns (bytes memory value_) - { - bytes memory key = _getSecureKey(_key); - value_ = MerkleTrie.get(key, _proof, _root); - } - - /// @notice Computes the hashed version of the input key. - /// @param _key Key to hash. - /// @return hash_ Hashed version of the key. - function _getSecureKey(bytes memory _key) private pure returns (bytes memory hash_) { - hash_ = abi.encodePacked(keccak256(_key)); - } -} diff --git a/packages/protocol/contracts/thirdparty/optimism/trie/SecureMerkleTrie.sol b/packages/protocol/contracts/thirdparty/optimism/trie/SecureMerkleTrie.sol deleted file mode 100644 index 018084369030..000000000000 --- a/packages/protocol/contracts/thirdparty/optimism/trie/SecureMerkleTrie.sol +++ /dev/null @@ -1,57 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import { MerkleTrie } from "./MerkleTrie.sol"; - -/// @title SecureMerkleTrie -/// @notice SecureMerkleTrie is a thin wrapper around the MerkleTrie library that hashes the input -/// keys. Ethereum's state trie hashes input keys before storing them. -library SecureMerkleTrie { - /// @notice Verifies a proof that a given key/value pair is present in the Merkle trie. - /// @param _key Key of the node to search for, as a hex string. - /// @param _value Value of the node to search for, as a hex string. - /// @param _proof Merkle trie inclusion proof for the desired node. Unlike traditional Merkle - /// trees, this proof is executed top-down and consists of a list of RLP-encoded - /// nodes that make a path down to the target node. - /// @param _root Known root of the Merkle trie. Used to verify that the included proof is - /// correctly constructed. - /// @return valid_ Whether or not the proof is valid. - function verifyInclusionProof( - bytes memory _key, - bytes memory _value, - bytes[] memory _proof, - bytes32 _root - ) - internal - pure - returns (bool valid_) - { - bytes memory key = _getSecureKey(_key); - valid_ = MerkleTrie.verifyInclusionProof(key, _value, _proof, _root); - } - - /// @notice Retrieves the value associated with a given key. - /// @param _key Key to search for, as hex bytes. - /// @param _proof Merkle trie inclusion proof for the key. - /// @param _root Known root of the Merkle trie. - /// @return value_ Value of the key if it exists. - function get( - bytes memory _key, - bytes[] memory _proof, - bytes32 _root - ) - internal - pure - returns (bytes memory value_) - { - bytes memory key = _getSecureKey(_key); - value_ = MerkleTrie.get(key, _proof, _root); - } - - /// @notice Computes the hashed version of the input key. - /// @param _key Key to hash. - /// @return hash_ Hashed version of the key. - function _getSecureKey(bytes memory _key) private pure returns (bytes memory hash_) { - hash_ = abi.encodePacked(keccak256(_key)); - } -} diff --git a/packages/protocol/contracts/thirdparty/risczero/IRiscZeroReceiptVerifier.sol b/packages/protocol/contracts/thirdparty/risczero/IRiscZeroReceiptVerifier.sol deleted file mode 100644 index 841b1c994239..000000000000 --- a/packages/protocol/contracts/thirdparty/risczero/IRiscZeroReceiptVerifier.sol +++ /dev/null @@ -1,27 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @notice Verifier interface for RISC Zero receipts of execution. -/// https://github.com/risc0/risc0-ethereum/blob/release-0.7/contracts/src/IRiscZeroVerifier.sol -interface IRiscZeroReceiptVerifier { - /// @notice Verify that the given seal is a valid RISC Zero proof of execution with the - /// given image ID, post-state digest, and journal digest. - /// @dev This method additionally ensures that the input hash is all-zeros (i.e. no - /// committed input), the exit code is (Halted, 0), and there are no assumptions (i.e. the - /// receipt is unconditional). - /// @param seal The encoded cryptographic proof (i.e. SNARK). - /// @param imageId The identifier for the guest program. - /// @param postStateDigest A hash of the final memory state. Required to run the verifier, but - /// otherwise can be left unconstrained for most use cases. - /// @param journalDigest The SHA-256 digest of the journal bytes. - /// @return true if the receipt passes the verification checks. The return code must be checked. - function verify( - bytes calldata seal, - bytes32 imageId, - bytes32 postStateDigest, - bytes32 journalDigest - ) - external - view - returns (bool); -} diff --git a/packages/protocol/contracts/thirdparty/solmate/LibFixedPointMath.sol b/packages/protocol/contracts/thirdparty/solmate/LibFixedPointMath.sol deleted file mode 100644 index 2ad599363c2a..000000000000 --- a/packages/protocol/contracts/thirdparty/solmate/LibFixedPointMath.sol +++ /dev/null @@ -1,82 +0,0 @@ -// SPDX-License-Identifier: MIT -// Taken from the contract below, expWad() function tailored to Taiko's need -// https://github.com/transmissions11/solmate/blob/v7/src/utils/FixedPointMathLib.sol -pragma solidity 0.8.24; - -library LibFixedPointMath { - uint128 public constant MAX_EXP_INPUT = 135_305_999_368_893_231_588; - uint256 public constant SCALING_FACTOR = 1e18; // For fixed point representation factor - - error Overflow(); - - // Computes e^x in 1e18 fixed point. - function exp(int256 x) internal pure returns (int256 r) { - unchecked { - // Input x is in fixed point format, with scale factor 1/1e18. - - // When the result is < 0.5 we return zero. This happens when - // x <= floor(log(0.5e18) * 1e18) ~ -42e18 - if (x <= -42_139_678_854_452_767_551) { - return 0; - } - - // When the result is > (2**255 - 1) / 1e18 we can not represent it - // as an int256. This happens when x >= floor(log((2**255 -1) / - // 1e18) * 1e18) ~ 135. - if (x >= 135_305_999_368_893_231_589) revert Overflow(); - - // x is now in the range (-42, 136) * 1e18. Convert to (-42, 136) * - // 2**96 - // for more intermediate precision and a binary basis. This base - // conversion - // is a multiplication by 1e18 / 2**96 = 5**18 / 2**78. - x = (x << 78) / 5 ** 18; - - // Reduce range of x to (-½ ln 2, ½ ln 2) * 2**96 by factoring out - // powers of two - // such that exp(x) = exp(x') * 2**k, where k is an integer. - // Solving this gives k = round(x / log(2)) and x' = x - k * log(2). - int256 k = ((x << 96) / 54_916_777_467_707_473_351_141_471_128 + 2 ** 95) >> 96; - x = x - k * 54_916_777_467_707_473_351_141_471_128; - // k is in the range [-61, 195]. - - // Evaluate using a (6, 7)-term rational approximation. - // p is made monic, we'll multiply by a scale factor later. - int256 y = x + 1_346_386_616_545_796_478_920_950_773_328; - y = ((y * x) >> 96) + 57_155_421_227_552_351_082_224_309_758_442; - int256 p = y + x - 94_201_549_194_550_492_254_356_042_504_812; - p = ((p * y) >> 96) + 28_719_021_644_029_726_153_956_944_680_412_240; - p = p * x + (4_385_272_521_454_847_904_659_076_985_693_276 << 96); - - // We leave p in 2**192 basis so we don't need to scale it back up - // for the division. - int256 q = x - 2_855_989_394_907_223_263_936_484_059_900; - q = ((q * x) >> 96) + 50_020_603_652_535_783_019_961_831_881_945; - q = ((q * x) >> 96) - 533_845_033_583_426_703_283_633_433_725_380; - q = ((q * x) >> 96) + 3_604_857_256_930_695_427_073_651_918_091_429; - q = ((q * x) >> 96) - 14_423_608_567_350_463_180_887_372_962_807_573; - q = ((q * x) >> 96) + 26_449_188_498_355_588_339_934_803_723_976_023; - assembly { - // Div in assembly because solidity adds a zero check despite - // the `unchecked`. - // The q polynomial is known not to have zeros in the domain. - // (All roots are complex) - // No scaling required because p is already 2**96 too large. - r := sdiv(p, q) - } - // r should be in the range (0.09, 0.25) * 2**96. - - // We now need to multiply r by - // * the scale factor s = ~6.031367120..., - // * the 2**k factor from the range reduction, and - // * the 1e18 / 2**96 factor for base conversion. - // We do all of this at once, with an intermediate result in 2**213 - // basis - // so the final right shift is always by a positive amount. - r = int256( - (uint256(r) * 3_822_833_074_963_236_453_042_738_258_902_158_003_155_416_615_667) - >> uint256(195 - k) - ); - } - } -} diff --git a/packages/protocol/contracts/tko/BridgedTaikoToken.sol b/packages/protocol/contracts/tko/BridgedTaikoToken.sol deleted file mode 100644 index c847dda66653..000000000000 --- a/packages/protocol/contracts/tko/BridgedTaikoToken.sol +++ /dev/null @@ -1,55 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../tokenvault/IBridgedERC20.sol"; -import "./TaikoTokenBase.sol"; - -/// @title BridgedTaikoToken -/// @notice The TaikoToken on L2 to support checkpoints and voting. For testnets, we do not need to -/// use this contract. -/// @custom:security-contact security@taiko.xyz -contract BridgedTaikoToken is TaikoTokenBase, IBridgedERC20 { - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address manager address. - function init(address _owner, address _addressManager) external initializer { - __Essential_init(_owner, _addressManager); - __ERC20_init("Taiko Token", "TKO"); - __ERC20Votes_init(); - __ERC20Permit_init("Taiko Token"); - } - - function mint( - address _account, - uint256 _amount - ) - external - override - whenNotPaused - onlyFromOwnerOrNamed(LibStrings.B_ERC20_VAULT) - nonReentrant - { - _mint(_account, _amount); - } - - function burn(uint256 _amount) - external - override - whenNotPaused - onlyFromOwnerOrNamed(LibStrings.B_ERC20_VAULT) - nonReentrant - { - _burn(msg.sender, _amount); - } - - /// @notice Gets the canonical token's address and chain ID. - /// @return The canonical token's address. - /// @return The canonical token's chain ID. - function canonical() public pure returns (address, uint256) { - // 0x10dea67478c5F8C5E2D90e5E9B26dBe60c54d800 is the TKO's mainnet address, - // 1 is the Ethereum's network id. - return (0x10dea67478c5F8C5E2D90e5E9B26dBe60c54d800, 1); - } - - function changeMigrationStatus(address, bool) public pure notImplemented { } -} diff --git a/packages/protocol/contracts/tko/TaikoToken.sol b/packages/protocol/contracts/tko/TaikoToken.sol deleted file mode 100644 index 7bb9d380865a..000000000000 --- a/packages/protocol/contracts/tko/TaikoToken.sol +++ /dev/null @@ -1,30 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "./TaikoTokenBase.sol"; - -/// @title TaikoToken -/// @notice The TaikoToken (TKO), in the protocol is used for prover collateral -/// in the form of bonds. It is an ERC20 token with 18 decimal places of precision. -/// @dev Labeled in AddressResolver as "taiko_token" -/// @dev On Ethereum, this contract is deployed behind a proxy at -/// 0x10dea67478c5F8C5E2D90e5E9B26dBe60c54d800 (token.taiko.eth) -/// @custom:security-contact security@taiko.xyz -contract TaikoToken is TaikoTokenBase { - address private constant _TAIKO_L1 = 0x06a9Ab27c7e2255df1815E6CC0168d7755Feb19a; - address private constant _ERC20_VAULT = 0x996282cA11E5DEb6B5D122CC3B9A1FcAAD4415Ab; - - error TT_INVALID_PARAM(); - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _recipient The address to receive initial token minting. - function init(address _owner, address _recipient) public initializer { - __Essential_init(_owner); - __ERC20_init("Taiko Token", "TKO"); - __ERC20Votes_init(); - __ERC20Permit_init("Taiko Token"); - // Mint 1 billion tokens - _mint(_recipient, 1_000_000_000 ether); - } -} diff --git a/packages/protocol/contracts/tko/TaikoTokenBase.sol b/packages/protocol/contracts/tko/TaikoTokenBase.sol deleted file mode 100644 index 2e30a8b9547e..000000000000 --- a/packages/protocol/contracts/tko/TaikoTokenBase.sol +++ /dev/null @@ -1,36 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts-upgradeable/token/ERC20/extensions/ERC20VotesUpgradeable.sol"; -import "../common/EssentialContract.sol"; -import "../common/LibStrings.sol"; - -/// @notice TaikoToken was `EssentialContract, ERC20SnapshotUpgradeable, ERC20VotesUpgradeable`. -/// We use this contract to take 50 more slots to remove `ERC20SnapshotUpgradeable` from the parent -/// contract list. -/// We can simplify the code since we no longer need to maintain upgradability with Hekla. -abstract contract TaikoTokenBase0 is EssentialContract { - // solhint-disable var-name-mixedcase - uint256[50] private __slots_previously_used_by_ERC20SnapshotUpgradeable; -} - -/// @title TaikoTokenBase -/// @notice The base contract for both the canonical and the bridged Taiko token. -/// @custom:security-contact security@taiko.xyz -abstract contract TaikoTokenBase is TaikoTokenBase0, ERC20VotesUpgradeable { - uint256[50] private __gap; - - function clock() public view override returns (uint48) { - return SafeCastUpgradeable.toUint48(block.timestamp); - } - - // solhint-disable-next-line func-name-mixedcase - function CLOCK_MODE() public pure override returns (string memory) { - // See https://eips.ethereum.org/EIPS/eip-6372 - return "mode=timestamp"; - } - - function symbol() public pure override returns (string memory) { - return "TAIKO"; - } -} diff --git a/packages/protocol/contracts/tokenvault/BaseNFTVault.sol b/packages/protocol/contracts/tokenvault/BaseNFTVault.sol deleted file mode 100644 index 8b4e35595330..000000000000 --- a/packages/protocol/contracts/tokenvault/BaseNFTVault.sol +++ /dev/null @@ -1,134 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "./BaseVault.sol"; - -/// @title BaseNFTVault -/// @notice Abstract contract for bridging NFTs across different chains. -/// @custom:security-contact security@taiko.xyz -abstract contract BaseNFTVault is BaseVault { - // Struct representing the canonical NFT on another chain. - struct CanonicalNFT { - // Chain ID of the NFT. - uint64 chainId; - // Address of the NFT contract. - address addr; - // Symbol of the NFT. - string symbol; - // Name of the NFT. - string name; - } - - /// @devStruct representing the details of a bridged token transfer operation. - /// 5 slots - struct BridgeTransferOp { - // Destination chain ID. - uint64 destChainId; - // The owner of the bridge message on the destination chain. - address destOwner; - // Recipient address. - address to; - // Processing fee for the relayer. - uint64 fee; - // Address of the token. - address token; - // Gas limit for the operation. - uint32 gasLimit; - // Token Id array - uint256[] tokenIds; - // Respective amounts per given token Ids. - uint256[] amounts; - } - - /// @notice Mapping to store bridged NFTs and their canonical counterparts. - mapping(address btoken => CanonicalNFT canonical) public bridgedToCanonical; - - /// @notice Mapping to store canonical NFTs and their bridged counterparts. - mapping(uint256 chainId => mapping(address ctoken => address btoken)) public canonicalToBridged; - - uint256[48] private __gap; - - /// @notice Emitted when a new bridged token is deployed. - /// @param chainId The chain ID of the bridged token. - /// @param ctoken The address of the canonical token. - /// @param btoken The address of the bridged token. - /// @param ctokenSymbol The symbol of the canonical token. - /// @param ctokenName The name of the canonical token. - event BridgedTokenDeployed( - uint64 indexed chainId, - address indexed ctoken, - address indexed btoken, - string ctokenSymbol, - string ctokenName - ); - - /// @notice Emitted when a token is sent to another chain. - /// @param msgHash The hash of the message. - /// @param from The sender of the message. - /// @param to The recipient of the message. - /// @param destChainId The destination chain ID. - /// @param ctoken The address of the canonical token. - /// @param token The address of the bridged token. - /// @param tokenIds The IDs of the tokens. - /// @param amounts The amounts of the tokens. - event TokenSent( - bytes32 indexed msgHash, - address indexed from, - address indexed to, - uint64 destChainId, - address ctoken, - address token, - uint256[] tokenIds, - uint256[] amounts - ); - - /// @notice Emitted when a token is released on the current chain. - /// @param msgHash The hash of the message. - /// @param from The sender of the message. - /// @param ctoken The address of the canonical token. - /// @param token The address of the bridged token. - /// @param tokenIds The IDs of the tokens. - /// @param amounts The amounts of the tokens. - event TokenReleased( - bytes32 indexed msgHash, - address indexed from, - address ctoken, - address token, - uint256[] tokenIds, - uint256[] amounts - ); - - /// @notice Emitted when a token is received from another chain. - /// @param msgHash The hash of the message. - /// @param from The sender of the message. - /// @param to The recipient of the message. - /// @param srcChainId The source chain ID. - /// @param ctoken The address of the canonical token. - /// @param token The address of the bridged token. - /// @param tokenIds The IDs of the tokens. - /// @param amounts The amounts of the tokens. - event TokenReceived( - bytes32 indexed msgHash, - address indexed from, - address indexed to, - uint64 srcChainId, - address ctoken, - address token, - uint256[] tokenIds, - uint256[] amounts - ); - - error VAULT_INVALID_TOKEN(); - error VAULT_INVALID_AMOUNT(); - error VAULT_INTERFACE_NOT_SUPPORTED(); - error VAULT_TOKEN_ARRAY_MISMATCH(); - - modifier withValidOperation(BridgeTransferOp memory _op) { - if (_op.tokenIds.length != _op.amounts.length) { - revert VAULT_TOKEN_ARRAY_MISMATCH(); - } - - if (_op.token == address(0)) revert VAULT_INVALID_TOKEN(); - _; - } -} diff --git a/packages/protocol/contracts/tokenvault/BaseVault.sol b/packages/protocol/contracts/tokenvault/BaseVault.sol deleted file mode 100644 index 18d17edb5515..000000000000 --- a/packages/protocol/contracts/tokenvault/BaseVault.sol +++ /dev/null @@ -1,87 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts-upgradeable/utils/introspection/IERC165Upgradeable.sol"; -import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Proxy.sol"; -import "../bridge/IBridge.sol"; -import "../common/EssentialContract.sol"; -import "../common/LibStrings.sol"; -import "../libs/LibBytes.sol"; - -/// @title INameSymbol -/// @notice Interface for contracts that provide name() and symbol() -/// functions. These functions may not be part of the official interface but are -/// used by some contracts. -/// @custom:security-contact security@taiko.xyz -interface INameSymbol { - function name() external view returns (string memory); - function symbol() external view returns (string memory); -} - -/// @title BaseVault -/// @notice This abstract contract provides a base implementation for vaults. -/// @custom:security-contact security@taiko.xyz -abstract contract BaseVault is - EssentialContract, - IRecallableSender, - IMessageInvocable, - IERC165Upgradeable -{ - using LibBytes for bytes; - - uint256[50] private __gap; - - error VAULT_INSUFFICIENT_FEE(); - error VAULT_INVALID_TO_ADDR(); - error VAULT_PERMISSION_DENIED(); - - /// @notice Checks if the contract supports the given interface. - /// @param _interfaceId The interface identifier. - /// @return true if the contract supports the interface, false otherwise. - function supportsInterface(bytes4 _interfaceId) public view virtual override returns (bool) { - return _interfaceId == type(IRecallableSender).interfaceId - || _interfaceId == type(IMessageInvocable).interfaceId - || _interfaceId == type(IERC165Upgradeable).interfaceId; - } - - /// @notice Returns the name of the vault. - /// @return The name of the vault. - function name() public pure virtual returns (bytes32); - - function checkProcessMessageContext() - internal - view - onlyFromNamed(LibStrings.B_BRIDGE) - returns (IBridge.Context memory ctx_) - { - ctx_ = IBridge(msg.sender).context(); - address selfOnSourceChain = resolve(ctx_.srcChainId, name(), false); - if (ctx_.from != selfOnSourceChain) revert VAULT_PERMISSION_DENIED(); - } - - function checkRecallMessageContext() - internal - view - onlyFromNamed(LibStrings.B_BRIDGE) - returns (IBridge.Context memory ctx_) - { - ctx_ = IBridge(msg.sender).context(); - if (ctx_.from != msg.sender) revert VAULT_PERMISSION_DENIED(); - } - - function checkToAddress(address _to) internal view { - if (_to == address(0) || _to == address(this)) revert VAULT_INVALID_TO_ADDR(); - } - - function safeSymbol(address _token) internal view returns (string memory symbol_) { - (bool success, bytes memory data) = - address(_token).staticcall(abi.encodeCall(INameSymbol.symbol, ())); - return success ? data.toString() : ""; - } - - function safeName(address _token) internal view returns (string memory) { - (bool success, bytes memory data) = - address(_token).staticcall(abi.encodeCall(INameSymbol.name, ())); - return success ? data.toString() : ""; - } -} diff --git a/packages/protocol/contracts/tokenvault/BridgedERC1155.sol b/packages/protocol/contracts/tokenvault/BridgedERC1155.sol deleted file mode 100644 index f846fc4d5c1e..000000000000 --- a/packages/protocol/contracts/tokenvault/BridgedERC1155.sol +++ /dev/null @@ -1,116 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts-upgradeable/token/ERC1155/ERC1155Upgradeable.sol"; -import "../common/EssentialContract.sol"; -import "../common/LibStrings.sol"; -import "./IBridgedERC1155.sol"; -import "./LibBridgedToken.sol"; - -/// @title BridgedERC1155 -/// @notice Contract for bridging ERC1155 tokens across different chains. -/// @custom:security-contact security@taiko.xyz -contract BridgedERC1155 is - EssentialContract, - IBridgedERC1155, - IBridgedERC1155Initializable, - ERC1155Upgradeable -{ - /// @notice Address of the source token contract. - address public srcToken; - - /// @notice Source chain ID where the token originates. - uint256 public srcChainId; - - /// @dev Symbol of the bridged token. - string public symbol; - - /// @dev Name of the bridged token. - string public name; - - uint256[46] private __gap; - - error BTOKEN_INVALID_PARAMS(); - - /// @inheritdoc IBridgedERC1155Initializable - function init( - address _owner, - address _addressManager, - address _srcToken, - uint256 _srcChainId, - string calldata _symbol, - string calldata _name - ) - external - initializer - { - // Check if provided parameters are valid. - // The symbol and the name can be empty for ERC1155 tokens so we use some placeholder data - // for them instead. - LibBridgedToken.validateInputs(_srcToken, _srcChainId); - __Essential_init(_owner, _addressManager); - - // The token URI here is not important as the client will have to read the URI from the - // canonical contract to fetch meta data. - __ERC1155_init(LibBridgedToken.buildURI(_srcToken, _srcChainId, "")); - - srcToken = _srcToken; - srcChainId = _srcChainId; - symbol = _symbol; - name = _name; - } - - /// @inheritdoc IBridgedERC1155 - function mintBatch( - address _to, - uint256[] calldata _tokenIds, - uint256[] calldata _amounts - ) - external - whenNotPaused - onlyFromNamed(LibStrings.B_ERC1155_VAULT) - nonReentrant - { - _mintBatch(_to, _tokenIds, _amounts, ""); - } - - /// @inheritdoc IBridgedERC1155 - function burn( - uint256 _id, - uint256 _amount - ) - external - whenNotPaused - onlyFromNamed(LibStrings.B_ERC1155_VAULT) - nonReentrant - { - _burn(msg.sender, _id, _amount); - } - - /// @inheritdoc IBridgedERC1155 - function canonical() external view returns (address, uint256) { - return (srcToken, srcChainId); - } - - function supportsInterface(bytes4 _interfaceId) public view override returns (bool) { - return _interfaceId == type(IBridgedERC1155).interfaceId - || _interfaceId == type(IBridgedERC1155Initializable).interfaceId - || super.supportsInterface(_interfaceId); - } - - function _beforeTokenTransfer( - address _operator, - address _from, - address _to, - uint256[] memory _ids, - uint256[] memory _amounts, - bytes memory _data - ) - internal - override - whenNotPaused - { - LibBridgedToken.checkToAddress(_to); - super._beforeTokenTransfer(_operator, _from, _to, _ids, _amounts, _data); - } -} diff --git a/packages/protocol/contracts/tokenvault/BridgedERC20.sol b/packages/protocol/contracts/tokenvault/BridgedERC20.sol deleted file mode 100644 index e8a75b8cbe05..000000000000 --- a/packages/protocol/contracts/tokenvault/BridgedERC20.sol +++ /dev/null @@ -1,178 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts-upgradeable/token/ERC20/ERC20Upgradeable.sol"; -import "@openzeppelin/contracts-upgradeable/utils/introspection/IERC165Upgradeable.sol"; -import "../common/EssentialContract.sol"; -import "../common/LibStrings.sol"; -import "./IBridgedERC20.sol"; -import "./LibBridgedToken.sol"; - -/// @title BridgedERC20 -/// @notice An upgradeable ERC20 contract that represents tokens bridged from -/// another chain. -/// @custom:security-contact security@taiko.xyz -contract BridgedERC20 is - EssentialContract, - IBridgedERC20, - IBridgedERC20Initializable, - IBridgedERC20Migratable, - IERC165Upgradeable, - ERC20Upgradeable -{ - /// @dev Slot 1. - address public srcToken; - - uint8 public __srcDecimals; - - /// @dev Slot 2. - uint256 public srcChainId; - - /// @dev Slot 3. - /// @notice The address of the contract to migrate tokens to or from. - address public migratingAddress; - - /// @notice If true, signals migrating 'to', false if migrating 'from'. - bool public migratingInbound; - - uint256[47] private __gap; - - /// @notice Emitted when the migration status is changed. - /// @param addr The address migrating 'to' or 'from'. - /// @param inbound If false then signals migrating 'from', true if migrating 'into'. - event MigrationStatusChanged(address addr, bool inbound); - - /// @notice Emitted when tokens are migrated to the new bridged token. - /// @param migratedTo The address of the bridged token. - /// @param account The address of the account. - /// @param amount The amount of tokens migrated. - event MigratedTo(address indexed migratedTo, address indexed account, uint256 amount); - - /// @notice Emitted when tokens are migrated from the old bridged token. - /// @param migratedFrom The address of the bridged token. - /// @param account The address of the account. - /// @param amount The amount of tokens migrated. - event MigratedFrom(address indexed migratedFrom, address indexed account, uint256 amount); - - error BTOKEN_INVALID_PARAMS(); - error BTOKEN_MINT_DISALLOWED(); - - /// @inheritdoc IBridgedERC20Initializable - function init( - address _owner, - address _addressManager, - address _srcToken, - uint256 _srcChainId, - uint8 _decimals, - string calldata _symbol, - string calldata _name - ) - external - initializer - { - // Check if provided parameters are valid - LibBridgedToken.validateInputs(_srcToken, _srcChainId); - __Essential_init(_owner, _addressManager); - __ERC20_init(_name, _symbol); - - // Set contract properties - srcToken = _srcToken; - srcChainId = _srcChainId; - __srcDecimals = _decimals; - } - - /// @inheritdoc IBridgedERC20Migratable - function changeMigrationStatus( - address _migratingAddress, - bool _migratingInbound - ) - external - whenNotPaused - onlyFromNamed(LibStrings.B_ERC20_VAULT) - nonReentrant - { - if (_migratingAddress == migratingAddress && _migratingInbound == migratingInbound) { - revert BTOKEN_INVALID_PARAMS(); - } - - migratingAddress = _migratingAddress; - migratingInbound = _migratingInbound; - emit MigrationStatusChanged(_migratingAddress, _migratingInbound); - } - - /// @inheritdoc IBridgedERC20 - function mint(address _account, uint256 _amount) external whenNotPaused nonReentrant { - // mint is disabled while migrating outbound. - if (isMigratingOut()) revert BTOKEN_MINT_DISALLOWED(); - - address _migratingAddress = migratingAddress; - if (msg.sender == _migratingAddress) { - // Inbound migration - emit MigratedFrom(_migratingAddress, _account, _amount); - } else { - // Bridging from vault - _authorizedMintBurn(msg.sender); - } - - _mint(_account, _amount); - } - - /// @inheritdoc IBridgedERC20 - function burn(uint256 _amount) external whenNotPaused nonReentrant { - if (isMigratingOut()) { - // Outbound migration - address _migratingAddress = migratingAddress; - emit MigratedTo(_migratingAddress, msg.sender, _amount); - // Ask the new bridged token to mint token for the user. - IBridgedERC20(_migratingAddress).mint(msg.sender, _amount); - } else { - // When user wants to burn tokens only during 'migrating out' phase is possible. If - // ERC20Vault burns the tokens, that will go through the burn(amount) function. - _authorizedMintBurn(msg.sender); - } - - _burn(msg.sender, _amount); - } - - /// @inheritdoc IBridgedERC20 - function canonical() external view returns (address, uint256) { - return (srcToken, srcChainId); - } - - /// @notice Gets the number of decimal places of the token. - /// @return The number of decimal places of the token. - function decimals() public view override returns (uint8) { - return __srcDecimals; - } - - function isMigratingOut() public view returns (bool) { - return migratingAddress != address(0) && !migratingInbound; - } - - function supportsInterface(bytes4 _interfaceId) public pure returns (bool) { - return _interfaceId == type(IBridgedERC20).interfaceId - || _interfaceId == type(IBridgedERC20Initializable).interfaceId - || _interfaceId == type(IBridgedERC20Migratable).interfaceId - || _interfaceId == type(IERC20Upgradeable).interfaceId - || _interfaceId == type(IERC20MetadataUpgradeable).interfaceId - || _interfaceId == type(IERC165Upgradeable).interfaceId; - } - - function _beforeTokenTransfer( - address _from, - address _to, - uint256 _amount - ) - internal - override - whenNotPaused - { - LibBridgedToken.checkToAddress(_to); - return super._beforeTokenTransfer(_from, _to, _amount); - } - - function _authorizedMintBurn(address addr) - private - onlyFromOwnerOrNamed(LibStrings.B_ERC20_VAULT) - { } -} diff --git a/packages/protocol/contracts/tokenvault/BridgedERC721.sol b/packages/protocol/contracts/tokenvault/BridgedERC721.sol deleted file mode 100644 index 3315c8bb56cc..000000000000 --- a/packages/protocol/contracts/tokenvault/BridgedERC721.sol +++ /dev/null @@ -1,113 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol"; -import "../common/EssentialContract.sol"; -import "../common/LibStrings.sol"; -import "./IBridgedERC721.sol"; -import "./LibBridgedToken.sol"; - -/// @title BridgedERC721 -/// @notice Contract for bridging ERC721 tokens across different chains. -/// @custom:security-contact security@taiko.xyz -contract BridgedERC721 is - EssentialContract, - IBridgedERC721, - IBridgedERC721Initializable, - ERC721Upgradeable -{ - /// @notice Address of the source token contract. - address public srcToken; - - /// @notice Source chain ID where the token originates. - uint256 public srcChainId; - - uint256[48] private __gap; - - error BTOKEN_INVALID_PARAMS(); - error BTOKEN_INVALID_BURN(); - - /// @inheritdoc IBridgedERC721Initializable - function init( - address _owner, - address _addressManager, - address _srcToken, - uint256 _srcChainId, - string calldata _symbol, - string calldata _name - ) - external - initializer - { - // Check if provided parameters are valid - LibBridgedToken.validateInputs(_srcToken, _srcChainId); - __Essential_init(_owner, _addressManager); - __ERC721_init(_name, _symbol); - - srcToken = _srcToken; - srcChainId = _srcChainId; - } - - /// @inheritdoc IBridgedERC721 - function mint( - address _account, - uint256 _tokenId - ) - external - whenNotPaused - onlyFromNamed(LibStrings.B_ERC721_VAULT) - nonReentrant - { - _safeMint(_account, _tokenId); - } - - /// @inheritdoc IBridgedERC721 - function burn(uint256 _tokenId) - external - whenNotPaused - onlyFromNamed(LibStrings.B_ERC721_VAULT) - nonReentrant - { - // Check if the caller is the owner of the token. Somehow this is not done inside the - // _burn() function below. - if (ownerOf(_tokenId) != msg.sender) { - revert BTOKEN_INVALID_BURN(); - } - _burn(_tokenId); - } - - /// @inheritdoc IBridgedERC721 - function canonical() external view returns (address, uint256) { - return (srcToken, srcChainId); - } - - /// @notice Returns the token URI. - /// @param _tokenId The token id. - /// @return The token URI following EIP-681. - function tokenURI(uint256 _tokenId) public view override returns (string memory) { - // https://github.com/crytic/slither/wiki/Detector-Documentation#abi-encodePacked-collision - // The abi.encodePacked() call below takes multiple dynamic arguments. This is known and - // considered acceptable in terms of risk. - return LibBridgedToken.buildURI(srcToken, srcChainId, Strings.toString(_tokenId)); - } - - function supportsInterface(bytes4 _interfaceId) public view override returns (bool) { - return _interfaceId == type(IBridgedERC721).interfaceId - || _interfaceId == type(IBridgedERC721Initializable).interfaceId - || super.supportsInterface(_interfaceId); - } - - function _beforeTokenTransfer( - address _from, - address _to, - uint256 _firstTokenId, - uint256 _batchSize - ) - internal - override - whenNotPaused - { - LibBridgedToken.checkToAddress(_to); - super._beforeTokenTransfer(_from, _to, _firstTokenId, _batchSize); - } -} diff --git a/packages/protocol/contracts/tokenvault/ERC1155Vault.sol b/packages/protocol/contracts/tokenvault/ERC1155Vault.sol deleted file mode 100644 index f6b82fe1c400..000000000000 --- a/packages/protocol/contracts/tokenvault/ERC1155Vault.sol +++ /dev/null @@ -1,306 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/token/ERC1155/IERC1155.sol"; -import "@openzeppelin/contracts-upgradeable/token/ERC1155/utils/ERC1155ReceiverUpgradeable.sol"; -import "../libs/LibAddress.sol"; -import "../common/LibStrings.sol"; -import "./IBridgedERC1155.sol"; -import "./BaseNFTVault.sol"; - -/// @title ERC1155Vault -/// @dev Labeled in AddressResolver as "erc1155_vault" -/// @notice This vault holds all ERC1155 tokens that users have deposited. -/// It also manages the mapping between canonical tokens and their bridged -/// tokens. -contract ERC1155Vault is BaseNFTVault, ERC1155ReceiverUpgradeable { - using LibAddress for address; - - uint256[50] private __gap; - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - function init(address _owner, address _addressManager) external initializer { - __Essential_init(_owner, _addressManager); - __ERC1155Receiver_init(); - } - /// @notice Transfers ERC1155 tokens to this vault and sends a message to - /// the destination chain so the user can receive the same (bridged) tokens - /// by invoking the message call. - /// @param _op Option for sending the ERC1155 token. - /// @return message_ The constructed message. - - function sendToken(BridgeTransferOp calldata _op) - external - payable - whenNotPaused - withValidOperation(_op) - nonReentrant - returns (IBridge.Message memory message_) - { - if (msg.value < _op.fee) revert VAULT_INSUFFICIENT_FEE(); - - for (uint256 i; i < _op.amounts.length; ++i) { - if (_op.amounts[i] == 0) revert VAULT_INVALID_AMOUNT(); - } - // Check token interface support - if (!_op.token.supportsInterface(type(IERC1155).interfaceId)) { - revert VAULT_INTERFACE_NOT_SUPPORTED(); - } - - (bytes memory data, CanonicalNFT memory ctoken) = _handleMessage(_op); - - // Create a message to send to the destination chain - IBridge.Message memory message = IBridge.Message({ - id: 0, // will receive a new value - from: address(0), // will receive a new value - srcChainId: 0, // will receive a new value - destChainId: _op.destChainId, - srcOwner: msg.sender, - destOwner: _op.destOwner != address(0) ? _op.destOwner : msg.sender, - to: resolve(_op.destChainId, name(), false), - value: msg.value - _op.fee, - fee: _op.fee, - gasLimit: _op.gasLimit, - data: data - }); - - // Send the message and obtain the message hash - bytes32 msgHash; - (msgHash, message_) = - IBridge(resolve(LibStrings.B_BRIDGE, false)).sendMessage{ value: msg.value }(message); - - // Emit TokenSent event - emit TokenSent({ - msgHash: msgHash, - from: message_.srcOwner, - to: _op.to, - destChainId: message_.destChainId, - ctoken: ctoken.addr, - token: _op.token, - tokenIds: _op.tokenIds, - amounts: _op.amounts - }); - } - - /// @inheritdoc IMessageInvocable - function onMessageInvocation(bytes calldata data) external payable whenNotPaused nonReentrant { - ( - CanonicalNFT memory ctoken, - address from, - address to, - uint256[] memory tokenIds, - uint256[] memory amounts - ) = abi.decode(data, (CanonicalNFT, address, address, uint256[], uint256[])); - - // Check context validity - // `onlyFromBridge` checked in checkProcessMessageContext - IBridge.Context memory ctx = checkProcessMessageContext(); - - // Don't allow sending to disallowed addresses. - // Don't send the tokens back to `from` because `from` is on the source chain. - checkToAddress(to); - - // Transfer the ETH and the tokens to the `to` address - address token = _transferTokens(ctoken, to, tokenIds, amounts); - to.sendEtherAndVerify(msg.value); - - emit TokenReceived({ - msgHash: ctx.msgHash, - from: from, - to: to, - srcChainId: ctx.srcChainId, - ctoken: ctoken.addr, - token: token, - tokenIds: tokenIds, - amounts: amounts - }); - } - - /// @inheritdoc IRecallableSender - function onMessageRecalled( - IBridge.Message calldata message, - bytes32 msgHash - ) - external - payable - override - whenNotPaused - nonReentrant - { - // `onlyFromBridge` checked in checkRecallMessageContext - checkRecallMessageContext(); - - (bytes memory data) = abi.decode(message.data[4:], (bytes)); - (CanonicalNFT memory ctoken,,, uint256[] memory tokenIds, uint256[] memory amounts) = - abi.decode(data, (CanonicalNFT, address, address, uint256[], uint256[])); - - // Transfer the ETH and tokens back to the owner - address token = _transferTokens(ctoken, message.srcOwner, tokenIds, amounts); - message.srcOwner.sendEtherAndVerify(message.value); - - // Emit TokenReleased event - emit TokenReleased({ - msgHash: msgHash, - from: message.srcOwner, - ctoken: ctoken.addr, - token: token, - tokenIds: tokenIds, - amounts: amounts - }); - } - - /// @notice See {ERC1155ReceiverUpgradeable-onERC1155BatchReceived}. - function onERC1155BatchReceived( - address, - address, - uint256[] calldata, - uint256[] calldata, - bytes calldata - ) - external - pure - returns (bytes4) - { - return IERC1155ReceiverUpgradeable.onERC1155BatchReceived.selector; - } - - /// @notice See {ERC1155ReceiverUpgradeable-onERC1155Received}. - function onERC1155Received( - address, - address, - uint256, - uint256, - bytes calldata - ) - external - pure - returns (bytes4) - { - return IERC1155ReceiverUpgradeable.onERC1155Received.selector; - } - - /// @dev See {BaseVault-supportsInterface}. - /// @param _interfaceId The interface identifier. - /// @return true if supports, else otherwise. - function supportsInterface(bytes4 _interfaceId) - public - view - override(BaseVault, ERC1155ReceiverUpgradeable) - returns (bool) - { - // Here we cannot user `super.supportsInterface(_interfaceId)` - return BaseVault.supportsInterface(_interfaceId) - || ERC1155ReceiverUpgradeable.supportsInterface(_interfaceId); - } - - /// @inheritdoc BaseVault - function name() public pure override returns (bytes32) { - return LibStrings.B_ERC1155_VAULT; - } - - /// @dev Transfers ERC1155 tokens to the `to` address. - /// @param ctoken CanonicalNFT data. - /// @param to The address to transfer the tokens to. - /// @param tokenIds The token IDs to transfer. - /// @param amounts The amounts to transfer. - /// @return token The address of the token. - function _transferTokens( - CanonicalNFT memory ctoken, - address to, - uint256[] memory tokenIds, - uint256[] memory amounts - ) - private - returns (address token) - { - if (ctoken.chainId == block.chainid) { - // Token lives on this chain - token = ctoken.addr; - IERC1155(token).safeBatchTransferFrom(address(this), to, tokenIds, amounts, ""); - } else { - // Token does not live on this chain - token = _getOrDeployBridgedToken(ctoken); - IBridgedERC1155(token).mintBatch(to, tokenIds, amounts); - } - } - - /// @dev Handles the message on the source chain and returns the encoded - /// call on the destination call. - /// @param _op BridgeTransferOp data. - /// @return msgData_ Encoded message data. - /// @return ctoken_ The canonical token. - function _handleMessage(BridgeTransferOp calldata _op) - private - returns (bytes memory msgData_, CanonicalNFT memory ctoken_) - { - unchecked { - // is a btoken, meaning, it does not live on this chain - CanonicalNFT storage _ctoken = bridgedToCanonical[_op.token]; - if (_ctoken.addr != address(0)) { - ctoken_ = _ctoken; - IERC1155(_op.token).safeBatchTransferFrom( - msg.sender, address(this), _op.tokenIds, _op.amounts, "" - ); - for (uint256 i; i < _op.tokenIds.length; ++i) { - IBridgedERC1155(_op.token).burn(_op.tokenIds[i], _op.amounts[i]); - } - } else { - // is a ctoken token, meaning, it lives on this chain - ctoken_ = CanonicalNFT({ - chainId: uint64(block.chainid), - addr: _op.token, - symbol: safeSymbol(_op.token), - name: safeName(_op.token) - }); - - IERC1155(_op.token).safeBatchTransferFrom( - msg.sender, address(this), _op.tokenIds, _op.amounts, "" - ); - } - } - msgData_ = abi.encodeCall( - this.onMessageInvocation, - abi.encode(ctoken_, msg.sender, _op.to, _op.tokenIds, _op.amounts) - ); - } - - /// @dev Retrieve or deploy a bridged ERC1155 token contract. - /// @param _ctoken CanonicalNFT data. - /// @return btoken_ Address of the bridged token contract. - function _getOrDeployBridgedToken(CanonicalNFT memory _ctoken) - private - returns (address btoken_) - { - btoken_ = canonicalToBridged[_ctoken.chainId][_ctoken.addr]; - if (btoken_ == address(0)) { - btoken_ = _deployBridgedToken(_ctoken); - } - } - - /// @dev Deploy a new BridgedNFT contract and initialize it. - /// This must be called before the first time a bridged token is sent to - /// this chain. - /// @param _ctoken CanonicalNFT data. - /// @return btoken_ Address of the deployed bridged token contract. - function _deployBridgedToken(CanonicalNFT memory _ctoken) private returns (address btoken_) { - bytes memory data = abi.encodeCall( - IBridgedERC1155Initializable.init, - (owner(), addressManager, _ctoken.addr, _ctoken.chainId, _ctoken.symbol, _ctoken.name) - ); - - btoken_ = address(new ERC1967Proxy(resolve(LibStrings.B_BRIDGED_ERC1155, false), data)); - - bridgedToCanonical[btoken_] = _ctoken; - canonicalToBridged[_ctoken.chainId][_ctoken.addr] = btoken_; - - emit BridgedTokenDeployed({ - chainId: _ctoken.chainId, - ctoken: _ctoken.addr, - btoken: btoken_, - ctokenSymbol: _ctoken.symbol, - ctokenName: _ctoken.name - }); - } -} diff --git a/packages/protocol/contracts/tokenvault/ERC20Vault.sol b/packages/protocol/contracts/tokenvault/ERC20Vault.sol deleted file mode 100644 index fbe0d82a3b14..000000000000 --- a/packages/protocol/contracts/tokenvault/ERC20Vault.sol +++ /dev/null @@ -1,474 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; -import "@openzeppelin/contracts/token/ERC20/extensions/IERC20Metadata.sol"; -import "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol"; -import "@openzeppelin/contracts/utils/Address.sol"; -import "../bridge/IQuotaManager.sol"; -import "../common/LibStrings.sol"; -import "../libs/LibAddress.sol"; -import "./IBridgedERC20.sol"; -import "./BaseVault.sol"; - -/// @title ERC20Vault -/// @notice This vault holds all ERC20 tokens (excluding Ether) that users have -/// deposited. It also manages the mapping between canonical ERC20 tokens and -/// their bridged tokens. This vault does not support rebase/elastic tokens. -/// @dev Labeled in AddressResolver as "erc20_vault". -/// @custom:security-contact security@taiko.xyz -contract ERC20Vault is BaseVault { - using Address for address; - using LibAddress for address; - using SafeERC20 for IERC20; - - uint256 public constant MIN_MIGRATION_DELAY = 90 days; - - /// @dev Represents a canonical ERC20 token. - struct CanonicalERC20 { - uint64 chainId; - address addr; - uint8 decimals; - string symbol; - string name; - } - - /// @dev Represents an operation to send tokens to another chain. - /// 4 slots - struct BridgeTransferOp { - // Destination chain ID. - uint64 destChainId; - // The owner of the bridge message on the destination chain. - address destOwner; - // Recipient address. - address to; - // Processing fee for the relayer. - uint64 fee; - // Address of the token. - address token; - // Gas limit for the operation. - uint32 gasLimit; - // Amount to be bridged. - uint256 amount; - } - - /// @notice Mappings from bridged tokens to their canonical tokens. - mapping(address btoken => CanonicalERC20 canonical) public bridgedToCanonical; - - /// @notice Mappings from canonical tokens to their bridged tokens. Also storing - /// the chainId for tokens across other chains aside from Ethereum. - mapping(uint256 chainId => mapping(address ctoken => address btoken)) public canonicalToBridged; - - /// @notice Mappings from bridged tokens to their blacklist status. - mapping(address btoken => bool denied) public btokenDenylist; - - /// @notice Mappings from ctoken to its last migration timestamp. - mapping(uint256 chainId => mapping(address ctoken => uint256 timestamp)) public - lastMigrationStart; - - uint256[46] private __gap; - - /// @notice Emitted when a new bridged token is deployed. - /// @param srcChainId The chain ID of the canonical token. - /// @param ctoken The address of the canonical token. - /// @param btoken The address of the bridged token. - /// @param ctokenSymbol The symbol of the canonical token. - /// @param ctokenName The name of the canonical token. - /// @param ctokenDecimal The decimal of the canonical token. - event BridgedTokenDeployed( - uint256 indexed srcChainId, - address indexed ctoken, - address indexed btoken, - string ctokenSymbol, - string ctokenName, - uint8 ctokenDecimal - ); - - /// @notice Emitted when a bridged token is changed. - /// @param srcChainId The chain ID of the canonical token. - /// @param ctoken The address of the canonical token. - /// @param btokenOld The address of the old bridged token. - /// @param btokenNew The address of the new bridged token. - /// @param ctokenSymbol The symbol of the canonical token. - /// @param ctokenName The name of the canonical token. - /// @param ctokenDecimal The decimal of the canonical token. - event BridgedTokenChanged( - uint256 indexed srcChainId, - address indexed ctoken, - address btokenOld, - address btokenNew, - string ctokenSymbol, - string ctokenName, - uint8 ctokenDecimal - ); - - /// @notice Emitted when a token is sent to another chain. - /// @param msgHash The hash of the message. - /// @param from The address of the sender. - /// @param to The address of the recipient. - /// @param canonicalChainId The chain ID of the canonical token. - /// @param destChainId The chain ID of the destination chain. - /// @param ctoken The address of the canonical token. - /// @param token The address of the bridged token. - /// @param amount The amount of tokens sent. - event TokenSent( - bytes32 indexed msgHash, - address indexed from, - address indexed to, - uint64 canonicalChainId, - uint64 destChainId, - address ctoken, - address token, - uint256 amount - ); - - /// @notice Emitted when a token is released from a message. - /// @param msgHash The hash of the message. - /// @param from The address of the sender. - /// @param ctoken The address of the canonical token. - /// @param token The address of the bridged token. - /// @param amount The amount of tokens released. - event TokenReleased( - bytes32 indexed msgHash, address indexed from, address ctoken, address token, uint256 amount - ); - - /// @notice Emitted when a token is received from another chain. - /// @param msgHash The hash of the message. - /// @param from The address of the sender. - /// @param to The address of the recipient. - /// @param srcChainId The chain ID of the source chain. - /// @param ctoken The address of the canonical token. - /// @param token The address of the bridged token. - /// @param amount The amount of tokens received. - event TokenReceived( - bytes32 indexed msgHash, - address indexed from, - address indexed to, - uint64 srcChainId, - address ctoken, - address token, - uint256 amount - ); - - error VAULT_BTOKEN_BLACKLISTED(); - error VAULT_CTOKEN_MISMATCH(); - error VAULT_INVALID_TOKEN(); - error VAULT_INVALID_AMOUNT(); - error VAULT_INVALID_CTOKEN(); - error VAULT_INVALID_NEW_BTOKEN(); - error VAULT_LAST_MIGRATION_TOO_CLOSE(); - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - function init(address _owner, address _addressManager) external initializer { - __Essential_init(_owner, _addressManager); - } - - /// @notice Change bridged token. - /// @param _ctoken The canonical token. - /// @param _btokenNew The new bridged token address. - /// @return btokenOld_ The old bridged token address. - function changeBridgedToken( - CanonicalERC20 calldata _ctoken, - address _btokenNew - ) - external - onlyOwner - nonReentrant - returns (address btokenOld_) - { - if ( - _btokenNew == address(0) || bridgedToCanonical[_btokenNew].addr != address(0) - || !_btokenNew.isContract() - ) { - revert VAULT_INVALID_NEW_BTOKEN(); - } - - if (_ctoken.addr == address(0) || _ctoken.chainId == block.chainid) { - revert VAULT_INVALID_CTOKEN(); - } - - if (btokenDenylist[_btokenNew]) revert VAULT_BTOKEN_BLACKLISTED(); - - uint256 _lastMigrationStart = lastMigrationStart[_ctoken.chainId][_ctoken.addr]; - if (block.timestamp < _lastMigrationStart + MIN_MIGRATION_DELAY) { - revert VAULT_LAST_MIGRATION_TOO_CLOSE(); - } - - btokenOld_ = canonicalToBridged[_ctoken.chainId][_ctoken.addr]; - - if (btokenOld_ != address(0)) { - CanonicalERC20 memory ctoken = bridgedToCanonical[btokenOld_]; - - // The ctoken must match the saved one. - if (keccak256(abi.encode(_ctoken)) != keccak256(abi.encode(ctoken))) { - revert VAULT_CTOKEN_MISMATCH(); - } - - delete bridgedToCanonical[btokenOld_]; - btokenDenylist[btokenOld_] = true; - - // Start the migration - if ( - btokenOld_.supportsInterface(type(IBridgedERC20Migratable).interfaceId) - && _btokenNew.supportsInterface(type(IBridgedERC20Migratable).interfaceId) - ) { - IBridgedERC20Migratable(btokenOld_).changeMigrationStatus(_btokenNew, false); - IBridgedERC20Migratable(_btokenNew).changeMigrationStatus(btokenOld_, true); - } - } - - bridgedToCanonical[_btokenNew] = _ctoken; - canonicalToBridged[_ctoken.chainId][_ctoken.addr] = _btokenNew; - lastMigrationStart[_ctoken.chainId][_ctoken.addr] = block.timestamp; - - emit BridgedTokenChanged({ - srcChainId: _ctoken.chainId, - ctoken: _ctoken.addr, - btokenOld: btokenOld_, - btokenNew: _btokenNew, - ctokenSymbol: _ctoken.symbol, - ctokenName: _ctoken.name, - ctokenDecimal: _ctoken.decimals - }); - } - - /// @notice Transfers ERC20 tokens to this vault and sends a message to the - /// destination chain so the user can receive the same amount of tokens by - /// invoking the message call. - /// @param _op Option for sending ERC20 tokens. - /// @return message_ The constructed message. - function sendToken(BridgeTransferOp calldata _op) - external - payable - whenNotPaused - nonReentrant - returns (IBridge.Message memory message_) - { - if (_op.amount == 0) revert VAULT_INVALID_AMOUNT(); - if (_op.token == address(0)) revert VAULT_INVALID_TOKEN(); - if (btokenDenylist[_op.token]) revert VAULT_BTOKEN_BLACKLISTED(); - if (msg.value < _op.fee) revert VAULT_INSUFFICIENT_FEE(); - - (bytes memory data, CanonicalERC20 memory ctoken, uint256 balanceChange) = - _handleMessage(_op); - - IBridge.Message memory message = IBridge.Message({ - id: 0, // will receive a new value - from: address(0), // will receive a new value - srcChainId: 0, // will receive a new value - destChainId: _op.destChainId, - srcOwner: msg.sender, - destOwner: _op.destOwner != address(0) ? _op.destOwner : msg.sender, - to: resolve(_op.destChainId, name(), false), - value: msg.value - _op.fee, - fee: _op.fee, - gasLimit: _op.gasLimit, - data: data - }); - - bytes32 msgHash; - (msgHash, message_) = - IBridge(resolve(LibStrings.B_BRIDGE, false)).sendMessage{ value: msg.value }(message); - - emit TokenSent({ - msgHash: msgHash, - from: message_.srcOwner, - to: _op.to, - canonicalChainId: ctoken.chainId, - destChainId: _op.destChainId, - ctoken: ctoken.addr, - token: _op.token, - amount: balanceChange - }); - } - - /// @inheritdoc IMessageInvocable - function onMessageInvocation(bytes calldata _data) public payable whenNotPaused nonReentrant { - (CanonicalERC20 memory ctoken, address from, address to, uint256 amount) = - abi.decode(_data, (CanonicalERC20, address, address, uint256)); - - // `onlyFromBridge` checked in checkProcessMessageContext - IBridge.Context memory ctx = checkProcessMessageContext(); - - // Don't allow sending to disallowed addresses. - // Don't send the tokens back to `from` because `from` is on the source chain. - checkToAddress(to); - - // Transfer the ETH and the tokens to the `to` address - address token = _transferTokens(ctoken, to, amount); - to.sendEtherAndVerify(msg.value); - - emit TokenReceived({ - msgHash: ctx.msgHash, - from: from, - to: to, - srcChainId: ctx.srcChainId, - ctoken: ctoken.addr, - token: token, - amount: amount - }); - } - - /// @inheritdoc IRecallableSender - function onMessageRecalled( - IBridge.Message calldata _message, - bytes32 _msgHash - ) - external - payable - override - whenNotPaused - nonReentrant - { - // `onlyFromBridge` checked in checkRecallMessageContext - checkRecallMessageContext(); - - (bytes memory data) = abi.decode(_message.data[4:], (bytes)); - (CanonicalERC20 memory ctoken,,, uint256 amount) = - abi.decode(data, (CanonicalERC20, address, address, uint256)); - - // Transfer the ETH and tokens back to the owner - address token = _transferTokens(ctoken, _message.srcOwner, amount); - _message.srcOwner.sendEtherAndVerify(_message.value); - - emit TokenReleased({ - msgHash: _msgHash, - from: _message.srcOwner, - ctoken: ctoken.addr, - token: token, - amount: amount - }); - } - - /// @inheritdoc BaseVault - function name() public pure override returns (bytes32) { - return LibStrings.B_ERC20_VAULT; - } - - function _transferTokens( - CanonicalERC20 memory _ctoken, - address _to, - uint256 _amount - ) - private - returns (address token_) - { - if (_ctoken.chainId == block.chainid) { - token_ = _ctoken.addr; - IERC20(token_).safeTransfer(_to, _amount); - } else { - token_ = _getOrDeployBridgedToken(_ctoken); - //For native bridged tokens (like USDC), the mint() signature is the same, so no need to - // check. - IBridgedERC20(token_).mint(_to, _amount); - } - _consumeTokenQuota(token_, _amount); - } - - /// @dev Handles the message on the source chain and returns the encoded - /// call on the destination call. - /// @param _op The BridgeTransferOp object. - /// @return msgData_ Encoded message data. - /// @return ctoken_ The canonical token. - /// @return balanceChange_ User token balance actual change after the token - /// transfer. This value is calculated so we do not assume token balance - /// change is the amount of token transferred away. - function _handleMessage(BridgeTransferOp calldata _op) - private - returns (bytes memory msgData_, CanonicalERC20 memory ctoken_, uint256 balanceChange_) - { - // If it's a bridged token - CanonicalERC20 storage _ctoken = bridgedToCanonical[_op.token]; - if (_ctoken.addr != address(0)) { - ctoken_ = _ctoken; - // Following the "transfer and burn" pattern, as used by USDC - IERC20(_op.token).safeTransferFrom(msg.sender, address(this), _op.amount); - IBridgedERC20(_op.token).burn(_op.amount); - balanceChange_ = _op.amount; - } else { - // If it's a canonical token - ctoken_ = CanonicalERC20({ - chainId: uint64(block.chainid), - addr: _op.token, - decimals: _safeDecimals(_op.token), - symbol: safeSymbol(_op.token), - name: safeName(_op.token) - }); - - // Query the balance then query it again to get the actual amount of - // token transferred into this address, this is more accurate than - // simply using `amount` -- some contract may deduct a fee from the - // transferred amount. - IERC20 t = IERC20(_op.token); - uint256 _balance = t.balanceOf(address(this)); - t.safeTransferFrom(msg.sender, address(this), _op.amount); - balanceChange_ = t.balanceOf(address(this)) - _balance; - } - - msgData_ = abi.encodeCall( - this.onMessageInvocation, abi.encode(ctoken_, msg.sender, _op.to, balanceChange_) - ); - } - - /// @dev Retrieve or deploy a bridged ERC20 token contract. - /// @param ctoken CanonicalERC20 data. - /// @return btoken Address of the bridged token contract. - function _getOrDeployBridgedToken(CanonicalERC20 memory ctoken) - private - returns (address btoken) - { - btoken = canonicalToBridged[ctoken.chainId][ctoken.addr]; - - if (btoken == address(0)) { - btoken = _deployBridgedToken(ctoken); - } - } - - /// @dev Deploy a new BridgedERC20 contract and initialize it. - /// This must be called before the first time a bridged token is sent to - /// this chain. - /// @param ctoken CanonicalERC20 data. - /// @return btoken Address of the deployed bridged token contract. - function _deployBridgedToken(CanonicalERC20 memory ctoken) private returns (address btoken) { - bytes memory data = abi.encodeCall( - IBridgedERC20Initializable.init, - ( - owner(), - addressManager, - ctoken.addr, - ctoken.chainId, - ctoken.decimals, - ctoken.symbol, - ctoken.name - ) - ); - - btoken = address(new ERC1967Proxy(resolve(LibStrings.B_BRIDGED_ERC20, false), data)); - bridgedToCanonical[btoken] = ctoken; - canonicalToBridged[ctoken.chainId][ctoken.addr] = btoken; - - emit BridgedTokenDeployed({ - srcChainId: ctoken.chainId, - ctoken: ctoken.addr, - btoken: btoken, - ctokenSymbol: ctoken.symbol, - ctokenName: ctoken.name, - ctokenDecimal: ctoken.decimals - }); - } - - function _consumeTokenQuota(address _token, uint256 _amount) private { - address quotaManager = resolve(LibStrings.B_QUOTA_MANAGER, true); - if (quotaManager != address(0)) { - IQuotaManager(quotaManager).consumeQuota(_token, _amount); - } - } - - function _safeDecimals(address _token) private view returns (uint8) { - (bool success, bytes memory data) = - address(_token).staticcall(abi.encodeCall(IERC20Metadata.decimals, ())); - return success && data.length == 32 ? abi.decode(data, (uint8)) : 18; - } -} diff --git a/packages/protocol/contracts/tokenvault/ERC721Vault.sol b/packages/protocol/contracts/tokenvault/ERC721Vault.sol deleted file mode 100644 index ac07f53bb2dd..000000000000 --- a/packages/protocol/contracts/tokenvault/ERC721Vault.sol +++ /dev/null @@ -1,261 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/token/ERC721/IERC721.sol"; -import "@openzeppelin/contracts/token/ERC721/IERC721Receiver.sol"; -import "../libs/LibAddress.sol"; -import "../common/LibStrings.sol"; -import "./IBridgedERC721.sol"; -import "./BaseNFTVault.sol"; - -/// @title ERC721Vault -/// @notice This vault holds all ERC721 tokens that users have deposited. It also manages -/// the mapping between canonical tokens and their bridged tokens. -/// @dev Labeled in AddressResolver as "erc721_vault". -/// @custom:security-contact security@taiko.xyz -contract ERC721Vault is BaseNFTVault, IERC721Receiver { - using LibAddress for address; - - uint256[50] private __gap; - - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - function init(address _owner, address _addressManager) external initializer { - __Essential_init(_owner, _addressManager); - } - - /// @notice Transfers ERC721 tokens to this vault and sends a message to the - /// destination chain so the user can receive the same (bridged) tokens - /// by invoking the message call. - /// @param _op Option for sending the ERC721 token. - /// @return message_ The constructed message. - function sendToken(BridgeTransferOp calldata _op) - external - payable - whenNotPaused - withValidOperation(_op) - nonReentrant - returns (IBridge.Message memory message_) - { - if (msg.value < _op.fee) revert VAULT_INSUFFICIENT_FEE(); - - for (uint256 i; i < _op.tokenIds.length; ++i) { - if (_op.amounts[i] != 0) revert VAULT_INVALID_AMOUNT(); - } - - if (!_op.token.supportsInterface(type(IERC721).interfaceId)) { - revert VAULT_INTERFACE_NOT_SUPPORTED(); - } - - (bytes memory data, CanonicalNFT memory ctoken) = _handleMessage(_op); - - IBridge.Message memory message = IBridge.Message({ - id: 0, // will receive a new value - from: address(0), // will receive a new value - srcChainId: 0, // will receive a new value - destChainId: _op.destChainId, - srcOwner: msg.sender, - destOwner: _op.destOwner != address(0) ? _op.destOwner : msg.sender, - to: resolve(_op.destChainId, name(), false), - value: msg.value - _op.fee, - fee: _op.fee, - gasLimit: _op.gasLimit, - data: data - }); - - bytes32 msgHash; - (msgHash, message_) = - IBridge(resolve(LibStrings.B_BRIDGE, false)).sendMessage{ value: msg.value }(message); - - emit TokenSent({ - msgHash: msgHash, - from: message_.srcOwner, - to: _op.to, - destChainId: message_.destChainId, - ctoken: ctoken.addr, - token: _op.token, - tokenIds: _op.tokenIds, - amounts: _op.amounts - }); - } - - /// @inheritdoc IMessageInvocable - function onMessageInvocation(bytes calldata _data) - external - payable - whenNotPaused - nonReentrant - { - (CanonicalNFT memory ctoken, address from, address to, uint256[] memory tokenIds) = - abi.decode(_data, (CanonicalNFT, address, address, uint256[])); - - // `onlyFromBridge` checked in checkProcessMessageContext - IBridge.Context memory ctx = checkProcessMessageContext(); - - // Don't allow sending to disallowed addresses. - // Don't send the tokens back to `from` because `from` is on the source chain. - checkToAddress(to); - - // Transfer the ETH and the tokens to the `to` address - address token = _transferTokens(ctoken, to, tokenIds); - to.sendEtherAndVerify(msg.value); - - emit TokenReceived({ - msgHash: ctx.msgHash, - from: from, - to: to, - srcChainId: ctx.srcChainId, - ctoken: ctoken.addr, - token: token, - tokenIds: tokenIds, - amounts: new uint256[](tokenIds.length) - }); - } - - /// @inheritdoc IRecallableSender - function onMessageRecalled( - IBridge.Message calldata _message, - bytes32 _msgHash - ) - external - payable - override - whenNotPaused - nonReentrant - { - // `onlyFromBridge` checked in checkRecallMessageContext - checkRecallMessageContext(); - - (bytes memory data) = abi.decode(_message.data[4:], (bytes)); - (CanonicalNFT memory ctoken,,, uint256[] memory tokenIds) = - abi.decode(data, (CanonicalNFT, address, address, uint256[])); - - // Transfer the ETH and tokens back to the owner - address token = _transferTokens(ctoken, _message.srcOwner, tokenIds); - _message.srcOwner.sendEtherAndVerify(_message.value); - - emit TokenReleased({ - msgHash: _msgHash, - from: _message.srcOwner, - ctoken: ctoken.addr, - token: token, - tokenIds: tokenIds, - amounts: new uint256[](tokenIds.length) - }); - } - - /// @inheritdoc IERC721Receiver - function onERC721Received( - address, - address, - uint256, - bytes calldata - ) - external - pure - returns (bytes4) - { - return IERC721Receiver.onERC721Received.selector; - } - - /// @inheritdoc BaseVault - function name() public pure override returns (bytes32) { - return LibStrings.B_ERC721_VAULT; - } - - function _transferTokens( - CanonicalNFT memory _ctoken, - address _to, - uint256[] memory _tokenIds - ) - private - returns (address token_) - { - if (_ctoken.chainId == block.chainid) { - token_ = _ctoken.addr; - for (uint256 i; i < _tokenIds.length; ++i) { - IERC721(token_).safeTransferFrom(address(this), _to, _tokenIds[i]); - } - } else { - token_ = _getOrDeployBridgedToken(_ctoken); - for (uint256 i; i < _tokenIds.length; ++i) { - IBridgedERC721(token_).mint(_to, _tokenIds[i]); - } - } - } - - /// @dev Handles the message on the source chain and returns the encoded - /// call on the destination call. - /// @param _op BridgeTransferOp data. - /// @return msgData_ Encoded message data. - /// @return ctoken_ The canonical token. - function _handleMessage(BridgeTransferOp calldata _op) - private - returns (bytes memory msgData_, CanonicalNFT memory ctoken_) - { - unchecked { - CanonicalNFT storage _ctoken = bridgedToCanonical[_op.token]; - if (_ctoken.addr != address(0)) { - ctoken_ = _ctoken; - for (uint256 i; i < _op.tokenIds.length; ++i) { - IERC721(_op.token).safeTransferFrom(msg.sender, address(this), _op.tokenIds[i]); - IBridgedERC721(_op.token).burn(_op.tokenIds[i]); - } - } else { - ctoken_ = CanonicalNFT({ - chainId: uint64(block.chainid), - addr: _op.token, - symbol: safeSymbol(_op.token), - name: safeName(_op.token) - }); - - for (uint256 i; i < _op.tokenIds.length; ++i) { - IERC721(_op.token).safeTransferFrom(msg.sender, address(this), _op.tokenIds[i]); - } - } - } - - msgData_ = abi.encodeCall( - this.onMessageInvocation, abi.encode(ctoken_, msg.sender, _op.to, _op.tokenIds) - ); - } - - /// @dev Retrieve or deploy a bridged ERC721 token contract. - /// @param _ctoken CanonicalNFT data. - /// @return btoken_ Address of the bridged token contract. - function _getOrDeployBridgedToken(CanonicalNFT memory _ctoken) - private - returns (address btoken_) - { - btoken_ = canonicalToBridged[_ctoken.chainId][_ctoken.addr]; - - if (btoken_ == address(0)) { - btoken_ = _deployBridgedToken(_ctoken); - } - } - - /// @dev Deploy a new BridgedNFT contract and initialize it. - /// This must be called before the first time a bridged token is sent to - /// this chain. - /// @param _ctoken CanonicalNFT data. - /// @return btoken_ Address of the deployed bridged token contract. - function _deployBridgedToken(CanonicalNFT memory _ctoken) private returns (address btoken_) { - bytes memory data = abi.encodeCall( - IBridgedERC721Initializable.init, - (owner(), addressManager, _ctoken.addr, _ctoken.chainId, _ctoken.symbol, _ctoken.name) - ); - - btoken_ = address(new ERC1967Proxy(resolve(LibStrings.B_BRIDGED_ERC721, false), data)); - bridgedToCanonical[btoken_] = _ctoken; - canonicalToBridged[_ctoken.chainId][_ctoken.addr] = btoken_; - - emit BridgedTokenDeployed({ - chainId: _ctoken.chainId, - ctoken: _ctoken.addr, - btoken: btoken_, - ctokenSymbol: _ctoken.symbol, - ctokenName: _ctoken.name - }); - } -} diff --git a/packages/protocol/contracts/tokenvault/IBridgedERC1155.sol b/packages/protocol/contracts/tokenvault/IBridgedERC1155.sol deleted file mode 100644 index 3d0ac0a93de1..000000000000 --- a/packages/protocol/contracts/tokenvault/IBridgedERC1155.sol +++ /dev/null @@ -1,49 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title IBridgedERC1155 -/// @notice Contract for bridging ERC1155 tokens across different chains. -/// @custom:security-contact security@taiko.xyz -interface IBridgedERC1155 { - /// @dev Mints tokens. - /// @param _to Address to receive the minted tokens. - /// @param _tokenIds ID of the token to mint. - /// @param _amounts Amount of tokens to mint. - function mintBatch( - address _to, - uint256[] calldata _tokenIds, - uint256[] calldata _amounts - ) - external; - - /// @dev Burns tokens. - /// @param _id ID of the token to burn. - /// @param _amount Amount of token to burn respectively. - function burn(uint256 _id, uint256 _amount) external; - - /// @notice Gets the canonical token's address and chain ID. - /// @return The canonical token's address. - /// @return The canonical token's chain ID. - function canonical() external view returns (address, uint256); -} - -/// @title IBridgedERC1155Initializable -/// @custom:security-contact security@taiko.xyz -interface IBridgedERC1155Initializable { - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - /// @param _srcToken Address of the source token. - /// @param _srcChainId Source chain ID. - /// @param _symbol Symbol of the bridged token. - /// @param _name Name of the bridged token. - function init( - address _owner, - address _addressManager, - address _srcToken, - uint256 _srcChainId, - string calldata _symbol, - string calldata _name - ) - external; -} diff --git a/packages/protocol/contracts/tokenvault/IBridgedERC20.sol b/packages/protocol/contracts/tokenvault/IBridgedERC20.sol deleted file mode 100644 index 5750341924c6..000000000000 --- a/packages/protocol/contracts/tokenvault/IBridgedERC20.sol +++ /dev/null @@ -1,71 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title IBridgedERC20 -/// @notice Interface for all bridged tokens. -/// @dev Here is the list of assumptions that guarantees that the bridged token can be bridged back -/// to it's canonical counterpart (by-default it is, but in case a third-party "native" token is set -/// and used in our bridge): -/// - The token should be ERC-20 compliant -/// - Supply increases should only be caused by mints from the vault. Notably, rebasing tokens are -/// not supported. -/// - Token balances should change by exactly the given amounts on `transfer`/`mint`/`burn`. Notable, -/// tokens with fees on transfers are not supported. -/// - If the bridged token is not directly deployed by the Bridge (ERC20Vault), - for example a USDT -/// token bytecode deployed on Taiko to support native tokens - it might be necessary to implement -/// an intermediary adapter contract which should conform mint() and burn() interfaces, so that the -/// ERC20Vault can call these actions on the adapter. -/// - If the bridged token is not directly deployed by the Bridge (ERC20Vault), but conforms the -/// mint() and burn() interface and the ERC20Vault has the right to perform these actions (has -/// minter/burner role). -/// - If the bridged token is directly deployed by our Bridge (ERC20Vault). -/// @custom:security-contact security@taiko.xyz -interface IBridgedERC20 { - /// @notice Mints `amount` tokens and assigns them to the `account` address. - /// @param _account The account to receive the minted tokens. - /// @param _amount The amount of tokens to mint. - function mint(address _account, uint256 _amount) external; - - /// @notice Burns tokens from msg.sender. This is only allowed if: - /// - 1) tokens are migrating out to a new bridged token - /// - 2) The token is burned by ERC20Vault to bridge back to the canonical chain. - /// @param _amount The amount of tokens to burn. - function burn(uint256 _amount) external; - - /// @notice Gets the canonical token's address and chain ID. - /// @return The canonical token's address. - /// @return The canonical token's chain ID. - function canonical() external view returns (address, uint256); -} - -/// @title IBridgedERC20Migratable -/// @custom:security-contact security@taiko.xyz -interface IBridgedERC20Migratable { - /// @notice Starts or stops migration to/from a specified contract. - /// @param _addr The address migrating 'to' or 'from'. - /// @param _inbound If false then signals migrating 'from', true if migrating 'into'. - function changeMigrationStatus(address _addr, bool _inbound) external; -} - -/// @title IBridgedERC20Initializable -/// @custom:security-contact security@taiko.xyz -interface IBridgedERC20Initializable { - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - /// @param _srcToken The source token address. - /// @param _srcChainId The source chain ID. - /// @param _decimals The number of decimal places of the source token. - /// @param _symbol The symbol of the token. - /// @param _name The name of the token. - function init( - address _owner, - address _addressManager, - address _srcToken, - uint256 _srcChainId, - uint8 _decimals, - string calldata _symbol, - string calldata _name - ) - external; -} diff --git a/packages/protocol/contracts/tokenvault/IBridgedERC721.sol b/packages/protocol/contracts/tokenvault/IBridgedERC721.sol deleted file mode 100644 index e8a2d4266054..000000000000 --- a/packages/protocol/contracts/tokenvault/IBridgedERC721.sol +++ /dev/null @@ -1,42 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -/// @title IBridgedERC721 -/// @notice Contract for bridging ERC721 tokens across different chains. -/// @custom:security-contact security@taiko.xyz -interface IBridgedERC721 { - /// @dev Mints tokens. - /// @param _account Address to receive the minted token. - /// @param _tokenId ID of the token to mint. - function mint(address _account, uint256 _tokenId) external; - - /// @dev Burns tokens. - /// @param _tokenId ID of the token to burn. - function burn(uint256 _tokenId) external; - - /// @notice Gets the canonical token's address and chain ID. - /// @return The canonical token's address. - /// @return The canonical token's chain ID. - function canonical() external view returns (address, uint256); -} - -/// @title IBridgedERC721Initializable -/// @custom:security-contact security@taiko.xyz -interface IBridgedERC721Initializable { - /// @notice Initializes the contract. - /// @param _owner The owner of this contract. msg.sender will be used if this value is zero. - /// @param _addressManager The address of the {AddressManager} contract. - /// @param _srcToken Address of the source token. - /// @param _srcChainId Source chain ID. - /// @param _symbol Symbol of the bridged token. - /// @param _name Name of the bridged token. - function init( - address _owner, - address _addressManager, - address _srcToken, - uint256 _srcChainId, - string calldata _symbol, - string calldata _name - ) - external; -} diff --git a/packages/protocol/contracts/tokenvault/LibBridgedToken.sol b/packages/protocol/contracts/tokenvault/LibBridgedToken.sol deleted file mode 100644 index c81313ccb045..000000000000 --- a/packages/protocol/contracts/tokenvault/LibBridgedToken.sol +++ /dev/null @@ -1,44 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/utils/Strings.sol"; - -/// @title LibBridgedToken -/// @custom:security-contact security@taiko.xyz -library LibBridgedToken { - error BTOKEN_INVALID_PARAMS(); - error BTOKEN_INVALID_TO_ADDR(); - - function validateInputs(address _srcToken, uint256 _srcChainId) internal view { - if (_srcToken == address(0) || _srcChainId == 0 || _srcChainId == block.chainid) { - revert BTOKEN_INVALID_PARAMS(); - } - } - - function checkToAddress(address _to) internal view { - if (_to == address(this)) revert BTOKEN_INVALID_TO_ADDR(); - } - - function buildURI( - address _srcToken, - uint256 _srcChainId, - string memory _extraParams - ) - internal - pure - returns (string memory) - { - // Creates a base URI in the format specified by EIP-681: - // https://eips.ethereum.org/EIPS/eip-681 - return string( - abi.encodePacked( - "ethereum:", - Strings.toHexString(uint160(_srcToken), 20), - "@", - Strings.toString(_srcChainId), - "/tokenURI?uint256=", - _extraParams - ) - ); - } -} diff --git a/packages/protocol/deployments/deploy_l1.json b/packages/protocol/deployments/deploy_l1.json deleted file mode 100644 index 7741733c17e2..000000000000 --- a/packages/protocol/deployments/deploy_l1.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "chain_prover": "0x373E0B8B80A15cdf587C1263654c6B5edd195a43", - "rollup_address_manager": "0x8F0342A7060e76dfc7F6e9dEbfAD9b9eC919952c", - "shared_address_manager": "0x17435ccE3d1B4fA2e5f8A08eD921D57C6762A180", - "taiko": "0x9fCF7D13d10dEdF17d0f24C62f0cf4ED462f65b7", - "taiko_token": "0x422A3492e218383753D8006C7Bfa97815B44373F", - "tier_sgx1": "0x1ADB9959EB142bE128E6dfEcc8D571f07cd66DeE", - "tier_sgx2": "0x3A8C1bd531b5C1aeFBB9ebc3e021C1251cF4Ccb1", - "tier_sgx3": "0x80741a37E3644612F0465145C9709a90B6D77Ee3", - "verifier_registry": "0x9ECB6f04D47FA2599449AaA523bF84476f7aD80f" -} \ No newline at end of file diff --git a/packages/protocol/deployments/local_deployment.md b/packages/protocol/deployments/local_deployment.md deleted file mode 100644 index 3424cba2f7cb..000000000000 --- a/packages/protocol/deployments/local_deployment.md +++ /dev/null @@ -1,67 +0,0 @@ -# How to deploy Gwyneth locally - on a reth-based private network - -The first part is coming from [Reth Book](https://reth.rs/run/private-testnet.html), but if you want to dig deeper, please visit the website, otherwise it is not necessary. - -### 0. Pre-requisites: -- have docker installed (and docker daemon running) -- have Kurtosis installed, on Mac, e.g.: -```shell -brew install kurtosis-tech/tap/kurtosis-cli -``` - -### 1. Define the network config parameters - -Create a `network_params.yaml` file. - -```shell -participants: - - el_type: reth - el_image: taiko_reth # We can use custom image, (remote, e.g.: ethpandaops/reth:main-9c0bc84 or locally: taiko_reth) - cl_type: lighthouse - cl_image: sigp/lighthouse:latest - - el_type: reth - el_image: taiko_reth # We can use custom image, (remote, e.g.: ethpandaops/reth:main-9c0bc84 or locally: taiko_reth) - cl_type: teku - cl_image: consensys/teku:latest -network_params: - network_id: '160010' -``` - -#### 1.1 Local reth-based network - -1. Go to the root of the repository, and build the image, e.g.: -```shell -docker build . -t taiko_reth -``` - -2. Use simply the `taiko_reth` image, in `el_image` variable of the network yaml file. - -### 2. Spin up the network - -```shell -kurtosis run github.com/ethpandaops/ethereum-package --args-file YOUR_NETWORK_FILE_PATH/network_params.yaml -``` - -It will show you a lot of information in the terminal - along with the genesis info, network id, addresses with pre-funded ETH, etc. - -### 3. Set .env vars and run contract deployment script -Paste one PK and ADDR pair from anvil output to .env file and set the correct corresponding (PRIVATE_KEY and MAINNET_CONTRACT_OWNER) variables. - -Run script: - -```shell -$ forge script --rpc-url http://127.0.0.1:YOUR_PORT scripts/DeployL1Locally.s.sol -vvvv --broadcast --private-key --legacy -``` - -Important: shall be the same PK as you set in the ENV file. - -### 4. Test interaction with the blockchain - -Shoot it with simple RPC commands e.g. via `curl`, to see the blockchain is operational. - -```shell -curl http://127.0.0.1:YOUR_EXPOSED_PORT \ - -X POST \ - -H "Content-Type: application/json" \ - --data '{"method":"eth_getBlockByNumber","params":["0x0",false],"id":1,"jsonrpc":"2.0"}' -``` \ No newline at end of file diff --git a/packages/protocol/foundry.toml b/packages/protocol/foundry.toml deleted file mode 100644 index afc3892ba624..000000000000 --- a/packages/protocol/foundry.toml +++ /dev/null @@ -1,52 +0,0 @@ -[profile.default] -src = "contracts" -out = "out" -test = "test" -script = "script" -gas_price = 10_000_000_000 # 10 Gwei -gas_limit = "18446744073709551615" # u64::MAX -optimizer = true -optimizer_runs = 200 -ffi = true -memory_limit = 2_073_741_824 -solc_version = "0.8.24" -evm_version = "cancun" -remappings = [ - "@openzeppelin/contracts-upgradeable/=node_modules/@openzeppelin/contracts-upgradeable/", - "@openzeppelin/contracts/=node_modules/@openzeppelin/contracts/", - "solady/=node_modules/solady/", - "forge-std/=node_modules/forge-std/", - "ds-test/=node_modules/ds-test/src/", - "p256-verifier/=node_modules/p256-verifier/", -] - -# Do not change the block_gas_limit value, TaikoL2.t.sol depends on it. -# For mainnet_mock tokenomics test we need a huge value to run lots of iterations. -# Use 30M for TaikoL2.t.sol related tests, only use this number with mainnet simulation. -block_gas_limit = 80_000_000 - -fs_permissions = [ - { access = "read", path = "./out" }, - { access = "read-write", path = "./deployments" }, - { access = "read", path = "./test" }, - { access = "read", path = "./genesis" }, -] - -# 2394: Transient storage warning -# 3860: Contract initcode size is xxx bytes and exceeds 49152 bytes -# 5574: Contract code size is xxx bytes and exceeds 24576 byte -# 5740: Unreachable code -ignored_error_codes = [2394, 3860, 5574, 5740] - -[fuzz] -runs = 200 - -[fmt] -bracket_spacing = true -line_length = 100 -multiline_func_header = "all" -number_underscore = "thousands" -wrap_comments = true - -[profile.genesis] -test = "genesis" \ No newline at end of file diff --git a/packages/protocol/package.json b/packages/protocol/package.json deleted file mode 100644 index f8d77a060b9a..000000000000 --- a/packages/protocol/package.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "name": "@taiko/gwyneth_protocol", - "version": "1.0.0", - "private": true, - "scripts": { - "buildMerkle": "ts-node ./utils/airdrop/buildMerkleTree.ts ./utils/airdrop/airdrop_db/example_claimList.json", - "clean": "rm -rf abis cache && forge clean", - "compile": "forge build", - "compile:hardhat": "pnpm hardhat compile", - "deploy:foundry": "./script/download_solc.sh && ./script/test_deploy_on_l1.sh", - "eslint": "pnpm exec eslint --ignore-path .eslintignore --ext .js,.ts .", - "eslint:fix": "pnpm exec eslint --ignore-path .eslintignore --ext .js,.ts . --fix", - "export:abi": "pnpm hardhat clear-abi && pnpm hardhat export-abi", - "fmt:sol": "forge fmt", - "generate:genesis": "ts-node ./utils/generate_genesis/main.ts", - "lint:sol": "forge fmt && pnpm solhint 'contracts/**/*.sol' --fix", - "sizer": "pnpm hardhat size-contracts", - "snapshot": "forge snapshot --match-path 'test/**/*.t.sol'", - "test": "forge test -vvv --match-path test/*.t.sol", - "test:coverage": "mkdir -p coverage && forge coverage --report lcov && lcov --remove ./lcov.info -o ./coverage/lcov.info 'test/' 'script/' 'contracts/thirdparty/' && genhtml coverage/lcov.info --branch-coverage --output-dir coverage --ignore-errors category && open coverage/index.html", - "test:genesis": "pnpm compile && pnpm compile:hardhat && FOUNDRY_PROFILE=genesis ./genesis/generate_genesis.test.sh", - "export:simconf": "forge test --match-test 'test_simulation' -vv > simulation/out/simconf_$(date +%s).txt" - }, - "keywords": [ - "ZKP", - "Zero-Knowledge Proof", - "Decentralized", - "Permissionless", - "Type-1", - "ZK-EVM", - "zkRollup", - "Ethereum", - "Layer2" - ], - "author": "Taiko Labs", - "license": "MIT", - "devDependencies": { - "@types/node": "^20.11.30", - "@typescript-eslint/eslint-plugin": "^7.4.0", - "@typescript-eslint/parser": "^7.7.0", - "eslint": "^8.51.0", - "eslint-config-prettier": "^9.1.0", - "eslint-config-standard": "^17.1.0", - "eslint-plugin-import": "^2.28.1", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-prettier": "^5.1.3", - "eslint-plugin-promise": "^6.1.1", - "ethers": "^5.7.2", - "solc": "0.8.24", - "solhint": "^5.0.1", - "ts-node": "^10.9.2", - "typescript": "^5.2.2" - }, - "dependencies": { - "@openzeppelin/contracts": "4.9.6", - "@openzeppelin/contracts-upgradeable": "4.9.6", - "ds-test": "github:dapphub/ds-test#e282159d5170298eb2455a6c05280ab5a73a4ef0", - "forge-std": "github:foundry-rs/forge-std#v1.7.5", - "merkletreejs": "^0.3.11", - "p256-verifier": "github:taikoxyz/p256-verifier#v0.1.0", - "solady": "github:Vectorized/solady#v0.0.167" - } -} diff --git a/packages/protocol/pnpm-lock.yaml b/packages/protocol/pnpm-lock.yaml deleted file mode 100644 index e534a4990cde..000000000000 --- a/packages/protocol/pnpm-lock.yaml +++ /dev/null @@ -1,3735 +0,0 @@ -lockfileVersion: '9.0' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -importers: - - .: - dependencies: - '@openzeppelin/contracts': - specifier: 4.9.6 - version: 4.9.6 - '@openzeppelin/contracts-upgradeable': - specifier: 4.9.6 - version: 4.9.6 - ds-test: - specifier: github:dapphub/ds-test#e282159d5170298eb2455a6c05280ab5a73a4ef0 - version: https://codeload.github.com/dapphub/ds-test/tar.gz/e282159d5170298eb2455a6c05280ab5a73a4ef0 - forge-std: - specifier: github:foundry-rs/forge-std#v1.7.5 - version: https://codeload.github.com/foundry-rs/forge-std/tar.gz/36c303b7ffdd842d06b1ec2744c9b9b5fb3083f3 - merkletreejs: - specifier: ^0.3.11 - version: 0.3.11 - p256-verifier: - specifier: github:taikoxyz/p256-verifier#v0.1.0 - version: p256-verifier#v0.1.0@https://codeload.github.com/taikoxyz/p256-verifier/tar.gz/6ef45b117642786b08a37b4c37c6a6ce151166da - solady: - specifier: github:Vectorized/solady#v0.0.167 - version: https://codeload.github.com/Vectorized/solady/tar.gz/de0f336d2033d04e0f77c923d639c7fbffd48b6d - devDependencies: - '@types/node': - specifier: ^20.11.30 - version: 20.11.30 - '@typescript-eslint/eslint-plugin': - specifier: ^7.4.0 - version: 7.4.0(@typescript-eslint/parser@7.7.0(eslint@8.51.0)(typescript@5.2.2))(eslint@8.51.0)(typescript@5.2.2) - '@typescript-eslint/parser': - specifier: ^7.7.0 - version: 7.7.0(eslint@8.51.0)(typescript@5.2.2) - eslint: - specifier: ^8.51.0 - version: 8.51.0 - eslint-config-prettier: - specifier: ^9.1.0 - version: 9.1.0(eslint@8.51.0) - eslint-config-standard: - specifier: ^17.1.0 - version: 17.1.0(eslint-plugin-import@2.28.1(@typescript-eslint/parser@7.7.0(eslint@8.51.0)(typescript@5.2.2))(eslint@8.51.0))(eslint-plugin-n@16.6.2(eslint@8.51.0))(eslint-plugin-promise@6.1.1(eslint@8.51.0))(eslint@8.51.0) - eslint-plugin-import: - specifier: ^2.28.1 - version: 2.28.1(@typescript-eslint/parser@7.7.0(eslint@8.51.0)(typescript@5.2.2))(eslint@8.51.0) - eslint-plugin-node: - specifier: ^11.1.0 - version: 11.1.0(eslint@8.51.0) - eslint-plugin-prettier: - specifier: ^5.1.3 - version: 5.1.3(eslint-config-prettier@9.1.0(eslint@8.51.0))(eslint@8.51.0)(prettier@3.0.3) - eslint-plugin-promise: - specifier: ^6.1.1 - version: 6.1.1(eslint@8.51.0) - ethers: - specifier: ^5.7.2 - version: 5.7.2 - solc: - specifier: 0.8.24 - version: 0.8.24 - solhint: - specifier: ^5.0.1 - version: 5.0.1(typescript@5.2.2) - ts-node: - specifier: ^10.9.2 - version: 10.9.2(@types/node@20.11.30)(typescript@5.2.2) - typescript: - specifier: ^5.2.2 - version: 5.2.2 - -packages: - - '@babel/code-frame@7.24.7': - resolution: {integrity: sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==} - engines: {node: '>=6.9.0'} - - '@babel/helper-validator-identifier@7.24.7': - resolution: {integrity: sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==} - engines: {node: '>=6.9.0'} - - '@babel/highlight@7.24.7': - resolution: {integrity: sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==} - engines: {node: '>=6.9.0'} - - '@cspotcode/source-map-support@0.8.1': - resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} - engines: {node: '>=12'} - - '@eslint-community/eslint-utils@4.4.0': - resolution: {integrity: sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 - - '@eslint-community/regexpp@4.11.0': - resolution: {integrity: sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A==} - engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} - - '@eslint/eslintrc@2.1.4': - resolution: {integrity: sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - - '@eslint/js@8.51.0': - resolution: {integrity: sha512-HxjQ8Qn+4SI3/AFv6sOrDB+g6PpUTDwSJiQqOrnneEk8L71161srI9gjzzZvYVbzHiVg/BvcH95+cK/zfIt4pg==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - - '@ethereumjs/rlp@4.0.1': - resolution: {integrity: sha512-tqsQiBQDQdmPWE1xkkBq4rlSW5QZpLOUJ5RJh2/9fug+q9tnUhuZoVLk7s0scUIKTOzEtR72DFBXI4WiZcMpvw==} - engines: {node: '>=14'} - hasBin: true - - '@ethereumjs/util@8.1.0': - resolution: {integrity: sha512-zQ0IqbdX8FZ9aw11vP+dZkKDkS+kgIvQPHnSAXzP9pLu+Rfu3D3XEeLbicvoXJTYnhZiPmsZUxgdzXwNKxRPbA==} - engines: {node: '>=14'} - - '@ethersproject/abi@5.7.0': - resolution: {integrity: sha512-351ktp42TiRcYB3H1OP8yajPeAQstMW/yCFokj/AthP9bLHzQFPlOrxOcwYEDkUAICmOHljvN4K39OMTMUa9RA==} - - '@ethersproject/abstract-provider@5.7.0': - resolution: {integrity: sha512-R41c9UkchKCpAqStMYUpdunjo3pkEvZC3FAwZn5S5MGbXoMQOHIdHItezTETxAO5bevtMApSyEhn9+CHcDsWBw==} - - '@ethersproject/abstract-signer@5.7.0': - resolution: {integrity: sha512-a16V8bq1/Cz+TGCkE2OPMTOUDLS3grCpdjoJCYNnVBbdYEMSgKrU0+B90s8b6H+ByYTBZN7a3g76jdIJi7UfKQ==} - - '@ethersproject/address@5.7.0': - resolution: {integrity: sha512-9wYhYt7aghVGo758POM5nqcOMaE168Q6aRLJZwUmiqSrAungkG74gSSeKEIR7ukixesdRZGPgVqme6vmxs1fkA==} - - '@ethersproject/base64@5.7.0': - resolution: {integrity: sha512-Dr8tcHt2mEbsZr/mwTPIQAf3Ai0Bks/7gTw9dSqk1mQvhW3XvRlmDJr/4n+wg1JmCl16NZue17CDh8xb/vZ0sQ==} - - '@ethersproject/basex@5.7.0': - resolution: {integrity: sha512-ywlh43GwZLv2Voc2gQVTKBoVQ1mti3d8HK5aMxsfu/nRDnMmNqaSJ3r3n85HBByT8OpoY96SXM1FogC533T4zw==} - - '@ethersproject/bignumber@5.7.0': - resolution: {integrity: sha512-n1CAdIHRWjSucQO3MC1zPSVgV/6dy/fjL9pMrPP9peL+QxEg9wOsVqwD4+818B6LUEtaXzVHQiuivzRoxPxUGw==} - - '@ethersproject/bytes@5.7.0': - resolution: {integrity: sha512-nsbxwgFXWh9NyYWo+U8atvmMsSdKJprTcICAkvbBffT75qDocbuggBU0SJiVK2MuTrp0q+xvLkTnGMPK1+uA9A==} - - '@ethersproject/constants@5.7.0': - resolution: {integrity: sha512-DHI+y5dBNvkpYUMiRQyxRBYBefZkJfo70VUkUAsRjcPs47muV9evftfZ0PJVCXYbAiCgght0DtcF9srFQmIgWA==} - - '@ethersproject/contracts@5.7.0': - resolution: {integrity: sha512-5GJbzEU3X+d33CdfPhcyS+z8MzsTrBGk/sc+G+59+tPa9yFkl6HQ9D6L0QMgNTA9q8dT0XKxxkyp883XsQvbbg==} - - '@ethersproject/hash@5.7.0': - resolution: {integrity: sha512-qX5WrQfnah1EFnO5zJv1v46a8HW0+E5xuBBDTwMFZLuVTx0tbU2kkx15NqdjxecrLGatQN9FGQKpb1FKdHCt+g==} - - '@ethersproject/hdnode@5.7.0': - resolution: {integrity: sha512-OmyYo9EENBPPf4ERhR7oj6uAtUAhYGqOnIS+jE5pTXvdKBS99ikzq1E7Iv0ZQZ5V36Lqx1qZLeak0Ra16qpeOg==} - - '@ethersproject/json-wallets@5.7.0': - resolution: {integrity: sha512-8oee5Xgu6+RKgJTkvEMl2wDgSPSAQ9MB/3JYjFV9jlKvcYHUXZC+cQp0njgmxdHkYWn8s6/IqIZYm0YWCjO/0g==} - - '@ethersproject/keccak256@5.7.0': - resolution: {integrity: sha512-2UcPboeL/iW+pSg6vZ6ydF8tCnv3Iu/8tUmLLzWWGzxWKFFqOBQFLo6uLUv6BDrLgCDfN28RJ/wtByx+jZ4KBg==} - - '@ethersproject/logger@5.7.0': - resolution: {integrity: sha512-0odtFdXu/XHtjQXJYA3u9G0G8btm0ND5Cu8M7i5vhEcE8/HmF4Lbdqanwyv4uQTr2tx6b7fQRmgLrsnpQlmnig==} - - '@ethersproject/networks@5.7.1': - resolution: {integrity: sha512-n/MufjFYv3yFcUyfhnXotyDlNdFb7onmkSy8aQERi2PjNcnWQ66xXxa3XlS8nCcA8aJKJjIIMNJTC7tu80GwpQ==} - - '@ethersproject/pbkdf2@5.7.0': - resolution: {integrity: sha512-oR/dBRZR6GTyaofd86DehG72hY6NpAjhabkhxgr3X2FpJtJuodEl2auADWBZfhDHgVCbu3/H/Ocq2uC6dpNjjw==} - - '@ethersproject/properties@5.7.0': - resolution: {integrity: sha512-J87jy8suntrAkIZtecpxEPxY//szqr1mlBaYlQ0r4RCaiD2hjheqF9s1LVE8vVuJCXisjIP+JgtK/Do54ej4Sw==} - - '@ethersproject/providers@5.7.2': - resolution: {integrity: sha512-g34EWZ1WWAVgr4aptGlVBF8mhl3VWjv+8hoAnzStu8Ah22VHBsuGzP17eb6xDVRzw895G4W7vvx60lFFur/1Rg==} - - '@ethersproject/random@5.7.0': - resolution: {integrity: sha512-19WjScqRA8IIeWclFme75VMXSBvi4e6InrUNuaR4s5pTF2qNhcGdCUwdxUVGtDDqC00sDLCO93jPQoDUH4HVmQ==} - - '@ethersproject/rlp@5.7.0': - resolution: {integrity: sha512-rBxzX2vK8mVF7b0Tol44t5Tb8gomOHkj5guL+HhzQ1yBh/ydjGnpw6at+X6Iw0Kp3OzzzkcKp8N9r0W4kYSs9w==} - - '@ethersproject/sha2@5.7.0': - resolution: {integrity: sha512-gKlH42riwb3KYp0reLsFTokByAKoJdgFCwI+CCiX/k+Jm2mbNs6oOaCjYQSlI1+XBVejwH2KrmCbMAT/GnRDQw==} - - '@ethersproject/signing-key@5.7.0': - resolution: {integrity: sha512-MZdy2nL3wO0u7gkB4nA/pEf8lu1TlFswPNmy8AiYkfKTdO6eXBJyUdmHO/ehm/htHw9K/qF8ujnTyUAD+Ry54Q==} - - '@ethersproject/solidity@5.7.0': - resolution: {integrity: sha512-HmabMd2Dt/raavyaGukF4XxizWKhKQ24DoLtdNbBmNKUOPqwjsKQSdV9GQtj9CBEea9DlzETlVER1gYeXXBGaA==} - - '@ethersproject/strings@5.7.0': - resolution: {integrity: sha512-/9nu+lj0YswRNSH0NXYqrh8775XNyEdUQAuf3f+SmOrnVewcJ5SBNAjF7lpgehKi4abvNNXyf+HX86czCdJ8Mg==} - - '@ethersproject/transactions@5.7.0': - resolution: {integrity: sha512-kmcNicCp1lp8qanMTC3RIikGgoJ80ztTyvtsFvCYpSCfkjhD0jZ2LOrnbcuxuToLIUYYf+4XwD1rP+B/erDIhQ==} - - '@ethersproject/units@5.7.0': - resolution: {integrity: sha512-pD3xLMy3SJu9kG5xDGI7+xhTEmGXlEqXU4OfNapmfnxLVY4EMSSRp7j1k7eezutBPH7RBN/7QPnwR7hzNlEFeg==} - - '@ethersproject/wallet@5.7.0': - resolution: {integrity: sha512-MhmXlJXEJFBFVKrDLB4ZdDzxcBxQ3rLyCkhNqVu3CDYvR97E+8r01UgrI+TI99Le+aYm/in/0vp86guJuM7FCA==} - - '@ethersproject/web@5.7.1': - resolution: {integrity: sha512-Gueu8lSvyjBWL4cYsWsjh6MtMwM0+H4HvqFPZfB6dV8ctbP9zFAO73VG1cMWae0FLPCtz0peKPpZY8/ugJJX2w==} - - '@ethersproject/wordlists@5.7.0': - resolution: {integrity: sha512-S2TFNJNfHWVHNE6cNDjbVlZ6MgE17MIxMbMg2zv3wn+3XSJGosL1m9ZVv3GXCf/2ymSsQ+hRI5IzoMJTG6aoVA==} - - '@humanwhocodes/config-array@0.11.14': - resolution: {integrity: sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==} - engines: {node: '>=10.10.0'} - deprecated: Use @eslint/config-array instead - - '@humanwhocodes/module-importer@1.0.1': - resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} - engines: {node: '>=12.22'} - - '@humanwhocodes/object-schema@2.0.3': - resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==} - deprecated: Use @eslint/object-schema instead - - '@jridgewell/resolve-uri@3.1.2': - resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} - engines: {node: '>=6.0.0'} - - '@jridgewell/sourcemap-codec@1.4.15': - resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} - - '@jridgewell/trace-mapping@0.3.9': - resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} - - '@noble/curves@1.4.2': - resolution: {integrity: sha512-TavHr8qycMChk8UwMld0ZDRvatedkzWfH8IiaeGCfymOP5i0hSCozz9vHOL0nkwk7HRMlFnAiKpS2jrUmSybcw==} - - '@noble/hashes@1.4.0': - resolution: {integrity: sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==} - engines: {node: '>= 16'} - - '@nodelib/fs.scandir@2.1.5': - resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} - engines: {node: '>= 8'} - - '@nodelib/fs.stat@2.0.5': - resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} - engines: {node: '>= 8'} - - '@nodelib/fs.walk@1.2.8': - resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} - engines: {node: '>= 8'} - - '@openzeppelin/contracts-upgradeable@4.9.6': - resolution: {integrity: sha512-m4iHazOsOCv1DgM7eD7GupTJ+NFVujRZt1wzddDPSVGpWdKq1SKkla5htKG7+IS4d2XOCtzkUNwRZ7Vq5aEUMA==} - - '@openzeppelin/contracts@4.9.6': - resolution: {integrity: sha512-xSmezSupL+y9VkHZJGDoCBpmnB2ogM13ccaYDWqJTfS3dbuHkgjuwDFUmaFauBCboQMGB/S5UqUl2y54X99BmA==} - - '@pkgr/core@0.1.1': - resolution: {integrity: sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==} - engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - - '@pnpm/config.env-replace@1.1.0': - resolution: {integrity: sha512-htyl8TWnKL7K/ESFa1oW2UB5lVDxuF5DpM7tBi6Hu2LNL3mWkIzNLG6N4zoCUP1lCKNxWy/3iu8mS8MvToGd6w==} - engines: {node: '>=12.22.0'} - - '@pnpm/network.ca-file@1.0.2': - resolution: {integrity: sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA==} - engines: {node: '>=12.22.0'} - - '@pnpm/npm-conf@2.2.2': - resolution: {integrity: sha512-UA91GwWPhFExt3IizW6bOeY/pQ0BkuNwKjk9iQW9KqxluGCrg4VenZ0/L+2Y0+ZOtme72EVvg6v0zo3AMQRCeA==} - engines: {node: '>=12'} - - '@scure/base@1.1.7': - resolution: {integrity: sha512-PPNYBslrLNNUQ/Yad37MHYsNQtK67EhWb6WtSvNLLPo7SdVZgkUjD6Dg+5On7zNwmskf8OX7I7Nx5oN+MIWE0g==} - - '@scure/bip32@1.4.0': - resolution: {integrity: sha512-sVUpc0Vq3tXCkDGYVWGIZTRfnvu8LoTDaev7vbwh0omSvVORONr960MQWdKqJDCReIEmTj3PAr73O3aoxz7OPg==} - - '@scure/bip39@1.3.0': - resolution: {integrity: sha512-disdg7gHuTDZtY+ZdkmLpPCk7fxZSu3gBiEGuoC1XYxv9cGx3Z6cpTggCgW6odSOOIXCiDjuGejW+aJKCY/pIQ==} - - '@sindresorhus/is@5.6.0': - resolution: {integrity: sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==} - engines: {node: '>=14.16'} - - '@solidity-parser/parser@0.18.0': - resolution: {integrity: sha512-yfORGUIPgLck41qyN7nbwJRAx17/jAIXCTanHOJZhB6PJ1iAk/84b/xlsVKFSyNyLXIj0dhppoE0+CRws7wlzA==} - - '@szmarczak/http-timer@5.0.1': - resolution: {integrity: sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==} - engines: {node: '>=14.16'} - - '@tsconfig/node10@1.0.11': - resolution: {integrity: sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==} - - '@tsconfig/node12@1.0.11': - resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} - - '@tsconfig/node14@1.0.3': - resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} - - '@tsconfig/node16@1.0.4': - resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} - - '@types/http-cache-semantics@4.0.4': - resolution: {integrity: sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==} - - '@types/json-schema@7.0.15': - resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} - - '@types/json5@0.0.29': - resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} - - '@types/node@20.11.30': - resolution: {integrity: sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==} - - '@types/semver@7.5.8': - resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==} - - '@typescript-eslint/eslint-plugin@7.4.0': - resolution: {integrity: sha512-yHMQ/oFaM7HZdVrVm/M2WHaNPgyuJH4WelkSVEWSSsir34kxW2kDJCxlXRhhGWEsMN0WAW/vLpKfKVcm8k+MPw==} - engines: {node: ^18.18.0 || >=20.0.0} - peerDependencies: - '@typescript-eslint/parser': ^7.0.0 - eslint: ^8.56.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - - '@typescript-eslint/parser@7.7.0': - resolution: {integrity: sha512-fNcDm3wSwVM8QYL4HKVBggdIPAy9Q41vcvC/GtDobw3c4ndVT3K6cqudUmjHPw8EAp4ufax0o58/xvWaP2FmTg==} - engines: {node: ^18.18.0 || >=20.0.0} - peerDependencies: - eslint: ^8.56.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - - '@typescript-eslint/scope-manager@7.4.0': - resolution: {integrity: sha512-68VqENG5HK27ypafqLVs8qO+RkNc7TezCduYrx8YJpXq2QGZ30vmNZGJJJC48+MVn4G2dCV8m5ZTVnzRexTVtw==} - engines: {node: ^18.18.0 || >=20.0.0} - - '@typescript-eslint/scope-manager@7.7.0': - resolution: {integrity: sha512-/8INDn0YLInbe9Wt7dK4cXLDYp0fNHP5xKLHvZl3mOT5X17rK/YShXaiNmorl+/U4VKCVIjJnx4Ri5b0y+HClw==} - engines: {node: ^18.18.0 || >=20.0.0} - - '@typescript-eslint/type-utils@7.4.0': - resolution: {integrity: sha512-247ETeHgr9WTRMqHbbQdzwzhuyaJ8dPTuyuUEMANqzMRB1rj/9qFIuIXK7l0FX9i9FXbHeBQl/4uz6mYuCE7Aw==} - engines: {node: ^18.18.0 || >=20.0.0} - peerDependencies: - eslint: ^8.56.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - - '@typescript-eslint/types@7.4.0': - resolution: {integrity: sha512-mjQopsbffzJskos5B4HmbsadSJQWaRK0UxqQ7GuNA9Ga4bEKeiO6b2DnB6cM6bpc8lemaPseh0H9B/wyg+J7rw==} - engines: {node: ^18.18.0 || >=20.0.0} - - '@typescript-eslint/types@7.7.0': - resolution: {integrity: sha512-G01YPZ1Bd2hn+KPpIbrAhEWOn5lQBrjxkzHkWvP6NucMXFtfXoevK82hzQdpfuQYuhkvFDeQYbzXCjR1z9Z03w==} - engines: {node: ^18.18.0 || >=20.0.0} - - '@typescript-eslint/typescript-estree@7.4.0': - resolution: {integrity: sha512-A99j5AYoME/UBQ1ucEbbMEmGkN7SE0BvZFreSnTd1luq7yulcHdyGamZKizU7canpGDWGJ+Q6ZA9SyQobipePg==} - engines: {node: ^18.18.0 || >=20.0.0} - peerDependencies: - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - - '@typescript-eslint/typescript-estree@7.7.0': - resolution: {integrity: sha512-8p71HQPE6CbxIBy2kWHqM1KGrC07pk6RJn40n0DSc6bMOBBREZxSDJ+BmRzc8B5OdaMh1ty3mkuWRg4sCFiDQQ==} - engines: {node: ^18.18.0 || >=20.0.0} - peerDependencies: - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - - '@typescript-eslint/utils@7.4.0': - resolution: {integrity: sha512-NQt9QLM4Tt8qrlBVY9lkMYzfYtNz8/6qwZg8pI3cMGlPnj6mOpRxxAm7BMJN9K0AiY+1BwJ5lVC650YJqYOuNg==} - engines: {node: ^18.18.0 || >=20.0.0} - peerDependencies: - eslint: ^8.56.0 - - '@typescript-eslint/visitor-keys@7.4.0': - resolution: {integrity: sha512-0zkC7YM0iX5Y41homUUeW1CHtZR01K3ybjM1l6QczoMuay0XKtrb93kv95AxUGwdjGr64nNqnOCwmEl616N8CA==} - engines: {node: ^18.18.0 || >=20.0.0} - - '@typescript-eslint/visitor-keys@7.7.0': - resolution: {integrity: sha512-h0WHOj8MhdhY8YWkzIF30R379y0NqyOHExI9N9KCzvmu05EgG4FumeYa3ccfKUSphyWkWQE1ybVrgz/Pbam6YA==} - engines: {node: ^18.18.0 || >=20.0.0} - - acorn-jsx@5.3.2: - resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} - peerDependencies: - acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - - acorn-walk@8.3.3: - resolution: {integrity: sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==} - engines: {node: '>=0.4.0'} - - acorn@8.12.0: - resolution: {integrity: sha512-RTvkC4w+KNXrM39/lWCUaG0IbRkWdCv7W/IOW9oU6SawyxulvkQy5HQPVTKxEjczcUvapcrw3cFx/60VN/NRNw==} - engines: {node: '>=0.4.0'} - hasBin: true - - aes-js@3.0.0: - resolution: {integrity: sha512-H7wUZRn8WpTq9jocdxQ2c8x2sKo9ZVmzfRE13GiNJXfp7NcKYEdvl3vspKjXox6RIG2VtaRe4JFvxG4rqp2Zuw==} - - ajv@6.12.6: - resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} - - ajv@8.16.0: - resolution: {integrity: sha512-F0twR8U1ZU67JIEtekUcLkXkoO5mMMmgGD8sK/xUFzJ805jxHQl92hImFAqqXMyMYjSPOyUPAwHYhB72g5sTXw==} - - ansi-regex@5.0.1: - resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} - engines: {node: '>=8'} - - ansi-styles@3.2.1: - resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} - engines: {node: '>=4'} - - ansi-styles@4.3.0: - resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} - engines: {node: '>=8'} - - antlr4@4.13.1: - resolution: {integrity: sha512-kiXTspaRYvnIArgE97z5YVVf/cDVQABr3abFRR6mE7yesLMkgu4ujuyV/sgxafQ8wgve0DJQUJ38Z8tkgA2izA==} - engines: {node: '>=16'} - - arg@4.1.3: - resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} - - argparse@2.0.1: - resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - - array-buffer-byte-length@1.0.1: - resolution: {integrity: sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==} - engines: {node: '>= 0.4'} - - array-includes@3.1.8: - resolution: {integrity: sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==} - engines: {node: '>= 0.4'} - - array-union@2.1.0: - resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} - engines: {node: '>=8'} - - array.prototype.findlastindex@1.2.5: - resolution: {integrity: sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==} - engines: {node: '>= 0.4'} - - array.prototype.flat@1.3.2: - resolution: {integrity: sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==} - engines: {node: '>= 0.4'} - - array.prototype.flatmap@1.3.2: - resolution: {integrity: sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==} - engines: {node: '>= 0.4'} - - arraybuffer.prototype.slice@1.0.3: - resolution: {integrity: sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==} - engines: {node: '>= 0.4'} - - ast-parents@0.0.1: - resolution: {integrity: sha512-XHusKxKz3zoYk1ic8Un640joHbFMhbqneyoZfoKnEGtf2ey9Uh/IdpcQplODdO/kENaMIWsD0nJm4+wX3UNLHA==} - - astral-regex@2.0.0: - resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==} - engines: {node: '>=8'} - - available-typed-arrays@1.0.7: - resolution: {integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==} - engines: {node: '>= 0.4'} - - balanced-match@1.0.2: - resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - - bech32@1.1.4: - resolution: {integrity: sha512-s0IrSOzLlbvX7yp4WBfPITzpAU8sqQcpsmwXDiKwrG4r491vwCO/XpejasRNl0piBMe/DvP4Tz0mIS/X1DPJBQ==} - - bignumber.js@9.1.2: - resolution: {integrity: sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==} - - bn.js@4.11.6: - resolution: {integrity: sha512-XWwnNNFCuuSQ0m3r3C4LE3EiORltHd9M05pq6FOlVeiophzRbMo50Sbz1ehl8K3Z+jw9+vmgnXefY1hz8X+2wA==} - - bn.js@4.12.0: - resolution: {integrity: sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==} - - bn.js@5.2.1: - resolution: {integrity: sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==} - - brace-expansion@1.1.11: - resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} - - brace-expansion@2.0.1: - resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} - - braces@3.0.3: - resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} - engines: {node: '>=8'} - - brorand@1.1.0: - resolution: {integrity: sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==} - - buffer-reverse@1.0.1: - resolution: {integrity: sha512-M87YIUBsZ6N924W57vDwT/aOu8hw7ZgdByz6ijksLjmHJELBASmYTTlNHRgjE+pTsT9oJXGaDSgqqwfdHotDUg==} - - builtin-modules@3.3.0: - resolution: {integrity: sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==} - engines: {node: '>=6'} - - builtins@5.1.0: - resolution: {integrity: sha512-SW9lzGTLvWTP1AY8xeAMZimqDrIaSdLQUcVr9DMef51niJ022Ri87SwRRKYm4A6iHfkPaiVUu/Duw2Wc4J7kKg==} - - cacheable-lookup@7.0.0: - resolution: {integrity: sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==} - engines: {node: '>=14.16'} - - cacheable-request@10.2.14: - resolution: {integrity: sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ==} - engines: {node: '>=14.16'} - - call-bind@1.0.7: - resolution: {integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==} - engines: {node: '>= 0.4'} - - callsites@3.1.0: - resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} - engines: {node: '>=6'} - - chalk@2.4.2: - resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} - engines: {node: '>=4'} - - chalk@4.1.2: - resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} - engines: {node: '>=10'} - - color-convert@1.9.3: - resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} - - color-convert@2.0.1: - resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} - engines: {node: '>=7.0.0'} - - color-name@1.1.3: - resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} - - color-name@1.1.4: - resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - - command-exists@1.2.9: - resolution: {integrity: sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==} - - commander@10.0.1: - resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==} - engines: {node: '>=14'} - - commander@8.3.0: - resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} - engines: {node: '>= 12'} - - concat-map@0.0.1: - resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} - - config-chain@1.1.13: - resolution: {integrity: sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==} - - cosmiconfig@8.3.6: - resolution: {integrity: sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==} - engines: {node: '>=14'} - peerDependencies: - typescript: '>=4.9.5' - peerDependenciesMeta: - typescript: - optional: true - - create-require@1.1.1: - resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} - - cross-spawn@7.0.3: - resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} - engines: {node: '>= 8'} - - crypto-js@4.2.0: - resolution: {integrity: sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q==} - - data-view-buffer@1.0.1: - resolution: {integrity: sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==} - engines: {node: '>= 0.4'} - - data-view-byte-length@1.0.1: - resolution: {integrity: sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==} - engines: {node: '>= 0.4'} - - data-view-byte-offset@1.0.0: - resolution: {integrity: sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==} - engines: {node: '>= 0.4'} - - debug@3.2.7: - resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - - debug@4.3.5: - resolution: {integrity: sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - - decompress-response@6.0.0: - resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} - engines: {node: '>=10'} - - deep-extend@0.6.0: - resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} - engines: {node: '>=4.0.0'} - - deep-is@0.1.4: - resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} - - defer-to-connect@2.0.1: - resolution: {integrity: sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==} - engines: {node: '>=10'} - - define-data-property@1.1.4: - resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==} - engines: {node: '>= 0.4'} - - define-properties@1.2.1: - resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==} - engines: {node: '>= 0.4'} - - diff@4.0.2: - resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} - engines: {node: '>=0.3.1'} - - dir-glob@3.0.1: - resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} - engines: {node: '>=8'} - - doctrine@2.1.0: - resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==} - engines: {node: '>=0.10.0'} - - doctrine@3.0.0: - resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} - engines: {node: '>=6.0.0'} - - ds-test@https://codeload.github.com/dapphub/ds-test/tar.gz/e282159d5170298eb2455a6c05280ab5a73a4ef0: - resolution: {tarball: https://codeload.github.com/dapphub/ds-test/tar.gz/e282159d5170298eb2455a6c05280ab5a73a4ef0} - version: 1.0.0 - - elliptic@6.5.4: - resolution: {integrity: sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==} - - emoji-regex@8.0.0: - resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - - error-ex@1.3.2: - resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} - - es-abstract@1.23.3: - resolution: {integrity: sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==} - engines: {node: '>= 0.4'} - - es-define-property@1.0.0: - resolution: {integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==} - engines: {node: '>= 0.4'} - - es-errors@1.3.0: - resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} - engines: {node: '>= 0.4'} - - es-object-atoms@1.0.0: - resolution: {integrity: sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==} - engines: {node: '>= 0.4'} - - es-set-tostringtag@2.0.3: - resolution: {integrity: sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==} - engines: {node: '>= 0.4'} - - es-shim-unscopables@1.0.2: - resolution: {integrity: sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==} - - es-to-primitive@1.2.1: - resolution: {integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==} - engines: {node: '>= 0.4'} - - escape-string-regexp@1.0.5: - resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} - engines: {node: '>=0.8.0'} - - escape-string-regexp@4.0.0: - resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} - engines: {node: '>=10'} - - eslint-compat-utils@0.5.1: - resolution: {integrity: sha512-3z3vFexKIEnjHE3zCMRo6fn/e44U7T1khUjg+Hp0ZQMCigh28rALD0nPFBcGZuiLC5rLZa2ubQHDRln09JfU2Q==} - engines: {node: '>=12'} - peerDependencies: - eslint: '>=6.0.0' - - eslint-config-prettier@9.1.0: - resolution: {integrity: sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==} - hasBin: true - peerDependencies: - eslint: '>=7.0.0' - - eslint-config-standard@17.1.0: - resolution: {integrity: sha512-IwHwmaBNtDK4zDHQukFDW5u/aTb8+meQWZvNFWkiGmbWjD6bqyuSSBxxXKkCftCUzc1zwCH2m/baCNDLGmuO5Q==} - engines: {node: '>=12.0.0'} - peerDependencies: - eslint: ^8.0.1 - eslint-plugin-import: ^2.25.2 - eslint-plugin-n: '^15.0.0 || ^16.0.0 ' - eslint-plugin-promise: ^6.0.0 - - eslint-import-resolver-node@0.3.9: - resolution: {integrity: sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==} - - eslint-module-utils@2.8.1: - resolution: {integrity: sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q==} - engines: {node: '>=4'} - peerDependencies: - '@typescript-eslint/parser': '*' - eslint: '*' - eslint-import-resolver-node: '*' - eslint-import-resolver-typescript: '*' - eslint-import-resolver-webpack: '*' - peerDependenciesMeta: - '@typescript-eslint/parser': - optional: true - eslint: - optional: true - eslint-import-resolver-node: - optional: true - eslint-import-resolver-typescript: - optional: true - eslint-import-resolver-webpack: - optional: true - - eslint-plugin-es-x@7.8.0: - resolution: {integrity: sha512-7Ds8+wAAoV3T+LAKeu39Y5BzXCrGKrcISfgKEqTS4BDN8SFEDQd0S43jiQ8vIa3wUKD07qitZdfzlenSi8/0qQ==} - engines: {node: ^14.18.0 || >=16.0.0} - peerDependencies: - eslint: '>=8' - - eslint-plugin-es@3.0.1: - resolution: {integrity: sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==} - engines: {node: '>=8.10.0'} - peerDependencies: - eslint: '>=4.19.1' - - eslint-plugin-import@2.28.1: - resolution: {integrity: sha512-9I9hFlITvOV55alzoKBI+K9q74kv0iKMeY6av5+umsNwayt59fz692daGyjR+oStBQgx6nwR9rXldDev3Clw+A==} - engines: {node: '>=4'} - peerDependencies: - '@typescript-eslint/parser': '*' - eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 - peerDependenciesMeta: - '@typescript-eslint/parser': - optional: true - - eslint-plugin-n@16.6.2: - resolution: {integrity: sha512-6TyDmZ1HXoFQXnhCTUjVFULReoBPOAjpuiKELMkeP40yffI/1ZRO+d9ug/VC6fqISo2WkuIBk3cvuRPALaWlOQ==} - engines: {node: '>=16.0.0'} - peerDependencies: - eslint: '>=7.0.0' - - eslint-plugin-node@11.1.0: - resolution: {integrity: sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==} - engines: {node: '>=8.10.0'} - peerDependencies: - eslint: '>=5.16.0' - - eslint-plugin-prettier@5.1.3: - resolution: {integrity: sha512-C9GCVAs4Eq7ZC/XFQHITLiHJxQngdtraXaM+LoUFoFp/lHNl2Zn8f3WQbe9HvTBBQ9YnKFB0/2Ajdqwo5D1EAw==} - engines: {node: ^14.18.0 || >=16.0.0} - peerDependencies: - '@types/eslint': '>=8.0.0' - eslint: '>=8.0.0' - eslint-config-prettier: '*' - prettier: '>=3.0.0' - peerDependenciesMeta: - '@types/eslint': - optional: true - eslint-config-prettier: - optional: true - - eslint-plugin-promise@6.1.1: - resolution: {integrity: sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - eslint: ^7.0.0 || ^8.0.0 - - eslint-scope@7.2.2: - resolution: {integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - - eslint-utils@2.1.0: - resolution: {integrity: sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==} - engines: {node: '>=6'} - - eslint-visitor-keys@1.3.0: - resolution: {integrity: sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==} - engines: {node: '>=4'} - - eslint-visitor-keys@3.4.3: - resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - - eslint@8.51.0: - resolution: {integrity: sha512-2WuxRZBrlwnXi+/vFSJyjMqrNjtJqiasMzehF0shoLaW7DzS3/9Yvrmq5JiT66+pNjiX4UBnLDiKHcWAr/OInA==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - hasBin: true - - espree@9.6.1: - resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - - esquery@1.5.0: - resolution: {integrity: sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==} - engines: {node: '>=0.10'} - - esrecurse@4.3.0: - resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} - engines: {node: '>=4.0'} - - estraverse@5.3.0: - resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} - engines: {node: '>=4.0'} - - esutils@2.0.3: - resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} - engines: {node: '>=0.10.0'} - - ethereum-bloom-filters@1.1.0: - resolution: {integrity: sha512-J1gDRkLpuGNvWYzWslBQR9cDV4nd4kfvVTE/Wy4Kkm4yb3EYRSlyi0eB/inTsSTTVyA0+HyzHgbr95Fn/Z1fSw==} - - ethereum-cryptography@2.2.1: - resolution: {integrity: sha512-r/W8lkHSiTLxUxW8Rf3u4HGB0xQweG2RyETjywylKZSzLWoWAijRz8WCuOtJ6wah+avllXBqZuk29HCCvhEIRg==} - - ethers@5.7.2: - resolution: {integrity: sha512-wswUsmWo1aOK8rR7DIKiWSw9DbLWe6x98Jrn8wcTflTVvaXhAMaB5zGAXy0GYQEQp9iO1iSHWVyARQm11zUtyg==} - - ethjs-unit@0.1.6: - resolution: {integrity: sha512-/Sn9Y0oKl0uqQuvgFk/zQgR7aw1g36qX/jzSQ5lSwlO0GigPymk4eGQfeNTD03w1dPOqfz8V77Cy43jH56pagw==} - engines: {node: '>=6.5.0', npm: '>=3'} - - fast-deep-equal@3.1.3: - resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} - - fast-diff@1.3.0: - resolution: {integrity: sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==} - - fast-glob@3.3.2: - resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} - engines: {node: '>=8.6.0'} - - fast-json-stable-stringify@2.1.0: - resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} - - fast-levenshtein@2.0.6: - resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} - - fastq@1.17.1: - resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} - - file-entry-cache@6.0.1: - resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} - engines: {node: ^10.12.0 || >=12.0.0} - - fill-range@7.1.1: - resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} - engines: {node: '>=8'} - - find-up@5.0.0: - resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} - engines: {node: '>=10'} - - flat-cache@3.2.0: - resolution: {integrity: sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==} - engines: {node: ^10.12.0 || >=12.0.0} - - flatted@3.3.1: - resolution: {integrity: sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==} - - follow-redirects@1.15.6: - resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} - engines: {node: '>=4.0'} - peerDependencies: - debug: '*' - peerDependenciesMeta: - debug: - optional: true - - for-each@0.3.3: - resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==} - - forge-std@https://codeload.github.com/foundry-rs/forge-std/tar.gz/36c303b7ffdd842d06b1ec2744c9b9b5fb3083f3: - resolution: {tarball: https://codeload.github.com/foundry-rs/forge-std/tar.gz/36c303b7ffdd842d06b1ec2744c9b9b5fb3083f3} - version: 1.7.5 - - form-data-encoder@2.1.4: - resolution: {integrity: sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==} - engines: {node: '>= 14.17'} - - fs.realpath@1.0.0: - resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} - - function-bind@1.1.2: - resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} - - function.prototype.name@1.1.6: - resolution: {integrity: sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==} - engines: {node: '>= 0.4'} - - functions-have-names@1.2.3: - resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} - - get-intrinsic@1.2.4: - resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==} - engines: {node: '>= 0.4'} - - get-stream@6.0.1: - resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} - engines: {node: '>=10'} - - get-symbol-description@1.0.2: - resolution: {integrity: sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==} - engines: {node: '>= 0.4'} - - get-tsconfig@4.7.5: - resolution: {integrity: sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==} - - glob-parent@5.1.2: - resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} - engines: {node: '>= 6'} - - glob-parent@6.0.2: - resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} - engines: {node: '>=10.13.0'} - - glob@7.2.3: - resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} - deprecated: Glob versions prior to v9 are no longer supported - - glob@8.1.0: - resolution: {integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==} - engines: {node: '>=12'} - deprecated: Glob versions prior to v9 are no longer supported - - globals@13.24.0: - resolution: {integrity: sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==} - engines: {node: '>=8'} - - globalthis@1.0.4: - resolution: {integrity: sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==} - engines: {node: '>= 0.4'} - - globby@11.1.0: - resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} - engines: {node: '>=10'} - - gopd@1.0.1: - resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} - - got@12.6.1: - resolution: {integrity: sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==} - engines: {node: '>=14.16'} - - graceful-fs@4.2.10: - resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} - - graphemer@1.4.0: - resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} - - has-bigints@1.0.2: - resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==} - - has-flag@3.0.0: - resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} - engines: {node: '>=4'} - - has-flag@4.0.0: - resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} - engines: {node: '>=8'} - - has-property-descriptors@1.0.2: - resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==} - - has-proto@1.0.3: - resolution: {integrity: sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==} - engines: {node: '>= 0.4'} - - has-symbols@1.0.3: - resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} - engines: {node: '>= 0.4'} - - has-tostringtag@1.0.2: - resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} - engines: {node: '>= 0.4'} - - has@1.0.4: - resolution: {integrity: sha512-qdSAmqLF6209RFj4VVItywPMbm3vWylknmB3nvNiUIs72xAimcM8nVYxYr7ncvZq5qzk9MKIZR8ijqD/1QuYjQ==} - engines: {node: '>= 0.4.0'} - - hash.js@1.1.7: - resolution: {integrity: sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==} - - hasown@2.0.2: - resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} - engines: {node: '>= 0.4'} - - hmac-drbg@1.0.1: - resolution: {integrity: sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==} - - http-cache-semantics@4.1.1: - resolution: {integrity: sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==} - - http2-wrapper@2.2.1: - resolution: {integrity: sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ==} - engines: {node: '>=10.19.0'} - - ignore@5.3.1: - resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==} - engines: {node: '>= 4'} - - import-fresh@3.3.0: - resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} - engines: {node: '>=6'} - - imurmurhash@0.1.4: - resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} - engines: {node: '>=0.8.19'} - - inflight@1.0.6: - resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} - deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. - - inherits@2.0.4: - resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - - ini@1.3.8: - resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} - - internal-slot@1.0.7: - resolution: {integrity: sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==} - engines: {node: '>= 0.4'} - - is-array-buffer@3.0.4: - resolution: {integrity: sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==} - engines: {node: '>= 0.4'} - - is-arrayish@0.2.1: - resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} - - is-bigint@1.0.4: - resolution: {integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==} - - is-boolean-object@1.1.2: - resolution: {integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==} - engines: {node: '>= 0.4'} - - is-builtin-module@3.2.1: - resolution: {integrity: sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==} - engines: {node: '>=6'} - - is-callable@1.2.7: - resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} - engines: {node: '>= 0.4'} - - is-core-module@2.14.0: - resolution: {integrity: sha512-a5dFJih5ZLYlRtDc0dZWP7RiKr6xIKzmn/oAYCDvdLThadVgyJwlaoQPmRtMSpz+rk0OGAgIu+TcM9HUF0fk1A==} - engines: {node: '>= 0.4'} - - is-data-view@1.0.1: - resolution: {integrity: sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==} - engines: {node: '>= 0.4'} - - is-date-object@1.0.5: - resolution: {integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==} - engines: {node: '>= 0.4'} - - is-extglob@2.1.1: - resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} - engines: {node: '>=0.10.0'} - - is-fullwidth-code-point@3.0.0: - resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} - engines: {node: '>=8'} - - is-glob@4.0.3: - resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} - engines: {node: '>=0.10.0'} - - is-hex-prefixed@1.0.0: - resolution: {integrity: sha512-WvtOiug1VFrE9v1Cydwm+FnXd3+w9GaeVUss5W4v/SLy3UW00vP+6iNF2SdnfiBoLy4bTqVdkftNGTUeOFVsbA==} - engines: {node: '>=6.5.0', npm: '>=3'} - - is-negative-zero@2.0.3: - resolution: {integrity: sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==} - engines: {node: '>= 0.4'} - - is-number-object@1.0.7: - resolution: {integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==} - engines: {node: '>= 0.4'} - - is-number@7.0.0: - resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} - engines: {node: '>=0.12.0'} - - is-path-inside@3.0.3: - resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==} - engines: {node: '>=8'} - - is-regex@1.1.4: - resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==} - engines: {node: '>= 0.4'} - - is-shared-array-buffer@1.0.3: - resolution: {integrity: sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==} - engines: {node: '>= 0.4'} - - is-string@1.0.7: - resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==} - engines: {node: '>= 0.4'} - - is-symbol@1.0.4: - resolution: {integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==} - engines: {node: '>= 0.4'} - - is-typed-array@1.1.13: - resolution: {integrity: sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==} - engines: {node: '>= 0.4'} - - is-weakref@1.0.2: - resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==} - - isarray@2.0.5: - resolution: {integrity: sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==} - - isexe@2.0.0: - resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - - js-sha3@0.8.0: - resolution: {integrity: sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==} - - js-tokens@4.0.0: - resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - - js-yaml@4.1.0: - resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} - hasBin: true - - json-buffer@3.0.1: - resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} - - json-parse-even-better-errors@2.3.1: - resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} - - json-schema-traverse@0.4.1: - resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} - - json-schema-traverse@1.0.0: - resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} - - json-stable-stringify-without-jsonify@1.0.1: - resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} - - json5@1.0.2: - resolution: {integrity: sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==} - hasBin: true - - keyv@4.5.4: - resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} - - latest-version@7.0.0: - resolution: {integrity: sha512-KvNT4XqAMzdcL6ka6Tl3i2lYeFDgXNCuIX+xNx6ZMVR1dFq+idXd9FLKNMOIx0t9mJ9/HudyX4oZWXZQ0UJHeg==} - engines: {node: '>=14.16'} - - levn@0.4.1: - resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} - engines: {node: '>= 0.8.0'} - - lines-and-columns@1.2.4: - resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} - - locate-path@6.0.0: - resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} - engines: {node: '>=10'} - - lodash.merge@4.6.2: - resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} - - lodash.truncate@4.4.2: - resolution: {integrity: sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==} - - lodash@4.17.21: - resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} - - lowercase-keys@3.0.0: - resolution: {integrity: sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - - make-error@1.3.6: - resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} - - memorystream@0.3.1: - resolution: {integrity: sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==} - engines: {node: '>= 0.10.0'} - - merge2@1.4.1: - resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} - engines: {node: '>= 8'} - - merkletreejs@0.3.11: - resolution: {integrity: sha512-LJKTl4iVNTndhL+3Uz/tfkjD0klIWsHlUzgtuNnNrsf7bAlXR30m+xYB7lHr5Z/l6e/yAIsr26Dabx6Buo4VGQ==} - engines: {node: '>= 7.6.0'} - - micro-ftch@0.3.1: - resolution: {integrity: sha512-/0LLxhzP0tfiR5hcQebtudP56gUurs2CLkGarnCiB/OqEyUFQ6U3paQi/tgLv0hBJYt2rnr9MNpxz4fiiugstg==} - - micromatch@4.0.7: - resolution: {integrity: sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==} - engines: {node: '>=8.6'} - - mimic-response@3.1.0: - resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} - engines: {node: '>=10'} - - mimic-response@4.0.0: - resolution: {integrity: sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - - minimalistic-assert@1.0.1: - resolution: {integrity: sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==} - - minimalistic-crypto-utils@1.0.1: - resolution: {integrity: sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==} - - minimatch@3.1.2: - resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} - - minimatch@5.1.6: - resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} - engines: {node: '>=10'} - - minimatch@9.0.3: - resolution: {integrity: sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==} - engines: {node: '>=16 || 14 >=14.17'} - - minimatch@9.0.5: - resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} - engines: {node: '>=16 || 14 >=14.17'} - - minimist@1.2.8: - resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} - - ms@2.1.2: - resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} - - ms@2.1.3: - resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} - - natural-compare@1.4.0: - resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} - - normalize-url@8.0.1: - resolution: {integrity: sha512-IO9QvjUMWxPQQhs60oOu10CRkWCiZzSUkzbXGGV9pviYl1fXYcvkzQ5jV9z8Y6un8ARoVRl4EtC6v6jNqbaJ/w==} - engines: {node: '>=14.16'} - - number-to-bn@1.7.0: - resolution: {integrity: sha512-wsJ9gfSz1/s4ZsJN01lyonwuxA1tml6X1yBDnfpMglypcBRFZZkus26EdPSlqS5GJfYddVZa22p3VNb3z5m5Ig==} - engines: {node: '>=6.5.0', npm: '>=3'} - - object-inspect@1.13.2: - resolution: {integrity: sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==} - engines: {node: '>= 0.4'} - - object-keys@1.1.1: - resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} - engines: {node: '>= 0.4'} - - object.assign@4.1.5: - resolution: {integrity: sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==} - engines: {node: '>= 0.4'} - - object.fromentries@2.0.8: - resolution: {integrity: sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==} - engines: {node: '>= 0.4'} - - object.groupby@1.0.3: - resolution: {integrity: sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==} - engines: {node: '>= 0.4'} - - object.values@1.2.0: - resolution: {integrity: sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==} - engines: {node: '>= 0.4'} - - once@1.4.0: - resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} - - optionator@0.9.4: - resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} - engines: {node: '>= 0.8.0'} - - os-tmpdir@1.0.2: - resolution: {integrity: sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==} - engines: {node: '>=0.10.0'} - - p-cancelable@3.0.0: - resolution: {integrity: sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==} - engines: {node: '>=12.20'} - - p-limit@3.1.0: - resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} - engines: {node: '>=10'} - - p-locate@5.0.0: - resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} - engines: {node: '>=10'} - - p256-verifier#v0.1.0@https://codeload.github.com/taikoxyz/p256-verifier/tar.gz/6ef45b117642786b08a37b4c37c6a6ce151166da: - resolution: {tarball: https://codeload.github.com/taikoxyz/p256-verifier/tar.gz/6ef45b117642786b08a37b4c37c6a6ce151166da} - version: 0.0.0 - - package-json@8.1.1: - resolution: {integrity: sha512-cbH9IAIJHNj9uXi196JVsRlt7cHKak6u/e6AkL/bkRelZ7rlL3X1YKxsZwa36xipOEKAsdtmaG6aAJoM1fx2zA==} - engines: {node: '>=14.16'} - - parent-module@1.0.1: - resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} - engines: {node: '>=6'} - - parse-json@5.2.0: - resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} - engines: {node: '>=8'} - - path-exists@4.0.0: - resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} - engines: {node: '>=8'} - - path-is-absolute@1.0.1: - resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} - engines: {node: '>=0.10.0'} - - path-key@3.1.1: - resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} - engines: {node: '>=8'} - - path-parse@1.0.7: - resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} - - path-type@4.0.0: - resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} - engines: {node: '>=8'} - - picocolors@1.0.1: - resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} - - picomatch@2.3.1: - resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} - engines: {node: '>=8.6'} - - pluralize@8.0.0: - resolution: {integrity: sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==} - engines: {node: '>=4'} - - possible-typed-array-names@1.0.0: - resolution: {integrity: sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==} - engines: {node: '>= 0.4'} - - prelude-ls@1.2.1: - resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} - engines: {node: '>= 0.8.0'} - - prettier-linter-helpers@1.0.0: - resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==} - engines: {node: '>=6.0.0'} - - prettier@2.8.8: - resolution: {integrity: sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==} - engines: {node: '>=10.13.0'} - hasBin: true - - prettier@3.0.3: - resolution: {integrity: sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg==} - engines: {node: '>=14'} - hasBin: true - - proto-list@1.2.4: - resolution: {integrity: sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==} - - punycode@2.3.1: - resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} - engines: {node: '>=6'} - - queue-microtask@1.2.3: - resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - - quick-lru@5.1.1: - resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==} - engines: {node: '>=10'} - - randombytes@2.1.0: - resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==} - - rc@1.2.8: - resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} - hasBin: true - - regexp.prototype.flags@1.5.2: - resolution: {integrity: sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==} - engines: {node: '>= 0.4'} - - regexpp@3.2.0: - resolution: {integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==} - engines: {node: '>=8'} - - registry-auth-token@5.0.2: - resolution: {integrity: sha512-o/3ikDxtXaA59BmZuZrJZDJv8NMDGSj+6j6XaeBmHw8eY1i1qd9+6H+LjVvQXx3HN6aRCGa1cUdJ9RaJZUugnQ==} - engines: {node: '>=14'} - - registry-url@6.0.1: - resolution: {integrity: sha512-+crtS5QjFRqFCoQmvGduwYWEBng99ZvmFvF+cUJkGYF1L1BfU8C6Zp9T7f5vPAwyLkUExpvK+ANVZmGU49qi4Q==} - engines: {node: '>=12'} - - require-from-string@2.0.2: - resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} - engines: {node: '>=0.10.0'} - - resolve-alpn@1.2.1: - resolution: {integrity: sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==} - - resolve-from@4.0.0: - resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} - engines: {node: '>=4'} - - resolve-pkg-maps@1.0.0: - resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} - - resolve@1.22.8: - resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==} - hasBin: true - - responselike@3.0.0: - resolution: {integrity: sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==} - engines: {node: '>=14.16'} - - reusify@1.0.4: - resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} - engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - - rimraf@3.0.2: - resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} - deprecated: Rimraf versions prior to v4 are no longer supported - hasBin: true - - run-parallel@1.2.0: - resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} - - safe-array-concat@1.1.2: - resolution: {integrity: sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==} - engines: {node: '>=0.4'} - - safe-buffer@5.2.1: - resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - - safe-regex-test@1.0.3: - resolution: {integrity: sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==} - engines: {node: '>= 0.4'} - - scrypt-js@3.0.1: - resolution: {integrity: sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA==} - - semver@5.7.2: - resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==} - hasBin: true - - semver@6.3.1: - resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} - hasBin: true - - semver@7.6.2: - resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==} - engines: {node: '>=10'} - hasBin: true - - set-function-length@1.2.2: - resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==} - engines: {node: '>= 0.4'} - - set-function-name@2.0.2: - resolution: {integrity: sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==} - engines: {node: '>= 0.4'} - - shebang-command@2.0.0: - resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} - engines: {node: '>=8'} - - shebang-regex@3.0.0: - resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} - engines: {node: '>=8'} - - side-channel@1.0.6: - resolution: {integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==} - engines: {node: '>= 0.4'} - - slash@3.0.0: - resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} - engines: {node: '>=8'} - - slice-ansi@4.0.0: - resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==} - engines: {node: '>=10'} - - solady@https://codeload.github.com/Vectorized/solady/tar.gz/de0f336d2033d04e0f77c923d639c7fbffd48b6d: - resolution: {tarball: https://codeload.github.com/Vectorized/solady/tar.gz/de0f336d2033d04e0f77c923d639c7fbffd48b6d} - version: 0.0.167 - - solc@0.8.24: - resolution: {integrity: sha512-G5yUqjTUPc8Np74sCFwfsevhBPlUifUOfhYrgyu6CmYlC6feSw0YS6eZW47XDT23k3JYdKx5nJ+Q7whCEmNcoA==} - engines: {node: '>=10.0.0'} - hasBin: true - - solhint@5.0.1: - resolution: {integrity: sha512-QeQLS9HGCnIiibt+xiOa/+MuP7BWz9N7C5+Mj9pLHshdkNhuo3AzCpWmjfWVZBUuwIUO3YyCRVIcYLR3YOKGfg==} - hasBin: true - - string-width@4.2.3: - resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} - engines: {node: '>=8'} - - string.prototype.trim@1.2.9: - resolution: {integrity: sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==} - engines: {node: '>= 0.4'} - - string.prototype.trimend@1.0.8: - resolution: {integrity: sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==} - - string.prototype.trimstart@1.0.8: - resolution: {integrity: sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==} - engines: {node: '>= 0.4'} - - strip-ansi@6.0.1: - resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} - engines: {node: '>=8'} - - strip-bom@3.0.0: - resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} - engines: {node: '>=4'} - - strip-hex-prefix@1.0.0: - resolution: {integrity: sha512-q8d4ue7JGEiVcypji1bALTos+0pWtyGlivAWyPuTkHzuTCJqrK9sWxYQZUq6Nq3cuyv3bm734IhHvHtGGURU6A==} - engines: {node: '>=6.5.0', npm: '>=3'} - - strip-json-comments@2.0.1: - resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==} - engines: {node: '>=0.10.0'} - - strip-json-comments@3.1.1: - resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} - engines: {node: '>=8'} - - supports-color@5.5.0: - resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} - engines: {node: '>=4'} - - supports-color@7.2.0: - resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} - engines: {node: '>=8'} - - supports-preserve-symlinks-flag@1.0.0: - resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} - engines: {node: '>= 0.4'} - - synckit@0.8.8: - resolution: {integrity: sha512-HwOKAP7Wc5aRGYdKH+dw0PRRpbO841v2DENBtjnR5HFWoiNByAl7vrx3p0G/rCyYXQsrxqtX48TImFtPcIHSpQ==} - engines: {node: ^14.18.0 || >=16.0.0} - - table@6.8.2: - resolution: {integrity: sha512-w2sfv80nrAh2VCbqR5AK27wswXhqcck2AhfnNW76beQXskGZ1V12GwS//yYVa3d3fcvAip2OUnbDAjW2k3v9fA==} - engines: {node: '>=10.0.0'} - - text-table@0.2.0: - resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==} - - tmp@0.0.33: - resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==} - engines: {node: '>=0.6.0'} - - to-regex-range@5.0.1: - resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} - engines: {node: '>=8.0'} - - treeify@1.1.0: - resolution: {integrity: sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A==} - engines: {node: '>=0.6'} - - ts-api-utils@1.3.0: - resolution: {integrity: sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==} - engines: {node: '>=16'} - peerDependencies: - typescript: '>=4.2.0' - - ts-node@10.9.2: - resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} - hasBin: true - peerDependencies: - '@swc/core': '>=1.2.50' - '@swc/wasm': '>=1.2.50' - '@types/node': '*' - typescript: '>=2.7' - peerDependenciesMeta: - '@swc/core': - optional: true - '@swc/wasm': - optional: true - - tsconfig-paths@3.15.0: - resolution: {integrity: sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==} - - tslib@2.6.3: - resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==} - - type-check@0.4.0: - resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} - engines: {node: '>= 0.8.0'} - - type-fest@0.20.2: - resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} - engines: {node: '>=10'} - - typed-array-buffer@1.0.2: - resolution: {integrity: sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==} - engines: {node: '>= 0.4'} - - typed-array-byte-length@1.0.1: - resolution: {integrity: sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==} - engines: {node: '>= 0.4'} - - typed-array-byte-offset@1.0.2: - resolution: {integrity: sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==} - engines: {node: '>= 0.4'} - - typed-array-length@1.0.6: - resolution: {integrity: sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==} - engines: {node: '>= 0.4'} - - typescript@5.2.2: - resolution: {integrity: sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==} - engines: {node: '>=14.17'} - hasBin: true - - unbox-primitive@1.0.2: - resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==} - - undici-types@5.26.5: - resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - - uri-js@4.4.1: - resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} - - utf8@3.0.0: - resolution: {integrity: sha512-E8VjFIQ/TyQgp+TZfS6l8yp/xWppSAHzidGiRrqe4bK4XP9pTRyKFgGJpO3SN7zdX4DeomTrwaseCHovfpFcqQ==} - - v8-compile-cache-lib@3.0.1: - resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} - - web3-utils@1.10.4: - resolution: {integrity: sha512-tsu8FiKJLk2PzhDl9fXbGUWTkkVXYhtTA+SmEFkKft+9BgwLxfCRpU96sWv7ICC8zixBNd3JURVoiR3dUXgP8A==} - engines: {node: '>=8.0.0'} - - which-boxed-primitive@1.0.2: - resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==} - - which-typed-array@1.1.15: - resolution: {integrity: sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==} - engines: {node: '>= 0.4'} - - which@2.0.2: - resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} - engines: {node: '>= 8'} - hasBin: true - - word-wrap@1.2.5: - resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} - engines: {node: '>=0.10.0'} - - wrappy@1.0.2: - resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} - - ws@7.4.6: - resolution: {integrity: sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==} - engines: {node: '>=8.3.0'} - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ^5.0.2 - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - - yn@3.1.1: - resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} - engines: {node: '>=6'} - - yocto-queue@0.1.0: - resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} - engines: {node: '>=10'} - -snapshots: - - '@babel/code-frame@7.24.7': - dependencies: - '@babel/highlight': 7.24.7 - picocolors: 1.0.1 - - '@babel/helper-validator-identifier@7.24.7': {} - - '@babel/highlight@7.24.7': - dependencies: - '@babel/helper-validator-identifier': 7.24.7 - chalk: 2.4.2 - js-tokens: 4.0.0 - picocolors: 1.0.1 - - '@cspotcode/source-map-support@0.8.1': - dependencies: - '@jridgewell/trace-mapping': 0.3.9 - - '@eslint-community/eslint-utils@4.4.0(eslint@8.51.0)': - dependencies: - eslint: 8.51.0 - eslint-visitor-keys: 3.4.3 - - '@eslint-community/regexpp@4.11.0': {} - - '@eslint/eslintrc@2.1.4': - dependencies: - ajv: 6.12.6 - debug: 4.3.5 - espree: 9.6.1 - globals: 13.24.0 - ignore: 5.3.1 - import-fresh: 3.3.0 - js-yaml: 4.1.0 - minimatch: 3.1.2 - strip-json-comments: 3.1.1 - transitivePeerDependencies: - - supports-color - - '@eslint/js@8.51.0': {} - - '@ethereumjs/rlp@4.0.1': {} - - '@ethereumjs/util@8.1.0': - dependencies: - '@ethereumjs/rlp': 4.0.1 - ethereum-cryptography: 2.2.1 - micro-ftch: 0.3.1 - - '@ethersproject/abi@5.7.0': - dependencies: - '@ethersproject/address': 5.7.0 - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/constants': 5.7.0 - '@ethersproject/hash': 5.7.0 - '@ethersproject/keccak256': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/properties': 5.7.0 - '@ethersproject/strings': 5.7.0 - - '@ethersproject/abstract-provider@5.7.0': - dependencies: - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/networks': 5.7.1 - '@ethersproject/properties': 5.7.0 - '@ethersproject/transactions': 5.7.0 - '@ethersproject/web': 5.7.1 - - '@ethersproject/abstract-signer@5.7.0': - dependencies: - '@ethersproject/abstract-provider': 5.7.0 - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/properties': 5.7.0 - - '@ethersproject/address@5.7.0': - dependencies: - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/keccak256': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/rlp': 5.7.0 - - '@ethersproject/base64@5.7.0': - dependencies: - '@ethersproject/bytes': 5.7.0 - - '@ethersproject/basex@5.7.0': - dependencies: - '@ethersproject/bytes': 5.7.0 - '@ethersproject/properties': 5.7.0 - - '@ethersproject/bignumber@5.7.0': - dependencies: - '@ethersproject/bytes': 5.7.0 - '@ethersproject/logger': 5.7.0 - bn.js: 5.2.1 - - '@ethersproject/bytes@5.7.0': - dependencies: - '@ethersproject/logger': 5.7.0 - - '@ethersproject/constants@5.7.0': - dependencies: - '@ethersproject/bignumber': 5.7.0 - - '@ethersproject/contracts@5.7.0': - dependencies: - '@ethersproject/abi': 5.7.0 - '@ethersproject/abstract-provider': 5.7.0 - '@ethersproject/abstract-signer': 5.7.0 - '@ethersproject/address': 5.7.0 - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/constants': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/properties': 5.7.0 - '@ethersproject/transactions': 5.7.0 - - '@ethersproject/hash@5.7.0': - dependencies: - '@ethersproject/abstract-signer': 5.7.0 - '@ethersproject/address': 5.7.0 - '@ethersproject/base64': 5.7.0 - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/keccak256': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/properties': 5.7.0 - '@ethersproject/strings': 5.7.0 - - '@ethersproject/hdnode@5.7.0': - dependencies: - '@ethersproject/abstract-signer': 5.7.0 - '@ethersproject/basex': 5.7.0 - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/pbkdf2': 5.7.0 - '@ethersproject/properties': 5.7.0 - '@ethersproject/sha2': 5.7.0 - '@ethersproject/signing-key': 5.7.0 - '@ethersproject/strings': 5.7.0 - '@ethersproject/transactions': 5.7.0 - '@ethersproject/wordlists': 5.7.0 - - '@ethersproject/json-wallets@5.7.0': - dependencies: - '@ethersproject/abstract-signer': 5.7.0 - '@ethersproject/address': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/hdnode': 5.7.0 - '@ethersproject/keccak256': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/pbkdf2': 5.7.0 - '@ethersproject/properties': 5.7.0 - '@ethersproject/random': 5.7.0 - '@ethersproject/strings': 5.7.0 - '@ethersproject/transactions': 5.7.0 - aes-js: 3.0.0 - scrypt-js: 3.0.1 - - '@ethersproject/keccak256@5.7.0': - dependencies: - '@ethersproject/bytes': 5.7.0 - js-sha3: 0.8.0 - - '@ethersproject/logger@5.7.0': {} - - '@ethersproject/networks@5.7.1': - dependencies: - '@ethersproject/logger': 5.7.0 - - '@ethersproject/pbkdf2@5.7.0': - dependencies: - '@ethersproject/bytes': 5.7.0 - '@ethersproject/sha2': 5.7.0 - - '@ethersproject/properties@5.7.0': - dependencies: - '@ethersproject/logger': 5.7.0 - - '@ethersproject/providers@5.7.2': - dependencies: - '@ethersproject/abstract-provider': 5.7.0 - '@ethersproject/abstract-signer': 5.7.0 - '@ethersproject/address': 5.7.0 - '@ethersproject/base64': 5.7.0 - '@ethersproject/basex': 5.7.0 - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/constants': 5.7.0 - '@ethersproject/hash': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/networks': 5.7.1 - '@ethersproject/properties': 5.7.0 - '@ethersproject/random': 5.7.0 - '@ethersproject/rlp': 5.7.0 - '@ethersproject/sha2': 5.7.0 - '@ethersproject/strings': 5.7.0 - '@ethersproject/transactions': 5.7.0 - '@ethersproject/web': 5.7.1 - bech32: 1.1.4 - ws: 7.4.6 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - - '@ethersproject/random@5.7.0': - dependencies: - '@ethersproject/bytes': 5.7.0 - '@ethersproject/logger': 5.7.0 - - '@ethersproject/rlp@5.7.0': - dependencies: - '@ethersproject/bytes': 5.7.0 - '@ethersproject/logger': 5.7.0 - - '@ethersproject/sha2@5.7.0': - dependencies: - '@ethersproject/bytes': 5.7.0 - '@ethersproject/logger': 5.7.0 - hash.js: 1.1.7 - - '@ethersproject/signing-key@5.7.0': - dependencies: - '@ethersproject/bytes': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/properties': 5.7.0 - bn.js: 5.2.1 - elliptic: 6.5.4 - hash.js: 1.1.7 - - '@ethersproject/solidity@5.7.0': - dependencies: - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/keccak256': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/sha2': 5.7.0 - '@ethersproject/strings': 5.7.0 - - '@ethersproject/strings@5.7.0': - dependencies: - '@ethersproject/bytes': 5.7.0 - '@ethersproject/constants': 5.7.0 - '@ethersproject/logger': 5.7.0 - - '@ethersproject/transactions@5.7.0': - dependencies: - '@ethersproject/address': 5.7.0 - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/constants': 5.7.0 - '@ethersproject/keccak256': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/properties': 5.7.0 - '@ethersproject/rlp': 5.7.0 - '@ethersproject/signing-key': 5.7.0 - - '@ethersproject/units@5.7.0': - dependencies: - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/constants': 5.7.0 - '@ethersproject/logger': 5.7.0 - - '@ethersproject/wallet@5.7.0': - dependencies: - '@ethersproject/abstract-provider': 5.7.0 - '@ethersproject/abstract-signer': 5.7.0 - '@ethersproject/address': 5.7.0 - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/hash': 5.7.0 - '@ethersproject/hdnode': 5.7.0 - '@ethersproject/json-wallets': 5.7.0 - '@ethersproject/keccak256': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/properties': 5.7.0 - '@ethersproject/random': 5.7.0 - '@ethersproject/signing-key': 5.7.0 - '@ethersproject/transactions': 5.7.0 - '@ethersproject/wordlists': 5.7.0 - - '@ethersproject/web@5.7.1': - dependencies: - '@ethersproject/base64': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/properties': 5.7.0 - '@ethersproject/strings': 5.7.0 - - '@ethersproject/wordlists@5.7.0': - dependencies: - '@ethersproject/bytes': 5.7.0 - '@ethersproject/hash': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/properties': 5.7.0 - '@ethersproject/strings': 5.7.0 - - '@humanwhocodes/config-array@0.11.14': - dependencies: - '@humanwhocodes/object-schema': 2.0.3 - debug: 4.3.5 - minimatch: 3.1.2 - transitivePeerDependencies: - - supports-color - - '@humanwhocodes/module-importer@1.0.1': {} - - '@humanwhocodes/object-schema@2.0.3': {} - - '@jridgewell/resolve-uri@3.1.2': {} - - '@jridgewell/sourcemap-codec@1.4.15': {} - - '@jridgewell/trace-mapping@0.3.9': - dependencies: - '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.4.15 - - '@noble/curves@1.4.2': - dependencies: - '@noble/hashes': 1.4.0 - - '@noble/hashes@1.4.0': {} - - '@nodelib/fs.scandir@2.1.5': - dependencies: - '@nodelib/fs.stat': 2.0.5 - run-parallel: 1.2.0 - - '@nodelib/fs.stat@2.0.5': {} - - '@nodelib/fs.walk@1.2.8': - dependencies: - '@nodelib/fs.scandir': 2.1.5 - fastq: 1.17.1 - - '@openzeppelin/contracts-upgradeable@4.9.6': {} - - '@openzeppelin/contracts@4.9.6': {} - - '@pkgr/core@0.1.1': {} - - '@pnpm/config.env-replace@1.1.0': {} - - '@pnpm/network.ca-file@1.0.2': - dependencies: - graceful-fs: 4.2.10 - - '@pnpm/npm-conf@2.2.2': - dependencies: - '@pnpm/config.env-replace': 1.1.0 - '@pnpm/network.ca-file': 1.0.2 - config-chain: 1.1.13 - - '@scure/base@1.1.7': {} - - '@scure/bip32@1.4.0': - dependencies: - '@noble/curves': 1.4.2 - '@noble/hashes': 1.4.0 - '@scure/base': 1.1.7 - - '@scure/bip39@1.3.0': - dependencies: - '@noble/hashes': 1.4.0 - '@scure/base': 1.1.7 - - '@sindresorhus/is@5.6.0': {} - - '@solidity-parser/parser@0.18.0': {} - - '@szmarczak/http-timer@5.0.1': - dependencies: - defer-to-connect: 2.0.1 - - '@tsconfig/node10@1.0.11': {} - - '@tsconfig/node12@1.0.11': {} - - '@tsconfig/node14@1.0.3': {} - - '@tsconfig/node16@1.0.4': {} - - '@types/http-cache-semantics@4.0.4': {} - - '@types/json-schema@7.0.15': {} - - '@types/json5@0.0.29': {} - - '@types/node@20.11.30': - dependencies: - undici-types: 5.26.5 - - '@types/semver@7.5.8': {} - - '@typescript-eslint/eslint-plugin@7.4.0(@typescript-eslint/parser@7.7.0(eslint@8.51.0)(typescript@5.2.2))(eslint@8.51.0)(typescript@5.2.2)': - dependencies: - '@eslint-community/regexpp': 4.11.0 - '@typescript-eslint/parser': 7.7.0(eslint@8.51.0)(typescript@5.2.2) - '@typescript-eslint/scope-manager': 7.4.0 - '@typescript-eslint/type-utils': 7.4.0(eslint@8.51.0)(typescript@5.2.2) - '@typescript-eslint/utils': 7.4.0(eslint@8.51.0)(typescript@5.2.2) - '@typescript-eslint/visitor-keys': 7.4.0 - debug: 4.3.5 - eslint: 8.51.0 - graphemer: 1.4.0 - ignore: 5.3.1 - natural-compare: 1.4.0 - semver: 7.6.2 - ts-api-utils: 1.3.0(typescript@5.2.2) - optionalDependencies: - typescript: 5.2.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/parser@7.7.0(eslint@8.51.0)(typescript@5.2.2)': - dependencies: - '@typescript-eslint/scope-manager': 7.7.0 - '@typescript-eslint/types': 7.7.0 - '@typescript-eslint/typescript-estree': 7.7.0(typescript@5.2.2) - '@typescript-eslint/visitor-keys': 7.7.0 - debug: 4.3.5 - eslint: 8.51.0 - optionalDependencies: - typescript: 5.2.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/scope-manager@7.4.0': - dependencies: - '@typescript-eslint/types': 7.4.0 - '@typescript-eslint/visitor-keys': 7.4.0 - - '@typescript-eslint/scope-manager@7.7.0': - dependencies: - '@typescript-eslint/types': 7.7.0 - '@typescript-eslint/visitor-keys': 7.7.0 - - '@typescript-eslint/type-utils@7.4.0(eslint@8.51.0)(typescript@5.2.2)': - dependencies: - '@typescript-eslint/typescript-estree': 7.4.0(typescript@5.2.2) - '@typescript-eslint/utils': 7.4.0(eslint@8.51.0)(typescript@5.2.2) - debug: 4.3.5 - eslint: 8.51.0 - ts-api-utils: 1.3.0(typescript@5.2.2) - optionalDependencies: - typescript: 5.2.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/types@7.4.0': {} - - '@typescript-eslint/types@7.7.0': {} - - '@typescript-eslint/typescript-estree@7.4.0(typescript@5.2.2)': - dependencies: - '@typescript-eslint/types': 7.4.0 - '@typescript-eslint/visitor-keys': 7.4.0 - debug: 4.3.5 - globby: 11.1.0 - is-glob: 4.0.3 - minimatch: 9.0.3 - semver: 7.6.2 - ts-api-utils: 1.3.0(typescript@5.2.2) - optionalDependencies: - typescript: 5.2.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/typescript-estree@7.7.0(typescript@5.2.2)': - dependencies: - '@typescript-eslint/types': 7.7.0 - '@typescript-eslint/visitor-keys': 7.7.0 - debug: 4.3.5 - globby: 11.1.0 - is-glob: 4.0.3 - minimatch: 9.0.5 - semver: 7.6.2 - ts-api-utils: 1.3.0(typescript@5.2.2) - optionalDependencies: - typescript: 5.2.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/utils@7.4.0(eslint@8.51.0)(typescript@5.2.2)': - dependencies: - '@eslint-community/eslint-utils': 4.4.0(eslint@8.51.0) - '@types/json-schema': 7.0.15 - '@types/semver': 7.5.8 - '@typescript-eslint/scope-manager': 7.4.0 - '@typescript-eslint/types': 7.4.0 - '@typescript-eslint/typescript-estree': 7.4.0(typescript@5.2.2) - eslint: 8.51.0 - semver: 7.6.2 - transitivePeerDependencies: - - supports-color - - typescript - - '@typescript-eslint/visitor-keys@7.4.0': - dependencies: - '@typescript-eslint/types': 7.4.0 - eslint-visitor-keys: 3.4.3 - - '@typescript-eslint/visitor-keys@7.7.0': - dependencies: - '@typescript-eslint/types': 7.7.0 - eslint-visitor-keys: 3.4.3 - - acorn-jsx@5.3.2(acorn@8.12.0): - dependencies: - acorn: 8.12.0 - - acorn-walk@8.3.3: - dependencies: - acorn: 8.12.0 - - acorn@8.12.0: {} - - aes-js@3.0.0: {} - - ajv@6.12.6: - dependencies: - fast-deep-equal: 3.1.3 - fast-json-stable-stringify: 2.1.0 - json-schema-traverse: 0.4.1 - uri-js: 4.4.1 - - ajv@8.16.0: - dependencies: - fast-deep-equal: 3.1.3 - json-schema-traverse: 1.0.0 - require-from-string: 2.0.2 - uri-js: 4.4.1 - - ansi-regex@5.0.1: {} - - ansi-styles@3.2.1: - dependencies: - color-convert: 1.9.3 - - ansi-styles@4.3.0: - dependencies: - color-convert: 2.0.1 - - antlr4@4.13.1: {} - - arg@4.1.3: {} - - argparse@2.0.1: {} - - array-buffer-byte-length@1.0.1: - dependencies: - call-bind: 1.0.7 - is-array-buffer: 3.0.4 - - array-includes@3.1.8: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-abstract: 1.23.3 - es-object-atoms: 1.0.0 - get-intrinsic: 1.2.4 - is-string: 1.0.7 - - array-union@2.1.0: {} - - array.prototype.findlastindex@1.2.5: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-abstract: 1.23.3 - es-errors: 1.3.0 - es-object-atoms: 1.0.0 - es-shim-unscopables: 1.0.2 - - array.prototype.flat@1.3.2: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-abstract: 1.23.3 - es-shim-unscopables: 1.0.2 - - array.prototype.flatmap@1.3.2: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-abstract: 1.23.3 - es-shim-unscopables: 1.0.2 - - arraybuffer.prototype.slice@1.0.3: - dependencies: - array-buffer-byte-length: 1.0.1 - call-bind: 1.0.7 - define-properties: 1.2.1 - es-abstract: 1.23.3 - es-errors: 1.3.0 - get-intrinsic: 1.2.4 - is-array-buffer: 3.0.4 - is-shared-array-buffer: 1.0.3 - - ast-parents@0.0.1: {} - - astral-regex@2.0.0: {} - - available-typed-arrays@1.0.7: - dependencies: - possible-typed-array-names: 1.0.0 - - balanced-match@1.0.2: {} - - bech32@1.1.4: {} - - bignumber.js@9.1.2: {} - - bn.js@4.11.6: {} - - bn.js@4.12.0: {} - - bn.js@5.2.1: {} - - brace-expansion@1.1.11: - dependencies: - balanced-match: 1.0.2 - concat-map: 0.0.1 - - brace-expansion@2.0.1: - dependencies: - balanced-match: 1.0.2 - - braces@3.0.3: - dependencies: - fill-range: 7.1.1 - - brorand@1.1.0: {} - - buffer-reverse@1.0.1: {} - - builtin-modules@3.3.0: {} - - builtins@5.1.0: - dependencies: - semver: 7.6.2 - - cacheable-lookup@7.0.0: {} - - cacheable-request@10.2.14: - dependencies: - '@types/http-cache-semantics': 4.0.4 - get-stream: 6.0.1 - http-cache-semantics: 4.1.1 - keyv: 4.5.4 - mimic-response: 4.0.0 - normalize-url: 8.0.1 - responselike: 3.0.0 - - call-bind@1.0.7: - dependencies: - es-define-property: 1.0.0 - es-errors: 1.3.0 - function-bind: 1.1.2 - get-intrinsic: 1.2.4 - set-function-length: 1.2.2 - - callsites@3.1.0: {} - - chalk@2.4.2: - dependencies: - ansi-styles: 3.2.1 - escape-string-regexp: 1.0.5 - supports-color: 5.5.0 - - chalk@4.1.2: - dependencies: - ansi-styles: 4.3.0 - supports-color: 7.2.0 - - color-convert@1.9.3: - dependencies: - color-name: 1.1.3 - - color-convert@2.0.1: - dependencies: - color-name: 1.1.4 - - color-name@1.1.3: {} - - color-name@1.1.4: {} - - command-exists@1.2.9: {} - - commander@10.0.1: {} - - commander@8.3.0: {} - - concat-map@0.0.1: {} - - config-chain@1.1.13: - dependencies: - ini: 1.3.8 - proto-list: 1.2.4 - - cosmiconfig@8.3.6(typescript@5.2.2): - dependencies: - import-fresh: 3.3.0 - js-yaml: 4.1.0 - parse-json: 5.2.0 - path-type: 4.0.0 - optionalDependencies: - typescript: 5.2.2 - - create-require@1.1.1: {} - - cross-spawn@7.0.3: - dependencies: - path-key: 3.1.1 - shebang-command: 2.0.0 - which: 2.0.2 - - crypto-js@4.2.0: {} - - data-view-buffer@1.0.1: - dependencies: - call-bind: 1.0.7 - es-errors: 1.3.0 - is-data-view: 1.0.1 - - data-view-byte-length@1.0.1: - dependencies: - call-bind: 1.0.7 - es-errors: 1.3.0 - is-data-view: 1.0.1 - - data-view-byte-offset@1.0.0: - dependencies: - call-bind: 1.0.7 - es-errors: 1.3.0 - is-data-view: 1.0.1 - - debug@3.2.7: - dependencies: - ms: 2.1.3 - - debug@4.3.5: - dependencies: - ms: 2.1.2 - - decompress-response@6.0.0: - dependencies: - mimic-response: 3.1.0 - - deep-extend@0.6.0: {} - - deep-is@0.1.4: {} - - defer-to-connect@2.0.1: {} - - define-data-property@1.1.4: - dependencies: - es-define-property: 1.0.0 - es-errors: 1.3.0 - gopd: 1.0.1 - - define-properties@1.2.1: - dependencies: - define-data-property: 1.1.4 - has-property-descriptors: 1.0.2 - object-keys: 1.1.1 - - diff@4.0.2: {} - - dir-glob@3.0.1: - dependencies: - path-type: 4.0.0 - - doctrine@2.1.0: - dependencies: - esutils: 2.0.3 - - doctrine@3.0.0: - dependencies: - esutils: 2.0.3 - - ds-test@https://codeload.github.com/dapphub/ds-test/tar.gz/e282159d5170298eb2455a6c05280ab5a73a4ef0: {} - - elliptic@6.5.4: - dependencies: - bn.js: 4.12.0 - brorand: 1.1.0 - hash.js: 1.1.7 - hmac-drbg: 1.0.1 - inherits: 2.0.4 - minimalistic-assert: 1.0.1 - minimalistic-crypto-utils: 1.0.1 - - emoji-regex@8.0.0: {} - - error-ex@1.3.2: - dependencies: - is-arrayish: 0.2.1 - - es-abstract@1.23.3: - dependencies: - array-buffer-byte-length: 1.0.1 - arraybuffer.prototype.slice: 1.0.3 - available-typed-arrays: 1.0.7 - call-bind: 1.0.7 - data-view-buffer: 1.0.1 - data-view-byte-length: 1.0.1 - data-view-byte-offset: 1.0.0 - es-define-property: 1.0.0 - es-errors: 1.3.0 - es-object-atoms: 1.0.0 - es-set-tostringtag: 2.0.3 - es-to-primitive: 1.2.1 - function.prototype.name: 1.1.6 - get-intrinsic: 1.2.4 - get-symbol-description: 1.0.2 - globalthis: 1.0.4 - gopd: 1.0.1 - has-property-descriptors: 1.0.2 - has-proto: 1.0.3 - has-symbols: 1.0.3 - hasown: 2.0.2 - internal-slot: 1.0.7 - is-array-buffer: 3.0.4 - is-callable: 1.2.7 - is-data-view: 1.0.1 - is-negative-zero: 2.0.3 - is-regex: 1.1.4 - is-shared-array-buffer: 1.0.3 - is-string: 1.0.7 - is-typed-array: 1.1.13 - is-weakref: 1.0.2 - object-inspect: 1.13.2 - object-keys: 1.1.1 - object.assign: 4.1.5 - regexp.prototype.flags: 1.5.2 - safe-array-concat: 1.1.2 - safe-regex-test: 1.0.3 - string.prototype.trim: 1.2.9 - string.prototype.trimend: 1.0.8 - string.prototype.trimstart: 1.0.8 - typed-array-buffer: 1.0.2 - typed-array-byte-length: 1.0.1 - typed-array-byte-offset: 1.0.2 - typed-array-length: 1.0.6 - unbox-primitive: 1.0.2 - which-typed-array: 1.1.15 - - es-define-property@1.0.0: - dependencies: - get-intrinsic: 1.2.4 - - es-errors@1.3.0: {} - - es-object-atoms@1.0.0: - dependencies: - es-errors: 1.3.0 - - es-set-tostringtag@2.0.3: - dependencies: - get-intrinsic: 1.2.4 - has-tostringtag: 1.0.2 - hasown: 2.0.2 - - es-shim-unscopables@1.0.2: - dependencies: - hasown: 2.0.2 - - es-to-primitive@1.2.1: - dependencies: - is-callable: 1.2.7 - is-date-object: 1.0.5 - is-symbol: 1.0.4 - - escape-string-regexp@1.0.5: {} - - escape-string-regexp@4.0.0: {} - - eslint-compat-utils@0.5.1(eslint@8.51.0): - dependencies: - eslint: 8.51.0 - semver: 7.6.2 - - eslint-config-prettier@9.1.0(eslint@8.51.0): - dependencies: - eslint: 8.51.0 - - eslint-config-standard@17.1.0(eslint-plugin-import@2.28.1(@typescript-eslint/parser@7.7.0(eslint@8.51.0)(typescript@5.2.2))(eslint@8.51.0))(eslint-plugin-n@16.6.2(eslint@8.51.0))(eslint-plugin-promise@6.1.1(eslint@8.51.0))(eslint@8.51.0): - dependencies: - eslint: 8.51.0 - eslint-plugin-import: 2.28.1(@typescript-eslint/parser@7.7.0(eslint@8.51.0)(typescript@5.2.2))(eslint@8.51.0) - eslint-plugin-n: 16.6.2(eslint@8.51.0) - eslint-plugin-promise: 6.1.1(eslint@8.51.0) - - eslint-import-resolver-node@0.3.9: - dependencies: - debug: 3.2.7 - is-core-module: 2.14.0 - resolve: 1.22.8 - transitivePeerDependencies: - - supports-color - - eslint-module-utils@2.8.1(@typescript-eslint/parser@7.7.0(eslint@8.51.0)(typescript@5.2.2))(eslint-import-resolver-node@0.3.9)(eslint@8.51.0): - dependencies: - debug: 3.2.7 - optionalDependencies: - '@typescript-eslint/parser': 7.7.0(eslint@8.51.0)(typescript@5.2.2) - eslint: 8.51.0 - eslint-import-resolver-node: 0.3.9 - transitivePeerDependencies: - - supports-color - - eslint-plugin-es-x@7.8.0(eslint@8.51.0): - dependencies: - '@eslint-community/eslint-utils': 4.4.0(eslint@8.51.0) - '@eslint-community/regexpp': 4.11.0 - eslint: 8.51.0 - eslint-compat-utils: 0.5.1(eslint@8.51.0) - - eslint-plugin-es@3.0.1(eslint@8.51.0): - dependencies: - eslint: 8.51.0 - eslint-utils: 2.1.0 - regexpp: 3.2.0 - - eslint-plugin-import@2.28.1(@typescript-eslint/parser@7.7.0(eslint@8.51.0)(typescript@5.2.2))(eslint@8.51.0): - dependencies: - array-includes: 3.1.8 - array.prototype.findlastindex: 1.2.5 - array.prototype.flat: 1.3.2 - array.prototype.flatmap: 1.3.2 - debug: 3.2.7 - doctrine: 2.1.0 - eslint: 8.51.0 - eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.8.1(@typescript-eslint/parser@7.7.0(eslint@8.51.0)(typescript@5.2.2))(eslint-import-resolver-node@0.3.9)(eslint@8.51.0) - has: 1.0.4 - is-core-module: 2.14.0 - is-glob: 4.0.3 - minimatch: 3.1.2 - object.fromentries: 2.0.8 - object.groupby: 1.0.3 - object.values: 1.2.0 - semver: 6.3.1 - tsconfig-paths: 3.15.0 - optionalDependencies: - '@typescript-eslint/parser': 7.7.0(eslint@8.51.0)(typescript@5.2.2) - transitivePeerDependencies: - - eslint-import-resolver-typescript - - eslint-import-resolver-webpack - - supports-color - - eslint-plugin-n@16.6.2(eslint@8.51.0): - dependencies: - '@eslint-community/eslint-utils': 4.4.0(eslint@8.51.0) - builtins: 5.1.0 - eslint: 8.51.0 - eslint-plugin-es-x: 7.8.0(eslint@8.51.0) - get-tsconfig: 4.7.5 - globals: 13.24.0 - ignore: 5.3.1 - is-builtin-module: 3.2.1 - is-core-module: 2.14.0 - minimatch: 3.1.2 - resolve: 1.22.8 - semver: 7.6.2 - - eslint-plugin-node@11.1.0(eslint@8.51.0): - dependencies: - eslint: 8.51.0 - eslint-plugin-es: 3.0.1(eslint@8.51.0) - eslint-utils: 2.1.0 - ignore: 5.3.1 - minimatch: 3.1.2 - resolve: 1.22.8 - semver: 6.3.1 - - eslint-plugin-prettier@5.1.3(eslint-config-prettier@9.1.0(eslint@8.51.0))(eslint@8.51.0)(prettier@3.0.3): - dependencies: - eslint: 8.51.0 - prettier: 3.0.3 - prettier-linter-helpers: 1.0.0 - synckit: 0.8.8 - optionalDependencies: - eslint-config-prettier: 9.1.0(eslint@8.51.0) - - eslint-plugin-promise@6.1.1(eslint@8.51.0): - dependencies: - eslint: 8.51.0 - - eslint-scope@7.2.2: - dependencies: - esrecurse: 4.3.0 - estraverse: 5.3.0 - - eslint-utils@2.1.0: - dependencies: - eslint-visitor-keys: 1.3.0 - - eslint-visitor-keys@1.3.0: {} - - eslint-visitor-keys@3.4.3: {} - - eslint@8.51.0: - dependencies: - '@eslint-community/eslint-utils': 4.4.0(eslint@8.51.0) - '@eslint-community/regexpp': 4.11.0 - '@eslint/eslintrc': 2.1.4 - '@eslint/js': 8.51.0 - '@humanwhocodes/config-array': 0.11.14 - '@humanwhocodes/module-importer': 1.0.1 - '@nodelib/fs.walk': 1.2.8 - ajv: 6.12.6 - chalk: 4.1.2 - cross-spawn: 7.0.3 - debug: 4.3.5 - doctrine: 3.0.0 - escape-string-regexp: 4.0.0 - eslint-scope: 7.2.2 - eslint-visitor-keys: 3.4.3 - espree: 9.6.1 - esquery: 1.5.0 - esutils: 2.0.3 - fast-deep-equal: 3.1.3 - file-entry-cache: 6.0.1 - find-up: 5.0.0 - glob-parent: 6.0.2 - globals: 13.24.0 - graphemer: 1.4.0 - ignore: 5.3.1 - imurmurhash: 0.1.4 - is-glob: 4.0.3 - is-path-inside: 3.0.3 - js-yaml: 4.1.0 - json-stable-stringify-without-jsonify: 1.0.1 - levn: 0.4.1 - lodash.merge: 4.6.2 - minimatch: 3.1.2 - natural-compare: 1.4.0 - optionator: 0.9.4 - strip-ansi: 6.0.1 - text-table: 0.2.0 - transitivePeerDependencies: - - supports-color - - espree@9.6.1: - dependencies: - acorn: 8.12.0 - acorn-jsx: 5.3.2(acorn@8.12.0) - eslint-visitor-keys: 3.4.3 - - esquery@1.5.0: - dependencies: - estraverse: 5.3.0 - - esrecurse@4.3.0: - dependencies: - estraverse: 5.3.0 - - estraverse@5.3.0: {} - - esutils@2.0.3: {} - - ethereum-bloom-filters@1.1.0: - dependencies: - '@noble/hashes': 1.4.0 - - ethereum-cryptography@2.2.1: - dependencies: - '@noble/curves': 1.4.2 - '@noble/hashes': 1.4.0 - '@scure/bip32': 1.4.0 - '@scure/bip39': 1.3.0 - - ethers@5.7.2: - dependencies: - '@ethersproject/abi': 5.7.0 - '@ethersproject/abstract-provider': 5.7.0 - '@ethersproject/abstract-signer': 5.7.0 - '@ethersproject/address': 5.7.0 - '@ethersproject/base64': 5.7.0 - '@ethersproject/basex': 5.7.0 - '@ethersproject/bignumber': 5.7.0 - '@ethersproject/bytes': 5.7.0 - '@ethersproject/constants': 5.7.0 - '@ethersproject/contracts': 5.7.0 - '@ethersproject/hash': 5.7.0 - '@ethersproject/hdnode': 5.7.0 - '@ethersproject/json-wallets': 5.7.0 - '@ethersproject/keccak256': 5.7.0 - '@ethersproject/logger': 5.7.0 - '@ethersproject/networks': 5.7.1 - '@ethersproject/pbkdf2': 5.7.0 - '@ethersproject/properties': 5.7.0 - '@ethersproject/providers': 5.7.2 - '@ethersproject/random': 5.7.0 - '@ethersproject/rlp': 5.7.0 - '@ethersproject/sha2': 5.7.0 - '@ethersproject/signing-key': 5.7.0 - '@ethersproject/solidity': 5.7.0 - '@ethersproject/strings': 5.7.0 - '@ethersproject/transactions': 5.7.0 - '@ethersproject/units': 5.7.0 - '@ethersproject/wallet': 5.7.0 - '@ethersproject/web': 5.7.1 - '@ethersproject/wordlists': 5.7.0 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - - ethjs-unit@0.1.6: - dependencies: - bn.js: 4.11.6 - number-to-bn: 1.7.0 - - fast-deep-equal@3.1.3: {} - - fast-diff@1.3.0: {} - - fast-glob@3.3.2: - dependencies: - '@nodelib/fs.stat': 2.0.5 - '@nodelib/fs.walk': 1.2.8 - glob-parent: 5.1.2 - merge2: 1.4.1 - micromatch: 4.0.7 - - fast-json-stable-stringify@2.1.0: {} - - fast-levenshtein@2.0.6: {} - - fastq@1.17.1: - dependencies: - reusify: 1.0.4 - - file-entry-cache@6.0.1: - dependencies: - flat-cache: 3.2.0 - - fill-range@7.1.1: - dependencies: - to-regex-range: 5.0.1 - - find-up@5.0.0: - dependencies: - locate-path: 6.0.0 - path-exists: 4.0.0 - - flat-cache@3.2.0: - dependencies: - flatted: 3.3.1 - keyv: 4.5.4 - rimraf: 3.0.2 - - flatted@3.3.1: {} - - follow-redirects@1.15.6: {} - - for-each@0.3.3: - dependencies: - is-callable: 1.2.7 - - forge-std@https://codeload.github.com/foundry-rs/forge-std/tar.gz/36c303b7ffdd842d06b1ec2744c9b9b5fb3083f3: {} - - form-data-encoder@2.1.4: {} - - fs.realpath@1.0.0: {} - - function-bind@1.1.2: {} - - function.prototype.name@1.1.6: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-abstract: 1.23.3 - functions-have-names: 1.2.3 - - functions-have-names@1.2.3: {} - - get-intrinsic@1.2.4: - dependencies: - es-errors: 1.3.0 - function-bind: 1.1.2 - has-proto: 1.0.3 - has-symbols: 1.0.3 - hasown: 2.0.2 - - get-stream@6.0.1: {} - - get-symbol-description@1.0.2: - dependencies: - call-bind: 1.0.7 - es-errors: 1.3.0 - get-intrinsic: 1.2.4 - - get-tsconfig@4.7.5: - dependencies: - resolve-pkg-maps: 1.0.0 - - glob-parent@5.1.2: - dependencies: - is-glob: 4.0.3 - - glob-parent@6.0.2: - dependencies: - is-glob: 4.0.3 - - glob@7.2.3: - dependencies: - fs.realpath: 1.0.0 - inflight: 1.0.6 - inherits: 2.0.4 - minimatch: 3.1.2 - once: 1.4.0 - path-is-absolute: 1.0.1 - - glob@8.1.0: - dependencies: - fs.realpath: 1.0.0 - inflight: 1.0.6 - inherits: 2.0.4 - minimatch: 5.1.6 - once: 1.4.0 - - globals@13.24.0: - dependencies: - type-fest: 0.20.2 - - globalthis@1.0.4: - dependencies: - define-properties: 1.2.1 - gopd: 1.0.1 - - globby@11.1.0: - dependencies: - array-union: 2.1.0 - dir-glob: 3.0.1 - fast-glob: 3.3.2 - ignore: 5.3.1 - merge2: 1.4.1 - slash: 3.0.0 - - gopd@1.0.1: - dependencies: - get-intrinsic: 1.2.4 - - got@12.6.1: - dependencies: - '@sindresorhus/is': 5.6.0 - '@szmarczak/http-timer': 5.0.1 - cacheable-lookup: 7.0.0 - cacheable-request: 10.2.14 - decompress-response: 6.0.0 - form-data-encoder: 2.1.4 - get-stream: 6.0.1 - http2-wrapper: 2.2.1 - lowercase-keys: 3.0.0 - p-cancelable: 3.0.0 - responselike: 3.0.0 - - graceful-fs@4.2.10: {} - - graphemer@1.4.0: {} - - has-bigints@1.0.2: {} - - has-flag@3.0.0: {} - - has-flag@4.0.0: {} - - has-property-descriptors@1.0.2: - dependencies: - es-define-property: 1.0.0 - - has-proto@1.0.3: {} - - has-symbols@1.0.3: {} - - has-tostringtag@1.0.2: - dependencies: - has-symbols: 1.0.3 - - has@1.0.4: {} - - hash.js@1.1.7: - dependencies: - inherits: 2.0.4 - minimalistic-assert: 1.0.1 - - hasown@2.0.2: - dependencies: - function-bind: 1.1.2 - - hmac-drbg@1.0.1: - dependencies: - hash.js: 1.1.7 - minimalistic-assert: 1.0.1 - minimalistic-crypto-utils: 1.0.1 - - http-cache-semantics@4.1.1: {} - - http2-wrapper@2.2.1: - dependencies: - quick-lru: 5.1.1 - resolve-alpn: 1.2.1 - - ignore@5.3.1: {} - - import-fresh@3.3.0: - dependencies: - parent-module: 1.0.1 - resolve-from: 4.0.0 - - imurmurhash@0.1.4: {} - - inflight@1.0.6: - dependencies: - once: 1.4.0 - wrappy: 1.0.2 - - inherits@2.0.4: {} - - ini@1.3.8: {} - - internal-slot@1.0.7: - dependencies: - es-errors: 1.3.0 - hasown: 2.0.2 - side-channel: 1.0.6 - - is-array-buffer@3.0.4: - dependencies: - call-bind: 1.0.7 - get-intrinsic: 1.2.4 - - is-arrayish@0.2.1: {} - - is-bigint@1.0.4: - dependencies: - has-bigints: 1.0.2 - - is-boolean-object@1.1.2: - dependencies: - call-bind: 1.0.7 - has-tostringtag: 1.0.2 - - is-builtin-module@3.2.1: - dependencies: - builtin-modules: 3.3.0 - - is-callable@1.2.7: {} - - is-core-module@2.14.0: - dependencies: - hasown: 2.0.2 - - is-data-view@1.0.1: - dependencies: - is-typed-array: 1.1.13 - - is-date-object@1.0.5: - dependencies: - has-tostringtag: 1.0.2 - - is-extglob@2.1.1: {} - - is-fullwidth-code-point@3.0.0: {} - - is-glob@4.0.3: - dependencies: - is-extglob: 2.1.1 - - is-hex-prefixed@1.0.0: {} - - is-negative-zero@2.0.3: {} - - is-number-object@1.0.7: - dependencies: - has-tostringtag: 1.0.2 - - is-number@7.0.0: {} - - is-path-inside@3.0.3: {} - - is-regex@1.1.4: - dependencies: - call-bind: 1.0.7 - has-tostringtag: 1.0.2 - - is-shared-array-buffer@1.0.3: - dependencies: - call-bind: 1.0.7 - - is-string@1.0.7: - dependencies: - has-tostringtag: 1.0.2 - - is-symbol@1.0.4: - dependencies: - has-symbols: 1.0.3 - - is-typed-array@1.1.13: - dependencies: - which-typed-array: 1.1.15 - - is-weakref@1.0.2: - dependencies: - call-bind: 1.0.7 - - isarray@2.0.5: {} - - isexe@2.0.0: {} - - js-sha3@0.8.0: {} - - js-tokens@4.0.0: {} - - js-yaml@4.1.0: - dependencies: - argparse: 2.0.1 - - json-buffer@3.0.1: {} - - json-parse-even-better-errors@2.3.1: {} - - json-schema-traverse@0.4.1: {} - - json-schema-traverse@1.0.0: {} - - json-stable-stringify-without-jsonify@1.0.1: {} - - json5@1.0.2: - dependencies: - minimist: 1.2.8 - - keyv@4.5.4: - dependencies: - json-buffer: 3.0.1 - - latest-version@7.0.0: - dependencies: - package-json: 8.1.1 - - levn@0.4.1: - dependencies: - prelude-ls: 1.2.1 - type-check: 0.4.0 - - lines-and-columns@1.2.4: {} - - locate-path@6.0.0: - dependencies: - p-locate: 5.0.0 - - lodash.merge@4.6.2: {} - - lodash.truncate@4.4.2: {} - - lodash@4.17.21: {} - - lowercase-keys@3.0.0: {} - - make-error@1.3.6: {} - - memorystream@0.3.1: {} - - merge2@1.4.1: {} - - merkletreejs@0.3.11: - dependencies: - bignumber.js: 9.1.2 - buffer-reverse: 1.0.1 - crypto-js: 4.2.0 - treeify: 1.1.0 - web3-utils: 1.10.4 - - micro-ftch@0.3.1: {} - - micromatch@4.0.7: - dependencies: - braces: 3.0.3 - picomatch: 2.3.1 - - mimic-response@3.1.0: {} - - mimic-response@4.0.0: {} - - minimalistic-assert@1.0.1: {} - - minimalistic-crypto-utils@1.0.1: {} - - minimatch@3.1.2: - dependencies: - brace-expansion: 1.1.11 - - minimatch@5.1.6: - dependencies: - brace-expansion: 2.0.1 - - minimatch@9.0.3: - dependencies: - brace-expansion: 2.0.1 - - minimatch@9.0.5: - dependencies: - brace-expansion: 2.0.1 - - minimist@1.2.8: {} - - ms@2.1.2: {} - - ms@2.1.3: {} - - natural-compare@1.4.0: {} - - normalize-url@8.0.1: {} - - number-to-bn@1.7.0: - dependencies: - bn.js: 4.11.6 - strip-hex-prefix: 1.0.0 - - object-inspect@1.13.2: {} - - object-keys@1.1.1: {} - - object.assign@4.1.5: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - has-symbols: 1.0.3 - object-keys: 1.1.1 - - object.fromentries@2.0.8: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-abstract: 1.23.3 - es-object-atoms: 1.0.0 - - object.groupby@1.0.3: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-abstract: 1.23.3 - - object.values@1.2.0: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-object-atoms: 1.0.0 - - once@1.4.0: - dependencies: - wrappy: 1.0.2 - - optionator@0.9.4: - dependencies: - deep-is: 0.1.4 - fast-levenshtein: 2.0.6 - levn: 0.4.1 - prelude-ls: 1.2.1 - type-check: 0.4.0 - word-wrap: 1.2.5 - - os-tmpdir@1.0.2: {} - - p-cancelable@3.0.0: {} - - p-limit@3.1.0: - dependencies: - yocto-queue: 0.1.0 - - p-locate@5.0.0: - dependencies: - p-limit: 3.1.0 - - p256-verifier#v0.1.0@https://codeload.github.com/taikoxyz/p256-verifier/tar.gz/6ef45b117642786b08a37b4c37c6a6ce151166da: {} - - package-json@8.1.1: - dependencies: - got: 12.6.1 - registry-auth-token: 5.0.2 - registry-url: 6.0.1 - semver: 7.6.2 - - parent-module@1.0.1: - dependencies: - callsites: 3.1.0 - - parse-json@5.2.0: - dependencies: - '@babel/code-frame': 7.24.7 - error-ex: 1.3.2 - json-parse-even-better-errors: 2.3.1 - lines-and-columns: 1.2.4 - - path-exists@4.0.0: {} - - path-is-absolute@1.0.1: {} - - path-key@3.1.1: {} - - path-parse@1.0.7: {} - - path-type@4.0.0: {} - - picocolors@1.0.1: {} - - picomatch@2.3.1: {} - - pluralize@8.0.0: {} - - possible-typed-array-names@1.0.0: {} - - prelude-ls@1.2.1: {} - - prettier-linter-helpers@1.0.0: - dependencies: - fast-diff: 1.3.0 - - prettier@2.8.8: - optional: true - - prettier@3.0.3: {} - - proto-list@1.2.4: {} - - punycode@2.3.1: {} - - queue-microtask@1.2.3: {} - - quick-lru@5.1.1: {} - - randombytes@2.1.0: - dependencies: - safe-buffer: 5.2.1 - - rc@1.2.8: - dependencies: - deep-extend: 0.6.0 - ini: 1.3.8 - minimist: 1.2.8 - strip-json-comments: 2.0.1 - - regexp.prototype.flags@1.5.2: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-errors: 1.3.0 - set-function-name: 2.0.2 - - regexpp@3.2.0: {} - - registry-auth-token@5.0.2: - dependencies: - '@pnpm/npm-conf': 2.2.2 - - registry-url@6.0.1: - dependencies: - rc: 1.2.8 - - require-from-string@2.0.2: {} - - resolve-alpn@1.2.1: {} - - resolve-from@4.0.0: {} - - resolve-pkg-maps@1.0.0: {} - - resolve@1.22.8: - dependencies: - is-core-module: 2.14.0 - path-parse: 1.0.7 - supports-preserve-symlinks-flag: 1.0.0 - - responselike@3.0.0: - dependencies: - lowercase-keys: 3.0.0 - - reusify@1.0.4: {} - - rimraf@3.0.2: - dependencies: - glob: 7.2.3 - - run-parallel@1.2.0: - dependencies: - queue-microtask: 1.2.3 - - safe-array-concat@1.1.2: - dependencies: - call-bind: 1.0.7 - get-intrinsic: 1.2.4 - has-symbols: 1.0.3 - isarray: 2.0.5 - - safe-buffer@5.2.1: {} - - safe-regex-test@1.0.3: - dependencies: - call-bind: 1.0.7 - es-errors: 1.3.0 - is-regex: 1.1.4 - - scrypt-js@3.0.1: {} - - semver@5.7.2: {} - - semver@6.3.1: {} - - semver@7.6.2: {} - - set-function-length@1.2.2: - dependencies: - define-data-property: 1.1.4 - es-errors: 1.3.0 - function-bind: 1.1.2 - get-intrinsic: 1.2.4 - gopd: 1.0.1 - has-property-descriptors: 1.0.2 - - set-function-name@2.0.2: - dependencies: - define-data-property: 1.1.4 - es-errors: 1.3.0 - functions-have-names: 1.2.3 - has-property-descriptors: 1.0.2 - - shebang-command@2.0.0: - dependencies: - shebang-regex: 3.0.0 - - shebang-regex@3.0.0: {} - - side-channel@1.0.6: - dependencies: - call-bind: 1.0.7 - es-errors: 1.3.0 - get-intrinsic: 1.2.4 - object-inspect: 1.13.2 - - slash@3.0.0: {} - - slice-ansi@4.0.0: - dependencies: - ansi-styles: 4.3.0 - astral-regex: 2.0.0 - is-fullwidth-code-point: 3.0.0 - - solady@https://codeload.github.com/Vectorized/solady/tar.gz/de0f336d2033d04e0f77c923d639c7fbffd48b6d: {} - - solc@0.8.24: - dependencies: - command-exists: 1.2.9 - commander: 8.3.0 - follow-redirects: 1.15.6 - js-sha3: 0.8.0 - memorystream: 0.3.1 - semver: 5.7.2 - tmp: 0.0.33 - transitivePeerDependencies: - - debug - - solhint@5.0.1(typescript@5.2.2): - dependencies: - '@solidity-parser/parser': 0.18.0 - ajv: 6.12.6 - antlr4: 4.13.1 - ast-parents: 0.0.1 - chalk: 4.1.2 - commander: 10.0.1 - cosmiconfig: 8.3.6(typescript@5.2.2) - fast-diff: 1.3.0 - glob: 8.1.0 - ignore: 5.3.1 - js-yaml: 4.1.0 - latest-version: 7.0.0 - lodash: 4.17.21 - pluralize: 8.0.0 - semver: 7.6.2 - strip-ansi: 6.0.1 - table: 6.8.2 - text-table: 0.2.0 - optionalDependencies: - prettier: 2.8.8 - transitivePeerDependencies: - - typescript - - string-width@4.2.3: - dependencies: - emoji-regex: 8.0.0 - is-fullwidth-code-point: 3.0.0 - strip-ansi: 6.0.1 - - string.prototype.trim@1.2.9: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-abstract: 1.23.3 - es-object-atoms: 1.0.0 - - string.prototype.trimend@1.0.8: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-object-atoms: 1.0.0 - - string.prototype.trimstart@1.0.8: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-object-atoms: 1.0.0 - - strip-ansi@6.0.1: - dependencies: - ansi-regex: 5.0.1 - - strip-bom@3.0.0: {} - - strip-hex-prefix@1.0.0: - dependencies: - is-hex-prefixed: 1.0.0 - - strip-json-comments@2.0.1: {} - - strip-json-comments@3.1.1: {} - - supports-color@5.5.0: - dependencies: - has-flag: 3.0.0 - - supports-color@7.2.0: - dependencies: - has-flag: 4.0.0 - - supports-preserve-symlinks-flag@1.0.0: {} - - synckit@0.8.8: - dependencies: - '@pkgr/core': 0.1.1 - tslib: 2.6.3 - - table@6.8.2: - dependencies: - ajv: 8.16.0 - lodash.truncate: 4.4.2 - slice-ansi: 4.0.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - - text-table@0.2.0: {} - - tmp@0.0.33: - dependencies: - os-tmpdir: 1.0.2 - - to-regex-range@5.0.1: - dependencies: - is-number: 7.0.0 - - treeify@1.1.0: {} - - ts-api-utils@1.3.0(typescript@5.2.2): - dependencies: - typescript: 5.2.2 - - ts-node@10.9.2(@types/node@20.11.30)(typescript@5.2.2): - dependencies: - '@cspotcode/source-map-support': 0.8.1 - '@tsconfig/node10': 1.0.11 - '@tsconfig/node12': 1.0.11 - '@tsconfig/node14': 1.0.3 - '@tsconfig/node16': 1.0.4 - '@types/node': 20.11.30 - acorn: 8.12.0 - acorn-walk: 8.3.3 - arg: 4.1.3 - create-require: 1.1.1 - diff: 4.0.2 - make-error: 1.3.6 - typescript: 5.2.2 - v8-compile-cache-lib: 3.0.1 - yn: 3.1.1 - - tsconfig-paths@3.15.0: - dependencies: - '@types/json5': 0.0.29 - json5: 1.0.2 - minimist: 1.2.8 - strip-bom: 3.0.0 - - tslib@2.6.3: {} - - type-check@0.4.0: - dependencies: - prelude-ls: 1.2.1 - - type-fest@0.20.2: {} - - typed-array-buffer@1.0.2: - dependencies: - call-bind: 1.0.7 - es-errors: 1.3.0 - is-typed-array: 1.1.13 - - typed-array-byte-length@1.0.1: - dependencies: - call-bind: 1.0.7 - for-each: 0.3.3 - gopd: 1.0.1 - has-proto: 1.0.3 - is-typed-array: 1.1.13 - - typed-array-byte-offset@1.0.2: - dependencies: - available-typed-arrays: 1.0.7 - call-bind: 1.0.7 - for-each: 0.3.3 - gopd: 1.0.1 - has-proto: 1.0.3 - is-typed-array: 1.1.13 - - typed-array-length@1.0.6: - dependencies: - call-bind: 1.0.7 - for-each: 0.3.3 - gopd: 1.0.1 - has-proto: 1.0.3 - is-typed-array: 1.1.13 - possible-typed-array-names: 1.0.0 - - typescript@5.2.2: {} - - unbox-primitive@1.0.2: - dependencies: - call-bind: 1.0.7 - has-bigints: 1.0.2 - has-symbols: 1.0.3 - which-boxed-primitive: 1.0.2 - - undici-types@5.26.5: {} - - uri-js@4.4.1: - dependencies: - punycode: 2.3.1 - - utf8@3.0.0: {} - - v8-compile-cache-lib@3.0.1: {} - - web3-utils@1.10.4: - dependencies: - '@ethereumjs/util': 8.1.0 - bn.js: 5.2.1 - ethereum-bloom-filters: 1.1.0 - ethereum-cryptography: 2.2.1 - ethjs-unit: 0.1.6 - number-to-bn: 1.7.0 - randombytes: 2.1.0 - utf8: 3.0.0 - - which-boxed-primitive@1.0.2: - dependencies: - is-bigint: 1.0.4 - is-boolean-object: 1.1.2 - is-number-object: 1.0.7 - is-string: 1.0.7 - is-symbol: 1.0.4 - - which-typed-array@1.1.15: - dependencies: - available-typed-arrays: 1.0.7 - call-bind: 1.0.7 - for-each: 0.3.3 - gopd: 1.0.1 - has-tostringtag: 1.0.2 - - which@2.0.2: - dependencies: - isexe: 2.0.0 - - word-wrap@1.2.5: {} - - wrappy@1.0.2: {} - - ws@7.4.6: {} - - yn@3.1.1: {} - - yocto-queue@0.1.0: {} diff --git a/packages/protocol/remappings.txt b/packages/protocol/remappings.txt deleted file mode 100644 index 7a0b8a151cbd..000000000000 --- a/packages/protocol/remappings.txt +++ /dev/null @@ -1,2 +0,0 @@ -forge-std/=node_modules/forge-std/src/ -solmate/=node_modules/solmate/src/ \ No newline at end of file diff --git a/packages/protocol/scripts/AuthorizeRemoteTaikoProtocols.s.sol b/packages/protocol/scripts/AuthorizeRemoteTaikoProtocols.s.sol deleted file mode 100644 index 60969210076c..000000000000 --- a/packages/protocol/scripts/AuthorizeRemoteTaikoProtocols.s.sol +++ /dev/null @@ -1,38 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "forge-std/Script.sol"; -import "forge-std/console2.sol"; -import "../contracts/signal/SignalService.sol"; - -contract AuthorizeRemoteTaikoProtocols is Script { - uint256 public privateKey = vm.envUint("PRIVATE_KEY"); - address public signalServiceAddress = vm.envAddress("SIGNAL_SERVICE_ADDRESS"); - uint256[] public remoteChainIDs = vm.envUint("REMOTE_CHAIN_IDS", ","); - address[] public remoteTaikoProtocols = vm.envAddress("REMOTE_TAIKO_PROTOCOLS", ","); - - function run() external { - require( - remoteChainIDs.length == remoteTaikoProtocols.length, - "invalid remote taiko protocol addresses length" - ); - - vm.startBroadcast(privateKey); - - SignalService signalService = SignalService(payable(signalServiceAddress)); - for (uint256 i; i < remoteChainIDs.length; ++i) { - console2.log(remoteTaikoProtocols[i], "--->", remoteChainIDs[i]); - if (!signalService.isAuthorizedAs(remoteTaikoProtocols[i], bytes32(remoteChainIDs[i]))) - { - signalService.authorize(remoteTaikoProtocols[i], bytes32(remoteChainIDs[i])); - } - } - - vm.stopBroadcast(); - } -} diff --git a/packages/protocol/scripts/DeployL1Locally.s.sol b/packages/protocol/scripts/DeployL1Locally.s.sol deleted file mode 100644 index 0ff10e09721a..000000000000 --- a/packages/protocol/scripts/DeployL1Locally.s.sol +++ /dev/null @@ -1,380 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/utils/Strings.sol"; - -import "../contracts/L1/TaikoL1.sol"; -import "../contracts/L1/ChainProver.sol"; -import "../contracts/L1/VerifierRegistry.sol"; -import "../contracts/tko/TaikoToken.sol"; -import "../contracts/L1/provers/GuardianProver.sol"; -// import "../contracts/L1/tiers/DevnetTierProvider.sol"; -// import "../contracts/L1/tiers/TierProviderV2.sol"; -// import "../contracts/bridge/Bridge.sol"; -// import "../contracts/tokenvault/BridgedERC20.sol"; -// import "../contracts/tokenvault/BridgedERC721.sol"; -// import "../contracts/tokenvault/BridgedERC1155.sol"; -// import "../contracts/tokenvault/ERC20Vault.sol"; -// import "../contracts/tokenvault/ERC1155Vault.sol"; -// import "../contracts/tokenvault/ERC721Vault.sol"; -// import "../contracts/signal/SignalService.sol"; -// import "../contracts/automata-attestation/AutomataDcapV3Attestation.sol"; -// import "../contracts/automata-attestation/utils/SigVerifyLib.sol"; -// import "../contracts/automata-attestation/lib/PEMCertChainLib.sol"; -//import "../contracts/L1/verifiers/SgxVerifier.sol"; -import "../contracts/L1/verifiers/MockSgxVerifier.sol"; // Avoid proof verification for now! -// import "../contracts/team/proving/ProverSet.sol"; -// import "../test/common/erc20/FreeMintERC20.sol"; -// import "../test/common/erc20/MayFailFreeMintERC20.sol"; -// import "../test/L1/TestTierProvider.sol"; -import "../test/DeployCapability.sol"; - -// Actually this one is deployed already on mainnet, but we are now deploying our own (non via-ir) -// version. For mainnet, it is easier to go with one of: -// - https://github.com/daimo-eth/p256-verifier -// - https://github.com/rdubois-crypto/FreshCryptoLib -import { P256Verifier } from "p256-verifier/src/P256Verifier.sol"; - -/// @title DeployOnL1 -/// @notice This script deploys the core Taiko protocol smart contract on L1, -/// initializing the rollup. -contract DeployL1Locally is DeployCapability { - // uint256 public NUM_MIN_MAJORITY_GUARDIANS = vm.envUint("NUM_MIN_MAJORITY_GUARDIANS"); - // uint256 public NUM_MIN_MINORITY_GUARDIANS = vm.envUint("NUM_MIN_MINORITY_GUARDIANS"); - - address public MAINNET_CONTRACT_OWNER = vm.envAddress("MAINNET_CONTRACT_OWNER"); //Dani: Use an address anvil provides, with preminted ETH - - modifier broadcast() { - uint256 privateKey = vm.envUint("PRIVATE_KEY"); - require(privateKey != 0, "invalid priv key"); - vm.startBroadcast(); - _; - vm.stopBroadcast(); - } - - function run() external broadcast { - /* - IMPORTANT NOTICES: - - TaikoL2 deployments (and not only TaikoL2, but all contracts sitting on L2) obviously not done and haven't even dealt with - - SignalService, Bridge, Vaults also not dealt with on L1 - */ - // addressNotNull(vm.envAddress("TAIKO_L2_ADDRESS"), "TAIKO_L2_ADDRESS"); - // addressNotNull(vm.envAddress("L2_SIGNAL_SERVICE"), "L2_SIGNAL_SERVICE"); - // addressNotNull(vm.envAddress("CONTRACT_OWNER"), "CONTRACT_OWNER"); - - require(vm.envBytes32("L2_GENESIS_HASH") != 0, "L2_GENESIS_HASH"); - address contractOwner = MAINNET_CONTRACT_OWNER; - - // --------------------------------------------------------------- - // Deploy shared contracts - (address sharedAddressManager) = deploySharedContracts(contractOwner); - console2.log("sharedAddressManager: ", sharedAddressManager); - // --------------------------------------------------------------- - // Deploy rollup contracts - address rollupAddressManager = deployRollupContracts(sharedAddressManager, contractOwner); - - // // --------------------------------------------------------------- - // // Signal service need to authorize the new rollup - // address signalServiceAddr = AddressManager(sharedAddressManager).getAddress( - // uint64(block.chainid), LibStrings.B_SIGNAL_SERVICE - // ); - // addressNotNull(signalServiceAddr, "signalServiceAddr"); - // SignalService signalService = SignalService(signalServiceAddr); - - address taikoL1Addr = AddressManager(rollupAddressManager).getAddress( - uint64(block.chainid), "taiko" - ); - addressNotNull(taikoL1Addr, "taikoL1Addr"); - TaikoL1 taikoL1 = TaikoL1(payable(taikoL1Addr)); - - // if (vm.envAddress("SHARED_ADDRESS_MANAGER") == address(0)) { - // SignalService(signalServiceAddr).authorize(taikoL1Addr, true); - // } - - // uint64 l2ChainId = taikoL1.getConfig().chainId; - // require(l2ChainId != block.chainid, "same chainid"); - - // console2.log("------------------------------------------"); - // console2.log("msg.sender: ", msg.sender); - // console2.log("address(this): ", address(this)); - // console2.log("signalService.owner(): ", signalService.owner()); - // console2.log("------------------------------------------"); - - // if (signalService.owner() == msg.sender) { - // signalService.transferOwnership(contractOwner); - // } else { - // console2.log("------------------------------------------"); - // console2.log("Warning - you need to transact manually:"); - // console2.log("signalService.authorize(taikoL1Addr, bytes32(block.chainid))"); - // console2.log("- signalService : ", signalServiceAddr); - // console2.log("- taikoL1Addr : ", taikoL1Addr); - // console2.log("- chainId : ", block.chainid); - // } - - // // --------------------------------------------------------------- - // // Register L2 addresses - // register(rollupAddressManager, "taiko", vm.envAddress("TAIKO_L2_ADDRESS"), l2ChainId); - // register( - // rollupAddressManager, "signal_service", vm.envAddress("L2_SIGNAL_SERVICE"), l2ChainId - // ); - - // // --------------------------------------------------------------- - // // Deploy other contracts - // if (block.chainid != 1) { - // deployAuxContracts(); - // } - - // if (AddressManager(sharedAddressManager).owner() == msg.sender) { - // AddressManager(sharedAddressManager).transferOwnership(contractOwner); - // console2.log("** sharedAddressManager ownership transferred to:", contractOwner); - // } - - // AddressManager(rollupAddressManager).transferOwnership(contractOwner); - // console2.log("** rollupAddressManager ownership transferred to:", contractOwner); - } - - function deploySharedContracts(address owner) internal returns (address sharedAddressManager) { - addressNotNull(owner, "owner"); - - sharedAddressManager = address(0);// Dani: Can be set tho via ENV var, for now, for anvil, easy to just deploy every time - if (sharedAddressManager == address(0)) { - sharedAddressManager = deployProxy({ - name: "shared_address_manager", - impl: address(new AddressManager()), - data: abi.encodeCall(AddressManager.init, (owner)) - }); - } - - //dataToFeed = abi.encodeCall(TaikoToken.init, ("TAIKO", "TAIKO", MAINNET_CONTRACT_OWNER)); - address taikoToken = address(0); // Later on use this as env. var since already deployed (on testnets): vm.envAddress("TAIKO_TOKEN"); - if (taikoToken == address(0)) { - taikoToken = deployProxy({ - name: "taiko_token", - impl: address(new TaikoToken()), - data: abi.encodeCall(TaikoToken.init, (MAINNET_CONTRACT_OWNER, MAINNET_CONTRACT_OWNER)), - registerTo: sharedAddressManager - }); - } - - // // Deploy Bridging contracts - to be done later. - // deployProxy({ - // name: "signal_service", - // impl: address(new SignalService()), - // data: abi.encodeCall(SignalService.init, (address(0), sharedAddressManager)), - // registerTo: sharedAddressManager - // }); - - // address brdige = deployProxy({ - // name: "bridge", - // impl: address(new Bridge()), - // data: abi.encodeCall(Bridge.init, (address(0), sharedAddressManager)), - // registerTo: sharedAddressManager - // }); - - // if (vm.envBool("PAUSE_BRIDGE")) { - // Bridge(payable(brdige)).pause(); - // } - - // Bridge(payable(brdige)).transferOwnership(owner); - - // console2.log("------------------------------------------"); - // console2.log( - // "Warning - you need to register *all* counterparty bridges to enable multi-hop bridging:" - // ); - // console2.log( - // "sharedAddressManager.setAddress(remoteChainId, \"bridge\", address(remoteBridge))" - // ); - // console2.log("- sharedAddressManager : ", sharedAddressManager); - - // // Deploy Vaults - // deployProxy({ - // name: "erc20_vault", - // impl: address(new ERC20Vault()), - // data: abi.encodeCall(ERC20Vault.init, (owner, sharedAddressManager)), - // registerTo: sharedAddressManager - // }); - - // deployProxy({ - // name: "erc721_vault", - // impl: address(new ERC721Vault()), - // data: abi.encodeCall(ERC721Vault.init, (owner, sharedAddressManager)), - // registerTo: sharedAddressManager - // }); - - // deployProxy({ - // name: "erc1155_vault", - // impl: address(new ERC1155Vault()), - // data: abi.encodeCall(ERC1155Vault.init, (owner, sharedAddressManager)), - // registerTo: sharedAddressManager - // }); - - // console2.log("------------------------------------------"); - // console2.log( - // "Warning - you need to register *all* counterparty vaults to enable multi-hop bridging:" - // ); - // console2.log( - // "sharedAddressManager.setAddress(remoteChainId, \"erc20_vault\", address(remoteERC20Vault))" - // ); - // console2.log( - // "sharedAddressManager.setAddress(remoteChainId, \"erc721_vault\", address(remoteERC721Vault))" - // ); - // console2.log( - // "sharedAddressManager.setAddress(remoteChainId, \"erc1155_vault\", address(remoteERC1155Vault))" - // ); - // console2.log("- sharedAddressManager : ", sharedAddressManager); - - // // Deploy Bridged token implementations - // register(sharedAddressManager, "bridged_erc20", address(new BridgedERC20())); - // register(sharedAddressManager, "bridged_erc721", address(new BridgedERC721())); - // register(sharedAddressManager, "bridged_erc1155", address(new BridgedERC1155())); - } - - function deployRollupContracts( - address _sharedAddressManager, - address owner - ) - internal - returns (address rollupAddressManager) - { - addressNotNull(_sharedAddressManager, "sharedAddressManager"); - addressNotNull(owner, "owner"); - - rollupAddressManager = deployProxy({ - name: "rollup_address_manager", - impl: address(new AddressManager()), - data: abi.encodeCall(AddressManager.init, (owner)) - }); - - // --------------------------------------------------------------- - // Register shared contracts in the new rollup - copyRegister(rollupAddressManager, _sharedAddressManager, "taiko_token"); - // Not deployed yet, so not needed: - // copyRegister(rollupAddressManager, _sharedAddressManager, "signal_service"); - // copyRegister(rollupAddressManager, _sharedAddressManager, "bridge"); - - deployProxy({ - name: "taiko", - impl: address(new TaikoL1()), - data: abi.encodeCall( - TaikoL1.init, - ( - owner, - rollupAddressManager, - vm.envBytes32("L2_GENESIS_HASH") - ) - ), - registerTo: rollupAddressManager - }); - - /* Deploy ChainProver */ - deployProxy({ - name: "chain_prover", - impl: address(new ChainProver()), - data: abi.encodeCall(ChainProver.init, (MAINNET_CONTRACT_OWNER, rollupAddressManager)), - registerTo: rollupAddressManager - }); - - /* Deploy MockSGXVerifier 3 times for now, so that we can call verifyProof without modifications of the protocol code. Later obv. shall be replaced with real verifiers. */ - address verifier1 = deployProxy({ - name: "tier_sgx1", - impl: address(new MockSgxVerifier()), - data: abi.encodeCall(MockSgxVerifier.init, (MAINNET_CONTRACT_OWNER, rollupAddressManager)), - registerTo: rollupAddressManager - }); - address verifier2 = deployProxy({ - name: "tier_sgx2", - impl: address(new MockSgxVerifier()), - data: abi.encodeCall(MockSgxVerifier.init, (MAINNET_CONTRACT_OWNER, rollupAddressManager)), - registerTo: rollupAddressManager - }); - address verifier3 = deployProxy({ - name: "tier_sgx3", - impl: address(new MockSgxVerifier()), - data: abi.encodeCall(MockSgxVerifier.init, (MAINNET_CONTRACT_OWNER, rollupAddressManager)), - registerTo: rollupAddressManager - }); - - /* Deploy VerifierRegistry */ - address vieriferRegistry = deployProxy({ - name: "verifier_registry", - impl: address(new VerifierRegistry()), - data: abi.encodeCall(VerifierRegistry.init, (MAINNET_CONTRACT_OWNER, rollupAddressManager)), - registerTo: rollupAddressManager - }); - - // Add those 3 to verifier registry - VerifierRegistry(vieriferRegistry).addVerifier(verifier1, "sgx1"); - VerifierRegistry(vieriferRegistry).addVerifier(verifier2, "sgx2"); - VerifierRegistry(vieriferRegistry).addVerifier(verifier3, "sgx3"); - - // Leave out guardians "tier" for now. - // address guardianProverImpl = address(new GuardianProver()); - - // address guardianProverMinority = deployProxy({ - // name: "guardian_prover_minority", - // impl: guardianProverImpl, - // data: abi.encodeCall(GuardianProver.init, (address(0), rollupAddressManager)) - // }); - - // GuardianProver(guardianProverMinority).enableTaikoTokenAllowance(true); - - // address guardianProver = deployProxy({ - // name: "guardian_prover", - // impl: guardianProverImpl, - // data: abi.encodeCall(GuardianProver.init, (address(0), rollupAddressManager)) - // }); - - // register(rollupAddressManager, "tier_guardian_minority", guardianProverMinority); - // register(rollupAddressManager, "tier_guardian", guardianProver); - // register( - // rollupAddressManager, - // "tier_router", - // address(deployTierProvider(vm.envString("TIER_PROVIDER"))) - // ); - - // address[] memory guardians = vm.envAddress("GUARDIAN_PROVERS", ","); - - // GuardianProver(guardianProverMinority).setGuardians( - // guardians, uint8(NUM_MIN_MINORITY_GUARDIANS), true - // ); - // GuardianProver(guardianProverMinority).transferOwnership(owner); - - // GuardianProver(guardianProver).setGuardians( - // guardians, uint8(NUM_MIN_MAJORITY_GUARDIANS), true - // ); - // GuardianProver(guardianProver).transferOwnership(owner); - - // // No need to proxy these, because they are 3rd party. If we want to modify, we simply - // // change the registerAddress("automata_dcap_attestation", address(attestation)); - // P256Verifier p256Verifier = new P256Verifier(); - // SigVerifyLib sigVerifyLib = new SigVerifyLib(address(p256Verifier)); - // PEMCertChainLib pemCertChainLib = new PEMCertChainLib(); - // address automateDcapV3AttestationImpl = address(new AutomataDcapV3Attestation()); - - // address automataProxy = deployProxy({ - // name: "automata_dcap_attestation", - // impl: automateDcapV3AttestationImpl, - // data: abi.encodeCall( - // AutomataDcapV3Attestation.init, (owner, address(sigVerifyLib), address(pemCertChainLib)) - // ), - // registerTo: rollupAddressManager - // }); - - // // Log addresses for the user to register sgx instance - // console2.log("SigVerifyLib", address(sigVerifyLib)); - // console2.log("PemCertChainLib", address(pemCertChainLib)); - // console2.log("AutomataDcapVaAttestation", automataProxy); - - // deployProxy({ - // name: "prover_set", - // impl: address(new ProverSet()), - // data: abi.encodeCall( - // ProverSet.init, (owner, vm.envAddress("PROVER_SET_ADMIN"), rollupAddressManager) - // ) - // }); - } - - function addressNotNull(address addr, string memory err) private pure { - require(addr != address(0), err); - } -} diff --git a/packages/protocol/scripts/DeployOnL1.s.sol b/packages/protocol/scripts/DeployOnL1.s.sol deleted file mode 100644 index 60627ab98f19..000000000000 --- a/packages/protocol/scripts/DeployOnL1.s.sol +++ /dev/null @@ -1,400 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "@openzeppelin/contracts/utils/Strings.sol"; -/* -import "../contracts/L1/TaikoToken.sol"; -import "../contracts/L1/TaikoL1.sol"; -import "../contracts/L1/provers/GuardianProver.sol"; -import "../contracts/L1/verifiers/PseZkVerifier.sol"; -import "../contracts/L1/verifiers/SgxVerifier.sol"; -import "../contracts/L1/verifiers/SgxAndZkVerifier.sol"; -import "../contracts/L1/verifiers/GuardianVerifier.sol"; -import "../contracts/L1/tiers/TaikoA6TierProvider.sol"; -import "../contracts/L1/actors/StandardProverPayment.sol"; -import "../contracts/L1/gov/TaikoTimelockController.sol"; -import "../contracts/L1/gov/TaikoGovernor.sol"; -import "../contracts/bridge/Bridge.sol"; -import "../contracts/tokenvault/ERC20Vault.sol"; -import "../contracts/tokenvault/ERC1155Vault.sol"; -import "../contracts/tokenvault/ERC721Vault.sol"; -import "../contracts/signal/SignalService.sol"; -import "../contracts/test/erc20/FreeMintERC20.sol"; -import "../contracts/test/erc20/MayFailFreeMintERC20.sol"; -import "../test/DeployCapability.sol"; - -/// @title DeployOnL1 -/// @notice This script deploys the core Taiko protocol smart contract on L1, -/// initializing the rollup. -contract DeployOnL1 is DeployCapability { - uint256 public constant NUM_GUARDIANS = 5; - - address public constant MAINNET_SECURITY_COUNCIL = 0x7C50d60743D3FCe5a39FdbF687AFbAe5acFF49Fd; - - address securityCouncil = - block.chainid == 1 ? MAINNET_SECURITY_COUNCIL : vm.envAddress("SECURITY_COUNCIL"); - - modifier broadcast() { - uint256 privateKey = vm.envUint("PRIVATE_KEY"); - require(privateKey != 0, "invalid priv key"); - vm.startBroadcast(); - _; - vm.stopBroadcast(); - } - - function run() external broadcast { - addressNotNull(vm.envAddress("TAIKO_L2_ADDRESS"), "TAIKO_L2_ADDRESS"); - addressNotNull(vm.envAddress("L2_SIGNAL_SERVICE"), "L2_SIGNAL_SERVICE"); - require(vm.envBytes32("L2_GENESIS_HASH") != 0, "L2_GENESIS_HASH"); - - // --------------------------------------------------------------- - // Deploy shared contracts - (address sharedAddressManager, address timelock) = deploySharedContracts(); - console2.log("sharedAddressManager: ", sharedAddressManager); - console2.log("timelock: ", timelock); - // --------------------------------------------------------------- - // Deploy rollup contracts - address rollupAddressManager = deployRollupContracts(sharedAddressManager, timelock); - - // --------------------------------------------------------------- - // Signal service need to authorize the new rollup - address signalServiceAddr = - AddressManager(sharedAddressManager).getAddress(uint64(block.chainid), "signal_service"); - addressNotNull(signalServiceAddr, "signalServiceAddr"); - SignalService signalService = SignalService(signalServiceAddr); - - address taikoL1Addr = - AddressManager(rollupAddressManager).getAddress(uint64(block.chainid), "taiko"); - addressNotNull(taikoL1Addr, "taikoL1Addr"); - TaikoL1 taikoL1 = TaikoL1(payable(taikoL1Addr)); - - uint64 l2ChainId = taikoL1.getConfig().chainId; - require(l2ChainId != block.chainid, "same chainid"); - - console2.log("------------------------------------------"); - console2.log("msg.sender: ", msg.sender); - console2.log("address(this): ", address(this)); - console2.log("signalService.owner(): ", signalService.owner()); - console2.log("------------------------------------------"); - - if (signalService.owner() == address(this)) { - signalService.authorize(taikoL1Addr, bytes32(block.chainid)); - signalService.authorize(vm.envAddress("TAIKO_L2_ADDRESS"), bytes32(uint256(l2ChainId))); - signalService.transferOwnership(timelock); - } else { - console2.log("------------------------------------------"); - console2.log("Warning - you need to transact manually:"); - console2.log("signalService.authorize(taikoL1Addr, bytes32(block.chainid))"); - console2.log("- signalService : ", signalServiceAddr); - console2.log("- taikoL1Addr : ", taikoL1Addr); - console2.log("- chainId : ", block.chainid); - } - - // --------------------------------------------------------------- - // Register shared contracts in the new rollup - copyRegister(rollupAddressManager, sharedAddressManager, "taiko_token"); - copyRegister(rollupAddressManager, sharedAddressManager, "signal_service"); - copyRegister(rollupAddressManager, sharedAddressManager, "bridge"); - - address proposer = vm.envAddress("PROPOSER"); - if (proposer != address(0)) { - register(rollupAddressManager, "proposer", proposer); - } - - address proposerOne = vm.envAddress("PROPOSER_ONE"); - if (proposerOne != address(0)) { - register(rollupAddressManager, "proposer_one", proposerOne); - } - - // --------------------------------------------------------------- - // Register L2 addresses - register(rollupAddressManager, "taiko", vm.envAddress("TAIKO_L2_ADDRESS"), l2ChainId); - register( - rollupAddressManager, "signal_service", vm.envAddress("L2_SIGNAL_SERVICE"), l2ChainId - ); - - // --------------------------------------------------------------- - // Deploy other contracts - deployAuxContracts(); - - if (AddressManager(sharedAddressManager).owner() == msg.sender) { - AddressManager(sharedAddressManager).transferOwnership(timelock); - console2.log("** sharedAddressManager ownership transferred to timelock:", timelock); - } - - AddressManager(rollupAddressManager).transferOwnership(timelock); - console2.log("** rollupAddressManager ownership transferred to timelock:", timelock); - } - - function deploySharedContracts() - internal - returns (address sharedAddressManager, address timelock) - { - sharedAddressManager = vm.envAddress("SHARED_ADDRESS_MANAGER"); - if (sharedAddressManager != address(0)) { - return (sharedAddressManager, vm.envAddress("TIMELOCK_CONTROLLER")); - } - - // Deploy the timelock - timelock = deployProxy({ - name: "timelock_controller", - impl: address(new TaikoTimelockController()), - data: bytes.concat(TaikoTimelockController.init.selector, abi.encode(7 days)) - }); - - sharedAddressManager = deployProxy({ - name: "shared_address_manager", - impl: address(new AddressManager()), - data: bytes.concat(AddressManager.init.selector) - }); - - address taikoToken = deployProxy({ - name: "taiko_token", - impl: address(new TaikoToken()), - data: bytes.concat( - TaikoToken.init.selector, - abi.encode( - vm.envString("TAIKO_TOKEN_NAME"), - vm.envString("TAIKO_TOKEN_SYMBOL"), - vm.envAddress("TAIKO_TOKEN_PREMINT_RECIPIENT") - ) - ), - registerTo: sharedAddressManager, - owner: timelock - }); - - address governor = deployProxy({ - name: "taiko_governor", - impl: address(new TaikoGovernor()), - data: bytes.concat(TaikoGovernor.init.selector, abi.encode(taikoToken, timelock)), - registerTo: address(0), - owner: timelock - }); - - // Setup time lock roles - TaikoTimelockController _timelock = TaikoTimelockController(payable(timelock)); - _timelock.grantRole(_timelock.PROPOSER_ROLE(), governor); - _timelock.grantRole(_timelock.PROPOSER_ROLE(), securityCouncil); - - _timelock.grantRole(_timelock.EXECUTOR_ROLE(), governor); - _timelock.grantRole(_timelock.EXECUTOR_ROLE(), securityCouncil); - - _timelock.grantRole(_timelock.CANCELLER_ROLE(), governor); - _timelock.grantRole(_timelock.CANCELLER_ROLE(), securityCouncil); - - _timelock.grantRole(_timelock.TIMELOCK_ADMIN_ROLE(), securityCouncil); - _timelock.revokeRole(_timelock.TIMELOCK_ADMIN_ROLE(), address(this)); - _timelock.revokeRole(_timelock.TIMELOCK_ADMIN_ROLE(), msg.sender); - - _timelock.transferOwnership(securityCouncil); - - // Deploy Bridging contracts - deployProxy({ - name: "signal_service", - impl: address(new SignalService()), - data: bytes.concat(SignalService.init.selector), - registerTo: sharedAddressManager, - owner: address(0) - }); - - deployProxy({ - name: "bridge", - impl: address(new Bridge()), - data: bytes.concat(Bridge.init.selector, abi.encode(sharedAddressManager)), - registerTo: sharedAddressManager, - owner: timelock - }); - - console2.log("------------------------------------------"); - console2.log( - "Warning - you need to register *all* counterparty bridges to enable multi-hop bridging:" - ); - console2.log( - "sharedAddressManager.setAddress(remoteChainId, \"bridge\", address(remoteBridge))" - ); - console2.log("- sharedAddressManager : ", sharedAddressManager); - - // Deploy Vaults - deployProxy({ - name: "erc20_vault", - impl: address(new ERC20Vault()), - data: bytes.concat(BaseVault.init.selector, abi.encode(sharedAddressManager)), - registerTo: sharedAddressManager, - owner: timelock - }); - - deployProxy({ - name: "erc721_vault", - impl: address(new ERC721Vault()), - data: bytes.concat(BaseVault.init.selector, abi.encode(sharedAddressManager)), - registerTo: sharedAddressManager, - owner: timelock - }); - - deployProxy({ - name: "erc1155_vault", - impl: address(new ERC1155Vault()), - data: bytes.concat(BaseVault.init.selector, abi.encode(sharedAddressManager)), - registerTo: sharedAddressManager, - owner: timelock - }); - - console2.log("------------------------------------------"); - console2.log( - "Warning - you need to register *all* counterparty vaults to enable multi-hop bridging:" - ); - console2.log( - "sharedAddressManager.setAddress(remoteChainId, \"erc20_vault\", address(remoteERC20Vault))" - ); - console2.log( - "sharedAddressManager.setAddress(remoteChainId, \"erc721_vault\", address(remoteERC721Vault))" - ); - console2.log( - "sharedAddressManager.setAddress(remoteChainId, \"erc1155_vault\", address(remoteERC1155Vault))" - ); - console2.log("- sharedAddressManager : ", sharedAddressManager); - - // Deploy Bridged token implementations - register(sharedAddressManager, "bridged_erc20", address(new BridgedERC20())); - register(sharedAddressManager, "bridged_erc721", address(new BridgedERC721())); - register(sharedAddressManager, "bridged_erc1155", address(new BridgedERC1155())); - } - - function deployRollupContracts( - address _sharedAddressManager, - address timelock - ) - internal - returns (address rollupAddressManager) - { - addressNotNull(_sharedAddressManager, "sharedAddressManager"); - addressNotNull(timelock, "timelock"); - - rollupAddressManager = deployProxy({ - name: "rollup_address_manager", - impl: address(new AddressManager()), - data: bytes.concat(AddressManager.init.selector) - }); - - deployProxy({ - name: "taiko", - impl: address(new TaikoL1()), - data: bytes.concat( - TaikoL1.init.selector, - abi.encode(rollupAddressManager, vm.envBytes32("L2_GENESIS_HASH")) - ), - registerTo: rollupAddressManager, - owner: timelock - }); - - deployProxy({ - name: "assignment_hook", - impl: address(new StandardProverPayment()), - data: bytes.concat(StandardProverPayment.init.selector, abi.encode(rollupAddressManager)), - registerTo: address(0), - owner: timelock - }); - - deployProxy({ - name: "tier_provider", - impl: address(new TaikoA6TierProvider()), - data: bytes.concat(TaikoA6TierProvider.init.selector), - registerTo: rollupAddressManager, - owner: timelock - }); - - deployProxy({ - name: "tier_guardian", - impl: address(new GuardianVerifier()), - data: bytes.concat(GuardianVerifier.init.selector, abi.encode(rollupAddressManager)), - registerTo: rollupAddressManager, - owner: timelock - }); - - deployProxy({ - name: "tier_sgx", - impl: address(new SgxVerifier()), - data: bytes.concat(SgxVerifier.init.selector, abi.encode(rollupAddressManager)), - registerTo: rollupAddressManager, - owner: timelock - }); - - deployProxy({ - name: "tier_sgx_and_pse_zkevm", - impl: address(new SgxAndZkVerifier()), - data: bytes.concat(SgxAndZkVerifier.init.selector, abi.encode(rollupAddressManager)), - registerTo: rollupAddressManager, - owner: timelock - }); - - address pseZkVerifier = deployProxy({ - name: "tier_pse_zkevm", - impl: address(new PseZkVerifier()), - data: bytes.concat(PseZkVerifier.init.selector, abi.encode(rollupAddressManager)), - registerTo: rollupAddressManager, - owner: timelock - }); - - address[] memory plonkVerifiers = new address[](1); - plonkVerifiers[0] = deployYulContract("contracts/L1/verifiers/PlonkVerifier.yulp"); - - for (uint16 i = 0; i < plonkVerifiers.length; ++i) { - register( - rollupAddressManager, - string(abi.encodePacked(PseZkVerifier(pseZkVerifier).getVerifierName(i))), - plonkVerifiers[i] - ); - } - - address guardianProver = deployProxy({ - name: "guardian_prover", - impl: address(new GuardianProver()), - data: bytes.concat(GuardianProver.init.selector, abi.encode(rollupAddressManager)), - registerTo: rollupAddressManager, - owner: address(0) - }); - - address[] memory guardians = vm.envAddress("GUARDIAN_PROVERS", ","); - uint8 minGuardians = uint8(vm.envUint("MIN_GUARDIANS")); - GuardianProver(guardianProver).setGuardians(guardians, minGuardians); - GuardianProver(guardianProver).transferOwnership(timelock); - } - - function deployAuxContracts() private { - address horseToken = address(new FreeMintERC20("Horse Token", "HORSE")); - console2.log("HorseToken", horseToken); - - address bullToken = address(new MayFailFreeMintERC20("Bull Token", "BULL")); - console2.log("BullToken", bullToken); - } - - function deployYulContract(string memory contractPath) private returns (address addr) { - string[] memory cmds = new string[](3); - cmds[0] = "bash"; - cmds[1] = "-c"; - cmds[2] = string.concat( - vm.projectRoot(), - "/bin/solc --yul --bin ", - string.concat(vm.projectRoot(), "/", contractPath), - " | grep -A1 Binary | tail -1" - ); - - bytes memory bytecode = vm.ffi(cmds); - assembly { - addr := create(0, add(bytecode, 0x20), mload(bytecode)) - } - - addressNotNull(addr, "failed yul deployment"); - console2.log(contractPath, addr); - } - - function addressNotNull(address addr, string memory err) private pure { - require(addr != address(0), err); - } -} -*/ \ No newline at end of file diff --git a/packages/protocol/scripts/L2_txn_simulation/CreateXChainTxn.s.sol b/packages/protocol/scripts/L2_txn_simulation/CreateXChainTxn.s.sol deleted file mode 100644 index 088e6ec880ee..000000000000 --- a/packages/protocol/scripts/L2_txn_simulation/CreateXChainTxn.s.sol +++ /dev/null @@ -1,35 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "forge-std/Script.sol"; -import "forge-std/console2.sol"; - -import "../../contracts/examples/xERC20Example.sol"; - -contract CreateXChainTxn is Script { - address public Bob_deployer_and_xchain_sender = 0x8943545177806ED17B9F23F0a21ee5948eCaa776; //Also .env PRIV_KEY is tied to Bob - address public Alice_xchain_receiver = 0xE25583099BA105D9ec0A67f5Ae86D90e50036425; - - function run() external { - vm.startBroadcast(); - - //Deploy a contract and mints 100k for Bob - xERC20Example exampleXChainToken = new xERC20Example("xChainExample", "xCE", Bob_deployer_and_xchain_sender, 100_000 * 1e18); - - // ChainId to send to - uint256 dummyChainId = 12346; // Does not matter at this point - - console2.log("Sender balance (before sending):", exampleXChainToken.balanceOf(Bob_deployer_and_xchain_sender)); - exampleXChainToken.xtransfer(Alice_xchain_receiver, 2 * 1e18, block.chainid, dummyChainId); - - console2.log("Sender balance:", exampleXChainToken.balanceOf(Bob_deployer_and_xchain_sender)); - console2.log("Receiver balance:", exampleXChainToken.balanceOf(Alice_xchain_receiver)); - - vm.stopBroadcast(); - } -} diff --git a/packages/protocol/scripts/L2_txn_simulation/ProposeBlock.s.sol b/packages/protocol/scripts/L2_txn_simulation/ProposeBlock.s.sol deleted file mode 100644 index 61328c80c9fa..000000000000 --- a/packages/protocol/scripts/L2_txn_simulation/ProposeBlock.s.sol +++ /dev/null @@ -1,78 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "forge-std/Script.sol"; -import "forge-std/console2.sol"; - -import "../../contracts/L1/TaikoL1.sol"; - -contract ProposeBlock is Script { - address public taikoL1Address = 0x9fCF7D13d10dEdF17d0f24C62f0cf4ED462f65b7;//address(0);// TaikoL1 proxy address -> Get from the deployment - address sender = 0x8943545177806ED17B9F23F0a21ee5948eCaa776; // With pre-generated eth - - function run() external { - - require(taikoL1Address != address(0), "based operator not set"); - - vm.startBroadcast(); - - bytes[] memory txLists = new bytes[](1); - // The L2 chainId with which i encoded the TXNs were 167011 - // THe nonce was 0 - bytes memory firstAddressSendingNonce0 = hex"02f87683028c6380843b9aca00847735940083030d4094f93ee4cf8c6c40b329b0c0626f28333c132cf24188016345785d8a000080c080a08f0f52d943504cecea0d6ce317c2fde8b0c27b1e449d85fcf98ccd2f50ac804ba04d5d56356518c1de0c1ece644a8a2fe64e6cc136cd8db0a21a21f72c167353c6"; - bytes memory secondAddressSendingNonce0 = hex"02f87683028c6380843b9aca00847735940083030d4094f93ee4cf8c6c40b329b0c0626f28333c132cf24188016345785d8a000080c080a0622e7060e09afd2100784bdc88ebb838729128bb6eb40f8b7f458430d56dafd4a006fe5d1a466788f941020a2278860c3f2642e44108c666ecd25b30d1b2f7a420"; - bytes memory thirdAddressSendingNonce0 = hex"02f87683028c6380843b9aca00847735940083030d4094f93ee4cf8c6c40b329b0c0626f28333c132cf24188016345785d8a000080c001a0558488f3af91777c382d2ab6ac3507f5d6b906431534193c1a45cc2a08b2825ea0495efd571c9ea5a5290f10efaa219f8c31b4e714745737c4e019df76f7a6df4b"; - - // The outcome of the above is the rlp encoded list (not concatenated but RLP encoded with: https://toolkit.abdk.consulting/ethereum#key-to-address,rlp) - txLists[0] = hex"f90171b87902f87683028c6280843b9aca00847735940083030d4094f93ee4cf8c6c40b329b0c0626f28333c132cf241880de0b6b3a764000080c080a07f983645ddf8365d14e5fb4e3b07c19fe31e23edd9ee4a737388acc2da7e64a3a072a56043512806a6de5f66f28bb659236eea41c9d66db8493f436804c42723d3b87902f87683028c6280843b9aca00847735940083030d4094f93ee4cf8c6c40b329b0c0626f28333c132cf241880de0b6b3a764000080c001a030911ab2ebf76f1e1bfe00d721207d929053efb051d50708a10dd9f66f84bacba07705a7cdb86ff00aa8c131ef3c4cb2ea2f2f4730d93308f1afbb94a04c1c9ae9b87902f87683028c6280843b9aca00847735940083030d4094f93ee4cf8c6c40b329b0c0626f28333c132cf241880de0b6b3a764000080c001a07da8dfb5bc3b7b353f9614bcd83733168500d1e06f2bcdac761cc54c85847e6aa03b041b0605e86aa379ff0f58a60743da411dfd1a9d4f1d18422a862f67a57fee"; - - bytes32 txListHash = keccak256(txLists[0]); //Since we not using Blobs, we need this - - // MetaData related - bytes[] memory metasEncoded = new bytes[](1); - TaikoData.BlockMetadata memory meta; - console2.log(txLists[0].length); - - meta = createBlockMetaDataForFirstBlockDebug(sender, 1, uint64(block.timestamp), uint24(txLists[0].length), txListHash); - - metasEncoded[0] = abi.encode(meta); - - TaikoL1(taikoL1Address).proposeBlock{value: 0.1 ether }(metasEncoded, txLists); - - vm.stopBroadcast(); - } - - function createBlockMetaDataForFirstBlockDebug( - address coinbase, - uint64 l2BlockNumber, - uint64 unixTimestamp, - uint24 txListByteSize, - bytes32 txListHash - ) - internal - returns (TaikoData.BlockMetadata memory meta) - { - meta.blockHash = 0xab80a9c4daa571aa308e967c9a6b4bf21ba8842d95d73d28be112b6fe0618e7c; // Randomly set it to smth - - //TaikoData.Block memory parentBlock = L1.getBlock(l2BlockNumber - 1); - meta.parentMetaHash = 0x0000000000000000000000000000000000000000000000000000000000000000; // This is the genesis block's metaHash - meta.parentBlockHash = 0xdf90a9c4daa571aa308e967c9a6b4bf21ba8842d95d73d28be112b6fe0618e8c; // This is the genesis block's blockhash - meta.l1Hash = blockhash(30); //L1 private network's L1 blockheight, submit this block between 30 and 30+128 blcok of L1. - meta.difficulty = block.prevrandao; - meta.blobHash = txListHash; - meta.coinbase = coinbase; - meta.l2BlockNumber = l2BlockNumber; - meta.gasLimit = 15_000_000; - meta.l1StateBlockNumber = uint32(30); // Submit this block between 30 and 30+128 blcok of L1. - meta.timestamp = unixTimestamp; - - meta.txListByteOffset = 0; - meta.txListByteSize = txListByteSize; // Corresponding txn list byte size - meta.blobUsed = false; - } -} diff --git a/packages/protocol/scripts/L2_txn_simulation/createL2Txn.py b/packages/protocol/scripts/L2_txn_simulation/createL2Txn.py deleted file mode 100644 index 279a86d3f6ca..000000000000 --- a/packages/protocol/scripts/L2_txn_simulation/createL2Txn.py +++ /dev/null @@ -1,50 +0,0 @@ -from web3 import Web3 -from eth_abi import encode -import argparse - -RPC_URL_L2 = 'http://127.0.0.1:8545' # Anything is fine for now as long as we dont have the L2 network, but if we have we can automate nonce and gas settings -w3_taiko_l2 = Web3(Web3.HTTPProvider(RPC_URL_L2)) - -# Some pre-loaded ETH addresses from Kurtosis private network (NO secret, no harm to use for private testnets!) -sender_addresses = ['0x8943545177806ED17B9F23F0a21ee5948eCaa776', '0xE25583099BA105D9ec0A67f5Ae86D90e50036425', '0x614561D2d143621E126e87831AEF287678B442b8'] -sender_pks = ['bcdf20249abf0ed6d944c0288fad489e33f66b3960d9e6229c1cd214ed3bbe31', '39725efee3fb28614de3bacaffe4cc4bd8c436257e2c8bb887c4b5c4be45e76d', '53321db7c1e331d93a11a41d16f004d7ff63972ec8ec7c25db329728ceeb1710'] - -receiver = '0xf93Ee4Cf8c6c40b329b0c0626F28333c132CF241' # This address also has pre-loaded ETH addresses - -parser = argparse.ArgumentParser() - -parser.add_argument("-n", "--nonce", help="collective nonce", - type=int, required=True) -parser.add_argument("-c", "--chainid", help="l2 chainId", - type=int, required=True) - -transaction_list = [] - -if __name__ == "__main__": - args = parser.parse_args() - nonce = args.nonce - chainId = args.chainid - - # Build the new tx list - idx = 0 - for sender in sender_addresses: - # Build the tx - transaction = { - 'chainId': chainId, - 'from': sender, - 'to': receiver, - 'value': w3_taiko_l2.to_wei('1', 'ether'), - 'nonce': nonce, # later we can use something like: w3_taiko_l2.eth.get_transaction_count(address1), - 'gas': 200000, - 'maxFeePerGas': 2000000000, # w3_taiko_l2.eth.gas_price or something - 'maxPriorityFeePerGas': 1000000000, - } - - # 2. Sign tx with a private key - signed_txn = w3_taiko_l2.eth.account.sign_transaction(transaction, sender_pks[idx]) - - # Most probably we need to zlib + rlp encode transactions not only just "concatenate" - print("Txn ",idx, " bytes:") - print(signed_txn.rawTransaction.hex()) - transaction_list.append(signed_txn) - idx += 1 \ No newline at end of file diff --git a/packages/protocol/scripts/L2_txn_simulation/readme.md b/packages/protocol/scripts/L2_txn_simulation/readme.md deleted file mode 100644 index 3088ed0b3d7c..000000000000 --- a/packages/protocol/scripts/L2_txn_simulation/readme.md +++ /dev/null @@ -1,54 +0,0 @@ -# Create / simulate L2 transactions (propose transaction and an xtransfer of a dummy xChainToken) - -In order to test the L2 node execution hook functionality, we need create valid L2 transactions and submit those to TaikoL1 - where a hook will be built in, to listen the proposeBlock and execute those transactions. This folder is to create L2 transactions (using the same pre-funded accounts Kurtosis is setting up by default) and submit it to our "L1" while using the local taiko_reth image as the EL. - -## Prerequisites - -Prerequisites can also be found in `deployments/local_deployment.md` file. - -1. Testnet up and running: -```shell -kurtosis run github.com/ethpandaops/ethereum-package --args-file YOUR_PATH_TO_NETWORK_CONFIG/network_params.yaml -``` - -2. Main contracts deployed: -```shell -forge script --rpc-url http://127.0.0.1:PORT scripts/DeployL1Locally.s.sol -vvvv --broadcast --private-key PK --legacy -``` -# ProposeBlock - -## 1. Create and print L2 transactions ("off-chain") - -Run script to gather 3 ether transactions, and print them out. `-n` flag stands for the nonce, and `-c` is for the L2 chainId. - -```shell -$ python3 createL2Txns.py -n -c -``` - -## 2. Prepare the script with proper data and fire away the L1 transaction - -Edit the `ProposeBlock.s.sol` file to to set the valid `basedOperatorAddress` and also add the above generated 3 signed transactions (already in the `ProposeBlock.s.sol` file, not needed to run and add them, unless the network `id` or `nonce` is different), then fire away the L1 transaction with the script below: - -```shell -$ forge script --rpc-url http://127.0.0.1:YOUR_PORT scripts/L2_txn_simulation/ProposeBlock.s.sol -vvvv --broadcast --private-key --legacy -``` - -## 3. In case of TXN failure, you can get the error via the debug trace transaction RPC call - -Command - -```shell -curl http://127.0.0.1:YOUR_PORT \ --X POST \ --H "Content-Type: application/json" \ ---data '{"method":"debug_traceTransaction","params":["YOUR_TXN_HASH", {"tracer": "callTracer"}], "id":1,"jsonrpc":"2.0"}' -``` - - -# Send a dummy xChainToken - -In order to send cross-chain transactions with `xCallOptions()`, when the network is up and running, deploy an `xChainERC20Token` contract and fire away an `xtransfer()` transaction. - -```shell -forge script --rpc-url http://127.0.0.1:YOUR_PORT scripts/L2_txn_simulation/CreateXChainTxn.s.sol -vvvv --broadcast --private-key PK_IN_ENV_FILE --legacy -``` \ No newline at end of file diff --git a/packages/protocol/scripts/L2_txn_simulation/sendTx.py b/packages/protocol/scripts/L2_txn_simulation/sendTx.py deleted file mode 100755 index ed02191ba5df..000000000000 --- a/packages/protocol/scripts/L2_txn_simulation/sendTx.py +++ /dev/null @@ -1,69 +0,0 @@ -from web3 import Web3 -from eth_abi import encode -import argparse - -RPC_URL_L2 = 'http://127.0.0.1:' # Anything is fine for now as long as we dont have the L2 network, but if we have we can automate nonce and gas settings -w3_taiko_l2 = Web3(Web3.HTTPProvider(RPC_URL_L2)) - -# Some pre-loaded ETH addresses from Kurtosis private network (NO secret, no harm to use for private testnets!) -sender_addresses = ['0x8943545177806ED17B9F23F0a21ee5948eCaa776'] -sender_pks = ['bcdf20249abf0ed6d944c0288fad489e33f66b3960d9e6229c1cd214ed3bbe31'] - -receiver = '0xf93Ee4Cf8c6c40b329b0c0626F28333c132CF241' # This address also has pre-loaded ETH addresses - -parser = argparse.ArgumentParser() - -parser.add_argument("-p", "--port", help="port on localhost", - type=str, required=True) -# parser.add_argument("-c", "--chainid", help="l2 chainId", -# type=int, required=True) - -transaction_list = [] - -if __name__ == "__main__": - args = parser.parse_args() - port = args.port - w3_taiko_l2 = Web3(Web3.HTTPProvider(RPC_URL_L2+port)) - chainId = 167010 - - # Build the new tx list - idx = 0 - for sender in sender_addresses: - # Build the tx - transaction = { - 'chainId': chainId, - 'from': sender, - 'to': receiver, - 'value': w3_taiko_l2.to_wei('1', 'ether'), - 'nonce': w3_taiko_l2.eth.get_transaction_count(sender), - 'gas': 200000, - 'maxFeePerGas': 2000000000, # w3_taiko_l2.eth.gas_price or something - 'maxPriorityFeePerGas': 1000000000, - } - - # Debug prints of balance - # # Get the balance - # balance_wei = w3_taiko_l2.eth.get_balance(sender) - - # # Convert balance from Wei to Ether - # balance_eth = w3_taiko_l2.from_wei(balance_wei, 'ether') - # print("Balance before:", balance_eth) - - # 2. Sign tx with a private key - signed_txn = w3_taiko_l2.eth.account.sign_transaction(transaction, sender_pks[idx]) - - # print("RawTransaction:") - # print(signed_txn.rawTransaction) - print("RawTransaction.hex():") - print(signed_txn.raw_transaction.hex()) - - txn_hash = w3_taiko_l2.eth.send_raw_transaction(signed_txn.raw_transaction) - print("Txn hash:") - print(txn_hash.hex()) - - # # Get the balance - # balance_wei = w3_taiko_l2.eth.get_balance(sender) - - # # Convert balance from Wei to Ether - # balance_eth = w3_taiko_l2.from_wei(balance_wei, 'ether') - # print("Balance after:", balance_eth) \ No newline at end of file diff --git a/packages/protocol/scripts/SetAddress.s.sol b/packages/protocol/scripts/SetAddress.s.sol deleted file mode 100644 index d16a533461ed..000000000000 --- a/packages/protocol/scripts/SetAddress.s.sol +++ /dev/null @@ -1,42 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "forge-std/Script.sol"; -import "forge-std/console2.sol"; - -import "../contracts/common/AddressManager.sol"; - -contract SetAddress is Script { - uint256 public adminPrivateKey = vm.envUint("PRIVATE_KEY"); - - address public proxyAddress = vm.envAddress("PROXY_ADDRESS"); - - uint64 public domain = uint64(vm.envUint("DOMAIN")); - - bytes32 public name = vm.envBytes32("NAME"); - - address public addr = vm.envAddress("ADDRESS"); - - AddressManager proxy; - - function run() external { - require(adminPrivateKey != 0, "PRIVATE_KEY not set"); - require(proxyAddress != address(0), "PROXY_ADDRESS not set"); - require(domain != 0, "DOMAIN NOT SET"); - require(name != bytes32(0), "NAME NOT SET"); - require(addr != address(0), "ADDR NOT SET"); - - vm.startBroadcast(adminPrivateKey); - - proxy = AddressManager(payable(proxyAddress)); - - proxy.setAddress(domain, name, addr); - - vm.stopBroadcast(); - } -} diff --git a/packages/protocol/scripts/SetRemoteBridgeSuites.s.sol b/packages/protocol/scripts/SetRemoteBridgeSuites.s.sol deleted file mode 100644 index b1bc030b42c3..000000000000 --- a/packages/protocol/scripts/SetRemoteBridgeSuites.s.sol +++ /dev/null @@ -1,92 +0,0 @@ -// SPDX-License-Identifier: MIT -// _____ _ _ _ _ -// |_ _|_ _(_) |_____ | | __ _| |__ ___ -// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< -// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ - -pragma solidity ^0.8.20; - -import "../test/DeployCapability.sol"; -import "../contracts/L1/gov/TaikoTimelockController.sol"; - -contract SetRemoteBridgeSuites is DeployCapability { - uint256 public privateKey = vm.envUint("PRIVATE_KEY"); - uint256 public securityCouncilPrivateKey = vm.envUint("SECURITY_COUNCIL_PRIVATE_KEY"); - address public timelockAddress = vm.envAddress("TIMELOCK_ADDRESS"); - address public addressManagerAddress = vm.envAddress("ADDRESS_MANAGER_ADDRESS"); - uint256[] public remoteChainIDs = vm.envUint("REMOTE_CHAIN_IDS", ","); - address[] public remoteBridges = vm.envAddress("REMOTE_BRIDGES", ","); - address[] public remoteERC20Vaults = vm.envAddress("REMOTE_ERC20_VAULTS", ","); - address[] public remoteERC721Vaults = vm.envAddress("REMOTE_ERC721_VAULTS", ","); - address[] public remoteERC1155Vaults = vm.envAddress("REMOTE_ERC1155_VAULTS", ","); - - function run() external { - require( - remoteChainIDs.length == remoteBridges.length, "invalid remote bridge addresses length" - ); - require( - remoteChainIDs.length == remoteERC20Vaults.length, - "invalid remote ERC20Vault addresses length" - ); - require( - remoteChainIDs.length == remoteERC721Vaults.length, - "invalid remote ERC721Vault addresses length" - ); - require( - remoteChainIDs.length == remoteERC1155Vaults.length, - "invalid remote ERC1155Vault addresses length" - ); - - vm.startBroadcast(privateKey); - - for (uint256 i; i < remoteChainIDs.length; ++i) { - uint64 chainid = uint64(remoteChainIDs[i]); - - if (securityCouncilPrivateKey == 0) { - register(addressManagerAddress, "bridge", remoteBridges[i], chainid); - register(addressManagerAddress, "erc20_vault", remoteERC20Vaults[i], chainid); - register(addressManagerAddress, "erc721_vault", remoteERC721Vaults[i], chainid); - register(addressManagerAddress, "erc1155_vault", remoteERC1155Vaults[i], chainid); - continue; - } - - registerByTimelock(addressManagerAddress, "bridge", remoteBridges[i], chainid); - registerByTimelock(addressManagerAddress, "erc20_vault", remoteERC20Vaults[i], chainid); - registerByTimelock( - addressManagerAddress, "erc721_vault", remoteERC721Vaults[i], chainid - ); - registerByTimelock( - addressManagerAddress, "erc1155_vault", remoteERC1155Vaults[i], chainid - ); - } - - vm.stopBroadcast(); - } - - function registerByTimelock( - address registerTo, - string memory name, - address addr, - uint64 chainId - ) - internal - { - bytes32 salt = bytes32(block.timestamp); - - bytes memory payload = abi.encodeWithSelector( - bytes4(keccak256("setAddress(uint64,bytes32,address)")), - chainId, - bytes32(bytes(name)), - addr - ); - - TaikoTimelockController timelock = TaikoTimelockController(payable(timelockAddress)); - - timelock.schedule(registerTo, 0, payload, bytes32(0), salt, 0); - - timelock.execute(registerTo, 0, payload, bytes32(0), salt); - - console2.log("> ", name, "@", registerTo); - console2.log("\t addr : ", addr); - } -} diff --git a/packages/protocol/scripts/confs/network_params.yaml b/packages/protocol/scripts/confs/network_params.yaml deleted file mode 100644 index e0c2a2bb1d76..000000000000 --- a/packages/protocol/scripts/confs/network_params.yaml +++ /dev/null @@ -1,25 +0,0 @@ -participants: - - el_type: reth - el_image: taiko_reth - cl_type: lighthouse - cl_image: sigp/lighthouse:latest - el_extra_params: ["--num_of_l2s", "2"] - cl_extra_params: [--always-prepare-payload, --prepare-payload-lookahead, "12000"] - - el_type: reth - el_image: taiko_reth - cl_type: teku - cl_image: consensys/teku:latest - el_extra_params: ["--num_of_l2s", "2"] -network_params: - network_id: '160010' -additional_services: - - blockscout - - blockscout_l2_2 -port_publisher: - nat_exit_ip: KURTOSIS_IP_ADDR_PLACEHOLDER - el: - enabled: true - public_port_start: 32000 - additional_services: - enabled: true - public_port_start: 64000 \ No newline at end of file diff --git a/packages/protocol/scripts/download_solc.sh b/packages/protocol/scripts/download_solc.sh deleted file mode 100755 index 20c7873391eb..000000000000 --- a/packages/protocol/scripts/download_solc.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/sh - -set -e - -protocol_dir=$(realpath "$(dirname $0)/..") -solc_bin=${protocol_dir}/bin/solc - -if [ -f "${solc_bin}" ]; then - exit 0 -fi - -mkdir -p "$(dirname ${solc_bin})" - -VERSION=v0.8.18 - -if [ "$(uname)" = 'Darwin' ]; then - SOLC_FILE_NAME=solc-macos -elif [ "$(uname)" = 'Linux' ]; then - SOLC_FILE_NAME=solc-static-linux -else - echo "unsupported platform $(uname)" - exit 1 -fi - -wget -O "${solc_bin}" https://github.com/ethereum/solidity/releases/download/$VERSION/$SOLC_FILE_NAME - -chmod +x "${solc_bin}" diff --git a/packages/protocol/scripts/launch_second_node.sh b/packages/protocol/scripts/launch_second_node.sh deleted file mode 100755 index ae2c1bb6696d..000000000000 --- a/packages/protocol/scripts/launch_second_node.sh +++ /dev/null @@ -1,181 +0,0 @@ -#!/bin/bash - -# Function to check if a command exists -command_exists() { - command -v "$1" >/dev/null 2>&1 -} - -# Check for Docker installation -if ! command_exists docker; then - echo "Docker is not installed. Please install Docker first." - exit 1 -fi - -# Function to get container ID by name prefix -get_container_id() { - docker ps --format '{{.ID}}' --filter "name=$1" -} - -# Function to copy file from container to host -copy_from_container() { - docker cp "$1:$2" "$3" -} - -# Function to get network name from container -get_network_name() { - docker inspect -f '{{range $key, $value := .NetworkSettings.Networks}}{{$key}}{{end}}' "$1" -} - -# Function to get container IP address -get_container_ip() { - docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$1" -} - -clean_directory() { - if [ -d "$1" ]; then - echo "Cleaning directory: $1" - rm -rf "$1"/* - else - echo "Creating directory: $1" - mkdir -p "$1" - fi -} - -# Function to get or create JWT secret -get_or_create_jwt_secret() { - local jwt_path="$HOME/jwt/jwtsecret" - if [ -f "$jwt_path" ]; then - echo "Using existing JWT secret." - else - echo "Creating new JWT secret." - mkdir -p "$(dirname "$jwt_path")" - openssl rand -hex 32 | tr -d "\n" > "$jwt_path" - fi - echo "$jwt_path" -} - -# Get or create JWT secret -JWT_SECRET_PATH=$(get_or_create_jwt_secret) -echo "JWT secret path: $JWT_SECRET_PATH" - -# Get container IDs -EL_CONTAINER_ID=$(get_container_id "el-2-reth-teku") -CL_CONTAINER_ID=$(get_container_id "cl-2-teku-reth") - -if [ -z "$EL_CONTAINER_ID" ] || [ -z "$CL_CONTAINER_ID" ]; then - echo "Failed to find required containers." - exit 1 -fi - -# Get network name -NETWORK_NAME=$(get_network_name "$EL_CONTAINER_ID") -if [ -z "$NETWORK_NAME" ]; then - echo "Failed to get network name." - exit 1 -fi -echo "Using network: $NETWORK_NAME" - -# Get EL container IP -EL_IP=$(get_container_ip "$EL_CONTAINER_ID") - -# Get bootnode from EL container -BOOTNODE=$(docker exec "$EL_CONTAINER_ID" ps aux | grep docker-init | grep -o 'bootnodes=[^ ]*' | cut -d= -f2) - -# Get CL bootnode -CL_BOOTNODE=$(docker exec "$CL_CONTAINER_ID" ps aux | grep docker-init | grep -o 'p2p-discovery-bootnodes=[^ ]*' | cut -d= -f2) - -# Clean and recreate required directories -clean_directory ~/data/reth/execution-data -clean_directory ~/data/teku/teku-beacon-data -clean_directory ~/data/teku/validator-keys/teku-secrets -clean_directory ~/data/teku/validator-keys/teku-keys - -# Create required directories -mkdir -p ~/network-configs ~/jwt - -# Copy required files -copy_from_container "$EL_CONTAINER_ID" "/network-configs/genesis.json" ~/network-configs/ -copy_from_container "$CL_CONTAINER_ID" "/network-configs/genesis.ssz" ~/network-configs/ -copy_from_container "$CL_CONTAINER_ID" "/network-configs/config.yaml" ~/network-configs/ - -# Launch EL container -echo "Launching EL container..." -EL_CONTAINER_ID=$(docker run -d --name reth-node3 --network "$NETWORK_NAME" \ - -v ~/data/reth/execution-data:/data/reth/execution-data \ - -v ~/network-configs:/network-configs \ - -v ~/jwt:/jwt \ - -p 8545:8545 \ - -p 10110:10110 \ - taiko_reth node -vvv --datadir=/data/reth/execution-data \ - --chain=/network-configs/genesis.json \ - --http --http.port=8545 --http.addr=0.0.0.0 \ - --http.corsdomain="*" --http.api=admin,net,eth,web3,debug,trace \ - --ws --ws.addr=0.0.0.0 --ws.port=8550 --ws.api=net,eth \ - --ws.origins="*" --nat=extip:0.0.0.0 \ - --authrpc.port=8551 --authrpc.jwtsecret=/jwt/jwtsecret \ - --authrpc.addr=0.0.0.0 --metrics=0.0.0.0:9003 \ - --discovery.port=42011 --port=42011 \ - --bootnodes="$BOOTNODE") - -if [ -z "$EL_CONTAINER_ID" ]; then - echo "Failed to launch EL container." - exit 1 -fi - -# Get the IP of the newly launched EL container -NEW_EL_IP=$(get_container_ip "$EL_CONTAINER_ID") -if [ -z "$NEW_EL_IP" ]; then - echo "Failed to get IP of the new EL container." - exit 1 -fi - -echo "New EL container IP: $NEW_EL_IP" - -# Wait for the EL container to be ready (you might want to implement a more robust check) -sleep 10 - -# Launch CL container -echo "Launching CL container..." -docker run -d \ - --name teku-node2 \ - --network "$NETWORK_NAME" \ - -v ~/data/teku/teku-beacon-data:/data/teku/teku-beacon-data \ - -v ~/data/teku/validator-keys:/validator-keys/ \ - -v ~/network-configs:/network-configs \ - -v ~/jwt:/jwt/ \ - --entrypoint /bin/sh \ - consensys/teku:latest -c " - MY_IP=\$(hostname -i) && \ - exec /opt/teku/bin/teku \ - --logging=INFO \ - --log-destination=CONSOLE \ - --network=/network-configs/config.yaml \ - --data-path=/data/teku/teku-beacon-data \ - --data-storage-mode=ARCHIVE \ - --p2p-enabled=true \ - --p2p-peer-lower-bound=1 \ - --p2p-advertised-ip=\$MY_IP \ - --p2p-discovery-site-local-addresses-enabled=true \ - --p2p-port=9000 \ - --rest-api-enabled=true \ - --rest-api-docs-enabled=true \ - --rest-api-interface=0.0.0.0 \ - --rest-api-port=4000 \ - --rest-api-host-allowlist=* \ - --data-storage-non-canonical-blocks-enabled=true \ - --ee-jwt-secret-file=/jwt/jwtsecret \ - --ee-endpoint=http://$NEW_EL_IP:8551 \ - --metrics-enabled \ - --metrics-interface=0.0.0.0 \ - --metrics-host-allowlist='*' \ - --metrics-categories=BEACON,PROCESS,LIBP2P,JVM,NETWORK,PROCESS \ - --metrics-port=8008 \ - --ignore-weak-subjectivity-period-enabled=true \ - --initial-state=/network-configs/genesis.ssz \ - --p2p-discovery-bootnodes=$CL_BOOTNODE \ - --validator-keys=/validator-keys/teku-keys:/validator-keys/teku-secrets \ - --validators-proposer-default-fee-recipient=0x8943545177806ED17B9F23F0a21ee5948eCaa776 \ - --validators-graffiti=2-reth-teku - " - -echo "Second node (EL and CL) launched successfully!" \ No newline at end of file diff --git a/packages/protocol/scripts/merge_contracts.py b/packages/protocol/scripts/merge_contracts.py deleted file mode 100644 index 7722d5d5ebdd..000000000000 --- a/packages/protocol/scripts/merge_contracts.py +++ /dev/null @@ -1,27 +0,0 @@ -import os -import argparse - -def merge_solidity_files(root_dir, output_file='../out/taiko_protocol.md'): - with open(output_file, 'w') as outfile: - for subdir, dirs, files in os.walk(root_dir): - for file in files: - if file.endswith('.sol') and not file.endswith('.t.sol'): - file_path = os.path.join(subdir, file) - if "/test/" in file_path: - continue - print("merging ", file_path) - relative_path = os.path.relpath(file_path, root_dir) - outfile.write(f"## {relative_path}\n") - outfile.write("```solidity\n") - with open(file_path, 'r') as infile: - outfile.write(infile.read()) - outfile.write("\n```\n\n") - - -if __name__ == "__main__": - # parser = argparse.ArgumentParser(description="Merge Solidity files into a Markdown file.") - # parser.add_argument("root_dir", type=str, help="Root directory containing Solidity files") - # args = parser.parse_args() - # merge_solidity_files(args.root_dir) - merge_solidity_files("../contracts") - print("merged into ../out/taiko_protocol.md") diff --git a/packages/protocol/scripts/propose_block.sh b/packages/protocol/scripts/propose_block.sh deleted file mode 100755 index 1f27608c8579..000000000000 --- a/packages/protocol/scripts/propose_block.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - -# Run the setup_deps.sh script to ensure dependencies are set up -#./scripts/setup_deps.sh - -# Read the RPC port from temporary file -RPC_PORT=$(cat /tmp/kurtosis_rpc_port) - -# Load the .env file and extract the PRIVATE_KEY -if [ -f .env ]; then - export $(grep -v '^#' .env | xargs) - PRIVATE_KEY=${PRIVATE_KEY} -else - echo ".env file not found. Please create a .env file with your PRIVATE_KEY." - exit 1 -fi - -if [ -z "$PRIVATE_KEY" ]; then - echo "PRIVATE_KEY not found in the .env file." - exit 1 -fi - -# Run the forge foundry script using the extracted RPC port and PRIVATE_KEY -FORGE_COMMAND="forge script --rpc-url http://127.0.0.1:$RPC_PORT scripts/L2_txn_simulation/ProposeBlock.s.sol -vvvv --broadcast --private-key $PRIVATE_KEY --legacy" - -echo "Running forge foundry script..." -eval $FORGE_COMMAND - -echo "Forge script execution completed." diff --git a/packages/protocol/scripts/setup_deps.sh b/packages/protocol/scripts/setup_deps.sh deleted file mode 100755 index d787319b97b6..000000000000 --- a/packages/protocol/scripts/setup_deps.sh +++ /dev/null @@ -1,228 +0,0 @@ -#!/bin/bash - -# Function to check if a command exists -command_exists() { - command -v "$1" >/dev/null 2>&1 -} - -# Function to check if Docker daemon is running -is_docker_running() { - docker info >/dev/null 2>&1 -} - -# Check for Docker installation and daemon status -if ! command_exists docker; then - echo "Docker is not installed. Please install Docker first." - exit 1 -elif ! is_docker_running; then - echo "Docker daemon is not running. Please start Docker first." - exit 1 -else - echo "Docker is installed and running." -fi - -# Check if the taiko_reth image exists -# if ! docker image inspect taiko_reth >/dev/null 2>&1; then - echo "Docker image taiko_reth does not exist. Building the image..." - if ! docker build ../../ -t taiko_reth; then - echo "Failed to build the Docker image taiko_reth." - exit 1 - fi -# else -# echo "Docker image taiko_reth already exists." -# fi - -# Function to install Kurtosis on macOS -install_kurtosis_mac() { - if ! command_exists brew; then - echo "Homebrew is not installed. Installing Homebrew..." - /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" - fi - echo "Installing Kurtosis CLI with Homebrew..." - brew install kurtosis-tech/tap/kurtosis-cli -} - -# Function to install Kurtosis on Ubuntu -install_kurtosis_ubuntu() { - echo "Installing Kurtosis CLI with apt..." - echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list - sudo apt update - sudo apt install -y kurtosis-cli -} - -# Detect the operating system and install Kurtosis accordingly -if [[ "$OSTYPE" == "darwin"* ]]; then - echo "Detected macOS." - install_kurtosis_mac -elif [[ "$OSTYPE" == "linux-gnu"* ]]; then - if [ -f /etc/os-release ]; then - . /etc/os-release - #if [[ "$ID" == "ubuntu" ]]; then - echo "Detected Ubuntu." - install_kurtosis_ubuntu - #else - # echo "This script currently supports only Ubuntu and macOS." - # exit 1 - #fi - else - echo "This script currently supports only Ubuntu and macOS." - exit 1 - fi -else - echo "This script currently supports only Ubuntu and macOS." - exit 1 -fi - -# Check if Kurtosis is installed and its version -if command_exists kurtosis; then - KURTOSIS_VERSION=$(kurtosis version | grep -oP '(?<=CLI Version:\s)[\d.]+') - echo "Kurtosis CLI is already installed. Version: $KURTOSIS_VERSION" -else - echo "Kurtosis CLI installation failed or is not installed correctly." - exit 1 -fi - -# Run the Kurtosis command and capture its output -echo "Running Kurtosis command..." -KURTOSIS_OUTPUT=$(kurtosis run github.com/adaki2004/ethereum-package --args-file ./scripts/confs/network_params.yaml) - -# Extract the Blockscout port -BLOCKSCOUT_PORT=$(echo "$KURTOSIS_OUTPUT" | grep -A 5 "^[a-f0-9]\+ *blockscout " | grep "http:" | sed -E 's/.*-> http:\/\/127\.0\.0\.1:([0-9]+).*/\1/' | head -n 1) - -if [ -z "$BLOCKSCOUT_PORT" ]; then - echo "Failed to extract Blockscout port." - exit 1 -fi - -echo "Extracted Blockscout port: $BLOCKSCOUT_PORT" -echo "$BLOCKSCOUT_PORT" > /tmp/kurtosis_blockscout_port -# # Print the entire Kurtosis output for debugging -echo "Kurtosis Output:" -echo "$KURTOSIS_OUTPUT" - -# Extract the "User Services" section -USER_SERVICES_SECTION=$(echo "$KURTOSIS_OUTPUT" | awk '/^========================================== User Services ==========================================/{flag=1;next}/^$/{flag=0}flag') -# Print the "User Services" section for debugging -echo "User Services Section:" -echo "$USER_SERVICES_SECTION" -# Extract the dynamic port assigned to the rpc service for "el-1-reth-lighthouse" -RPC_PORT=$(echo "$USER_SERVICES_SECTION" | grep -A 5 "el-1-reth-lighthouse" | grep "rpc: 8545/tcp" | sed -E 's/.* -> 127.0.0.1:([0-9]+).*/\1/') -if [ -z "$RPC_PORT" ]; then - echo "Failed to extract RPC port from User Services section." - exit 1 -else - echo "Extracted RPC port: $RPC_PORT" - echo "$RPC_PORT" > /tmp/kurtosis_rpc_port -fi - -# Extract the Starlark output section -STARLARK_OUTPUT=$(echo "$KURTOSIS_OUTPUT" | awk '/^Starlark code successfully run. Output was:/{flag=1; next} /^$/{flag=0} flag') - -# Extract the beacon_http_url for cl-1-lighthouse-reth -BEACON_HTTP_URL=$(echo "$STARLARK_OUTPUT" | jq -r '.all_participants[] | select(.cl_context.beacon_service_name == "cl-1-lighthouse-reth") | .cl_context.beacon_http_url') - -if [ -z "$BEACON_HTTP_URL" ]; then - echo "Failed to extract beacon_http_url for cl-1-lighthouse-reth." - exit 1 -else - echo "Extracted beacon_http_url: $BEACON_HTTP_URL" - echo "$BEACON_HTTP_URL" > /tmp/kurtosis_beacon_http_url -fi - -# Find the correct Docker container -CONTAINER_ID=$(docker ps --format '{{.ID}} {{.Names}}' | grep 'el-1-reth-lighthouse--' | awk '{print $1}') - -if [ -z "$CONTAINER_ID" ]; then - echo "Failed to find the el-1-reth-lighthouse container." - exit 1 -else - echo "Found container ID: $CONTAINER_ID" -fi - -# Check if the file exists in the container -FILE_PATH="/app/rbuilder/config-gwyneth-reth.toml" -if ! docker exec "$CONTAINER_ID" test -f "$FILE_PATH"; then - echo "File $FILE_PATH does not exist in the container." - exit 1 -fi - -# Update the cl_node_url in the file, regardless of its current content -ESCAPED_URL=$(echo "$BEACON_HTTP_URL" | sed 's/[\/&]/\\&/g') -UPDATE_COMMAND="sed -i '/^cl_node_url[[:space:]]*=/c\cl_node_url = [\"$ESCAPED_URL\"]' $FILE_PATH" -if docker exec "$CONTAINER_ID" sh -c "$UPDATE_COMMAND"; then - echo "Successfully updated $FILE_PATH in the container." -else - echo "Failed to update $FILE_PATH in the container." - exit 1 -fi - -# Verify the change -VERIFY_COMMAND="grep 'cl_node_url' $FILE_PATH" -VERIFICATION=$(docker exec "$CONTAINER_ID" sh -c "$VERIFY_COMMAND") -echo "Updated line in $FILE_PATH: $VERIFICATION" -# Load the .env file and extract the PRIVATE_KEY -if [ -f .env ]; then - export $(grep -v '^#' .env | xargs) - PRIVATE_KEY=${PRIVATE_KEY} -else - echo ".env file not found. Please create a .env file with your PRIVATE_KEY." - exit 1 -fi -if [ -z "$PRIVATE_KEY" ]; then - echo "PRIVATE_KEY not found in the .env file." - exit 1 -fi -# Run the forge foundry script using the extracted RPC port and PRIVATE_KEY -FORGE_COMMAND="forge script --rpc-url http://127.0.0.1:$RPC_PORT scripts/DeployL1Locally.s.sol -vvvv --broadcast --private-key $PRIVATE_KEY --legacy" -echo "Running forge foundry script..." -FORGE_OUTPUT=$(eval $FORGE_COMMAND | tee /dev/tty) -echo "Script execution completed." - - -# Ensure the log file exists in the current working directory -touch ./rbuilder.log - -echo "Starting rbuilder and streaming logs to ./rbuilder.log..." -docker exec -d "$CONTAINER_ID" /bin/bash -c " - /app/start_rbuilder.sh > /tmp/rbuilder.log 2>&1 & - RBUILDER_PID=\$! - tail -f /tmp/rbuilder.log & - TAIL_PID=\$! - wait \$RBUILDER_PID -" - -# Start a background process to stream logs from the container to the host file -docker exec "$CONTAINER_ID" tail -f /tmp/rbuilder.log >> ./rbuilder.log & -FILE_LOG_PID=$! - -# Start another process to stream logs to the terminal -docker exec "$CONTAINER_ID" tail -f /tmp/rbuilder.log & -TERMINAL_LOG_PID=$! - -# Set up a trap to handle Ctrl+C (SIGINT) -trap 'echo "Interrupt received. Stopping terminal log streaming, but file logging continues."; kill $TERMINAL_LOG_PID; exit' INT TERM - -echo "rbuilder is running in the container." -echo "Logs are being streamed to ./rbuilder.log and to this terminal." -echo "Press Ctrl+C to stop watching logs in the terminal. rbuilder and file logging will continue." - -# Wait for the terminal log streaming to be manually interrupted -wait $TERMINAL_LOG_PID - -# Check if rbuilder is still running -if docker exec "$CONTAINER_ID" pgrep -f "/app/start_rbuilder.sh" > /dev/null; then - echo "rbuilder is still running in the container. Logs continue to be written to ./rbuilder.log" -else - echo "rbuilder has stopped unexpectedly." - kill $FILE_LOG_PID - exit 1 -fi - -# Extract the path to run-latest.json -RUN_LATEST_PATH=$(echo "$FORGE_OUTPUT" | grep "Transactions saved to:" | sed 's/Transactions saved to: //') - -# Run the verification script -echo "Starting contract verification..." -BLOCKSCOUT_PORT=$(cat /tmp/kurtosis_blockscout_port) -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" -"$SCRIPT_DIR/verify_contracts.sh" "$BLOCKSCOUT_PORT" "$RUN_LATEST_PATH" diff --git a/packages/protocol/scripts/test_deploy_on_l1.sh b/packages/protocol/scripts/test_deploy_on_l1.sh deleted file mode 100755 index 9591cc4ded84..000000000000 --- a/packages/protocol/scripts/test_deploy_on_l1.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh - -# This script is only used by `pnpm deploy:foundry`. -set -e - -PRIVATE_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 \ -PROPOSER=0x0000000000000000000000000000000000000000 \ -PROPOSER_ONE=0x0000000000000000000000000000000000000000 \ -GUARDIAN_PROVERS="0x1000777700000000000000000000000000000001,0x1000777700000000000000000000000000000002,0x1000777700000000000000000000000000000003,0x1000777700000000000000000000000000000004,0x1000777700000000000000000000000000000005" \ -MIN_GUARDIANS=3 \ -TAIKO_L2_ADDRESS=0x1000777700000000000000000000000000000001 \ -L2_SIGNAL_SERVICE=0x1000777700000000000000000000000000000007 \ -SECURITY_COUNCIL=0x60997970C51812dc3A010C7d01b50e0d17dc79C8 \ -TAIKO_TOKEN_PREMINT_RECIPIENT=0xa0Ee7A142d267C1f36714E4a8F75612F20a79720 \ -TAIKO_TOKEN_NAME="Taiko Token Katla" \ -TAIKO_TOKEN_SYMBOL=TTKOk \ -SHARED_ADDRESS_MANAGER=0x0000000000000000000000000000000000000000 \ -L2_GENESIS_HASH=0xee1950562d42f0da28bd4550d88886bc90894c77c9c9eaefef775d4c8223f259 \ -forge script script/DeployOnL1.s.sol:DeployOnL1 \ - --fork-url http://localhost:8545 \ - --broadcast \ - --ffi \ - -vvvv \ - --private-key 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 \ - --block-gas-limit 100000000 diff --git a/packages/protocol/scripts/upgrade_to.sh b/packages/protocol/scripts/upgrade_to.sh deleted file mode 100755 index 9f68e09dce92..000000000000 --- a/packages/protocol/scripts/upgrade_to.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh - -set -e -: "${FORK_URL:=http://localhost:8545}" - -forge script script/upgrade/Upgrade$CONTRACT.s.sol:Upgrade$CONTRACT \ - --fork-url $FORK_URL \ - --broadcast \ - --ffi \ - -vvvv \ No newline at end of file diff --git a/packages/protocol/scripts/verify_contracts.sh b/packages/protocol/scripts/verify_contracts.sh deleted file mode 100755 index 7ae2e41c8973..000000000000 --- a/packages/protocol/scripts/verify_contracts.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/bash - -# Check if both BLOCKSCOUT_PORT and RUN_LATEST_PATH are provided -if [ -z "$1" ] || [ -z "$2" ]; then - echo "Error: Both BLOCKSCOUT_PORT and RUN_LATEST_PATH must be provided" - echo "Usage: $0 " - exit 1 -fi - -BLOCKSCOUT_PORT="$1" -RUN_LATEST_PATH="$2" - -echo "Using Blockscout port: $BLOCKSCOUT_PORT" -echo "Using run-latest.json path: $RUN_LATEST_PATH" - -# Function to verify a regular contract -verify_contract() { - local address=$1 - local contract_path=$2 - local contract_name=$3 - - echo "Verifying contract: $contract_name at address $address" - forge verify-contract "$address" "$contract_path:$contract_name" \ - --watch --verifier-url "http://localhost:$BLOCKSCOUT_PORT/api" \ - --verifier blockscout --chain-id 160010 -} - -# Function to verify a proxy contract -verify_proxy_contract() { - local address=$1 - local arguments=$2 - - echo "Verifying proxy contract at address: $address" - echo "Constructor arguments: $arguments" - forge verify-contract "$address" "node_modules/@openzeppelin/contracts/proxy/ERC1967/ERC1967Proxy.sol:ERC1967Proxy" \ - --watch --verifier-url "http://localhost:$BLOCKSCOUT_PORT/api" \ - --verifier blockscout --chain-id 160010 \ - --constructor-args "$arguments" --skip-is-verified-check -} - -# Read the run-latest.json file -if [ ! -f "$RUN_LATEST_PATH" ]; then - echo "Error: run-latest.json not found at $RUN_LATEST_PATH" - exit 1 -fi - -RUN_LATEST=$(cat "$RUN_LATEST_PATH") - -# Verify regular contracts -verify_all_creates() { - local contract_name=$1 - local contract_path=$2 - - echo "Verifying all instances of $contract_name" - local addresses=$(jq -r ".transactions[] | select(.contractName == \"$contract_name\" and .transactionType == \"CREATE\") | .contractAddress" <<< "$RUN_LATEST") - - if [ -z "$addresses" ]; then - echo "No CREATE transactions found for $contract_name" - else - echo "$addresses" | while read -r address; do - if [ ! -z "$address" ]; then - verify_contract "$address" "$contract_path" "$contract_name" - fi - done - fi -} - -verify_all_creates "AddressManager" "contracts/common/AddressManager.sol" -verify_all_creates "TaikoToken" "contracts/tko/TaikoToken.sol" -verify_all_creates "TaikoL1" "contracts/L1/TaikoL1.sol" -verify_all_creates "ChainProver" "contracts/L1/ChainProver.sol" -verify_all_creates "VerifierRegistry" "contracts/L1/VerifierRegistry.sol" -verify_all_creates "MockSgxVerifier" "contracts/L1/verifiers/MockSgxVerifier.sol" - -# Verify proxy contracts -echo "Verifying ERC1967Proxy contracts:" -PROXY_CONTRACTS=$(jq -r '.transactions[] | select(.contractName == "ERC1967Proxy" and .transactionType == "CREATE")' <<< "$RUN_LATEST") -echo "$PROXY_CONTRACTS" | jq -c '.' | while read -r proxy; do - if [ ! -z "$proxy" ]; then - address=$(echo "$proxy" | jq -r '.contractAddress') - args=$(echo "$proxy" | jq -r '.arguments | join(",")') - if [ ! -z "$address" ] && [ ! -z "$args" ]; then - verify_proxy_contract "$address" "$args" - else - echo "Skipping proxy contract due to missing address or arguments" - fi - fi -done - -echo "All contracts verified." \ No newline at end of file diff --git a/packages/protocol/src/Counter.sol b/packages/protocol/src/Counter.sol deleted file mode 100644 index aded7997b0c3..000000000000 --- a/packages/protocol/src/Counter.sol +++ /dev/null @@ -1,14 +0,0 @@ -// SPDX-License-Identifier: UNLICENSED -pragma solidity ^0.8.13; - -contract Counter { - uint256 public number; - - function setNumber(uint256 newNumber) public { - number = newNumber; - } - - function increment() public { - number++; - } -} diff --git a/packages/protocol/test/DeployCapability.sol b/packages/protocol/test/DeployCapability.sol deleted file mode 100644 index 1def977c3c0c..000000000000 --- a/packages/protocol/test/DeployCapability.sol +++ /dev/null @@ -1,88 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Proxy.sol"; -import "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol"; - -import "forge-std/console2.sol"; -import "forge-std/Script.sol"; - -import "../contracts/common/AddressManager.sol"; - -/// @title DeployCapability -abstract contract DeployCapability is Script { - error ADDRESS_NULL(); - - function deployProxy( - string memory name, - address impl, - bytes memory data, - address registerTo - ) - internal - returns (address proxy) - { - proxy = address(new ERC1967Proxy(impl, data)); - - if (registerTo != address(0)) { - AddressManager(registerTo).setAddress( - uint64(block.chainid), bytes32(bytes(name)), proxy - ); - } - - console2.log(">", name, "@", registerTo); - console2.log(" proxy :", proxy); - console2.log(" impl :", impl); - console2.log(" owner :", OwnableUpgradeable(proxy).owner()); - console2.log(" msg.sender :", msg.sender); - console2.log(" this :", address(this)); - - vm.writeJson( - vm.serializeAddress("deployment", name, proxy), - string.concat(vm.projectRoot(), "/deployments/deploy_l1.json") - ); - } - - function deployProxy( - string memory name, - address impl, - bytes memory data - ) - internal - returns (address proxy) - { - return deployProxy(name, impl, data, address(0)); - } - - function register(address registerTo, string memory name, address addr) internal { - register(registerTo, name, addr, uint64(block.chainid)); - } - - function register( - address registerTo, - string memory name, - address addr, - uint64 chainId - ) - internal - { - if (registerTo == address(0)) revert ADDRESS_NULL(); - if (addr == address(0)) revert ADDRESS_NULL(); - AddressManager(registerTo).setAddress(chainId, bytes32(bytes(name)), addr); - console2.log("> ", name, "@", registerTo); - console2.log("\t addr : ", addr); - } - - function copyRegister(address registerTo, address readFrom, string memory name) internal { - if (registerTo == address(0)) revert ADDRESS_NULL(); - if (readFrom == address(0)) revert ADDRESS_NULL(); - - register({ - registerTo: registerTo, - name: name, - addr: AddressManager(readFrom).getAddress(uint64(block.chainid), bytes32(bytes(name))), - chainId: uint64(block.chainid) - }); - } -} diff --git a/packages/protocol/test/HelperContracts.sol b/packages/protocol/test/HelperContracts.sol deleted file mode 100644 index 2253a3be915b..000000000000 --- a/packages/protocol/test/HelperContracts.sol +++ /dev/null @@ -1,51 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../contracts/bridge/Bridge.sol"; -import "../contracts/signal/SignalService.sol"; - -contract BadReceiver { - receive() external payable { - revert("can not send to this contract"); - } - - fallback() external payable { - revert("can not send to this contract"); - } - - function transfer() public pure { - revert("this fails"); - } -} - -contract GoodReceiver is IMessageInvocable { - receive() external payable { } - - function onMessageInvocation(bytes calldata data) public payable { - address addr = abi.decode(data, (address)); - payable(addr).transfer(address(this).balance / 2); - } -} - -// NonNftContract -contract NonNftContract { - uint256 dummyData; - - constructor(uint256 _dummyData) { - dummyData = _dummyData; - } -} - -contract SkipProofCheckSignal is SignalService { - function proveSignalReceived( - uint64, /*srcChainId*/ - address, /*app*/ - bytes32, /*signal*/ - bytes calldata /*proof*/ - ) - public - pure - override - returns (uint256) - { } -} diff --git a/packages/protocol/test/L1/Guardians.t.sol b/packages/protocol/test/L1/Guardians.t.sol deleted file mode 100644 index e3da2e89613b..000000000000 --- a/packages/protocol/test/L1/Guardians.t.sol +++ /dev/null @@ -1,88 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; - -import "../TaikoTest.sol"; -/* -contract DummyGuardians is Guardians { - uint256 public operationId; - - function init() external initializer { - __Essential_init(); - } - - function approve(bytes32 hash) public returns (bool) { - return super.approve(operationId++, hash); - } -} - -contract TestSignalService is TaikoTest { - DummyGuardians target; - - function getSigners(uint256 numGuardians) internal returns (address[] memory signers) { - signers = new address[](numGuardians); - for (uint256 i = 0; i < numGuardians; ++i) { - signers[i] = randAddress(); - vm.deal(signers[i], 1 ether); - } - } - - function setUp() public { - target = DummyGuardians( - deployProxy({ - name: "guardians", - impl: address(new DummyGuardians()), - data: bytes.concat(DummyGuardians.init.selector) - }) - ); - } - - function test_guardians_set_guardians() public { - vm.expectRevert(Guardians.INVALID_GUARDIAN_SET.selector); - target.setGuardians(getSigners(0), 0); - - vm.expectRevert(Guardians.INVALID_MIN_GUARDIANS.selector); - target.setGuardians(getSigners(5), 0); - - vm.expectRevert(Guardians.INVALID_MIN_GUARDIANS.selector); - target.setGuardians(getSigners(5), 6); - } - - function test_guardians_set_guardians2() public { - address[] memory signers = getSigners(5); - signers[0] = address(0); - vm.expectRevert(Guardians.INVALID_GUARDIAN.selector); - target.setGuardians(signers, 4); - - signers[0] = signers[1]; - vm.expectRevert(Guardians.INVALID_GUARDIAN_SET.selector); - target.setGuardians(signers, 4); - } - - function test_guardians_approve() public { - address[] memory signers = getSigners(5); - target.setGuardians(signers, 3); - - bytes32 hash = keccak256("paris"); - for (uint256 i; i < 5; ++i) { - vm.prank(signers[0]); - assertEq(target.approve(hash), false); - assertEq(target.isApproved(hash), false); - } - - hash = keccak256("singapore"); - for (uint256 i; i < 5; ++i) { - vm.startPrank(signers[i]); - target.approve(hash); - - assertEq(target.approve(hash), i >= 2); - assertEq(target.isApproved(hash), i >= 2); - vm.stopPrank(); - } - - // changing the settings will invalid all approval history - target.setGuardians(signers, 2); - assertEq(target.version(), 2); - assertEq(target.isApproved(hash), false); - } -} -*/ diff --git a/packages/protocol/test/L1/SgxVerifier.t.sol b/packages/protocol/test/L1/SgxVerifier.t.sol deleted file mode 100644 index 4afb1209629d..000000000000 --- a/packages/protocol/test/L1/SgxVerifier.t.sol +++ /dev/null @@ -1,55 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; - -import "./TaikoL1TestBase.sol"; -/* -contract TestSgxVerifier is TaikoL1TestBase { - function deployTaikoL1() internal override returns (TaikoL1) { - return -TaikoL1(payable(deployProxy({ name: "taiko", impl: address(new TaikoL1()), data: "" }))); - } - - function test_addInstancesByOwner() external { - address[] memory _instances = new address[](3); - _instances[0] = SGX_X_1; - _instances[1] = SGX_Y; - _instances[2] = SGX_Z; - sv.addInstances(_instances); - } - - function test_addInstancesByOwner_WithoutOwnerRole() external { - address[] memory _instances = new address[](3); - _instances[0] = SGX_X_0; - _instances[1] = SGX_Y; - _instances[2] = SGX_Z; - - vm.expectRevert(); - vm.prank(Bob, Bob); - sv.addInstances(_instances); - } - - function test_addInstancesBySgxInstance() external { - address[] memory _instances = new address[](2); - _instances[0] = SGX_Y; - _instances[1] = SGX_Z; - - bytes memory signature = _getSignature(_instances, 0x4); - - vm.prank(Bob, Bob); - sv.addInstances(0, SGX_X_1, _instances, signature); - } - - function _getSignature( - address[] memory _instances, - uint256 privKey - ) - private - pure - returns (bytes memory signature) - { - bytes32 digest = keccak256(abi.encode("ADD_INSTANCES", _instances)); - (uint8 v, bytes32 r, bytes32 s) = vm.sign(privKey, digest); - signature = abi.encodePacked(r, s, v); - } -} -*/ diff --git a/packages/protocol/test/L1/TaikoL1.t.sol b/packages/protocol/test/L1/TaikoL1.t.sol deleted file mode 100644 index a0e4b38d3ee7..000000000000 --- a/packages/protocol/test/L1/TaikoL1.t.sol +++ /dev/null @@ -1,114 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; - -import "./TaikoL1TestBase.sol"; - -contract TaikoL1Test is TaikoL1TestBase { - function deployTaikoL1() internal override returns (TaikoL1) { - return - TaikoL1(payable(deployProxy({ name: "taiko", impl: address(new TaikoL1()), data: "" }))); - } - - function test_L1_propose_prove_and_verify_blocks_sequentially() external { - giveEthAndTko(Alice, 100 ether, 100 ether); - - TaikoData.BlockMetadata memory meta; - - vm.roll(block.number + 1); - vm.warp(block.timestamp + 12); - - bytes32 parentMetaHash; - bytes32 parentBlockHash = GENESIS_BLOCK_HASH; - for (uint64 blockId = 1; blockId <= 20; blockId++) { - printVariables("before propose & prove & verify"); - // Create metadata and propose the block - meta = createBlockMetaData(Alice, blockId, 1, true, parentMetaHash, parentBlockHash); - proposeBlock(Alice, meta, ""); - - //Save arent data for next block iteration - parentMetaHash = keccak256(abi.encode(meta)); - parentBlockHash = meta.blockHash; - - // Create proofs and prove a block - bytes32 newRoot = randBytes32(); // Currently does not matter what do we feed as newRoot - // as verification is mocked!! - ChainProver.ProofBatch memory blockProofs = - createProofs(uint64(block.number), newRoot, Alice, true); - proveBlock(Alice, abi.encode(blockProofs)); - - //Wait enought time and verify block - vm.warp(uint32(block.timestamp + L1.SECURITY_DELAY_AFTER_PROVEN() + 1)); - vm.roll(block.number + 10); - //verifyBlock(1); - parentMetaHash = keccak256(abi.encode(meta)); - printVariables("after verify"); - } - } - - function test_L1_propose_some_blocks_in_a_row_then_prove_and_verify() external { - giveEthAndTko(Alice, 100 ether, 100 ether); - - TaikoData.BlockMetadata[] memory blockMetaDatas = new TaikoData.BlockMetadata[](20); - - vm.roll(block.number + 1); - vm.warp(block.timestamp + 12); - - bytes32 parentMetaHash; - bytes32 parentBlockHash = GENESIS_BLOCK_HASH; - for (uint64 blockId = 1; blockId <= 20; blockId++) { - printVariables("before propose & prove & verify"); - // Create metadata and propose the block - blockMetaDatas[blockId - 1] = - createBlockMetaData(Alice, blockId, 1, true, parentMetaHash, parentBlockHash); - - proposeBlock(Alice, blockMetaDatas[blockId - 1], ""); - - //Save arent data for next block iteration - parentMetaHash = keccak256(abi.encode(blockMetaDatas[blockId - 1])); - parentBlockHash = blockMetaDatas[blockId - 1].blockHash; - - vm.roll(block.number + 1); - vm.warp(block.timestamp + 12); - } - - for (uint64 blockId = 1; blockId <= 20; blockId++) { - bytes32 newRoot = randBytes32(); // Currently does not matter what do we feed as newRoot - // as verification is mocked!! - ChainProver.ProofBatch memory blockProofs = - createProofs(uint64(block.number), newRoot, Alice, true); - proveBlock(Alice, abi.encode(blockProofs)); - - //Wait enought time and verify block (currently we simply just "wait enough" from latest - // block and not time it perfectly) - vm.warp(uint32(block.timestamp + L1.SECURITY_DELAY_AFTER_PROVEN() + 1)); - vm.roll(block.number + 10); - //verifyBlock(1); - parentMetaHash = keccak256(abi.encode(blockMetaDatas[blockId - 1])); - printVariables("after verify 1"); - } - } - - // This test does not fail anymore, because proposing is possible but validating of the preconf window violation will be done - // So for now, not needed ! Commenting out. When PR approved, i'll delete also. - // function test_L1_propose_block_outside_the_4_epoch_window() external { - // giveEthAndTko(Alice, 100 ether, 100 ether); - - // TaikoData.BlockMetadata memory meta; - - // vm.roll(block.number + 1); - // vm.warp(block.timestamp + 12); - - // bytes32 parentMetaHash; - // bytes32 parentBlockHash = GENESIS_BLOCK_HASH; - // // Create metadata and propose the block 129 blocks later only - // meta = createBlockMetaData(Alice, 1, 1, true, parentMetaHash, parentBlockHash); - // vm.roll(block.number + 129); - // vm.warp(block.timestamp + 129 * 12); - - // proposeBlock(Alice, meta, TaikoErrors.L1_INVALID_L1_STATE_BLOCK.selector); - // } - - function test_print_genesis_hash() external pure { - console2.logBytes32(keccak256("GENESIS_BLOCK_HASH")); - } -} diff --git a/packages/protocol/test/L1/TaikoL1LibProvingWithTiers.t.sol b/packages/protocol/test/L1/TaikoL1LibProvingWithTiers.t.sol deleted file mode 100644 index f5370caf6cfc..000000000000 --- a/packages/protocol/test/L1/TaikoL1LibProvingWithTiers.t.sol +++ /dev/null @@ -1,820 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; - -import "./TaikoL1TestBase.sol"; -/* -contract TaikoL1Tiers is TaikoL1 { - function getConfig() public view override returns (TaikoData.Config memory config) { - config = TaikoL1.getConfig(); - - config.maxBlocksToVerifyPerProposal = 0; - config.blockMaxProposals = 10; - config.blockRingBufferSize = 12; - config.livenessBond = 1e18; // 1 Taiko token - } -} - -contract Verifier { - fallback(bytes calldata) external returns (bytes memory) { - return bytes.concat(keccak256("taiko")); - } -} - -contract TaikoL1LibProvingWithTiers is TaikoL1TestBase { - function deployTaikoL1() internal override returns (TaikoL1 taikoL1) { - taikoL1 = TaikoL1( - payable(deployProxy({ name: "taiko", impl: address(new TaikoL1Tiers()), data: "" })) - ); - } - - function proveHigherTierProof( - TaikoData.BlockMetadata memory meta, - bytes32 parentHash, - bytes32 signalRoot, - bytes32 blockHash, - uint16 minTier - ) - internal - { - uint16 tierToProveWith; - if (minTier == LibTiers.TIER_OPTIMISTIC) { - tierToProveWith = LibTiers.TIER_SGX; - } else if (minTier == LibTiers.TIER_SGX) { - tierToProveWith = LibTiers.TIER_SGX_AND_PSE_ZKEVM; - } else if (minTier == LibTiers.TIER_SGX_AND_PSE_ZKEVM) { - tierToProveWith = LibTiers.TIER_GUARDIAN; - } - proveBlock(Carol, Carol, meta, parentHash, blockHash, signalRoot, tierToProveWith, ""); - } - - function test_L1_ContestingWithSameProof() external { - giveEthAndTko(Alice, 1e7 ether, 1000 ether); - giveEthAndTko(Carol, 1e7 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e6 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < conf.blockMaxProposals * 3; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - // This proof cannot be verified obviously because of - // blockhash:blockId - proveBlock(Bob, Bob, meta, parentHash, blockHash, signalRoot, meta.minTier, ""); - - // Try to contest - but should revert with L1_ALREADY_PROVED - proveBlock( - Carol, - Carol, - meta, - parentHash, - blockHash, - signalRoot, - meta.minTier, - TaikoErrors.L1_ALREADY_PROVED.selector - ); - - vm.roll(block.number + 15 * 12); - - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - verifyBlock(Carol, 1); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_ContestingWithDifferentButCorrectProof() external { - giveEthAndTko(Alice, 1e8 ether, 1000 ether); - giveEthAndTko(Carol, 1e8 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < conf.blockMaxProposals * 3; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - // This proof cannot be verified obviously because of - // signalRoot instead of blockHash - uint16 minTier = meta.minTier; - - proveBlock(Bob, Bob, meta, parentHash, signalRoot, signalRoot, minTier, ""); - - // Try to contest - proveBlock(Carol, Carol, meta, parentHash, blockHash, signalRoot, minTier, ""); - - vm.roll(block.number + 15 * 12); - - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - // Cannot verify block because it is contested.. - verifyBlock(Carol, 1); - - proveHigherTierProof(meta, parentHash, signalRoot, blockHash, minTier); - - vm.warp(block.timestamp + L1.getTier(LibTiers.TIER_GUARDIAN).cooldownWindow + 1); - // Now can verify - console2.log("Probalom verify-olni"); - verifyBlock(Carol, 1); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_ContestingWithSgxProof() external { - giveEthAndTko(Alice, 1e8 ether, 1000 ether); - giveEthAndTko(Carol, 1e8 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < conf.blockMaxProposals * 3; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - // This proof cannot be verified obviously because of - // signalRoot instead of blockHash - uint16 minTier = meta.minTier; - proveBlock(Bob, Bob, meta, parentHash, signalRoot, signalRoot, minTier, ""); - - // Try to contest - proveBlock(Carol, Carol, meta, parentHash, blockHash, signalRoot, minTier, ""); - - vm.roll(block.number + 15 * 12); - - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - // Cannot verify block because it is contested.. - verifyBlock(Carol, 1); - - proveHigherTierProof(meta, parentHash, signalRoot, blockHash, minTier); - - // Otherwise just not contest - vm.warp(block.timestamp + L1.getTier(LibTiers.TIER_GUARDIAN).cooldownWindow + 1); - // Now can verify - verifyBlock(Carol, 1); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_ContestingWithDifferentButInCorrectProof() external { - giveEthAndTko(Alice, 1e8 ether, 1000 ether); - giveEthAndTko(Carol, 1e8 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < conf.blockMaxProposals * 3; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - // This proof cannot be verified obviously because of - // signalRoot instead of blockHash - uint16 minTier = meta.minTier; - - proveBlock(Bob, Bob, meta, parentHash, blockHash, signalRoot, minTier, ""); - - if (minTier == LibTiers.TIER_OPTIMISTIC) { - // Try to contest - proveBlock(Carol, Carol, meta, parentHash, signalRoot, signalRoot, minTier, ""); - - vm.roll(block.number + 15 * 12); - - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - // Cannot verify block because it is contested.. - verifyBlock(Carol, 1); - - proveBlock( - Carol, - Carol, - meta, - parentHash, - blockHash, - signalRoot, - LibTiers.TIER_SGX_AND_PSE_ZKEVM, - "" - ); - } - - // Otherwise just not contest - vm.warp(block.timestamp + L1.getTier(LibTiers.TIER_GUARDIAN).cooldownWindow + 1); - // Now can verify - verifyBlock(Carol, 1); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_ContestingWithInvalidBlockHash() external { - giveEthAndTko(Alice, 1e8 ether, 1000 ether); - giveEthAndTko(Carol, 1e8 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < 10; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - // This proof cannot be verified obviously because of - // signalRoot instead of blockHash - uint16 minTier = meta.minTier; - proveBlock(Bob, Bob, meta, parentHash, signalRoot, signalRoot, minTier, ""); - - if (minTier == LibTiers.TIER_OPTIMISTIC) { - // Try to contest - proveBlock(Carol, Carol, meta, parentHash, blockHash, signalRoot, minTier, ""); - - vm.roll(block.number + 15 * 12); - - vm.warp(block.timestamp + L1.getTier(LibTiers.TIER_GUARDIAN).cooldownWindow + 1); - - // Cannot verify block because it is contested.. - verifyBlock(Carol, 1); - - proveBlock( - Carol, - Carol, - meta, - parentHash, - 0, - signalRoot, - LibTiers.TIER_SGX_AND_PSE_ZKEVM, - TaikoErrors.L1_INVALID_TRANSITION.selector - ); - } - - // Otherwise just not contest - vm.warp(block.timestamp + L1.getTier(LibTiers.TIER_GUARDIAN).cooldownWindow + 1); - // Now can verify - verifyBlock(Carol, 1); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_NonAsignedProverCannotBeFirstInProofWindowTime() external { - giveEthAndTko(Alice, 1e8 ether, 100 ether); - // This is a very weird test (code?) issue here. - // If this line (or Bob's query balance) is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - console2.log("Alice balance:", tko.balanceOf(Alice)); - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - giveEthAndTko(Carol, 1e8 ether, 100 ether); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - - for (uint256 blockId = 1; blockId < 10; blockId++) { - //printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - proveBlock( - Carol, - Carol, - meta, - parentHash, - blockHash, - signalRoot, - meta.minTier, - TaikoErrors.L1_NOT_ASSIGNED_PROVER.selector - ); - vm.roll(block.number + 15 * 12); - - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - verifyBlock(Carol, 1); - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_asignedProverCannotProveAfterHisWindowElapsed() external { - giveEthAndTko(Alice, 1e8 ether, 100 ether); - // This is a very weird test (code?) issue here. - // If this line (or Bob's query balance) is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - console2.log("Alice balance:", tko.balanceOf(Alice)); - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - giveEthAndTko(Carol, 1e8 ether, 100 ether); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - - for (uint256 blockId = 1; blockId < 10; blockId++) { - //printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - - vm.roll(block.number + 15 * 12); - - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - proveBlock( - Bob, - Bob, - meta, - parentHash, - blockHash, - signalRoot, - meta.minTier, - TaikoErrors.L1_ASSIGNED_PROVER_NOT_ALLOWED.selector - ); - - verifyBlock(Carol, 1); - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_GuardianProverCannotOverwriteIfSameProof() external { - giveEthAndTko(Alice, 1e7 ether, 1000 ether); - giveEthAndTko(Carol, 1e7 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e6 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < conf.blockMaxProposals * 3; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - // This proof cannot be verified obviously because of - // blockhash:blockId - proveBlock(Bob, Bob, meta, parentHash, blockHash, signalRoot, meta.minTier, ""); - - // Try to contest - but should revert with L1_ALREADY_PROVED - proveBlock( - Carol, - Carol, - meta, - parentHash, - blockHash, - signalRoot, - LibTiers.TIER_GUARDIAN, - TaikoErrors.L1_ALREADY_PROVED.selector - ); - - vm.roll(block.number + 15 * 12); - - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - verifyBlock(Carol, 1); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_GuardianProverFailsWithInvalidBlockHash() external { - giveEthAndTko(Alice, 1e7 ether, 1000 ether); - giveEthAndTko(Carol, 1e7 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e6 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < conf.blockMaxProposals * 3; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - // This proof cannot be verified obviously because of - // blockhash:blockId - proveBlock(Bob, Bob, meta, parentHash, blockHash, signalRoot, meta.minTier, ""); - - // Try to contest - but should revert with L1_ALREADY_PROVED - proveBlock( - Carol, - Carol, - meta, - parentHash, - 0, - signalRoot, - LibTiers.TIER_GUARDIAN, - TaikoErrors.L1_INVALID_TRANSITION.selector - ); - - vm.roll(block.number + 15 * 12); - - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - verifyBlock(Carol, 1); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_GuardianProverCanOverwriteIfNotSameProof() external { - giveEthAndTko(Alice, 1e7 ether, 1000 ether); - giveEthAndTko(Carol, 1e7 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e7 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < conf.blockMaxProposals * 3; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - // This proof cannot be verified obviously because of - // blockhash:blockId - proveBlock(Bob, Bob, meta, parentHash, signalRoot, signalRoot, meta.minTier, ""); - - // Prove as guardian - proveBlock( - Carol, Carol, meta, parentHash, blockHash, signalRoot, LibTiers.TIER_GUARDIAN, "" - ); - - vm.roll(block.number + 15 * 12); - - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - verifyBlock(Carol, 1); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_IfGuardianRoleIsNotGrantedToProver() external { - registerAddress("guardian_prover", Alice); - - giveEthAndTko(Alice, 1e8 ether, 1000 ether); - giveEthAndTko(Carol, 1e8 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < 10; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - // This proof cannot be verified obviously because of - // blockhash:blockId - proveBlock(Bob, Bob, meta, parentHash, signalRoot, signalRoot, meta.minTier, ""); - - // Prove as guardian but in reality not a guardian - proveBlock( - Carol, - Carol, - meta, - parentHash, - blockHash, - signalRoot, - LibTiers.TIER_GUARDIAN, - GuardianVerifier.PERMISSION_DENIED.selector - ); - - vm.roll(block.number + 15 * 12); - - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - verifyBlock(Carol, 1); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_ProveWithInvalidBlockId() external { - registerAddress("guardian_prover", Alice); - - giveEthAndTko(Alice, 1e8 ether, 1000 ether); - giveEthAndTko(Carol, 1e8 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < 10; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - - meta.id = 100; - proveBlock( - Carol, - Carol, - meta, - parentHash, - blockHash, - signalRoot, - LibTiers.TIER_PSE_ZKEVM, - TaikoErrors.L1_INVALID_BLOCK_ID.selector - ); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_ProveWithInvalidMetahash() external { - registerAddress("guardian_prover", Alice); - - giveEthAndTko(Alice, 1e8 ether, 1000 ether); - giveEthAndTko(Carol, 1e8 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < 10; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - - // Mess up metahash - meta.l1Height = 200; - proveBlock( - Bob, - Bob, - meta, - parentHash, - blockHash, - signalRoot, - LibTiers.TIER_PSE_ZKEVM, - TaikoErrors.L1_BLOCK_MISMATCH.selector - ); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_GuardianProofCannotBeOverwrittenByLowerTier() external { - giveEthAndTko(Alice, 1e7 ether, 1000 ether); - giveEthAndTko(Carol, 1e7 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are onsgoing with foundry team - giveEthAndTko(Bob, 1e7 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < conf.blockMaxProposals * 3; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - // This proof cannot be verified obviously because of blockhash is - // exchanged with signalRoot - proveBlock(Bob, Bob, meta, parentHash, signalRoot, signalRoot, meta.minTier, ""); - - // Prove as guardian - proveBlock( - Carol, Carol, meta, parentHash, blockHash, signalRoot, LibTiers.TIER_GUARDIAN, "" - ); - - // Try to re-prove but reverts - proveBlock( - Bob, - Bob, - meta, - parentHash, - signalRoot, - signalRoot, - LibTiers.TIER_PSE_ZKEVM, - TaikoErrors.L1_INVALID_TIER.selector - ); - - vm.roll(block.number + 15 * 12); - - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - verifyBlock(Carol, 1); - - parentHash = blockHash; - } - printVariables(""); - } - - function test_L1_GuardianCanReturnBondIfBlockUnprovable() external { - giveEthAndTko(Alice, 1e7 ether, 1000 ether); - giveEthAndTko(Carol, 1e7 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - // This is a very weird test (code?) issue here. - // If this line is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - // Current investigations are ongoing with foundry team - giveEthAndTko(Bob, 1e7 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - for (uint256 blockId = 1; blockId < conf.blockMaxProposals * 3; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - // This proof cannot be verified obviously because of blockhash is - // exchanged with signalRoot - proveBlock(Bob, Bob, meta, parentHash, signalRoot, signalRoot, meta.minTier, ""); - - // Let's say the 10th block is unprovable so prove accordingly - if (blockId == 10) { - TaikoData.Transition memory tran = TaikoData.Transition({ - parentHash: parentHash, - blockHash: blockHash, - signalRoot: signalRoot, - graffiti: 0x0 - }); - - TaikoData.TierProof memory proof; - proof.tier = LibTiers.TIER_GUARDIAN; - proof.data = bytes.concat(keccak256("RETURN_LIVENESS_BOND")); - - uint256 balanceBeforeReimbursement = tko.balanceOf(Bob); - - vm.prank(David, David); - gp.approve(meta, tran, proof); - vm.prank(Emma, Emma); - gp.approve(meta, tran, proof); - vm.prank(Frank, Frank); - gp.approve(meta, tran, proof); - - // // Credited back the bond (not transferred to the user - // wallet, - // // but in-contract account credited only.) - assertEq(tko.balanceOf(Bob) - balanceBeforeReimbursement, 1 ether); - } else { - // Prove as guardian - proveBlock( - Carol, - Carol, - meta, - parentHash, - blockHash, - signalRoot, - LibTiers.TIER_GUARDIAN, - "" - ); - } - vm.roll(block.number + 15 * 12); - - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - verifyBlock(Carol, 1); - - parentHash = blockHash; - } - printVariables(""); - } -} -*/ diff --git a/packages/protocol/test/L1/TaikoL1TestBase.sol b/packages/protocol/test/L1/TaikoL1TestBase.sol deleted file mode 100644 index aa601ef4ba0e..000000000000 --- a/packages/protocol/test/L1/TaikoL1TestBase.sol +++ /dev/null @@ -1,544 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; - -import "../TaikoTest.sol"; -/* -contract MockVerifier { - fallback(bytes calldata) external returns (bytes memory) { - return bytes.concat(keccak256("taiko")); - } -} -*/ -// TODO (dani): remove some code to sub-contracts, this one shall only contain -// shared logics and data. - -abstract contract TaikoL1TestBase is TaikoTest { - AddressManager public addressManager; - // AssignmentHook public assignmentHook; - ChainProver public chainProver; - TaikoToken public tko; - // SignalService public ss; - TaikoL1 public L1; - TaikoData.Config conf; - uint256 internal logCount; - // PseZkVerifier public pv; - /* 3 proof verifiers - to fulfill the requirement in BasedOperator.sol */ - MockSgxVerifier public sv1; - MockSgxVerifier public sv2; - MockSgxVerifier public sv3; - VerifierRegistry public vr; - // SgxAndZkVerifier public sgxZkVerifier; - // GuardianVerifier public gv; - // GuardianProver public gp; - // TaikoA6TierProvider public cp; - // Bridge public bridge; - - bytes32 public GENESIS_BLOCK_HASH = keccak256("GENESIS_BLOCK_HASH"); - - address public L2SS = randAddress(); - address public L2 = randAddress(); - - function deployTaikoL1() internal virtual returns (TaikoL1 taikoL1); - - function setUp() public virtual { - vm.startPrank(Alice); - vm.roll(20_232_182); //A real Ethereum block number from Jul-04-2024 09:13:47 - vm.warp(1_720_077_269); - - addressManager = AddressManager( - deployProxy({ - name: "address_manager", - impl: address(new AddressManager()), - data: abi.encodeCall(AddressManager.init, (Alice)) - }) - ); - - L1 = deployTaikoL1(); - conf = L1.getConfig(); - - chainProver = ChainProver( - deployProxy({ - name: "chain_prover", - impl: address(new ChainProver()), - data: abi.encodeCall(ChainProver.init, (Alice, address(addressManager))) - }) - ); - - vr = VerifierRegistry( - deployProxy({ - name: "verifier_registry", - impl: address(new VerifierRegistry()), - data: abi.encodeCall(VerifierRegistry.init, (Alice, address(addressManager))) - }) - ); - - registerAddress("taiko", address(L1)); - registerAddress("chain_prover", address(chainProver)); - registerAddress("verifier_registry", address(vr)); - - // ss = SignalService( - // deployProxy({ - // name: "signal_service", - // impl: address(new SignalService()), - // data: bytes.concat(SignalService.init.selector) - // }) - // ); - - // pv = PseZkVerifier( - // deployProxy({ - // name: "tier_pse_zkevm", - // impl: address(new PseZkVerifier()), - // data: bytes.concat(PseZkVerifier.init.selector, - // abi.encode(address(addressManager))) - // }) - // ); - - address sgxImpl = address(new MockSgxVerifier()); - //Naming is like: 3, 1, 2, is because we need to have incremental order of addresses in - // BasedOperator, so figured out this is actually the way - - sv1 = MockSgxVerifier( - deployProxy({ - name: "sgx2", //Name does not matter now, since we check validity via - // verifierRegistry - impl: sgxImpl, - data: abi.encodeCall(MockSgxVerifier.init, (Alice, address(addressManager))) - }) - ); - - console2.log(address(sv1)); - - sv2 = MockSgxVerifier( - deployProxy({ - name: "sgx3", //Name does not matter now, since we check validity via - // verifierRegistry - impl: sgxImpl, - data: abi.encodeCall(MockSgxVerifier.init, (Alice, address(addressManager))) - }) - ); - - sv3 = MockSgxVerifier( - deployProxy({ - name: "sgx1", //Name does not matter now, since we check validity via - // verifierRegistry - impl: sgxImpl, - data: abi.encodeCall(MockSgxVerifier.init, (Alice, address(addressManager))) - }) - ); - - console2.log(address(sv2)); - - // sv2 = SgxVerifier( - // deployProxy({ - // name: "sgx3", //Name does not matter now, since we check validity via - // // verifierRegistry - // impl: sgxImpl, - // data: abi.encodeCall(SgxVerifier.init, (Alice, address(addressManager))) - // }) - // ); - - console2.log(address(sv3)); - - // Bootstrap / add first trusted instance -> SGX code needs some change tho - because - // changed since taiko-simplified was created first. - address[] memory initSgxInstances = new address[](1); - initSgxInstances[0] = SGX_X_0; - - sv1.addInstances(initSgxInstances); - sv2.addInstances(initSgxInstances); - sv3.addInstances(initSgxInstances); - - // address[] memory initSgxInstances = new address[](1); - // initSgxInstances[0] = SGX_X_0; - // sv.addInstances(initSgxInstances); - - // sgxZkVerifier = SgxAndZkVerifier( - // deployProxy({ - // name: "tier_sgx_and_pse_zkevm", - // impl: address(new SgxAndZkVerifier()), - // data: bytes.concat(SgxAndZkVerifier.init.selector, abi.encode(address(addressManager))) - // }) - // ); - - // gv = GuardianVerifier( - // deployProxy({ - // name: "guardian_verifier", - // impl: address(new GuardianVerifier()), - // data: bytes.concat(GuardianVerifier.init.selector, abi.encode(address(addressManager))) - // }) - // ); - - // gp = GuardianProver( - // deployProxy({ - // name: "guardian_prover", - // impl: address(new GuardianProver()), - // data: bytes.concat(GuardianProver.init.selector, abi.encode(address(addressManager))) - // }) - // ); - - // setupGuardianProverMultisig(); - - // cp = TaikoA6TierProvider( - // deployProxy({ - // name: "tier_provider", - // impl: address(new TaikoA6TierProvider()), - // data: bytes.concat(TaikoA6TierProvider.init.selector) - // }) - // ); - - // bridge = Bridge( - // payable( - // deployProxy({ - // name: "bridge", - // impl: address(new Bridge()), - // data: bytes.concat(Bridge.init.selector, abi.encode(addressManager)), - // registerTo: address(addressManager), - // owner: address(0) - // }) - // ) - // ); - - // assignmentHook = AssignmentHook( - // deployProxy({ - // name: "assignment_hook", - // impl: address(new AssignmentHook()), - // data: bytes.concat(AssignmentHook.init.selector, abi.encode(address(addressManager))) - // }) - // ); - - // registerAddress("taiko", address(L1)); - // registerAddress("tier_pse_zkevm", address(pv)); - // registerAddress("tier_sgx", address(sv)); - // registerAddress("tier_guardian", address(gv)); - // registerAddress("tier_sgx_and_pse_zkevm", address(sgxZkVerifier)); - // registerAddress("tier_provider", address(cp)); - // registerAddress("signal_service", address(ss)); - // registerAddress("guardian_prover", address(gp)); - // registerAddress("bridge", address(bridge)); - // registerL2Address("taiko", address(L2)); - // registerL2Address("signal_service", address(L2SS)); - // registerL2Address("taiko_l2", address(L2)); - - // registerAddress(pv.getVerifierName(300), address(new MockVerifier())); - - tko = TaikoToken( - deployProxy({ - name: "taiko_token", - impl: address(new TaikoToken()), - data: abi.encodeCall(TaikoToken.init, (address(0), address(this))), - registerTo: address(addressManager) - }) - ); - - L1.init(Alice, address(addressManager), GENESIS_BLOCK_HASH); - printVariables("init "); - vm.stopPrank(); - - // Add those 3 to verifier registry - vm.startPrank(vr.owner()); - vr.addVerifier(address(sv1), "sgx1"); - vr.addVerifier(address(sv2), "sgx2"); - vr.addVerifier(address(sv3), "sgx3"); - vm.stopPrank(); - } - - function proposeBlock( - address proposer, - TaikoData.BlockMetadata memory meta, - bytes4 revertReason - ) - internal - returns (TaikoData.BlockMetadata memory) - { - // TaikoData.TierFee[] memory tierFees = new TaikoData.TierFee[](5); - // // Register the tier fees - // // Based on OPL2ConfigTier we need 3: - // // - LibTiers.TIER_PSE_ZKEVM; - // // - LibTiers.TIER_SGX; - // // - LibTiers.TIER_OPTIMISTIC; - // // - LibTiers.TIER_GUARDIAN; - // // - LibTiers.TIER_SGX_AND_PSE_ZKEVM - // tierFees[0] = TaikoData.TierFee(LibTiers.TIER_OPTIMISTIC, 1 ether); - // tierFees[1] = TaikoData.TierFee(LibTiers.TIER_SGX, 1 ether); - // tierFees[2] = TaikoData.TierFee(LibTiers.TIER_PSE_ZKEVM, 2 ether); - // tierFees[3] = TaikoData.TierFee(LibTiers.TIER_SGX_AND_PSE_ZKEVM, 2 ether); - // tierFees[4] = TaikoData.TierFee(LibTiers.TIER_GUARDIAN, 0 ether); - // // For the test not to fail, set the message.value to the highest, the - // // rest will be returned - // // anyways - // uint256 msgValue = 2 ether; - - // AssignmentHook.ProverAssignment memory assignment = AssignmentHook.ProverAssignment({ - // feeToken: address(0), - // tierFees: tierFees, - // expiry: uint64(block.timestamp + 60 minutes), - // maxBlockId: 0, - // maxProposedIn: 0, - // metaHash: 0, - // signature: new bytes(0) - // }); - - // assignment.signature = - // _signAssignment(prover, assignment, address(L1), keccak256(new bytes(txListSize))); - - // (, TaikoData.SlotB memory b) = L1.getStateVariables(); - - // uint256 _difficulty; - // unchecked { - // _difficulty = block.prevrandao; - // } - - // meta.blockHash = blockHash; - // meta.parentHash = parentHash; - - // meta.timestamp = uint64(block.timestamp); - // meta.l1Height = uint64(block.number - 1); - // meta.l1Hash = blockhash(block.number - 1); - // meta.difficulty = bytes32(_difficulty); - // meta.gasLimit = gasLimit; - - // TaikoData.HookCall[] memory hookcalls = new TaikoData.HookCall[](1); - - // hookcalls[0] = TaikoData.HookCall(address(assignmentHook), abi.encode(assignment)); - - bytes[] memory dummyTxList = new bytes[](1); - dummyTxList[0] = hex"0000000000000000000000000000000000000000000000000000000000000001"; - - // If blob is used, empty tx list - bytes[] memory emptyTxList; - - // Input metadata sturct can now support multiple block proposals per L1 TXN - bytes[] memory metasEncoded = new bytes[](1); - metasEncoded[0] = abi.encode(meta); - - TaikoData.BlockMetadata[] memory _returnedBlocks = new TaikoData.BlockMetadata[](1); - - // if (revertReason == "") { - // vm.prank(proposer, proposer); - // _returnedBlocks = L1.proposeBlock{ value: 1 ether / 10 }( - // metasEncoded, meta.blobUsed == true ? emptyTxList : dummyTxList - // ); - // } else { - // vm.prank(proposer, proposer); - // vm.expectRevert(revertReason); - // _returnedBlocks = L1.proposeBlock{ value: 1 ether / 10 }( - // metasEncoded, meta.blobUsed == true ? emptyTxList : dummyTxList - // ); - // return meta; - // } - - return _returnedBlocks[0]; - } - - function proveBlock(address prover, bytes memory blockProof) internal { - vm.prank(prover, prover); - chainProver.prove(blockProof); - } - - // function verifyBlock(uint64 count) internal { - // basedOperator.verifyBlocks(count); - // } - - // function setupGuardianProverMultisig() internal { - // address[] memory initMultiSig = new address[](5); - // initMultiSig[0] = David; - // initMultiSig[1] = Emma; - // initMultiSig[2] = Frank; - // initMultiSig[3] = Grace; - // initMultiSig[4] = Henry; - - // gp.setGuardians(initMultiSig, 3); - // } - - function registerAddress(bytes32 nameHash, address addr) internal { - addressManager.setAddress(uint64(block.chainid), nameHash, addr); - console2.log(block.chainid, uint256(nameHash), unicode"→", addr); - } - - function registerL2Address(bytes32 nameHash, address addr) internal { - addressManager.setAddress(conf.chainId, nameHash, addr); - console2.log(conf.chainId, string(abi.encodePacked(nameHash)), unicode"→", addr); - } - - // function _signAssignment( - // address signer, - // AssignmentHook.ProverAssignment memory assignment, - // address taikoAddr, - // bytes32 blobHash - // ) - // internal - // view - // returns (bytes memory signature) - // { - // uint256 signerPrivateKey; - - // // In the test suite these are the 3 which acts as provers - // if (signer == Alice) { - // signerPrivateKey = 0x1; - // } else if (signer == Bob) { - // signerPrivateKey = 0x2; - // } else if (signer == Carol) { - // signerPrivateKey = 0x3; - // } - - // (uint8 v, bytes32 r, bytes32 s) = vm.sign( - // signerPrivateKey, assignmentHook.hashAssignment(assignment, taikoAddr, blobHash) - // ); - // signature = abi.encodePacked(r, s, v); - // } - - function createSgxSignatureProof( - address newInstance, - address prover, - bytes32 transitionHash - ) - internal - view - returns (bytes memory signature) - { - bytes32 digest = LibPublicInput.hashPublicInputs( - transitionHash, address(sv1), newInstance, prover, L1.getConfig().chainId - ); - - uint256 signerPrivateKey; - - // In the test suite these are the 3 which acts as provers - if (SGX_X_0 == newInstance) { - signerPrivateKey = 0x5; - } else if (SGX_X_1 == newInstance) { - signerPrivateKey = 0x4; - } - - (uint8 v, bytes32 r, bytes32 s) = vm.sign(signerPrivateKey, digest); - signature = abi.encodePacked(r, s, v); - } - - function giveEthAndTko(address to, uint256 amountTko, uint256 amountEth) internal { - vm.deal(to, amountEth); - tko.transfer(to, amountTko); - - vm.prank(to, to); - tko.approve(address(L1), amountTko); - // vm.prank(to, to); - // tko.approve(address(assignmentHook), amountTko); - - console2.log("TKO balance:", to, tko.balanceOf(to)); - console2.log("ETH balance:", to, to.balance); - } - - function printVariables(string memory comment) internal { - string memory str = string.concat( - Strings.toString(logCount++), - ":[", - "LEGACY_PRINTING_SHALL_BE_REMOVED", - unicode"→", - "LEGACY_PRINTING_SHALL_BE_REMOVED", - "] // ", - comment - ); - console2.log(str); - } - - function mine(uint256 counts) internal { - vm.warp(block.timestamp + 12 * counts); - vm.roll(block.number + counts); - } - - function createBlockMetaData( - address coinbase, - uint64 l2BlockNumber, - uint32 belowBlockTipHeight, // How many blocks below from current tip (block.id) - bool blobUsed, - bytes32 parentMetaHash, - bytes32 parentBlockHash - ) - internal - returns (TaikoData.BlockMetadata memory meta) - { - meta.blockHash = randBytes32(); - meta.parentMetaHash = parentMetaHash; - meta.parentBlockHash = parentBlockHash; - meta.l1Hash = blockhash(block.number - belowBlockTipHeight); - meta.difficulty = block.prevrandao; - meta.blobHash = randBytes32(); - meta.coinbase = coinbase; - meta.l2BlockNumber = l2BlockNumber; - meta.gasLimit = L1.getConfig().blockMaxGasLimit; - meta.l1StateBlockNumber = uint32(block.number - belowBlockTipHeight); - meta.timestamp = uint64(block.timestamp - (belowBlockTipHeight * 12)); // x blocks behind - - if (blobUsed) { - meta.txListByteOffset = 0; - meta.txListByteSize = 0; - meta.blobUsed = true; - } else { - meta.txListByteOffset = 0; - meta.txListByteSize = 32; // Corresponding dummyTxList is set during proposeBlock() - meta.blobUsed = false; - } - } - - function createProofs( - uint64 newL1BlockNumber, - bytes32 newL1Root, - address prover, - bool threeMockSGXProofs // Used to indicate to "trick" the BasedProver with 3 different (but - // same code) deployments of SGX verifier - later we can fine tune to have 3 correct, - // valid proofs. - ) - internal - view - returns (ChainProver.ProofBatch memory proofBatch) - { - proofBatch.newL1BlockNumber = newL1BlockNumber; - proofBatch.newL1Root = newL1Root; - - // Set prover - proofBatch.prover = prover; - - address newInstance; - // Keep changing the pub key associated with an instance to avoid - // attacks, - // obviously just a mock due to 2 addresses changing all the time. - (newInstance,) = sv1.instances(0); - if (newInstance == SGX_X_0) { - newInstance = SGX_X_1; - } else { - newInstance = SGX_X_0; - } - - ChainProver.ProofData[] memory proofs = new ChainProver.ProofData[](3); - - bytes memory signature = createSgxSignatureProof( - newInstance, - prover, - keccak256( - abi.encode( - chainProver.currentStateHash(), - keccak256( - abi.encode(blockhash(proofBatch.newL1BlockNumber), proofBatch.newL1Root) - ) - ) - ) - ); - - // The order is on purpose reversed, becase of the L1_INVALID_OR_DUPLICATE_VERIFIER() check - proofs[0].verifier = sv2; - proofs[0].proof = bytes.concat(bytes4(0), bytes20(newInstance), signature); - - if (threeMockSGXProofs) { - // The order is on purpose reversed, becase of the L1_INVALID_OR_DUPLICATE_VERIFIER() - // check - proofs[1].verifier = sv1; - proofs[1].proof = bytes.concat(bytes4(0), bytes20(newInstance), signature); - - proofs[2].verifier = sv3; - proofs[2].proof = bytes.concat(bytes4(0), bytes20(newInstance), signature); - } else { - //Todo(dani): Implement more proof and verifiers when needed/available but for now, not - // to change the code in BasedOperator, maybe best to mock those 3 - } - - proofBatch.proofs = proofs; - } -} diff --git a/packages/protocol/test/L2/Lib1559Math.t.sol b/packages/protocol/test/L2/Lib1559Math.t.sol deleted file mode 100644 index a393a4ff7056..000000000000 --- a/packages/protocol/test/L2/Lib1559Math.t.sol +++ /dev/null @@ -1,39 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../TaikoTest.sol"; -import "../../contracts/L2/LibL2Config.sol"; - -contract TestLib1559Math is TaikoTest { - using LibMath for uint256; - - // function test_eip1559_math() external pure { - // LibL2Config.Config memory config = LibL2Config.get(); - // uint256 adjustmentFactor = config.gasTargetPerL1Block * config.basefeeAdjustmentQuotient; - - // uint256 baseFee; - // uint256 i; - // uint256 target = 0.01 gwei; - - // for (uint256 k; k < 5; ++k) { - // for (; baseFee < target; ++i) { - // baseFee = Lib1559Math.basefee(config.gasTargetPerL1Block * i, adjustmentFactor); - // } - // console2.log("base fee:", baseFee); - // console2.log(" gasExcess:", config.gasTargetPerL1Block * i); - // console2.log(" i:", i); - // target *= 10; - // } - // } - - // function test_eip1559_math_max() external pure { - // LibL2Config.Config memory config = LibL2Config.get(); - // uint256 adjustmentFactor = config.gasTargetPerL1Block * config.basefeeAdjustmentQuotient; - - // uint256 gasExcess = type(uint64).max; - // uint256 baseFee = Lib1559Math.basefee(gasExcess, adjustmentFactor); - - // console2.log("base fee (gwei):", baseFee / 1 gwei); - // console2.log(" gasExcess:", gasExcess); - // } -} diff --git a/packages/protocol/test/TaikoTest.sol b/packages/protocol/test/TaikoTest.sol deleted file mode 100644 index 94c4329edf92..000000000000 --- a/packages/protocol/test/TaikoTest.sol +++ /dev/null @@ -1,91 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; - -import "forge-std/Test.sol"; -import "forge-std/console2.sol"; - -import "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; - -import "../contracts/thirdparty/LibFixedPointMath.sol"; - -import "../contracts/bridge/Bridge.sol"; -import "../contracts/signal/SignalService.sol"; -import "../contracts/tokenvault/BridgedERC20.sol"; -import "../contracts/tokenvault/BridgedERC721.sol"; -import "../contracts/tokenvault/BridgedERC1155.sol"; -import "../contracts/tokenvault/ERC20Vault.sol"; -import "../contracts/tokenvault/ERC721Vault.sol"; -import "../contracts/tokenvault/ERC1155Vault.sol"; - -import "../contracts/tko/TaikoToken.sol"; -import "../contracts/L1/VerifierRegistry.sol"; -import "../contracts/L1/verifiers/MockSgxVerifier.sol"; -import "../contracts/L1/ChainProver.sol"; -/*import "../contracts/L1/TaikoL1.sol"; -import "../contracts/L1/verifiers/GuardianVerifier.sol"; -import "../contracts/L1/verifiers/PseZkVerifier.sol"; -import "../contracts/L1/verifiers/SgxAndZkVerifier.sol"; -import "../contracts/L1/tiers/TaikoA6TierProvider.sol"; -import "../contracts/L1/tiers/ITierProvider.sol"; -import "../contracts/L1/tiers/ITierProvider.sol"; -import "../contracts/L1/provers/GuardianProver.sol";*/ - -// import "../contracts/L2/Lib1559Math.sol"; -//import "../contracts/L2/TaikoL2EIP1559Configurable.sol"; -// import "../contracts/L2/TaikoL2.sol"; - -import "../contracts/test/erc20/FreeMintERC20.sol"; - -import "./DeployCapability.sol"; -import "./HelperContracts.sol"; - -abstract contract TaikoTest is Test, DeployCapability { - uint256 private _seed = 0x12345678; - address internal Alice = vm.addr(0x1); - address internal Bob = vm.addr(0x2); - address internal Carol = vm.addr(0x3); - address internal David = randAddress(); - address internal Emma = randAddress(); - address internal Frank = randAddress(); - address internal Grace = randAddress(); - address internal Henry = randAddress(); - address internal Isabella = randAddress(); - address internal James = randAddress(); - address internal Katherine = randAddress(); - address internal Liam = randAddress(); - address internal Mia = randAddress(); - address internal Noah = randAddress(); - address internal Olivia = randAddress(); - address internal Patrick = randAddress(); - address internal Quinn = randAddress(); - address internal Rachel = randAddress(); - address internal Samuel = randAddress(); - address internal Taylor = randAddress(); - address internal Ulysses = randAddress(); - address internal Victoria = randAddress(); - address internal William = randAddress(); - address internal Xavier = randAddress(); - address internal Yasmine = randAddress(); - address internal Zachary = randAddress(); - address internal SGX_X_0 = vm.addr(0x4); - address internal SGX_X_1 = vm.addr(0x5); - address internal SGX_Y = randAddress(); - address internal SGX_Z = randAddress(); - - function randAddress() internal returns (address) { - bytes32 randomHash = keccak256(abi.encodePacked("address", _seed++)); - return address(bytes20(randomHash)); - } - - function randBytes32() internal returns (bytes32) { - return keccak256(abi.encodePacked("bytes32", _seed++)); - } - - function strToBytes32(string memory input) internal pure returns (bytes32 result) { - require(bytes(input).length <= 32, "String too long"); - // Copy the string's bytes directly into the bytes32 variable - assembly { - result := mload(add(input, 32)) - } - } -} diff --git a/packages/protocol/test/bridge/Bridge.t.sol b/packages/protocol/test/bridge/Bridge.t.sol deleted file mode 100644 index 99df95ce9342..000000000000 --- a/packages/protocol/test/bridge/Bridge.t.sol +++ /dev/null @@ -1,623 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../TaikoTest.sol"; - -// A contract which is not our ErcXXXTokenVault -// Which in such case, the sent funds are still recoverable, but not via the -// onMessageRecall() but Bridge will send it back -contract UntrustedSendMessageRelayer { - function sendMessage( - address bridge, - IBridge.Message memory message, - uint256 message_value - ) - public - returns (bytes32 msgHash, IBridge.Message memory updatedMessage) - { - return IBridge(bridge).sendMessage{ value: message_value }(message); - } -} - -// A malicious contract that attempts to exhaust gas -contract MaliciousContract2 { - fallback() external payable { - while (true) { } // infinite loop - } -} - -// Non malicious contract that does not exhaust gas -contract NonMaliciousContract1 { - fallback() external payable { } -} - -contract BridgeTest is TaikoTest { - AddressManager addressManager; - BadReceiver badReceiver; - GoodReceiver goodReceiver; - Bridge bridge; - Bridge destChainBridge; - SignalService signalService; - SkipProofCheckSignal mockProofSignalService; - UntrustedSendMessageRelayer untrustedSenderContract; - - NonMaliciousContract1 nonmaliciousContract1; - MaliciousContract2 maliciousContract2; - - address mockDAO = randAddress(); //as "real" L1 owner - - uint64 destChainId = 19_389; - - function setUp() public { - vm.startPrank(Alice); - vm.deal(Alice, 100 ether); - - addressManager = AddressManager( - deployProxy({ - name: "address_manager", - impl: address(new AddressManager()), - data: abi.encodeCall(AddressManager.init, (address(0))) - }) - ); - - bridge = Bridge( - payable( - deployProxy({ - name: "bridge", - impl: address(new Bridge()), - data: abi.encodeCall(Bridge.init, (address(0), address(addressManager))), - registerTo: address(addressManager) - }) - ) - ); - - destChainBridge = Bridge( - payable( - deployProxy({ - name: "bridge", - impl: address(new Bridge()), - data: abi.encodeCall(Bridge.init, (address(0), address(addressManager))) - }) - ) - ); - - // "Deploy" on L2 only - uint64 l1ChainId = uint64(block.chainid); - vm.chainId(destChainId); - - vm.chainId(l1ChainId); - - mockProofSignalService = SkipProofCheckSignal( - deployProxy({ - name: "signal_service", - impl: address(new SkipProofCheckSignal()), - data: abi.encodeCall(SignalService.init, (address(0), address(addressManager))), - registerTo: address(addressManager) - }) - ); - - signalService = SignalService( - deployProxy({ - name: "signal_service", - impl: address(new SignalService()), - data: abi.encodeCall(SignalService.init, (address(0), address(addressManager))) - }) - ); - - vm.deal(address(destChainBridge), 100 ether); - - untrustedSenderContract = new UntrustedSendMessageRelayer(); - vm.deal(address(untrustedSenderContract), 10 ether); - - register( - address(addressManager), "signal_service", address(mockProofSignalService), destChainId - ); - - register(address(addressManager), "bridge", address(destChainBridge), destChainId); - - register(address(addressManager), "taiko", address(uint160(123)), destChainId); - - register(address(addressManager), "bridge_watchdog", address(uint160(123)), destChainId); - - vm.stopPrank(); - } - - function test_Bridge_send_ether_to_to_with_value() public { - IBridge.Message memory message = IBridge.Message({ - id: 0, - from: address(bridge), - srcChainId: uint64(block.chainid), - destChainId: destChainId, - srcOwner: Alice, - destOwner: Alice, - to: Alice, - value: 10_000, - fee: 1000, - gasLimit: 1_000_000, - data: "" - }); - // Mocking proof - but obviously it needs to be created in prod - // corresponding to the message - bytes memory proof = hex"00"; - - bytes32 msgHash = destChainBridge.hashMessage(message); - - vm.chainId(destChainId); - vm.prank(Bob, Bob); - destChainBridge.processMessage(message, proof); - - IBridge.Status status = destChainBridge.messageStatus(msgHash); - - assertEq(status == IBridge.Status.DONE, true); - // Alice has 100 ether + 1000 wei balance, because we did not use the - // 'sendMessage' - // since we mocking the proof, so therefore the 1000 wei - // deduction/transfer did not happen - assertTrue(Alice.balance >= 100 ether + 10_000); - assertTrue(Alice.balance <= 100 ether + 10_000 + 1000); - assertTrue(Bob.balance >= 0 && Bob.balance <= 1000); - } - - function test_Bridge_send_ether_to_contract_with_value_simple() public { - goodReceiver = new GoodReceiver(); - - IBridge.Message memory message = IBridge.Message({ - id: 0, - from: address(bridge), - srcChainId: uint64(block.chainid), - destChainId: destChainId, - srcOwner: Alice, - destOwner: Alice, - to: address(goodReceiver), - value: 10_000, - fee: 1000, - gasLimit: 1_000_000, - data: "" - }); - // Mocking proof - but obviously it needs to be created in prod - // corresponding to the message - bytes memory proof = hex"00"; - - bytes32 msgHash = destChainBridge.hashMessage(message); - - vm.chainId(destChainId); - - vm.prank(Bob, Bob); - destChainBridge.processMessage(message, proof); - - IBridge.Status status = destChainBridge.messageStatus(msgHash); - - assertEq(status == IBridge.Status.DONE, true); - - // Bob (relayer) and goodContract has 1000 wei balance - assertEq(address(goodReceiver).balance, 10_000); - console2.log("Bob.balance:", Bob.balance); - assertTrue(Bob.balance >= 0 && Bob.balance <= 1000); - } - - function test_Bridge_send_ether_to_contract_with_value_and_message_data() public { - goodReceiver = new GoodReceiver(); - - IBridge.Message memory message = IBridge.Message({ - id: 0, - from: address(bridge), - srcChainId: uint64(block.chainid), - destChainId: destChainId, - srcOwner: Alice, - destOwner: Alice, - to: address(goodReceiver), - value: 1000, - fee: 1000, - gasLimit: 1_000_000, - data: abi.encodeCall(GoodReceiver.onMessageInvocation, abi.encode(Carol)) - }); - // Mocking proof - but obviously it needs to be created in prod - // corresponding to the message - bytes memory proof = hex"00"; - - bytes32 msgHash = destChainBridge.hashMessage(message); - - vm.chainId(destChainId); - - vm.prank(Bob, Bob); - destChainBridge.processMessage(message, proof); - - IBridge.Status status = destChainBridge.messageStatus(msgHash); - - assertEq(status == IBridge.Status.DONE, true); - - // Carol and goodContract has 500 wei balance - assertEq(address(goodReceiver).balance, 500); - assertEq(Carol.balance, 500); - } - - function test_Bridge_send_message_ether_reverts_if_value_doesnt_match_expected() public { - // uint256 amount = 1 wei; - IBridge.Message memory message = newMessage({ - owner: Alice, - to: Alice, - value: 0, - gasLimit: 1_000_000, - fee: 1_000_000, - destChain: destChainId - }); - - vm.expectRevert(Bridge.B_INVALID_VALUE.selector); - bridge.sendMessage(message); - } - - function test_Bridge_send_message_ether_reverts_when_owner_is_zero_address() public { - uint256 amount = 1 wei; - IBridge.Message memory message = newMessage({ - owner: address(0), - to: Alice, - value: 0, - gasLimit: 0, - fee: 0, - destChain: destChainId - }); - - vm.expectRevert(EssentialContract.ZERO_ADDRESS.selector); - bridge.sendMessage{ value: amount }(message); - } - - function test_Bridge_send_message_ether_reverts_when_dest_chain_is_not_enabled() public { - uint256 amount = 1 wei; - IBridge.Message memory message = newMessage({ - owner: Alice, - to: Alice, - value: 0, - gasLimit: 0, - fee: 0, - destChain: destChainId + 1 - }); - - vm.expectRevert(Bridge.B_INVALID_CHAINID.selector); - bridge.sendMessage{ value: amount }(message); - } - - function test_Bridge_send_message_ether_reverts_when_dest_chain_same_as_block_chainid() - public - { - uint256 amount = 1 wei; - IBridge.Message memory message = newMessage({ - owner: Alice, - to: Alice, - value: 0, - gasLimit: 0, - fee: 0, - destChain: uint64(block.chainid) - }); - - vm.expectRevert(Bridge.B_INVALID_CHAINID.selector); - bridge.sendMessage{ value: amount }(message); - } - - function test_Bridge_send_message_ether_with_no_processing_fee() public { - uint256 amount = 0 wei; - IBridge.Message memory message = newMessage({ - owner: Alice, - to: Alice, - value: 0, - gasLimit: 0, - fee: 0, - destChain: destChainId - }); - - (, IBridge.Message memory _message) = bridge.sendMessage{ value: amount }(message); - assertEq(bridge.isMessageSent(_message), true); - } - - function test_Bridge_send_message_ether_with_processing_fee() public { - uint256 amount = 0 wei; - uint64 fee = 1_000_000 wei; - IBridge.Message memory message = newMessage({ - owner: Alice, - to: Alice, - value: 0, - gasLimit: 1_000_000, - fee: fee, - destChain: destChainId - }); - - (, IBridge.Message memory _message) = bridge.sendMessage{ value: amount + fee }(message); - assertEq(bridge.isMessageSent(_message), true); - } - - function test_Bridge_recall_message_ether() public { - uint256 amount = 1 ether; - uint64 fee = 0 wei; - IBridge.Message memory message = newMessage({ - owner: Alice, - to: Alice, - value: amount, - gasLimit: 0, - fee: fee, - destChain: destChainId - }); - - uint256 starterBalanceVault = address(bridge).balance; - uint256 starterBalanceAlice = Alice.balance; - - vm.prank(Alice, Alice); - (, IBridge.Message memory _message) = bridge.sendMessage{ value: amount + fee }(message); - assertEq(bridge.isMessageSent(_message), true); - - assertEq(address(bridge).balance, (starterBalanceVault + amount + fee)); - assertEq(Alice.balance, (starterBalanceAlice - (amount + fee))); - bridge.recallMessage(message, ""); - - assertEq(address(bridge).balance, (starterBalanceVault + fee)); - assertEq(Alice.balance, (starterBalanceAlice - fee)); - } - - function test_Bridge_recall_message_but_not_supports_recall_interface() public { - // In this test we expect that the 'message value is still refundable, - // just not via - // ERCXXTokenVault (message.from) but directly from the Bridge - - uint256 amount = 1 ether; - uint64 fee = 0 wei; - IBridge.Message memory message = newMessage({ - owner: Alice, - to: Alice, - value: amount, - gasLimit: 0, - fee: fee, - destChain: destChainId - }); - - uint256 starterBalanceVault = address(bridge).balance; - - (, message) = untrustedSenderContract.sendMessage(address(bridge), message, amount + fee); - - assertEq(address(bridge).balance, (starterBalanceVault + amount + fee)); - - bridge.recallMessage(message, ""); - - assertEq(address(bridge).balance, (starterBalanceVault + fee)); - } - - function test_Bridge_send_message_ether_with_processing_fee_invalid_amount() public { - uint256 amount = 0 wei; - uint64 fee = 1_000_000 wei; - IBridge.Message memory message = newMessage({ - owner: Alice, - to: Alice, - value: 0, - gasLimit: 1_000_000, - fee: fee, - destChain: destChainId - }); - - vm.expectRevert(Bridge.B_INVALID_VALUE.selector); - bridge.sendMessage{ value: amount }(message); - } - - // test with a known good merkle proof / message since we cant generate - // proofs via rpc - // in foundry - function test_Bridge_process_message() public { - // This predefined successful process message call fails now - // since we modified the iBridge.Message struct and cut out - // depositValue - vm.startPrank(Alice); - (IBridge.Message memory message, bytes memory proof) = - setUpPredefinedSuccessfulProcessMessageCall(); - - bytes32 msgHash = destChainBridge.hashMessage(message); - - destChainBridge.processMessage(message, proof); - - IBridge.Status status = destChainBridge.messageStatus(msgHash); - - assertEq(status == IBridge.Status.DONE, true); - } - - // test with a known good merkle proof / message since we cant generate - // proofs via rpc - // in foundry - function test_Bridge_retry_message_and_end_up_in_failed_status() public { - vm.startPrank(Alice); - (IBridge.Message memory message, bytes memory proof) = - setUpPredefinedSuccessfulProcessMessageCall(); - - // etch bad receiver at the to address, so it fails. - vm.etch(message.to, address(badReceiver).code); - - bytes32 msgHash = destChainBridge.hashMessage(message); - - destChainBridge.processMessage(message, proof); - - IBridge.Status status = destChainBridge.messageStatus(msgHash); - - assertEq(status == IBridge.Status.RETRIABLE, true); - - vm.stopPrank(); - - vm.prank(message.destOwner); - vm.expectRevert(Bridge.B_RETRY_FAILED.selector); - destChainBridge.retryMessage(message, false); - - vm.prank(message.destOwner); - destChainBridge.retryMessage(message, true); - IBridge.Status postRetryStatus = destChainBridge.messageStatus(msgHash); - assertEq(postRetryStatus == IBridge.Status.FAILED, true); - } - - function test_Bridge_fail_message() public { - vm.startPrank(Alice); - (IBridge.Message memory message, bytes memory proof) = - setUpPredefinedSuccessfulProcessMessageCall(); - - // etch bad receiver at the to address, so it fails. - vm.etch(message.to, address(badReceiver).code); - - bytes32 msgHash = destChainBridge.hashMessage(message); - - destChainBridge.processMessage(message, proof); - - IBridge.Status status = destChainBridge.messageStatus(msgHash); - - assertEq(status == IBridge.Status.RETRIABLE, true); - - vm.stopPrank(); - - vm.prank(message.destOwner); - destChainBridge.failMessage(message); - IBridge.Status postRetryStatus = destChainBridge.messageStatus(msgHash); - assertEq(postRetryStatus == IBridge.Status.FAILED, true); - } - - function test_processMessage_InvokeMessageCall_DoS1() public { - nonmaliciousContract1 = new NonMaliciousContract1(); - - IBridge.Message memory message = IBridge.Message({ - id: 0, - from: address(this), - srcChainId: uint64(block.chainid), - destChainId: destChainId, - srcOwner: Alice, - destOwner: Alice, - to: address(nonmaliciousContract1), - value: 1000, - fee: 1000, - gasLimit: 1_000_000, - data: "" - }); - - bytes memory proof = hex"00"; - bytes32 msgHash = destChainBridge.hashMessage(message); - vm.chainId(destChainId); - vm.prank(Bob, Bob); - - destChainBridge.processMessage(message, proof); - - IBridge.Status status = destChainBridge.messageStatus(msgHash); - assertEq(status == IBridge.Status.DONE, true); // test pass check - } - - function test_processMessage_InvokeMessageCall_DoS2_testfail() public { - maliciousContract2 = new MaliciousContract2(); - - IBridge.Message memory message = IBridge.Message({ - id: 0, - from: address(this), - srcChainId: uint64(block.chainid), - destChainId: destChainId, - srcOwner: Alice, - destOwner: Alice, - to: address(maliciousContract2), - value: 1000, - fee: 1000, - gasLimit: 1_000_000, - data: "" - }); - - bytes memory proof = hex"00"; - bytes32 msgHash = destChainBridge.hashMessage(message); - vm.chainId(destChainId); - vm.prank(Bob, Bob); - - destChainBridge.processMessage(message, proof); - - IBridge.Status status = destChainBridge.messageStatus(msgHash); - assertEq(status == IBridge.Status.RETRIABLE, true); //Test fail check - } - - function retry_message_reverts_when_status_non_retriable() public { - IBridge.Message memory message = newMessage({ - owner: Alice, - to: Alice, - value: 0, - gasLimit: 10_000, - fee: 1, - destChain: destChainId - }); - - vm.expectRevert(Bridge.B_INVALID_STATUS.selector); - destChainBridge.retryMessage(message, true); - } - - function retry_message_reverts_when_last_attempt_and_message_is_not_owner() public { - vm.startPrank(Alice); - IBridge.Message memory message = newMessage({ - owner: Bob, - to: Alice, - value: 0, - gasLimit: 10_000, - fee: 1, - destChain: destChainId - }); - - vm.expectRevert(Bridge.B_PERMISSION_DENIED.selector); - destChainBridge.retryMessage(message, true); - } - - function setUpPredefinedSuccessfulProcessMessageCall() - internal - returns (IBridge.Message memory, bytes memory) - { - badReceiver = new BadReceiver(); - - uint64 dest = 1337; - addressManager.setAddress(1336, "bridge", 0x564540a26Fb667306b3aBdCB4ead35BEb88698ab); - - addressManager.setAddress(dest, "bridge", address(destChainBridge)); - - vm.deal(address(bridge), 100 ether); - - addressManager.setAddress(dest, "signal_service", address(mockProofSignalService)); - - vm.deal(address(destChainBridge), 1 ether); - - vm.chainId(dest); - - // known message that corresponds with below proof. - IBridge.Message memory message = IBridge.Message({ - id: 0, - from: 0xDf08F82De32B8d460adbE8D72043E3a7e25A3B39, - srcChainId: 1336, - destChainId: dest, - srcOwner: 0xDf08F82De32B8d460adbE8D72043E3a7e25A3B39, - destOwner: 0xDf08F82De32B8d460adbE8D72043E3a7e25A3B39, - to: 0x200708D76eB1B69761c23821809d53F65049939e, - value: 1000, - fee: 1000, - gasLimit: 1_000_000, - data: "" - }); - - bytes memory proof = - hex"0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000003e0f7ff3b519ec113138509a5b1b6f54761cebc6891bc0ba4f904b89688b1ef8e051dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d493470000000000000000000000000000000000000000000000000000000000000000a85358ff57974db8c9ce2ecabe743d44133f9d11e5da97e386111073f1a2f92c345bd00c2ef9db5726d84c184af67fdbad0be00921eb1dcbca674c427abb5c3ebda7d1e94e5b2b3d5e6a54c9a42423b1746afa4b264e7139877c0523c3397ec4000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000002000800002000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000001000040000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000001500000000000000000000000000000000000000000000000000000000009bbf55000000000000000000000000000000000000000000000000000000000001d4fb0000000000000000000000000000000000000000000000000000000064435d130000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004d2e85500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000061d883010a1a846765746888676f312e31382e38856c696e75780000000000000015b1ca61fbe1aa968ab60a461913aa40046b5357162466a4134d195647c14dd7488dd438abb39d6574e7d9d752fa2381bbd9dc780efc3fcc66af5285ebcb117b010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000dbf8d9b8b3f8b18080a04fc5f13ab2f9ba0c2da88b0151ab0e7cf4d85d08cca45ccd923c6ab76323eb28a02b70a98baa2507beffe8c266006cae52064dccf4fd1998af774ab3399029b38380808080a07394a09684ef3b2c87e9e2a753eb4ac78e2047b980e16d2e2133aee78946370d8080a0f4984a11f61a2921456141df88de6e1a710d28681b91af794c5a721e47839cd78080a09248167635e6f0eb40f782a6bbd237174104259b6af88b3c52086214098f0e2c8080a3e2a03ecd5e1f251bf1676a367f6b16e92ffe6b2638b4a27b3d31870d25442bd59ef4010000000000"; - - return (message, proof); - } - - function newMessage( - address owner, - address to, - uint256 value, - uint32 gasLimit, - uint64 fee, - uint64 destChain - ) - internal - view - returns (IBridge.Message memory) - { - return IBridge.Message({ - srcOwner: owner, - destOwner: owner, - destChainId: destChain, - to: to, - value: value, - fee: fee, - id: 0, // placeholder, will be overwritten - from: owner, // placeholder, will be overwritten - srcChainId: uint64(block.chainid), // will be overwritten - gasLimit: gasLimit, - data: "" - }); - } -} diff --git a/packages/protocol/test/common/EssentialContract.t.sol b/packages/protocol/test/common/EssentialContract.t.sol deleted file mode 100644 index 2012cffebd98..000000000000 --- a/packages/protocol/test/common/EssentialContract.t.sol +++ /dev/null @@ -1,66 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../TaikoTest.sol"; - -contract Target1 is EssentialContract { - uint256 public count; - - function init(address _owner) external initializer { - __Essential_init(_owner); - count = 100; - } - - function adjust() external virtual onlyOwner { - count += 1; - } -} - -contract Target2 is Target1 { - function update() external onlyOwner { - count += 10; - } - - function adjust() external override onlyOwner { - count -= 1; - } -} - -contract TestEssentialContract is TaikoTest { - function test_essential_behind_1967_proxy() external { - bytes memory data = abi.encodeCall(Target1.init, (address(0))); - vm.startPrank(Alice); - ERC1967Proxy proxy = new ERC1967Proxy(address(new Target1()), data); - Target1 target = Target1(address(proxy)); - vm.stopPrank(); - - // Owner is Alice - vm.prank(Carol); - assertEq(target.owner(), Alice); - - // Alice can adjust(); - vm.prank(Alice); - target.adjust(); - assertEq(target.count(), 101); - - // Bob cannot adjust() - vm.prank(Bob); - vm.expectRevert(); - target.adjust(); - - address v2 = address(new Target2()); - data = abi.encodeCall(Target2.update, ()); - - vm.prank(Bob); - vm.expectRevert(); - target.upgradeToAndCall(v2, data); - - vm.prank(Alice); - target.upgradeToAndCall(v2, data); - assertEq(target.count(), 111); - - vm.prank(Alice); - target.adjust(); - assertEq(target.count(), 110); - } -} diff --git a/packages/protocol/test/libs/LibFixedPointMath.t.sol b/packages/protocol/test/libs/LibFixedPointMath.t.sol deleted file mode 100644 index d9406578f8d4..000000000000 --- a/packages/protocol/test/libs/LibFixedPointMath.t.sol +++ /dev/null @@ -1,47 +0,0 @@ -// SPDX-License-Identifier: UNLICENSED -// Some of the tests are taken from: -// https://github.com/recmo/experiment-solexp/blob/main/src/test/FixedPointMathLib.t.sol -pragma solidity 0.8.24; - -import "../TaikoTest.sol"; - -contract LibFixedPointMathTest is TaikoTest { - function testExp1() public { - assertEq(LibFixedPointMath.exp(-1e18), 367_879_441_171_442_321); - } - - function testExpSmallest() public pure { - int256 y = LibFixedPointMath.exp(-42_139_678_854_452_767_550); - - console2.log("LibFixedPointMath.exp(-42139678854452767550)=", uint256(y)); - } - - function testExpLargest() public pure { - int256 y = LibFixedPointMath.exp(int256(uint256(LibFixedPointMath.MAX_EXP_INPUT))); - console2.log("LibFixedPointMath.exp(135305999368893231588)=", uint256(y)); - } - - function testExpSome() public pure { - int256 y = LibFixedPointMath.exp(5e18); - console2.log("LibFixedPointMath.exp(5e18)=", uint256(y)); - } - - function testExpGas() public view { - uint256 g0 = gasleft(); - LibFixedPointMath.exp(133e18); - uint256 g1 = gasleft(); - LibFixedPointMath.exp(-23e18); - uint256 g2 = gasleft(); - LibFixedPointMath.exp(5e18); - uint256 g3 = gasleft(); - console2.logUint(g0 - g1); - console2.logUint(g1 - g2); - console2.logUint(g2 - g3); - } - - function testExp3() public pure { - LibFixedPointMath.exp(133e18); - LibFixedPointMath.exp(10e18); - LibFixedPointMath.exp(-23e18); - } -} diff --git a/packages/protocol/test/signal/SignalService.t.sol b/packages/protocol/test/signal/SignalService.t.sol deleted file mode 100644 index 25726864d182..000000000000 --- a/packages/protocol/test/signal/SignalService.t.sol +++ /dev/null @@ -1,649 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../TaikoTest.sol"; -import "forge-std/console2.sol"; - -contract MockSignalService is SignalService { - function _verifyHopProof( - uint64, /*chainId*/ - address, /*app*/ - bytes32, /*signal*/ - bytes32, /*value*/ - HopProof memory, /*hop*/ - address /*relay*/ - ) - internal - pure - override - returns (bytes32) - { - // Skip verifying the merkle proof entirely - return bytes32(uint256(789)); - } -} - -contract TestSignalService is TaikoTest { - AddressManager addressManager; - MockSignalService signalService; - SignalService realSignalService; - uint64 public destChainId = 7; - address taiko; - - function setUp() public { - vm.startPrank(Alice); - vm.deal(Alice, 1 ether); - vm.deal(Bob, 1 ether); - - addressManager = AddressManager( - deployProxy({ - name: "address_manager", - impl: address(new AddressManager()), - data: abi.encodeCall(AddressManager.init, (address(0))), - registerTo: address(addressManager) - }) - ); - - signalService = MockSignalService( - deployProxy({ - name: "signal_service", - impl: address(new MockSignalService()), - data: abi.encodeCall(SignalService.init, (address(0), address(addressManager))) - }) - ); - - realSignalService = SignalService( - deployProxy({ - name: "signal_service", - impl: address(new SignalService()), - data: abi.encodeCall(SignalService.init, (Alice, address(addressManager))) - }) - ); - - taiko = randAddress(); - signalService.authorize(taiko, true); - realSignalService.authorize(Alice, true); - vm.stopPrank(); - } - - function test_real_signal() public { - vm.chainId(167_001); - - bytes memory proof = - hex"0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000028c5900000000000000000000000000000000000000000000000000000000000015c27a889e6436fc1cde7827f75217adf5371afb14cc56860e6d9032ba5e28214819000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000005000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000002a000000000000000000000000000000000000000000000000000000000000003800000000000000000000000000000000000000000000000000000000000000214f90211a0dfa513509c34b62c66488e60634c6fd42fe00397e4e1d15d5e70f227ded60befa05bdb02a2eca8f311b9671483176ec61fae7edcaf62b04a28c44cc2192e9d0f46a01cc45e358bfc5242aaf32f63dee58b9aa3f4113be91b2b50bb5740beda4fde25a0d92467760a1a9357492b426c682426d81af5cb713839647e49b13e01b02d6440a0c84062f286e3581246bccf7d0b7f82366e880161f91879ebef0180c9c93c941aa0db20b735c7f1053aceb0828f861d0b7e33febd12a123bc0352c929718b845faaa0247a3b41495f2b92708771026d7099e824051e275b8a29e82876030437b67c0aa0477ffe5998d9bc8b5866d020bb1d184b62cd6ab90475bc5cf9ec0a55c417c28ba074ecd264e5eb045d4d93d6670df1364257028d7b7fdda9c4eb627d7cd744780ba02221fdd890129df643dc88a6767c7d586fac3fd573ec069b15232f7b26a7ce28a06ea5ac730ebf7a40efe4cf416fac7ad0bdd3afcfb58d6df8c5915050822a3359a03ec8023d3660e15a8ba0ab69a1ed8ae5f121d3221587d14b1ee11b92fef2f92ca03ed73d6c820ff05ed2b08021ffa9a9cf754e376e26f80ba5fe8ba2906d398f8fa0e8e7f865b0d0ece00599f085d5b3e1ba7ca723b8918242001fe1939e1c5e4636a023f41a3b7932420372da53ae441f422ca8c5da9706a79ff47a81c5c8c1fb4917a003a143ebcd7f0dc5385606c701eb2b8245300e1ea72dd478ebf3dd894b173b598000000000000000000000000000000000000000000000000000000000000000000000000000000000000000b3f8b180a0887fcd34179304e7cd2b4e20e2b02a4a8f86479c938ef9f250afa70389b005f9808080a044716b385f8d9459d36317d101408b5ac6918cf2ca6fec073f6bc6a24a3a04e4a024c54ee716f3386163e13b31e8987471a988be429e3aef879217e22b7760561ca00140f6012398f45a3130df7f78f20f7c40bf0d1074b88d3cf981bf0afae32e2580808080a03b0365c6777cd412b8762e2e8216a2fab245f1c83111e08f09c20ae4ed8628e88080808000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006bf869a02016202fe7055f067ad86805f2e5a7f909257e295658bcbfc2b2cb8c3127fb9db846f8440180a014f07e11fa9eac150c017a5fea958a3b73935da8b057843d3314bc53acbd00e5a0dc679fd48cf611aa38e906adc93928c5f8f6fae534978ea2fa4f5935f5ed1b2c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000002e0000000000000000000000000000000000000000000000000000000000000052000000000000000000000000000000000000000000000000000000000000006e000000000000000000000000000000000000000000000000000000000000007600000000000000000000000000000000000000000000000000000000000000214f90211a09e1b6a91ab7ab54dc82ca4bf7c621663b37d09fd0adfc6ee5b04d61d1674be28a0be9221596e9d2200855bf3eed5237bf998f95a94d46f9045c3d15138262aa336a08a468dcca4b10bc41cbfbe1cff581839da9efb3a6453a1a9d51c623393056b75a0eb149f5f12c6e13cf0e0bdff4a87870c1d2a15f512d8610d938ff91420e567bca0ae4668eccb3ec464e47550870fb978fe7840ea30cc94e9b4983ea213de536caca0fd30849f4ce21a0bf9e93e7ff6681ba9f8c4c23d9c2b32aaabbba735cb3ad8bca03a61ccbbd269b701d9da7568421d47355b9b07f88cb1a8b559bcbbda60cdc588a04ccb3f9257808d764c277e5825d005516ac955f64e59d0e7ab2f94b1a4fc4c17a0697d43c2b13982e7971e4bd05cca3a3714163333c764d5383d1f5e642f6b9038a0fad2df5c5417b57cf90bac46a87f3dbb68a02fc415d382b7880ca203998e5848a0456c9736422257556e259b1ec6ef1f57603db9140a81a0537d3efa9392fa1396a0d75f6fb980e2a441be4e5da97f59b411caef24b0ddbbdf69eb51e8294c7721d0a030358f3b1834ef739810ca31f4fabd79f43dc1af8f8ece3b66cbdfec1e2f91d7a0191280d4afd9c5d9e493b78a155abbb8e5bb61754672c3deeb3002c337c7376ea0deb7ba981af9635c6b0df1de8dd515128592e2fb80bb760279f8492d8d4caa8ba070096993175dea6432f4243ae88ccdfacd67453e4d018bfe2f43dee3b5d831ad800000000000000000000000000000000000000000000000000000000000000000000000000000000000000214f90211a0994000eab355e9641a1b22339676cca81f018f367ec829b07ff569bd3f418f43a09a373f01670a460ef4d7f4e2893d5fd9411b696b4a943f18e988912e8bab8349a087849ade71c21e99bd2cff5dbb222ef04fd42d381d488936de43b7ef4380b4d3a072278a26bebb48de2be3ed56fda4bb05cfc430384f76a46402880b16fc56b823a02051ad643d886b6aebaca99b95efbdfb299ff1dbd8f7cba92431a6c83da68381a0fc9d08a35f7850ef9e6932f0b2ccca5e77dee26da198e942935436e7a04f9ba2a01cd8eb71fa95b520e37763506000751e573cdb4d6b7f22809fc7725569bf5362a05097491bc4a3dc25f339fa3be312044e0fb85445bd8bbfba76de7cc8de278db9a0057aca47ca23862aa56aa61e58822f9010782645309b24d7ed41fc564342bf97a0c0bc073a3474c0bd105c3db6a915598559501abe06e9ec9cf9f7345c440ade8ca054be9f6d8226aba0c1b4c530000991fd8476e0c99faecc3f3703751d4929d994a01d192e93484bca5e70661e1655cdc58994e467e58dc6fa6349e3339d9263f8e3a0de752f43804851bc139350b4d09d8eeae52d3793e5a77d966bdcc543e8ed4a07a08d17a09e17828697ba29022f714d57588b55942120b44adb3e7f4311b11a60dca05989405f26fd35e72e532c6528228e90fc3a010eb0d87feca60557fa18b45896a0dcf2645898dead212b4330054c56e51f10af5d3f3bfc64a8f73cdf1bd6617e0d800000000000000000000000000000000000000000000000000000000000000000000000000000000000000194f90191a00415edc1050fe50ad35dbcf7960f64d127b236a1c0d7653a21d75f3cacb529c980a04c18411340e7a2669fd694dee4413c834cf59b13b54107b832503769f2c942b2a0c0910bd1e2e47c03541323952c9da9da3e5807c306b2ca0b4af696508b38f82a80a034e02eebc9d6a274c89c7789fd03eb12333e65346d9453cca75e4a02d7b2992ea0ff68d74f45306203073844aee925a4cc4ae4d78cc032f54504ee9c9335cd32dd8080a09c1849766089cd1349cef938726d7a9e7dc9598fb261d136d9a58e0b7caf275ba0f19ae8192e9b0aadf26d08fbe4f32d0f8949afc2e172c90c834685302ea69bbca0e10bf29e7ae1512acb04c2c1d63c124c4d02e191374f93fefce052420ce55e15a082010d47803c23ef3a2be7b3b089572c2a59de5a9173e9f9404bb71d367e0adaa0a29475985f39e34dec60f13ac4ca2370041873aa30f823b0d0d301998136804ba0d7acb7203bbbd9c87cd646416284ef32bbae269bf178e56127f4a69d6f7a6e43a0224da002242f29898a82f586a50143afa77334954d4581e61266a245c13254c7800000000000000000000000000000000000000000000000000000000000000000000000000000000000000053f851808080a024635e394cfa76468c00494a6e4fc0a50dd27d737c9e41861d32a8be31e3a38d808080808080a06f7c75a0076a5802c84d3d370baefd6b6655641f648e087b60a86c402b07ba84808080808080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044f8429f20b4780cdd5e2149e06b1a3cd443645775c177c33344f9f36e535023c39e1fa1a02299879cc2fe4c05d8b15238cbf4b15f35a7d434084d279d42f462c90f02b54400000000000000000000000000000000000000000000000000000000"; - bytes32 msgHash = hex"2299879cc2fe4c05d8b15238cbf4b15f35a7d434084d279d42f462c90f02b544"; - - address srcSignalService = 0x604C61d6618AaCdF7a7A2Fe4c42E35Ecba32AE75; - address srcBridge = 0xde5B0e8a8034eF30a8b71d78e658C85dFE3FC657; - - vm.prank(Alice); - addressManager.setAddress(32_382, "signal_service", srcSignalService); - - bytes32 stateRoot = hex"7a889e6436fc1cde7827f75217adf5371afb14cc56860e6d9032ba5e28214819"; - uint64 blockId = 5570; - vm.prank(Alice); - realSignalService.syncChainData(32_382, LibStrings.H_STATE_ROOT, blockId, stateRoot); - - realSignalService.proveSignalReceived(32_382, srcBridge, msgHash, proof); - } - - function test_SignalService_sendSignal_revert() public { - vm.expectRevert(EssentialContract.ZERO_VALUE.selector); - signalService.sendSignal(0); - } - - function test_SignalService_isSignalSent_revert() public { - bytes32 signal = bytes32(uint256(1)); - vm.expectRevert(EssentialContract.ZERO_ADDRESS.selector); - signalService.isSignalSent(address(0), signal); - - signal = bytes32(uint256(0)); - vm.expectRevert(EssentialContract.ZERO_VALUE.selector); - signalService.isSignalSent(Alice, signal); - } - - function test_SignalService_sendSignal_isSignalSent() public { - vm.startPrank(Alice); - bytes32 signal = bytes32(uint256(1)); - signalService.sendSignal(signal); - - assertTrue(signalService.isSignalSent(Alice, signal)); - } - - function test_SignalService_proveSignalReceived_revert_invalid_chainid_or_signal() public { - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](1); - - // app being address(0) will revert - vm.expectRevert(EssentialContract.ZERO_ADDRESS.selector); - signalService.proveSignalReceived({ - _chainId: 1, - _app: address(0), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - - // signal being 0 will revert - vm.expectRevert(EssentialContract.ZERO_VALUE.selector); - signalService.proveSignalReceived({ - _chainId: uint64(block.chainid), - _app: randAddress(), - _signal: 0, - _proof: abi.encode(proofs) - }); - } - - function test_SignalService_proveSignalReceived_revert_malformat_proof() public { - // "undecodable proof" is not decodeable into SignalService.HopProof[] memory - vm.expectRevert(); - signalService.proveSignalReceived({ - _chainId: 0, - _app: randAddress(), - _signal: randBytes32(), - _proof: "undecodable proof" - }); - } - - function test_SignalService_proveSignalReceived_revert_src_signal_service_not_registered() - public - { - uint64 srcChainId = uint64(block.chainid - 1); - - // Did not call the following, so revert with RESOLVER_ZERO_ADDR - // vm.prank(Alice); - // addressManager.setAddress(srcChainId, "signal_service", randAddress()); - - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](1); - - vm.expectRevert( - abi.encodeWithSelector( - AddressResolver.RESOLVER_ZERO_ADDR.selector, - srcChainId, - strToBytes32("signal_service") - ) - ); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - } - - function test_SignalService_proveSignalReceived_revert_zero_size_proof() public { - uint64 srcChainId = uint64(block.chainid - 1); - - vm.prank(Alice); - addressManager.setAddress(srcChainId, "signal_service", randAddress()); - - // proofs.length must > 0 in order not to revert - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](0); - - vm.expectRevert(SignalService.SS_EMPTY_PROOF.selector); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - } - - function test_SignalService_proveSignalReceived_revert_last_hop_incorrect_chainid() public { - uint64 srcChainId = uint64(block.chainid - 1); - - vm.prank(Alice); - addressManager.setAddress(srcChainId, "signal_service", randAddress()); - - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](1); - - // proofs[0].chainId must be block.chainid in order not to revert - proofs[0].chainId = uint64(block.chainid + 1); - proofs[0].blockId = 1; - - vm.expectRevert(SignalService.SS_INVALID_LAST_HOP_CHAINID.selector); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - } - - function test_SignalService_proveSignalReceived_revert_mid_hop_incorrect_chainid() public { - uint64 srcChainId = uint64(block.chainid - 1); - - vm.prank(Alice); - addressManager.setAddress(srcChainId, "signal_service", randAddress()); - - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](2); - - // proofs[0].chainId must NOT be block.chainid in order not to revert - proofs[0].chainId = uint64(block.chainid); - proofs[0].blockId = 1; - - vm.expectRevert(SignalService.SS_INVALID_MID_HOP_CHAINID.selector); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - } - - function test_SignalService_proveSignalReceived_revert_mid_hop_not_registered() public { - uint64 srcChainId = uint64(block.chainid + 1); - - vm.prank(Alice); - addressManager.setAddress(srcChainId, "signal_service", randAddress()); - - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](2); - - // proofs[0].chainId must NOT be block.chainid in order not to revert - proofs[0].chainId = srcChainId + 1; - proofs[0].blockId = 1; - - vm.expectRevert( - abi.encodeWithSelector( - AddressResolver.RESOLVER_ZERO_ADDR.selector, - proofs[0].chainId, - strToBytes32("signal_service") - ) - ); - - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - } - - function test_SignalService_proveSignalReceived_local_chaindata_not_found() public { - uint64 srcChainId = uint64(block.chainid + 1); - - vm.prank(Alice); - addressManager.setAddress(srcChainId, "signal_service", randAddress()); - - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](1); - - proofs[0].chainId = uint64(block.chainid); - proofs[0].blockId = 1; - - // the proof is a storage proof - proofs[0].accountProof = new bytes[](0); - proofs[0].storageProof = new bytes[](10); - - vm.expectRevert(SignalService.SS_SIGNAL_NOT_FOUND.selector); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - - // the proof is a full proof - proofs[0].accountProof = new bytes[](1); - - vm.expectRevert(SignalService.SS_SIGNAL_NOT_FOUND.selector); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - } - - function test_SignalService_proveSignalReceived_one_hop_cache_signal_root() public { - uint64 srcChainId = uint64(block.chainid + 1); - - vm.prank(Alice); - addressManager.setAddress(srcChainId, "signal_service", randAddress()); - - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](1); - - proofs[0].chainId = uint64(block.chainid); - proofs[0].blockId = 1; - proofs[0].rootHash = randBytes32(); - - // the proof is a storage proof - proofs[0].accountProof = new bytes[](0); - proofs[0].storageProof = new bytes[](10); - - vm.expectRevert(SignalService.SS_SIGNAL_NOT_FOUND.selector); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - - // relay the signal root - vm.prank(taiko); - signalService.syncChainData( - srcChainId, LibStrings.H_SIGNAL_ROOT, proofs[0].blockId, proofs[0].rootHash - ); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - - vm.prank(Alice); - signalService.authorize(taiko, false); - - vm.expectRevert(SignalService.SS_UNAUTHORIZED.selector); - vm.prank(taiko); - signalService.syncChainData( - srcChainId, LibStrings.H_SIGNAL_ROOT, proofs[0].blockId, proofs[0].rootHash - ); - } - - function test_SignalService_proveSignalReceived_one_hop_state_root() public { - uint64 srcChainId = uint64(block.chainid + 1); - - vm.prank(Alice); - addressManager.setAddress(srcChainId, "signal_service", randAddress()); - - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](1); - - proofs[0].chainId = uint64(block.chainid); - proofs[0].blockId = 1; - proofs[0].rootHash = randBytes32(); - - // the proof is a full merkle proof - proofs[0].accountProof = new bytes[](1); - proofs[0].storageProof = new bytes[](10); - - vm.expectRevert(SignalService.SS_SIGNAL_NOT_FOUND.selector); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - - // relay the state root - vm.prank(taiko); - signalService.syncChainData( - srcChainId, LibStrings.H_STATE_ROOT, proofs[0].blockId, proofs[0].rootHash - ); - - // Should not revert - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - - assertEq( - signalService.isChainDataSynced( - srcChainId, LibStrings.H_SIGNAL_ROOT, proofs[0].blockId, bytes32(uint256(789)) - ), - false - ); - } - - function test_SignalService_proveSignalReceived_multiple_hops_no_caching() public { - uint64 srcChainId = uint64(block.chainid + 1); - - vm.prank(Alice); - addressManager.setAddress(srcChainId, "signal_service", randAddress()); - - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](3); - - // first hop with full merkle proof - proofs[0].chainId = uint64(block.chainid + 2); - proofs[0].blockId = 1; - proofs[0].rootHash = randBytes32(); - proofs[0].accountProof = new bytes[](1); - proofs[0].storageProof = new bytes[](10); - - // second hop with storage merkle proof - proofs[1].chainId = uint64(block.chainid + 3); - proofs[1].blockId = 2; - proofs[1].rootHash = randBytes32(); - proofs[1].accountProof = new bytes[](0); - proofs[1].storageProof = new bytes[](10); - - // third/last hop with full merkle proof - proofs[2].chainId = uint64(block.chainid); - proofs[2].blockId = 3; - proofs[2].rootHash = randBytes32(); - proofs[2].accountProof = new bytes[](1); - proofs[2].storageProof = new bytes[](10); - - // expect RESOLVER_ZERO_ADDR - vm.expectRevert( - abi.encodeWithSelector( - AddressResolver.RESOLVER_ZERO_ADDR.selector, - proofs[0].chainId, - strToBytes32("signal_service") - ) - ); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - - // Add two trusted hop relayers - vm.startPrank(Alice); - addressManager.setAddress(proofs[0].chainId, "signal_service", randAddress() /*relay1*/ ); - addressManager.setAddress(proofs[1].chainId, "signal_service", randAddress() /*relay2*/ ); - vm.stopPrank(); - - vm.expectRevert(SignalService.SS_SIGNAL_NOT_FOUND.selector); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - - vm.prank(taiko); - signalService.syncChainData( - proofs[1].chainId, LibStrings.H_STATE_ROOT, proofs[2].blockId, proofs[2].rootHash - ); - - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - } - - function test_SignalService_proveSignalReceived_revert_with_a_loop() public { - uint64 srcChainId = uint64(block.chainid + 1); - - vm.prank(Alice); - addressManager.setAddress(srcChainId, "signal_service", randAddress()); - - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](3); - - // first hop with full merkle proof - proofs[0].chainId = uint64(block.chainid + 2); - proofs[0].blockId = 1; - proofs[0].rootHash = randBytes32(); - proofs[0].accountProof = new bytes[](1); - proofs[0].storageProof = new bytes[](10); - - // second hop with storage merkle proof - proofs[1].chainId = proofs[0].chainId; // same - proofs[1].blockId = 2; - proofs[1].rootHash = randBytes32(); - proofs[1].accountProof = new bytes[](0); - proofs[1].storageProof = new bytes[](10); - - // third/last hop with full merkle proof - proofs[2].chainId = uint64(block.chainid); - proofs[2].blockId = 3; - proofs[2].rootHash = randBytes32(); - proofs[2].accountProof = new bytes[](1); - proofs[2].storageProof = new bytes[](10); - - // Add two trusted hop relayers - vm.startPrank(Alice); - addressManager.setAddress(proofs[0].chainId, "signal_service", randAddress() /*relay1*/ ); - addressManager.setAddress(proofs[1].chainId, "signal_service", randAddress() /*relay2*/ ); - vm.stopPrank(); - - vm.prank(taiko); - signalService.syncChainData( - proofs[1].chainId, LibStrings.H_STATE_ROOT, proofs[2].blockId, proofs[2].rootHash - ); - - vm.expectRevert(SignalService.SS_INVALID_HOPS_WITH_LOOP.selector); - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - } - - function test_SignalService_proveSignalReceived_multiple_hops_caching() public { - uint64 srcChainId = uint64(block.chainid + 1); - uint64 nextChainId = srcChainId + 100; - - SignalService.HopProof[] memory proofs = new SignalService.HopProof[](9); - - // hop 1: full merkle proof, CACHE_NOTHING - proofs[0].chainId = nextChainId++; - proofs[0].blockId = 1; - proofs[0].rootHash = randBytes32(); - proofs[0].accountProof = new bytes[](1); - proofs[0].storageProof = new bytes[](10); - proofs[0].cacheOption = ISignalService.CacheOption.CACHE_NOTHING; - - // hop 2: full merkle proof, CACHE_STATE_ROOT - proofs[1].chainId = nextChainId++; - proofs[1].blockId = 2; - proofs[1].rootHash = randBytes32(); - proofs[1].accountProof = new bytes[](1); - proofs[1].storageProof = new bytes[](10); - proofs[1].cacheOption = ISignalService.CacheOption.CACHE_STATE_ROOT; - - // hop 3: full merkle proof, CACHE_SIGNAL_ROOT - proofs[2].chainId = nextChainId++; - proofs[2].blockId = 3; - proofs[2].rootHash = randBytes32(); - proofs[2].accountProof = new bytes[](1); - proofs[2].storageProof = new bytes[](10); - proofs[2].cacheOption = ISignalService.CacheOption.CACHE_SIGNAL_ROOT; - - // hop 4: full merkle proof, CACHE_BOTH - proofs[3].chainId = nextChainId++; - proofs[3].blockId = 4; - proofs[3].rootHash = randBytes32(); - proofs[3].accountProof = new bytes[](1); - proofs[3].storageProof = new bytes[](10); - proofs[3].cacheOption = ISignalService.CacheOption.CACHE_BOTH; - - // hop 5: storage merkle proof, CACHE_NOTHING - proofs[4].chainId = nextChainId++; - proofs[4].blockId = 5; - proofs[4].rootHash = randBytes32(); - proofs[4].accountProof = new bytes[](0); - proofs[4].storageProof = new bytes[](10); - proofs[4].cacheOption = ISignalService.CacheOption.CACHE_NOTHING; - - // hop 6: storage merkle proof, CACHE_STATE_ROOT - proofs[5].chainId = nextChainId++; - proofs[5].blockId = 6; - proofs[5].rootHash = randBytes32(); - proofs[5].accountProof = new bytes[](0); - proofs[5].storageProof = new bytes[](10); - proofs[5].cacheOption = ISignalService.CacheOption.CACHE_STATE_ROOT; - - // hop 7: storage merkle proof, CACHE_SIGNAL_ROOT - proofs[6].chainId = nextChainId++; - proofs[6].blockId = 7; - proofs[6].rootHash = randBytes32(); - proofs[6].accountProof = new bytes[](0); - proofs[6].storageProof = new bytes[](10); - proofs[6].cacheOption = ISignalService.CacheOption.CACHE_SIGNAL_ROOT; - - // hop 8: storage merkle proof, CACHE_BOTH - proofs[7].chainId = nextChainId++; - proofs[7].blockId = 8; - proofs[7].rootHash = randBytes32(); - proofs[7].accountProof = new bytes[](0); - proofs[7].storageProof = new bytes[](10); - proofs[7].cacheOption = ISignalService.CacheOption.CACHE_BOTH; - - // last hop, 9: full merkle proof, CACHE_BOTH - proofs[8].chainId = uint64(block.chainid); - proofs[8].blockId = 9; - proofs[8].rootHash = randBytes32(); - proofs[8].accountProof = new bytes[](1); - proofs[8].storageProof = new bytes[](10); - proofs[8].cacheOption = ISignalService.CacheOption.CACHE_BOTH; - - // Add two trusted hop relayers - vm.startPrank(Alice); - addressManager.setAddress(srcChainId, "signal_service", randAddress()); - for (uint256 i; i < proofs.length; ++i) { - addressManager.setAddress( - proofs[i].chainId, "signal_service", randAddress() /*relay1*/ - ); - } - vm.stopPrank(); - - vm.prank(taiko); - signalService.syncChainData( - proofs[7].chainId, LibStrings.H_STATE_ROOT, proofs[8].blockId, proofs[8].rootHash - ); - - signalService.proveSignalReceived({ - _chainId: srcChainId, - _app: randAddress(), - _signal: randBytes32(), - _proof: abi.encode(proofs) - }); - - // hop 1: full merkle proof, CACHE_NOTHING - _verifyCache(srcChainId, proofs[0].blockId, proofs[0].rootHash, false, false); - // hop 2: full merkle proof, CACHE_STATE_ROOT - _verifyCache(proofs[0].chainId, proofs[1].blockId, proofs[1].rootHash, true, false); - // hop 3: full merkle proof, CACHE_SIGNAL_ROOT - _verifyCache(proofs[1].chainId, proofs[2].blockId, proofs[2].rootHash, false, true); - // hop 4: full merkle proof, CACHE_BOTH - _verifyCache(proofs[2].chainId, proofs[3].blockId, proofs[3].rootHash, true, true); - // hop 5: storage merkle proof, CACHE_NOTHING - _verifyCache(proofs[3].chainId, proofs[4].blockId, proofs[4].rootHash, false, false); - // hop 6: storage merkle proof, CACHE_STATE_ROOT - _verifyCache(proofs[4].chainId, proofs[5].blockId, proofs[5].rootHash, false, false); - // hop 7: storage merkle proof, CACHE_SIGNAL_ROOT - _verifyCache(proofs[5].chainId, proofs[6].blockId, proofs[6].rootHash, false, true); - // hop 8: storage merkle proof, CACHE_BOTH - _verifyCache(proofs[6].chainId, proofs[7].blockId, proofs[7].rootHash, false, true); - // last hop, 9: full merkle proof, CACHE_BOTH - // last hop's state root is already cached even before the proveSignalReceived call. - _verifyCache(proofs[7].chainId, proofs[8].blockId, proofs[8].rootHash, true, true); - } - - function _verifyCache( - uint64 chainId, - uint64 blockId, - bytes32 stateRoot, - bool stateRootCached, - bool signalRootCached - ) - private - { - assertEq( - signalService.isChainDataSynced(chainId, LibStrings.H_STATE_ROOT, blockId, stateRoot), - stateRootCached - ); - - assertEq( - signalService.isChainDataSynced( - chainId, LibStrings.H_SIGNAL_ROOT, blockId, bytes32(uint256(789)) - ), - signalRootCached - ); - } -} diff --git a/packages/protocol/test/tokenvault/BridgedERC20.t.sol b/packages/protocol/test/tokenvault/BridgedERC20.t.sol deleted file mode 100644 index 80470c706449..000000000000 --- a/packages/protocol/test/tokenvault/BridgedERC20.t.sol +++ /dev/null @@ -1,139 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../TaikoTest.sol"; - -contract TestBridgedERC20 is TaikoTest { - address manager; - address vault = randAddress(); - address owner = randAddress(); - - function setUp() public { - manager = deployProxy({ - name: "address_manager", - impl: address(new AddressManager()), - data: abi.encodeCall(AddressManager.init, (address(0))) - }); - - register(manager, "erc20_vault", vault); - } - - function test_20Vault_migration__change_migration_status() public { - BridgedERC20 btoken = deployBridgedToken("FOO"); - - vm.expectRevert(); - btoken.changeMigrationStatus(Emma, false); - - vm.startPrank(vault); - btoken.changeMigrationStatus(Frank, false); - btoken.changeMigrationStatus(address(0), false); - btoken.changeMigrationStatus(address(0), true); - btoken.changeMigrationStatus(Frank, true); - vm.expectRevert(); - btoken.changeMigrationStatus(Frank, true); - - vm.stopPrank(); - } - - function test_20Vault_migration___only_vault_can_min__but_cannot_burn_when_migration_off() - public - { - BridgedERC20 btoken = deployBridgedToken("BAR"); - // only erc20_vault can brun and mint - vm.prank(vault, vault); - btoken.mint(Bob, 1000); - //Vault cannot burn only if it owns the tokens - vm.expectRevert(); - vm.prank(Bob, Bob); - btoken.burn(600); - assertEq(btoken.balanceOf(Bob), 1000); - vm.stopPrank(); - - // Owner can burn/mint - vm.prank(owner, owner); - btoken.mint(Bob, 1000); - } - - function test_20Vault_migration__old_to_new() public { - BridgedERC20 oldToken = deployBridgedToken("OLD"); - BridgedERC20 newToken = deployBridgedToken("NEW"); - - vm.startPrank(vault); - oldToken.mint(Bob, 100); - newToken.mint(Bob, 200); - - oldToken.changeMigrationStatus(address(newToken), false); - newToken.changeMigrationStatus(address(oldToken), true); - vm.stopPrank(); - - // Testing oldToken - // 1. minting is not possible for Bob, owner, or vault - vm.prank(Bob); - vm.expectRevert(); - oldToken.mint(Bob, 10); - - vm.prank(owner); - vm.expectRevert(); - oldToken.mint(Bob, 10); - - vm.prank(vault); - vm.expectRevert(); - oldToken.mint(Bob, 10); - - // but can be done by the token owner - if migrating out phase - vm.prank(Bob); - oldToken.burn(10); - assertEq(oldToken.balanceOf(Bob), 90); - assertEq(newToken.balanceOf(Bob), 210); - - // Testing newToken - // 1. Nobody can mint except the vault - vm.prank(Bob); - vm.expectRevert(); - newToken.mint(Bob, 10); - - vm.prank(owner); - newToken.mint(Bob, 10); - - vm.prank(vault); - newToken.mint(Bob, 15); - assertEq(newToken.balanceOf(Bob), 235); - - // Vault can only burn if it owns the tokens - vm.prank(vault); - vm.expectRevert(); - newToken.burn(25); - assertEq(newToken.balanceOf(Bob), 235); - - // Imitate current bridge-back operation, as Bob gave approval (for bridging back) and then - // ERC20Vault does the "transfer and burn" - vm.prank(Bob); - newToken.approve(vault, 25); - - // Following the "transfer and burn" pattern - vm.prank(vault); - newToken.transferFrom(Bob, vault, 25); - - vm.prank(vault); - newToken.burn(25); - - assertEq(newToken.balanceOf(Bob), 210); - } - - function deployBridgedToken(string memory name) internal returns (BridgedERC20) { - address srcToken = randAddress(); - uint256 srcChainId = 1000; - uint8 srcDecimals = 11; - return BridgedERC20( - deployProxy({ - name: "bridged_token1", - impl: address(new BridgedERC20()), - data: abi.encodeCall( - BridgedERC20.init, - (owner, address(manager), srcToken, srcChainId, srcDecimals, name, name) - ), - registerTo: manager - }) - ); - } -} diff --git a/packages/protocol/test/tokenvault/ERC1155Vault.t.sol b/packages/protocol/test/tokenvault/ERC1155Vault.t.sol deleted file mode 100644 index c7a4853e664c..000000000000 --- a/packages/protocol/test/tokenvault/ERC1155Vault.t.sol +++ /dev/null @@ -1,988 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/token/ERC1155/ERC1155.sol"; -import "../TaikoTest.sol"; - -contract TestTokenERC1155 is ERC1155 { - constructor(string memory baseURI) ERC1155(baseURI) { } - - function mint(uint256 tokenId, uint256 amount) public { - _mint(msg.sender, tokenId, amount, ""); - } -} - -// PrankDestBridge lets us simulate a transaction to the ERC1155Vault -// from a named Bridge, without having to test/run through the real Bridge code, -// outside the scope of the unit tests in the ERC1155Vault. -contract PrankDestBridge { - ERC1155Vault destERC1155Vault; - - struct BridgeContext { - bytes32 msgHash; - address sender; - uint64 srcChainId; - } - - BridgeContext ctx; - - constructor(ERC1155Vault _erc1155Vault) { - destERC1155Vault = _erc1155Vault; - } - - function setERC1155Vault(address addr) public { - destERC1155Vault = ERC1155Vault(addr); - } - - function sendMessage(IBridge.Message memory message) - external - payable - returns (bytes32 msgHash, IBridge.Message memory _message) - { - // Dummy return value - return (keccak256(abi.encode(message.id)), _message); - } - - function context() public view returns (BridgeContext memory) { - return ctx; - } - - function sendReceiveERC1155ToERC1155Vault( - BaseNFTVault.CanonicalNFT calldata ctoken, - address from, - address to, - uint256[] memory tokenIds, - uint256[] memory amounts, - bytes32 msgHash, - address srcChainERC1155Vault, - uint64 srcChainId, - uint256 mockLibInvokeMsgValue - ) - public - { - ctx.sender = srcChainERC1155Vault; - ctx.msgHash = msgHash; - ctx.srcChainId = srcChainId; - - // We need this in order to 'mock' the LibBridgeInvoke's - // (success,retVal) = - // message.to.call{ value: message.value, gas: gasLimit - // }(message.data); - // The problem (with foundry) is that this way it is not able to deploy - // a contract - // most probably due to some deployment address nonce issue. (Seems a - // known issue). - destERC1155Vault.onMessageInvocation{ value: mockLibInvokeMsgValue }( - abi.encode(ctoken, from, to, tokenIds, amounts) - ); - - ctx.sender = address(0); - ctx.msgHash = bytes32(0); - ctx.srcChainId = 0; - } -} - -contract UpdatedBridgedERC1155 is BridgedERC1155 { - function helloWorld() public pure returns (string memory) { - return "helloworld"; - } -} - -contract ERC1155VaultTest is TaikoTest { - uint32 private constant GAS_LIMIT = 2_000_000; - AddressManager addressManager; - BadReceiver badReceiver; - Bridge bridge; - Bridge destChainBridge; - PrankDestBridge destChainIdBridge; - SkipProofCheckSignal mockProofSignalService; - ERC1155Vault erc1155Vault; - ERC1155Vault destChainErc1155Vault; - TestTokenERC1155 ctoken1155; - SignalService signalService; - uint64 destChainId = 19_389; - - function setUp() public { - vm.startPrank(Carol); - vm.deal(Alice, 100 ether); - vm.deal(Carol, 100 ether); - vm.deal(Bob, 100 ether); - addressManager = AddressManager( - deployProxy({ - name: "address_manager", - impl: address(new AddressManager()), - data: abi.encodeCall(AddressManager.init, (address(0))) - }) - ); - - bridge = Bridge( - payable( - deployProxy({ - name: "bridge", - impl: address(new Bridge()), - data: abi.encodeCall(Bridge.init, (address(0), address(addressManager))), - registerTo: address(addressManager) - }) - ) - ); - - destChainBridge = Bridge( - payable( - deployProxy({ - name: "bridge", - impl: address(new Bridge()), - data: abi.encodeCall(Bridge.init, (address(0), address(addressManager))), - registerTo: address(addressManager) - }) - ) - ); - - signalService = SignalService( - deployProxy({ - name: "signal_service", - impl: address(new SignalService()), - data: abi.encodeCall(SignalService.init, (address(0), address(addressManager))) - }) - ); - - erc1155Vault = ERC1155Vault( - deployProxy({ - name: "erc1155_vault", - impl: address(new ERC1155Vault()), - data: abi.encodeCall(ERC1155Vault.init, (address(0), address(addressManager))) - }) - ); - - destChainErc1155Vault = ERC1155Vault( - deployProxy({ - name: "erc1155_vault", - impl: address(new ERC1155Vault()), - data: abi.encodeCall(ERC1155Vault.init, (address(0), address(addressManager))) - }) - ); - - destChainIdBridge = new PrankDestBridge(destChainErc1155Vault); - vm.deal(address(destChainIdBridge), 100 ether); - - mockProofSignalService = SkipProofCheckSignal( - deployProxy({ - name: "signal_service", - impl: address(new SkipProofCheckSignal()), - data: abi.encodeCall(SignalService.init, (address(0), address(addressManager))) - }) - ); - - addressManager.setAddress( - uint64(block.chainid), "signal_service", address(mockProofSignalService) - ); - - addressManager.setAddress(destChainId, "signal_service", address(mockProofSignalService)); - - addressManager.setAddress(uint64(block.chainid), "bridge", address(bridge)); - - addressManager.setAddress(destChainId, "bridge", address(destChainIdBridge)); - - addressManager.setAddress(uint64(block.chainid), "erc1155_vault", address(erc1155Vault)); - - addressManager.setAddress(destChainId, "erc1155_vault", address(destChainErc1155Vault)); - - // Below 2-2 registrations (mock) are needed bc of - // LibBridgeRecall.sol's - // resolve address - addressManager.setAddress(destChainId, "erc721_vault", address(mockProofSignalService)); - addressManager.setAddress(destChainId, "erc20_vault", address(mockProofSignalService)); - addressManager.setAddress( - uint64(block.chainid), "erc721_vault", address(mockProofSignalService) - ); - addressManager.setAddress( - uint64(block.chainid), "erc20_vault", address(mockProofSignalService) - ); - - vm.deal(address(bridge), 100 ether); - - address bridgedERC1155 = address(new BridgedERC1155()); - - addressManager.setAddress(destChainId, "bridged_erc1155", bridgedERC1155); - addressManager.setAddress(uint64(block.chainid), "bridged_erc1155", bridgedERC1155); - - ctoken1155 = new TestTokenERC1155("http://example.host.com/"); - vm.stopPrank(); - vm.startPrank(Alice, Alice); - ctoken1155.mint(1, 10); - ctoken1155.mint(2, 10); - - vm.stopPrank(); - } - - function getPreDeterminedDataBytes() internal pure returns (bytes memory) { - return - hex"20b8155900000000000000000000000000000000000000000000000000000000000000a00000000000000000000000007e5f4552091a69125d5dfcb7b8c2659029395bdf0000000000000000000000007e5f4552091a69125d5dfcb7b8c2659029395bdf000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000001a00000000000000000000000000000000000000000000000000000000000007a690000000000000000000000007935de70183a080242a58f64637a8e7f15349b63000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002"; - } - - function test_1155Vault_sendToken_1155() public { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 2; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(ctoken1155.balanceOf(Alice, 1), 8); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 2); - } - - function test_1155Vault_sendToken_with_invalid_token_address_1155() public { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 2; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, address(0), Alice, GAS_LIMIT, address(0), GAS_LIMIT, tokenIds, amounts - ); - vm.prank(Alice, Alice); - vm.expectRevert(BaseNFTVault.VAULT_INVALID_TOKEN.selector); - erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - } - - function test_1155Vault_sendToken_with_0_tokens_1155() public { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 0; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - vm.expectRevert(BaseNFTVault.VAULT_INVALID_AMOUNT.selector); - erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - } - - function test_1155Vault_receiveTokens_from_newly_deployed_bridged_contract_on_destination_chain_1155( - ) - public - { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 2; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(ctoken1155.balanceOf(Alice, 1), 8); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 2); - - amounts[0] = 2; - BaseNFTVault.CanonicalNFT memory ctoken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(ctoken1155), - symbol: "", - name: "" - }); - - uint64 srcChainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC1155ToERC1155Vault( - ctoken, - Alice, - Alice, - tokenIds, - amounts, - bytes32(0), - address(erc1155Vault), - srcChainId, - 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc1155Vault.canonicalToBridged(srcChainId, address(ctoken1155)); - - // Alice bridged over 2 items - assertEq(ERC1155(deployedContract).balanceOf(Alice, 1), 2); - } - - function test_1155Vault_receiveTokens_but_mint_not_deploy_if_bridged_second_time_1155() - public - { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 2; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(ctoken1155.balanceOf(Alice, 1), 8); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 2); - - BaseNFTVault.CanonicalNFT memory ctoken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(ctoken1155), - symbol: "", - name: "" - }); - - uint64 srcChainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC1155ToERC1155Vault( - ctoken, - Alice, - Alice, - tokenIds, - amounts, - bytes32(0), - address(erc1155Vault), - srcChainId, - 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc1155Vault.canonicalToBridged(srcChainId, address(ctoken1155)); - - // Alice bridged over 2 items - assertEq(ERC1155(deployedContract).balanceOf(Alice, 1), 2); - - // Change back to 'L1' - vm.chainId(srcChainId); - - tokenIds[0] = 1; - amounts[0] = 1; - - sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(ctoken1155.balanceOf(Alice, 1), 7); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 3); - - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC1155ToERC1155Vault( - ctoken, - Alice, - Alice, - tokenIds, - amounts, - bytes32(0), - address(erc1155Vault), - srcChainId, - 0 - ); - - // Query canonicalToBridged - address bridgedContract = - destChainErc1155Vault.canonicalToBridged(srcChainId, address(ctoken1155)); - - assertEq(bridgedContract, deployedContract); - } - - function test_1155Vault_receiveTokens_erc1155_with_ether_to_dave() public { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 2; - - uint256 etherValue = 0.1 ether; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - David, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc1155Vault.sendToken{ value: etherValue }(sendOpts); - - assertEq(ctoken1155.balanceOf(Alice, 1), 8); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 2); - - amounts[0] = 2; - BaseNFTVault.CanonicalNFT memory ctoken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(ctoken1155), - symbol: "", - name: "" - }); - - uint64 srcChainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC1155ToERC1155Vault( - ctoken, - Alice, - David, - tokenIds, - amounts, - bytes32(0), - address(erc1155Vault), - srcChainId, - etherValue - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc1155Vault.canonicalToBridged(srcChainId, address(ctoken1155)); - - // Alice bridged over 2 items and etherValue to David - assertEq(ERC1155(deployedContract).balanceOf(David, 1), 2); - assertEq(David.balance, etherValue); - } - - function test_1155Vault_onMessageRecalled_1155() public { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 2; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - - vm.prank(Alice, Alice); - IBridge.Message memory message = erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(ctoken1155.balanceOf(Alice, 1), 8); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 2); - - bridge.recallMessage(message, bytes("")); - - // Alice got back her NFTs, and vault has 0 - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - } - - function test_1155Vault_receiveTokens_multiple_1155() public { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - assertEq(ctoken1155.balanceOf(Alice, 2), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 2), 0); - - uint256[] memory tokenIds = new uint256[](2); - tokenIds[0] = 1; - tokenIds[1] = 2; - - uint256[] memory amounts = new uint256[](2); - amounts[0] = 2; - amounts[1] = 5; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(ctoken1155.balanceOf(Alice, 1), 8); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 2); - - assertEq(ctoken1155.balanceOf(Alice, 2), 5); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 2), 5); - - BaseNFTVault.CanonicalNFT memory ctoken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(ctoken1155), - symbol: "", - name: "" - }); - - uint64 srcChainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC1155ToERC1155Vault( - ctoken, - Alice, - Alice, - tokenIds, - amounts, - bytes32(0), - address(erc1155Vault), - srcChainId, - 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc1155Vault.canonicalToBridged(srcChainId, address(ctoken1155)); - - // Alice bridged over 2 items - assertEq(ERC1155(deployedContract).balanceOf(Alice, 1), 2); - assertEq(ERC1155(deployedContract).balanceOf(Alice, 2), 5); - } - - function test_1155Vault_bridge_back_but_owner_is_different_now_1155() public { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 1; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 1); - - // This canonicalToken is basically need to be exact same as the - // sendToken() puts together - // - here is just mocking putting it together. - BaseNFTVault.CanonicalNFT memory canonicalToken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(ctoken1155), - symbol: "TT", - name: "TT" - }); - - uint64 chainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC1155ToERC1155Vault( - canonicalToken, - Alice, - Alice, - tokenIds, - amounts, - bytes32(0), - address(erc1155Vault), - chainId, - 0 - ); - // Query canonicalToBridged - address deployedContract = - destChainErc1155Vault.canonicalToBridged(chainId, address(ctoken1155)); - - // Alice bridged over 1 from tokenId 1 - assertEq(ERC1155(deployedContract).balanceOf(Alice, 1), 1); - - // Transfer the asset to Bob, and Bob can receive it back on canonical - // chain - vm.prank(Alice, Alice); - ERC1155(deployedContract).safeTransferFrom(Alice, Bob, 1, 1, ""); - - assertEq(ERC1155(deployedContract).balanceOf(Bob, 1), 1); - assertEq(ERC1155(deployedContract).balanceOf(Alice, 1), 0); - - vm.prank(Bob, Bob); - ERC1155(deployedContract).setApprovalForAll(address(destChainErc1155Vault), true); - - sendOpts = BaseNFTVault.BridgeTransferOp( - chainId, - address(0), - Bob, - GAS_LIMIT, - address(deployedContract), - GAS_LIMIT, - tokenIds, - amounts - ); - - vm.prank(Bob, Bob); - destChainErc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - vm.chainId(chainId); - - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 1); - - destChainIdBridge.setERC1155Vault(address(erc1155Vault)); - - vm.prank(Carol, Carol); - addressManager.setAddress(uint64(block.chainid), "bridge", address(destChainIdBridge)); - - destChainIdBridge.sendReceiveERC1155ToERC1155Vault( - canonicalToken, - Bob, - Bob, - tokenIds, - amounts, - bytes32(0), - address(erc1155Vault), - chainId, - 0 - ); - - assertEq(ctoken1155.balanceOf(Bob, 1), 1); - } - - function test_1155Vault_bridge_back_but_original_owner_cannot_claim_it_anymore_if_sold_1155() - public - { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 1; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 1); - - // This canonicalToken is basically need to be exact same as the - // sendToken() puts together - // - here is just mocking putting it together. - BaseNFTVault.CanonicalNFT memory canonicalToken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(ctoken1155), - symbol: "TT", - name: "TT" - }); - - uint64 chainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC1155ToERC1155Vault( - canonicalToken, - Alice, - Alice, - tokenIds, - amounts, - bytes32(0), - address(erc1155Vault), - chainId, - 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc1155Vault.canonicalToBridged(chainId, address(ctoken1155)); - // Alice bridged over 1 from tokenId 1 - assertEq(ERC1155(deployedContract).balanceOf(Alice, 1), 1); - - // Transfer the asset to Bob, and Bob can receive it back on canonical - // chain - vm.prank(Alice, Alice); - ERC1155(deployedContract).safeTransferFrom(Alice, Bob, 1, 1, ""); - - assertEq(ERC1155(deployedContract).balanceOf(Bob, 1), 1); - assertEq(ERC1155(deployedContract).balanceOf(Alice, 1), 0); - - vm.prank(Bob, Bob); - ERC1155(deployedContract).setApprovalForAll(address(destChainErc1155Vault), true); - - sendOpts = BaseNFTVault.BridgeTransferOp( - chainId, - address(0), - Alice, - GAS_LIMIT, - address(deployedContract), - GAS_LIMIT, - tokenIds, - amounts - ); - - vm.prank(Alice, Alice); - vm.expectRevert("ERC1155: caller is not token owner or approved"); - destChainErc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - } - - function test_1155Vault_upgrade_bridged_tokens_1155() public { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 2; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(ctoken1155.balanceOf(Alice, 1), 8); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 2); - - BaseNFTVault.CanonicalNFT memory ctoken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(ctoken1155), - symbol: "", - name: "" - }); - - uint64 srcChainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC1155ToERC1155Vault( - ctoken, - Alice, - Alice, - tokenIds, - amounts, - bytes32(0), - address(erc1155Vault), - srcChainId, - 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc1155Vault.canonicalToBridged(srcChainId, address(ctoken1155)); - - try UpdatedBridgedERC1155(deployedContract).helloWorld() { - fail(); - } catch { } - - // Upgrade the implementation of that contract - // so that it supports now the 'helloWorld' call - UpdatedBridgedERC1155 newBridgedContract = new UpdatedBridgedERC1155(); - vm.prank(Carol, Carol); - BridgedERC1155(payable(deployedContract)).upgradeTo(address(newBridgedContract)); - - try UpdatedBridgedERC1155(deployedContract).helloWorld() { } - catch { - fail(); - } - } - - function test_1155Vault_shall_not_be_able_to_burn_arbitrarily() public { - vm.prank(Alice, Alice); - ctoken1155.setApprovalForAll(address(erc1155Vault), true); - - assertEq(ctoken1155.balanceOf(Alice, 1), 10); - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 0); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 1; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(ctoken1155), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(ctoken1155.balanceOf(address(erc1155Vault), 1), 1); - - // This canonicalToken is basically need to be exact same as the - // sendToken() puts together - // - here is just mocking putting it together. - BaseNFTVault.CanonicalNFT memory canonicalToken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(ctoken1155), - symbol: "TT", - name: "TT" - }); - - uint64 chainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC1155ToERC1155Vault( - canonicalToken, - Alice, - Alice, - tokenIds, - amounts, - bytes32(0), - address(erc1155Vault), - chainId, - 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc1155Vault.canonicalToBridged(chainId, address(ctoken1155)); - // Alice bridged over 1 from tokenId 1 - assertEq(ERC1155(deployedContract).balanceOf(Alice, 1), 1); - - sendOpts = BaseNFTVault.BridgeTransferOp( - chainId, - address(0), - Alice, - GAS_LIMIT, - address(deployedContract), - GAS_LIMIT, - tokenIds, - amounts - ); - - // Alice hasn't approved the vault yet! - vm.prank(Alice, Alice); - vm.expectRevert("ERC1155: caller is not token owner or approved"); - destChainErc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - // Also Vault cannot burn tokens it does not own (even if the priv key compromised) - vm.prank(address(destChainErc1155Vault), address(destChainErc1155Vault)); - vm.expectRevert("ERC1155: burn amount exceeds balance"); - BridgedERC1155(deployedContract).burn(1, 20); - - // After setApprovalForAll() ERC1155Vault can transfer and burn - vm.prank(Alice, Alice); - ERC1155(deployedContract).setApprovalForAll(address(destChainErc1155Vault), true); - vm.prank(Alice, Alice); - destChainErc1155Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - } -} diff --git a/packages/protocol/test/tokenvault/ERC20Vault.t.sol b/packages/protocol/test/tokenvault/ERC20Vault.t.sol deleted file mode 100644 index 98f1c9ddc12b..000000000000 --- a/packages/protocol/test/tokenvault/ERC20Vault.t.sol +++ /dev/null @@ -1,681 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "../TaikoTest.sol"; - -// PrankDestBridge lets us simulate a transaction to the ERC20Vault -// from a named Bridge, without having to test/run through the real Bridge code, -// outside the scope of the unit tests in the ERC20Vault. -contract PrankDestBridge { - ERC20Vault destERC20Vault; - TContext ctx; - - struct TContext { - bytes32 msgHash; // messageHash - address sender; - uint64 srcChainId; - } - - constructor(ERC20Vault _erc20Vault) { - destERC20Vault = _erc20Vault; - } - - function setERC20Vault(address addr) public { - destERC20Vault = ERC20Vault(addr); - } - - function context() public view returns (TContext memory) { - return ctx; - } - - function sendReceiveERC20ToERC20Vault( - ERC20Vault.CanonicalERC20 calldata canonicalToken, - address from, - address to, - uint64 amount, - bytes32 msgHash, - address srcChainERC20Vault, - uint64 srcChainId, - uint256 mockLibInvokeMsgValue - ) - public - { - ctx.sender = srcChainERC20Vault; - ctx.msgHash = msgHash; - ctx.srcChainId = srcChainId; - - // We need this in order to 'mock' the LibBridgeInvoke's - // (success,retVal) = - // message.to.call{ value: message.value, gas: gasLimit - // }(message.data); - // The problem (with foundry) is that this way it is not able to deploy - // a contract most probably due to some deployment address nonce issue. (Seems a known - // issue). - destERC20Vault.onMessageInvocation{ value: mockLibInvokeMsgValue }( - abi.encode(canonicalToken, from, to, amount) - ); - - ctx.sender = address(0); - ctx.msgHash = bytes32(0); - ctx.srcChainId = 0; - } -} - -contract UpdatedBridgedERC20 is BridgedERC20 { - function helloWorld() public pure returns (string memory) { - return "helloworld"; - } -} - -contract TestERC20Vault is TaikoTest { - TaikoToken tko; - AddressManager addressManager; - Bridge bridge; - ERC20Vault erc20Vault; - ERC20Vault destChainIdERC20Vault; - PrankDestBridge destChainIdBridge; - SkipProofCheckSignal mockProofSignalService; - FreeMintERC20 erc20; - FreeMintERC20 weirdNamedToken; - uint64 destChainId = 7; - uint64 srcChainId = uint64(block.chainid); - - BridgedERC20 usdc; - BridgedERC20 usdt; - BridgedERC20 stETH; - - function setUp() public { - vm.startPrank(Carol); - vm.deal(Alice, 1 ether); - vm.deal(Carol, 1 ether); - vm.deal(Bob, 1 ether); - - addressManager = AddressManager( - deployProxy({ - name: "address_manager", - impl: address(new AddressManager()), - data: abi.encodeCall(AddressManager.init, (address(0))) - }) - ); - - tko = TaikoToken( - deployProxy({ - name: "taiko_token", - impl: address(new TaikoToken()), - data: abi.encodeCall(TaikoToken.init, (address(0), address(this))) - }) - ); - - addressManager.setAddress(uint64(block.chainid), "taiko_token", address(tko)); - - erc20Vault = ERC20Vault( - deployProxy({ - name: "erc20_vault", - impl: address(new ERC20Vault()), - data: abi.encodeCall(ERC20Vault.init, (address(0), address(addressManager))) - }) - ); - - destChainIdERC20Vault = ERC20Vault( - deployProxy({ - name: "erc20_vault", - impl: address(new ERC20Vault()), - data: abi.encodeCall(ERC20Vault.init, (address(0), address(addressManager))) - }) - ); - - erc20 = new FreeMintERC20("ERC20", "ERC20"); - erc20.mint(Alice); - - weirdNamedToken = new FreeMintERC20("", "123456abcdefgh"); - weirdNamedToken.mint(Alice); - - bridge = Bridge( - payable( - deployProxy({ - name: "bridge", - impl: address(new Bridge()), - data: abi.encodeCall(Bridge.init, (address(0), address(addressManager))), - registerTo: address(addressManager) - }) - ) - ); - - destChainIdBridge = new PrankDestBridge(erc20Vault); - vm.deal(address(destChainIdBridge), 100 ether); - - mockProofSignalService = SkipProofCheckSignal( - deployProxy({ - name: "signal_service", - impl: address(new SkipProofCheckSignal()), - data: abi.encodeCall(SignalService.init, (address(0), address(addressManager))) - }) - ); - - addressManager.setAddress( - uint64(block.chainid), "signal_service", address(mockProofSignalService) - ); - - addressManager.setAddress(destChainId, "signal_service", address(mockProofSignalService)); - - addressManager.setAddress(uint64(block.chainid), "erc20_vault", address(erc20Vault)); - - addressManager.setAddress(destChainId, "erc20_vault", address(destChainIdERC20Vault)); - - addressManager.setAddress(destChainId, "bridge", address(destChainIdBridge)); - - address bridgedERC20 = address(new BridgedERC20()); - - addressManager.setAddress(destChainId, "bridged_erc20", bridgedERC20); - - addressManager.setAddress(uint64(block.chainid), "bridged_erc20", bridgedERC20); - - usdc = BridgedERC20( - deployProxy({ - name: "usdc", - impl: address(new BridgedERC20()), - data: abi.encodeCall( - BridgedERC20.init, - (address(0), address(addressManager), randAddress(), 100, 18, "USDC", "USDC coin") - ) - }) - ); - - usdt = BridgedERC20( - deployProxy({ - name: "usdt", - impl: address(new BridgedERC20()), - data: abi.encodeCall( - BridgedERC20.init, - (address(0), address(addressManager), randAddress(), 100, 18, "USDT", "USDT coin") - ) - }) - ); - - stETH = BridgedERC20( - deployProxy({ - name: "stETH", - impl: address(new BridgedERC20()), - data: abi.encodeCall( - BridgedERC20.init, - ( - address(0), - address(addressManager), - randAddress(), - 100, - 18, - "stETH", - "Lido Staked ETH" - ) - ) - }) - ); - vm.stopPrank(); - } - - function test_20Vault_send_erc20_revert_if_allowance_not_set() public { - vm.startPrank(Alice); - vm.expectRevert(BaseVault.VAULT_INSUFFICIENT_FEE.selector); - erc20Vault.sendToken( - ERC20Vault.BridgeTransferOp( - destChainId, address(0), Bob, 1, address(erc20), 1_000_000, 1 wei - ) - ); - } - - function test_20Vault_send_erc20_no_processing_fee() public { - vm.startPrank(Alice); - - uint64 amount = 2 wei; - erc20.approve(address(erc20Vault), amount); - - uint256 aliceBalanceBefore = erc20.balanceOf(Alice); - uint256 erc20VaultBalanceBefore = erc20.balanceOf(address(erc20Vault)); - - erc20Vault.sendToken( - ERC20Vault.BridgeTransferOp( - destChainId, address(0), Bob, 0, address(erc20), 1_000_000, amount - ) - ); - - uint256 aliceBalanceAfter = erc20.balanceOf(Alice); - uint256 erc20VaultBalanceAfter = erc20.balanceOf(address(erc20Vault)); - - assertEq(aliceBalanceBefore - aliceBalanceAfter, amount); - assertEq(erc20VaultBalanceAfter - erc20VaultBalanceBefore, amount); - } - - function test_20Vault_send_erc20_processing_fee_reverts_if_msg_value_too_low() public { - vm.startPrank(Alice); - - uint64 amount = 2 wei; - erc20.approve(address(erc20Vault), amount); - - vm.expectRevert(); - erc20Vault.sendToken( - ERC20Vault.BridgeTransferOp( - destChainId, address(0), Bob, amount - 1, address(erc20), 1_000_000, amount - ) - ); - } - - function test_20Vault_send_erc20_processing_fee() public { - vm.startPrank(Alice); - - uint64 amount = 2 wei; - erc20.approve(address(erc20Vault), amount); - - uint256 aliceBalanceBefore = erc20.balanceOf(Alice); - uint256 erc20VaultBalanceBefore = erc20.balanceOf(address(erc20Vault)); - - erc20Vault.sendToken{ value: amount }( - ERC20Vault.BridgeTransferOp( - destChainId, - address(0), - Bob, - amount - 1, - address(erc20), - 1_000_000, - amount - 1 // value: (msg.value - fee) - ) - ); - - uint256 aliceBalanceAfter = erc20.balanceOf(Alice); - uint256 erc20VaultBalanceAfter = erc20.balanceOf(address(erc20Vault)); - - assertEq(aliceBalanceBefore - aliceBalanceAfter, 1); - assertEq(erc20VaultBalanceAfter - erc20VaultBalanceBefore, 1); - } - - function test_20Vault_send_erc20_reverts_invalid_amount() public { - vm.startPrank(Alice); - - uint64 amount = 0; - - vm.expectRevert(ERC20Vault.VAULT_INVALID_AMOUNT.selector); - erc20Vault.sendToken( - ERC20Vault.BridgeTransferOp( - destChainId, address(0), Bob, 0, address(erc20), 1_000_000, amount - ) - ); - } - - function test_20Vault_send_erc20_reverts_invalid_token_address() public { - vm.startPrank(Alice); - - uint64 amount = 1; - - vm.expectRevert(ERC20Vault.VAULT_INVALID_TOKEN.selector); - erc20Vault.sendToken( - ERC20Vault.BridgeTransferOp( - destChainId, address(0), Bob, 0, address(0), 1_000_000, amount - ) - ); - } - - function test_20Vault_receive_erc20_canonical_to_dest_chain_transfers_from_canonical_token() - public - { - vm.startPrank(Alice); - - vm.chainId(destChainId); - - erc20.mint(address(erc20Vault)); - - uint64 amount = 1; - address to = Bob; - - uint256 erc20VaultBalanceBefore = erc20.balanceOf(address(erc20Vault)); - uint256 toBalanceBefore = erc20.balanceOf(to); - - destChainIdBridge.sendReceiveERC20ToERC20Vault( - erc20ToCanonicalERC20(destChainId), - Alice, - to, - amount, - bytes32(0), - address(erc20Vault), - srcChainId, - 0 - ); - - uint256 erc20VaultBalanceAfter = erc20.balanceOf(address(erc20Vault)); - assertEq(erc20VaultBalanceBefore - erc20VaultBalanceAfter, amount); - - uint256 toBalanceAfter = erc20.balanceOf(to); - assertEq(toBalanceAfter - toBalanceBefore, amount); - } - - function test_20Vault_receiveTokens_erc20_with_ether_to_dave() public { - vm.startPrank(Alice); - - vm.chainId(destChainId); - - erc20.mint(address(erc20Vault)); - - uint64 amount = 1; - uint256 etherAmount = 0.1 ether; - address to = David; - - uint256 erc20VaultBalanceBefore = erc20.balanceOf(address(erc20Vault)); - uint256 toBalanceBefore = erc20.balanceOf(to); - - destChainIdBridge.sendReceiveERC20ToERC20Vault( - erc20ToCanonicalERC20(destChainId), - Alice, - to, - amount, - bytes32(0), - address(erc20Vault), - srcChainId, - etherAmount - ); - - uint256 erc20VaultBalanceAfter = erc20.balanceOf(address(erc20Vault)); - assertEq(erc20VaultBalanceBefore - erc20VaultBalanceAfter, amount); - - uint256 toBalanceAfter = erc20.balanceOf(to); - assertEq(toBalanceAfter - toBalanceBefore, amount); - assertEq(David.balance, etherAmount); - } - - function test_20Vault_receive_erc20_non_canonical_to_dest_chain_deploys_new_bridged_token_and_mints( - ) - public - { - vm.startPrank(Alice); - - vm.chainId(destChainId); - - uint64 amount = 1; - - destChainIdBridge.setERC20Vault(address(destChainIdERC20Vault)); - - address bridgedAddressBefore = - destChainIdERC20Vault.canonicalToBridged(srcChainId, address(erc20)); - assertEq(bridgedAddressBefore == address(0), true); - - destChainIdBridge.sendReceiveERC20ToERC20Vault( - erc20ToCanonicalERC20(srcChainId), - Alice, - Bob, - amount, - bytes32(0), - address(erc20Vault), - srcChainId, - 0 - ); - - address bridgedAddressAfter = - destChainIdERC20Vault.canonicalToBridged(srcChainId, address(erc20)); - assertEq(bridgedAddressAfter != address(0), true); - BridgedERC20 bridgedERC20 = BridgedERC20(bridgedAddressAfter); - - assertEq(bridgedERC20.name(), "ERC20"); - assertEq(bridgedERC20.symbol(), "ERC20"); - assertEq(bridgedERC20.balanceOf(Bob), amount); - } - - function erc20ToCanonicalERC20(uint64 chainId) - internal - view - returns (ERC20Vault.CanonicalERC20 memory) - { - return ERC20Vault.CanonicalERC20({ - chainId: chainId, - addr: address(erc20), - decimals: erc20.decimals(), - symbol: erc20.symbol(), - name: erc20.name() - }); - } - - function noNameErc20(uint64 chainId) internal view returns (ERC20Vault.CanonicalERC20 memory) { - return ERC20Vault.CanonicalERC20({ - chainId: chainId, - addr: address(weirdNamedToken), - decimals: weirdNamedToken.decimals(), - symbol: weirdNamedToken.symbol(), - name: weirdNamedToken.name() - }); - } - - function test_20Vault_upgrade_bridged_tokens_20() public { - vm.startPrank(Alice); - - vm.chainId(destChainId); - - uint64 amount = 1; - - destChainIdBridge.setERC20Vault(address(destChainIdERC20Vault)); - - address bridgedAddressBefore = - destChainIdERC20Vault.canonicalToBridged(srcChainId, address(erc20)); - assertEq(bridgedAddressBefore == address(0), true); - - destChainIdBridge.sendReceiveERC20ToERC20Vault( - erc20ToCanonicalERC20(srcChainId), - Alice, - Bob, - amount, - bytes32(0), - address(erc20Vault), - srcChainId, - 0 - ); - - address bridgedAddressAfter = - destChainIdERC20Vault.canonicalToBridged(srcChainId, address(erc20)); - assertEq(bridgedAddressAfter != address(0), true); - - try UpdatedBridgedERC20(bridgedAddressAfter).helloWorld() { - fail(); - } catch { - // It should not yet support this function call - } - - // Upgrade the implementation of that contract - // so that it supports now the 'helloWorld' call - UpdatedBridgedERC20 newBridgedContract = new UpdatedBridgedERC20(); - vm.stopPrank(); - vm.prank(Carol, Carol); - BridgedERC20(payable(bridgedAddressAfter)).upgradeTo(address(newBridgedContract)); - - vm.prank(Alice, Alice); - try UpdatedBridgedERC20(bridgedAddressAfter).helloWorld() { - // It should support now this function call - } catch { - fail(); - } - } - - function test_20Vault_onMessageRecalled_20() public { - vm.startPrank(Alice); - - uint64 amount = 2 wei; - erc20.approve(address(erc20Vault), amount); - - uint256 aliceBalanceBefore = erc20.balanceOf(Alice); - uint256 erc20VaultBalanceBefore = erc20.balanceOf(address(erc20Vault)); - - IBridge.Message memory _messageToSimulateFail = erc20Vault.sendToken( - ERC20Vault.BridgeTransferOp( - destChainId, address(0), Bob, 0, address(erc20), 1_000_000, amount - ) - ); - - uint256 aliceBalanceAfter = erc20.balanceOf(Alice); - uint256 erc20VaultBalanceAfter = erc20.balanceOf(address(erc20Vault)); - - assertEq(aliceBalanceBefore - aliceBalanceAfter, amount); - assertEq(erc20VaultBalanceAfter - erc20VaultBalanceBefore, amount); - - // No need to imitate that it is failed because we have a mock SignalService - bridge.recallMessage(_messageToSimulateFail, bytes("")); - - uint256 aliceBalanceAfterRecall = erc20.balanceOf(Alice); - uint256 erc20VaultBalanceAfterRecall = erc20.balanceOf(address(erc20Vault)); - - // Release -> original balance - assertEq(aliceBalanceAfterRecall, aliceBalanceBefore); - assertEq(erc20VaultBalanceAfterRecall, erc20VaultBalanceBefore); - } - - function test_20Vault_change_bridged_token() public { - // A mock canonical "token" - address canonicalRandomToken = vm.addr(102); - - vm.warp(block.timestamp + 91 days); - - vm.startPrank(Carol); - - erc20Vault.changeBridgedToken( - ERC20Vault.CanonicalERC20({ - chainId: 1, - addr: address(erc20), - decimals: 18, - symbol: "ERC20TT", - name: "ERC20 Test token" - }), - address(usdc) - ); - - assertEq(erc20Vault.canonicalToBridged(1, address(erc20)), address(usdc)); - - vm.expectRevert(ERC20Vault.VAULT_LAST_MIGRATION_TOO_CLOSE.selector); - erc20Vault.changeBridgedToken( - ERC20Vault.CanonicalERC20({ - chainId: 1, - addr: address(erc20), - decimals: 18, - symbol: "ERC20TT", - name: "ERC20 Test token" - }), - address(usdt) - ); - - vm.warp(block.timestamp + 91 days); - - vm.expectRevert(ERC20Vault.VAULT_CTOKEN_MISMATCH.selector); - erc20Vault.changeBridgedToken( - ERC20Vault.CanonicalERC20({ - chainId: 1, - addr: address(erc20), - decimals: 18, - symbol: "ERC20TT_WRONG_NAME", - name: "ERC20 Test token" - }), - address(usdt) - ); - - erc20Vault.changeBridgedToken( - ERC20Vault.CanonicalERC20({ - chainId: 1, - addr: address(erc20), - decimals: 18, - symbol: "ERC20TT", - name: "ERC20 Test token" - }), - address(usdt) - ); - - assertEq(erc20Vault.canonicalToBridged(1, address(erc20)), address(usdt)); - - erc20Vault.changeBridgedToken( - ERC20Vault.CanonicalERC20({ - chainId: 1, - addr: canonicalRandomToken, - decimals: 18, - symbol: "ERC20TT2", - name: "ERC20 Test token2" - }), - address(stETH) - ); - - vm.warp(block.timestamp + 91 days); - - // usdc is already blacklisted! - vm.expectRevert(ERC20Vault.VAULT_BTOKEN_BLACKLISTED.selector); - erc20Vault.changeBridgedToken( - ERC20Vault.CanonicalERC20({ - chainId: 1, - addr: address(erc20), - decimals: 18, - symbol: "ERC20TT", - name: "ERC20 Test token" - }), - address(usdc) - ); - - // invalid btoken - vm.expectRevert(ERC20Vault.VAULT_INVALID_CTOKEN.selector); - erc20Vault.changeBridgedToken( - ERC20Vault.CanonicalERC20({ - chainId: uint64(block.chainid), - addr: address(erc20), - decimals: 18, - symbol: "ERC20TT", - name: "ERC20 Test token" - }), - address(usdc) - ); - - // We cannot use stETH for erc20 (as it is used in connection with another token) - vm.expectRevert(ERC20Vault.VAULT_INVALID_NEW_BTOKEN.selector); - erc20Vault.changeBridgedToken( - ERC20Vault.CanonicalERC20({ - chainId: 1, - addr: address(erc20), - decimals: 18, - symbol: "ERC20TT", - name: "ERC20 Test token" - }), - address(stETH) - ); - - vm.stopPrank(); - } - - function test_20Vault_to_string() public { - vm.startPrank(Alice); - - (, bytes memory symbolData) = - address(weirdNamedToken).staticcall(abi.encodeCall(INameSymbol.symbol, ())); - (, bytes memory nameData) = - address(weirdNamedToken).staticcall(abi.encodeCall(INameSymbol.name, ())); - - string memory decodedSymbol = LibBytes.toString(symbolData); - string memory decodedName = LibBytes.toString(nameData); - - assertEq(decodedSymbol, "123456abcdefgh"); - assertEq(decodedName, ""); - - vm.stopPrank(); - } - - function test_20Vault_deploy_erc20_with_no_name() public { - vm.startPrank(Alice); - - vm.chainId(destChainId); - - uint64 amount = 1; - - destChainIdBridge.setERC20Vault(address(destChainIdERC20Vault)); - - address bridgedAddressBefore = - destChainIdERC20Vault.canonicalToBridged(srcChainId, address(erc20)); - assertEq(bridgedAddressBefore == address(0), true); - - // Token with empty name succeeds - destChainIdBridge.sendReceiveERC20ToERC20Vault( - noNameErc20(srcChainId), - Alice, - Bob, - amount, - bytes32(0), - address(erc20Vault), - srcChainId, - 0 - ); - } -} diff --git a/packages/protocol/test/tokenvault/ERC721Vault.t.sol b/packages/protocol/test/tokenvault/ERC721Vault.t.sol deleted file mode 100644 index c43ed5fde0b0..000000000000 --- a/packages/protocol/test/tokenvault/ERC721Vault.t.sol +++ /dev/null @@ -1,918 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.24; - -import "@openzeppelin/contracts/token/ERC721/ERC721.sol"; -import "../TaikoTest.sol"; - -contract TestTokenERC721 is ERC721 { - string _baseTokenURI; - uint256 minted; - - constructor(string memory baseURI) ERC721("TT", "TT") { - setBaseURI(baseURI); - } - - function setBaseURI(string memory baseURI) internal { - _baseTokenURI = baseURI; - } - - function _baseURI() internal view virtual override returns (string memory) { - return _baseTokenURI; - } - - function mint(uint256 amount) public { - for (uint256 i; i < amount; ++i) { - _safeMint(msg.sender, minted + i); - } - minted += amount; - } -} - -// PrankDestBridge lets us simulate a transaction to the erc721Vault -// from a named Bridge, without having to test/run through the real Bridge code, -// outside the scope of the unit tests in the erc721Vault. -contract PrankDestBridge { - ERC721Vault destERC721Vault; - - struct BridgeContext { - bytes32 msgHash; - address sender; - uint64 chainId; - } - - BridgeContext ctx; - - constructor(ERC721Vault _erc721Vault) { - destERC721Vault = _erc721Vault; - } - - function setERC721Vault(address addr) public { - destERC721Vault = ERC721Vault(addr); - } - - function sendMessage(IBridge.Message memory message) - external - payable - returns (bytes32 msgHash, IBridge.Message memory _message) - { - // Dummy return value - return (keccak256(abi.encode(message.id)), _message); - } - - function context() public view returns (BridgeContext memory) { - return ctx; - } - - function sendReceiveERC721ToERC721Vault( - BaseNFTVault.CanonicalNFT calldata canonicalToken, - address from, - address to, - uint256[] memory tokenIds, - bytes32 msgHash, - address srcChainerc721Vault, - uint64 chainId, - uint256 mockLibInvokeMsgValue - ) - public - { - ctx.sender = srcChainerc721Vault; - ctx.msgHash = msgHash; - ctx.chainId = chainId; - - // We need this in order to 'mock' the LibBridgeInvoke's - // (success,retVal) = - // message.to.call{ value: message.value, gas: gasLimit - // }(message.data); - // The problem (with foundry) is that this way it is not able to deploy - // a contract - // most probably due to some deployment address nonce issue. (Seems a - // known issue). - destERC721Vault.onMessageInvocation{ value: mockLibInvokeMsgValue }( - abi.encode(canonicalToken, from, to, tokenIds) - ); - - ctx.sender = address(0); - ctx.msgHash = bytes32(0); - ctx.chainId = 0; - } -} - -contract UpdatedBridgedERC721 is BridgedERC721 { - function helloWorld() public pure returns (string memory) { - return "helloworld"; - } -} - -contract ERC721VaultTest is TaikoTest { - uint32 private constant GAS_LIMIT = 2_000_000; - - AddressManager addressManager; - BadReceiver badReceiver; - Bridge bridge; - Bridge destChainBridge; - PrankDestBridge destChainIdBridge; - SkipProofCheckSignal mockProofSignalService; - ERC721Vault erc721Vault; - ERC721Vault destChainErc721Vault; - TestTokenERC721 canonicalToken721; - SignalService signalService; - uint64 destChainId = 19_389; - - function setUp() public { - vm.startPrank(Carol); - vm.deal(Alice, 100 ether); - vm.deal(Carol, 100 ether); - vm.deal(Bob, 100 ether); - - addressManager = AddressManager( - deployProxy({ - name: "address_manager", - impl: address(new AddressManager()), - data: abi.encodeCall(AddressManager.init, (address(0))) - }) - ); - - bridge = Bridge( - payable( - deployProxy({ - name: "bridge", - impl: address(new Bridge()), - data: abi.encodeCall(Bridge.init, (address(0), address(addressManager))), - registerTo: address(addressManager) - }) - ) - ); - - destChainBridge = Bridge( - payable( - deployProxy({ - name: "bridge", - impl: address(new Bridge()), - data: abi.encodeCall(Bridge.init, (address(0), address(addressManager))), - registerTo: address(addressManager) - }) - ) - ); - - signalService = SignalService( - deployProxy({ - name: "signal_service", - impl: address(new SignalService()), - data: abi.encodeCall(SignalService.init, (address(0), address(addressManager))) - }) - ); - - erc721Vault = ERC721Vault( - deployProxy({ - name: "erc721_vault", - impl: address(new ERC721Vault()), - data: abi.encodeCall(ERC721Vault.init, (address(0), address(addressManager))) - }) - ); - - destChainErc721Vault = ERC721Vault( - deployProxy({ - name: "erc721_vault", - impl: address(new ERC721Vault()), - data: abi.encodeCall(ERC721Vault.init, (address(0), address(addressManager))) - }) - ); - - destChainIdBridge = new PrankDestBridge(destChainErc721Vault); - vm.deal(address(destChainIdBridge), 100 ether); - - mockProofSignalService = SkipProofCheckSignal( - deployProxy({ - name: "signal_service", - impl: address(new SkipProofCheckSignal()), - data: abi.encodeCall(SignalService.init, (address(0), address(addressManager))) - }) - ); - - addressManager.setAddress( - uint64(block.chainid), "signal_service", address(mockProofSignalService) - ); - - addressManager.setAddress(destChainId, "signal_service", address(mockProofSignalService)); - - addressManager.setAddress(uint64(block.chainid), "bridge", address(bridge)); - - addressManager.setAddress(destChainId, "bridge", address(destChainIdBridge)); - - addressManager.setAddress(uint64(block.chainid), "erc721_vault", address(erc721Vault)); - - addressManager.setAddress(destChainId, "erc721_vault", address(destChainErc721Vault)); - // Below 2-2 registrations (mock) are needed bc of - // LibBridgeRecall.sol's - // resolve address - addressManager.setAddress(destChainId, "erc1155_vault", address(erc721Vault)); - addressManager.setAddress(destChainId, "erc20_vault", address(erc721Vault)); - addressManager.setAddress(uint64(block.chainid), "erc1155_vault", address(erc721Vault)); - addressManager.setAddress(uint64(block.chainid), "erc20_vault", address(erc721Vault)); - - address bridgedERC721 = address(new BridgedERC721()); - - addressManager.setAddress(destChainId, "bridged_erc721", bridgedERC721); - addressManager.setAddress(uint64(block.chainid), "bridged_erc721", bridgedERC721); - - vm.stopPrank(); - - vm.startPrank(Alice); - canonicalToken721 = new TestTokenERC721("http://example.host.com/"); - canonicalToken721.mint(10); - vm.stopPrank(); - } - - function getPreDeterminedDataBytes() internal pure returns (bytes memory) { - return - hex"a9976baf00000000000000000000000000000000000000000000000000000000000000800000000000000000000000007e5f4552091a69125d5dfcb7b8c2659029395bdf0000000000000000000000007e5f4552091a69125d5dfcb7b8c2659029395bdf00000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000007a69000000000000000000000000f2e246bb76df876cef8b38ae84130f4f55de395b000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000000254540000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002545400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001"; - } - - function test_721Vault_sendToken_721() public { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - - assertEq(canonicalToken721.ownerOf(1), Alice); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 0; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts // With ERC721 still need to specify 1 - ); - vm.prank(Alice, Alice); - erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(ERC721(canonicalToken721).ownerOf(1), address(erc721Vault)); - } - - function test_721Vault_sendToken_with_invalid_token_address() public { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - - assertEq(canonicalToken721.ownerOf(1), Alice); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 0; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, address(0), Alice, GAS_LIMIT, address(0), GAS_LIMIT, tokenIds, amounts - ); - vm.prank(Alice, Alice); - vm.expectRevert(BaseNFTVault.VAULT_INVALID_TOKEN.selector); - erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - } - - function test_721Vault_sendToken_with_1_tokens_but_erc721_amount_1_invalid() public { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - - assertEq(canonicalToken721.ownerOf(1), Alice); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 1; - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - vm.expectRevert(BaseNFTVault.VAULT_INVALID_AMOUNT.selector); - erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - } - - function test_721Vault_receiveTokens_from_newly_deployed_bridged_contract_on_destination_chain_721( - ) - public - { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - - assertEq(canonicalToken721.ownerOf(1), Alice); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 0; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(canonicalToken721.ownerOf(1), address(erc721Vault)); - - BaseNFTVault.CanonicalNFT memory canonicalToken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(canonicalToken721), - symbol: "TT", - name: "TT" - }); - - uint64 chainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC721ToERC721Vault( - canonicalToken, Alice, Alice, tokenIds, bytes32(0), address(erc721Vault), chainId, 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc721Vault.canonicalToBridged(chainId, address(canonicalToken721)); - - // Alice bridged over tokenId 1 - assertEq(ERC721(deployedContract).ownerOf(1), Alice); - } - - function test_721Vault_receiveTokens_but_mint_not_deploy_if_bridged_second_time_721() public { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 2); - - assertEq(canonicalToken721.ownerOf(1), Alice); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 0; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(canonicalToken721.ownerOf(1), address(erc721Vault)); - - // This canonicalToken is basically need to be exact same as the - // sendToken() puts together - // - here is just mocking putting it together. - BaseNFTVault.CanonicalNFT memory canonicalToken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(canonicalToken721), - symbol: "TT", - name: "TT" - }); - - uint64 chainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC721ToERC721Vault( - canonicalToken, Alice, Alice, tokenIds, bytes32(0), address(erc721Vault), chainId, 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc721Vault.canonicalToBridged(chainId, address(canonicalToken721)); - - // Alice bridged over tokenId 1 - assertEq(ERC721(deployedContract).ownerOf(1), Alice); - - // Change back to 'L1' - vm.chainId(chainId); - - tokenIds[0] = 2; - - amounts[0] = 0; - - sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(canonicalToken721.ownerOf(2), address(erc721Vault)); - - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC721ToERC721Vault( - canonicalToken, Alice, Alice, tokenIds, bytes32(0), address(erc721Vault), chainId, 0 - ); - - // Query canonicalToBridged - address bridgedContract = - destChainErc721Vault.canonicalToBridged(chainId, address(canonicalToken721)); - - assertEq(bridgedContract, deployedContract); - } - - function test_721Vault_receiveTokens_erc721_with_ether_to_dave() public { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - - assertEq(canonicalToken721.ownerOf(1), Alice); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 0; - - uint256 etherValue = 0.1 ether; - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - David, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc721Vault.sendToken{ value: etherValue }(sendOpts); - - assertEq(canonicalToken721.ownerOf(1), address(erc721Vault)); - - BaseNFTVault.CanonicalNFT memory canonicalToken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(canonicalToken721), - symbol: "TT", - name: "TT" - }); - - uint64 chainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC721ToERC721Vault( - canonicalToken, - Alice, - David, - tokenIds, - bytes32(0), - address(erc721Vault), - chainId, - etherValue - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc721Vault.canonicalToBridged(chainId, address(canonicalToken721)); - - // Alice bridged over tokenId 1 and etherValue to David - assertEq(ERC721(deployedContract).ownerOf(1), David); - assertEq(etherValue, David.balance); - } - - function test_721Vault_onMessageRecalled_721() public { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - - assertEq(canonicalToken721.ownerOf(1), Alice); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 0; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts - ); - - vm.prank(Alice, Alice); - IBridge.Message memory message = erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(canonicalToken721.ownerOf(1), address(erc721Vault)); - - bridge.recallMessage(message, bytes("")); - - // Alice got back her NFT - assertEq(canonicalToken721.ownerOf(1), Alice); - } - - function test_721Vault_receiveTokens_multiple_721() public { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 2); - - assertEq(canonicalToken721.ownerOf(1), Alice); - assertEq(canonicalToken721.ownerOf(2), Alice); - - uint256[] memory tokenIds = new uint256[](2); - tokenIds[0] = 1; - tokenIds[1] = 2; - - uint256[] memory amounts = new uint256[](2); - amounts[0] = 0; - amounts[1] = 0; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(canonicalToken721.ownerOf(1), address(erc721Vault)); - assertEq(canonicalToken721.ownerOf(2), address(erc721Vault)); - - BaseNFTVault.CanonicalNFT memory canonicalToken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(canonicalToken721), - symbol: "TT", - name: "TT" - }); - - uint64 srcChainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC721ToERC721Vault( - canonicalToken, Alice, Alice, tokenIds, bytes32(0), address(erc721Vault), srcChainId, 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc721Vault.canonicalToBridged(srcChainId, address(canonicalToken721)); - - // Alice bridged over tokenId 1 - assertEq(ERC721(deployedContract).ownerOf(1), Alice); - assertEq(ERC721(deployedContract).ownerOf(2), Alice); - } - - function test_721Vault_bridge_back_but_owner_is_different_now_721() public { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 2); - - assertEq(canonicalToken721.ownerOf(1), Alice); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 0; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(canonicalToken721.ownerOf(1), address(erc721Vault)); - - // This canonicalToken is basically need to be exact same as the - // sendToken() puts together - // - here is just mocking putting it together. - BaseNFTVault.CanonicalNFT memory canonicalToken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(canonicalToken721), - symbol: "TT", - name: "TT" - }); - - uint64 chainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC721ToERC721Vault( - canonicalToken, Alice, Alice, tokenIds, bytes32(0), address(erc721Vault), chainId, 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc721Vault.canonicalToBridged(chainId, address(canonicalToken721)); - - // Alice bridged over tokenId 1 - assertEq(ERC721(deployedContract).ownerOf(1), Alice); - - // Transfer the asset to Bob, and Bob can receive it back on canonical - // chain - vm.prank(Alice, Alice); - ERC721(deployedContract).transferFrom(Alice, Bob, 1); - - assertEq(ERC721(deployedContract).ownerOf(1), Bob); - - vm.prank(Bob, Bob); - ERC721(deployedContract).approve(address(destChainErc721Vault), 1); - - sendOpts = BaseNFTVault.BridgeTransferOp( - chainId, - address(0), - Bob, - GAS_LIMIT, - address(deployedContract), - GAS_LIMIT, - tokenIds, - amounts - ); - - vm.prank(Bob, Bob); - destChainErc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - vm.chainId(chainId); - - assertEq(ERC721(canonicalToken721).ownerOf(1), address(erc721Vault)); - - destChainIdBridge.setERC721Vault(address(erc721Vault)); - - vm.prank(Carol, Carol); - addressManager.setAddress(uint64(block.chainid), "bridge", address(destChainIdBridge)); - - destChainIdBridge.sendReceiveERC721ToERC721Vault( - canonicalToken, Bob, Bob, tokenIds, bytes32(0), address(erc721Vault), chainId, 0 - ); - - assertEq(canonicalToken721.ownerOf(1), Bob); - } - - function test_721Vault_bridge_back_but_original_owner_cannot_claim_it_anymore_if_sold_721() - public - { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 2); - - assertEq(canonicalToken721.ownerOf(1), Alice); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 0; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(canonicalToken721.ownerOf(1), address(erc721Vault)); - - // This canonicalToken is basically need to be exact same as the - // sendToken() puts together - // - here is just mocking putting it together. - BaseNFTVault.CanonicalNFT memory canonicalToken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(canonicalToken721), - symbol: "TT", - name: "TT" - }); - - uint64 chainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC721ToERC721Vault( - canonicalToken, Alice, Alice, tokenIds, bytes32(0), address(erc721Vault), chainId, 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc721Vault.canonicalToBridged(chainId, address(canonicalToken721)); - - // Alice bridged over tokenId 1 - assertEq(ERC721(deployedContract).ownerOf(1), Alice); - - // Transfer the asset to Bob, and Bob can receive it back on canonical - // chain - vm.prank(Alice, Alice); - ERC721(deployedContract).transferFrom(Alice, Bob, 1); - - assertEq(ERC721(deployedContract).ownerOf(1), Bob); - - vm.prank(Bob, Bob); - ERC721(deployedContract).approve(address(destChainErc721Vault), 1); - - // Alice puts together a malicious bridging back message - sendOpts = BaseNFTVault.BridgeTransferOp( - chainId, - address(0), - Alice, - GAS_LIMIT, - address(deployedContract), - GAS_LIMIT, - tokenIds, - amounts - ); - - vm.prank(Alice, Alice); - vm.expectRevert("ERC721: transfer from incorrect owner"); - destChainErc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - } - - function test_721Vault_upgrade_bridged_tokens_721() public { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 2); - - assertEq(canonicalToken721.ownerOf(1), Alice); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 0; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(canonicalToken721.ownerOf(1), address(erc721Vault)); - - // This canonicalToken is basically need to be exact same as the - // sendToken() puts together - // - here is just mocking putting it together. - BaseNFTVault.CanonicalNFT memory canonicalToken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(canonicalToken721), - symbol: "TT", - name: "TT" - }); - - uint64 chainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC721ToERC721Vault( - canonicalToken, Alice, Alice, tokenIds, bytes32(0), address(erc721Vault), chainId, 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc721Vault.canonicalToBridged(chainId, address(canonicalToken721)); - - try UpdatedBridgedERC721(deployedContract).helloWorld() { - fail(); - } catch { - // It should not yet support this function call - } - - // Upgrade the implementation of that contract - // so that it supports now the 'helloWorld' call - UpdatedBridgedERC721 newBridgedContract = new UpdatedBridgedERC721(); - vm.prank(Carol, Carol); - BridgedERC721(payable(deployedContract)).upgradeTo(address(newBridgedContract)); - - try UpdatedBridgedERC721(deployedContract).helloWorld() { - // It should support now this function call - } catch { - fail(); - } - } - - function test_721Vault_shall_not_be_able_to_burn_arbitrarily() public { - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 1); - vm.prank(Alice, Alice); - canonicalToken721.approve(address(erc721Vault), 2); - - assertEq(canonicalToken721.ownerOf(1), Alice); - - uint256[] memory tokenIds = new uint256[](1); - tokenIds[0] = 1; - - uint256[] memory amounts = new uint256[](1); - amounts[0] = 0; - - BaseNFTVault.BridgeTransferOp memory sendOpts = BaseNFTVault.BridgeTransferOp( - destChainId, - address(0), - Alice, - GAS_LIMIT, - address(canonicalToken721), - GAS_LIMIT, - tokenIds, - amounts - ); - vm.prank(Alice, Alice); - erc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - assertEq(canonicalToken721.ownerOf(1), address(erc721Vault)); - - // This canonicalToken is basically need to be exact same as the - // sendToken() puts together - // - here is just mocking putting it together. - BaseNFTVault.CanonicalNFT memory canonicalToken = BaseNFTVault.CanonicalNFT({ - chainId: 31_337, - addr: address(canonicalToken721), - symbol: "TT", - name: "TT" - }); - - uint64 chainId = uint64(block.chainid); - vm.chainId(destChainId); - - destChainIdBridge.sendReceiveERC721ToERC721Vault( - canonicalToken, Alice, Alice, tokenIds, bytes32(0), address(erc721Vault), chainId, 0 - ); - - // Query canonicalToBridged - address deployedContract = - destChainErc721Vault.canonicalToBridged(chainId, address(canonicalToken721)); - - // Alice bridged over tokenId 1 - assertEq(ERC721(deployedContract).ownerOf(1), Alice); - - // Alice tries to bridge back message - sendOpts = BaseNFTVault.BridgeTransferOp( - chainId, - address(0), - Alice, - GAS_LIMIT, - address(deployedContract), - GAS_LIMIT, - tokenIds, - amounts - ); - - // Alice hasn't approved the vault yet! - vm.prank(Alice, Alice); - vm.expectRevert("ERC721: caller is not token owner or approved"); - destChainErc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - - // Also Vault cannot burn tokens it does not own (even if the priv key compromised) - vm.prank(address(destChainErc721Vault), address(destChainErc721Vault)); - vm.expectRevert(BridgedERC721.BTOKEN_INVALID_BURN.selector); - BridgedERC721(deployedContract).burn(1); - - // After approve() ERC721Vault can transfer and burn - vm.prank(Alice, Alice); - ERC721(deployedContract).approve(address(destChainErc721Vault), 1); - vm.prank(Alice, Alice); - destChainErc721Vault.sendToken{ value: GAS_LIMIT }(sendOpts); - } -}