From fdeb4d7c761f15b9c85258c74cc3ead1652d5fd7 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 16 Sep 2022 02:22:11 +0200 Subject: [PATCH 1/3] chore(deps): bump ethers (#3231) --- Cargo.lock | 43 ++++++++++++------------------------------- 1 file changed, 12 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3a99d7b465a3..64ce64185fa1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -120,7 +120,6 @@ dependencies = [ "memory-db", "parking_lot 0.12.0", "pretty_assertions", - "revm_precompiles 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_json", "tempfile", @@ -1628,7 +1627,7 @@ dependencies = [ [[package]] name = "ethers" version = "0.17.0" -source = "git+https://github.com/gakonst/ethers-rs#6b4007f6193ffa8a685a802eec7ff7be9431d112" +source = "git+https://github.com/gakonst/ethers-rs#9773a76dd422ed8c625ac994f0ffbb5f7c20fc1c" dependencies = [ "ethers-addressbook", "ethers-contract", @@ -1643,7 +1642,7 @@ dependencies = [ [[package]] name = "ethers-addressbook" version = "0.17.0" -source = "git+https://github.com/gakonst/ethers-rs#6b4007f6193ffa8a685a802eec7ff7be9431d112" +source = "git+https://github.com/gakonst/ethers-rs#9773a76dd422ed8c625ac994f0ffbb5f7c20fc1c" dependencies = [ "ethers-core", "once_cell", @@ -1654,7 +1653,7 @@ dependencies = [ [[package]] name = "ethers-contract" version = "0.17.0" -source = "git+https://github.com/gakonst/ethers-rs#6b4007f6193ffa8a685a802eec7ff7be9431d112" +source = "git+https://github.com/gakonst/ethers-rs#9773a76dd422ed8c625ac994f0ffbb5f7c20fc1c" dependencies = [ "ethers-contract-abigen", "ethers-contract-derive", @@ -1672,7 +1671,7 @@ dependencies = [ [[package]] name = "ethers-contract-abigen" version = "0.17.0" -source = "git+https://github.com/gakonst/ethers-rs#6b4007f6193ffa8a685a802eec7ff7be9431d112" +source = "git+https://github.com/gakonst/ethers-rs#9773a76dd422ed8c625ac994f0ffbb5f7c20fc1c" dependencies = [ "Inflector", "cfg-if 1.0.0", @@ -1695,7 +1694,7 @@ dependencies = [ [[package]] name = "ethers-contract-derive" version = "0.17.0" -source = "git+https://github.com/gakonst/ethers-rs#6b4007f6193ffa8a685a802eec7ff7be9431d112" +source = "git+https://github.com/gakonst/ethers-rs#9773a76dd422ed8c625ac994f0ffbb5f7c20fc1c" dependencies = [ "ethers-contract-abigen", "ethers-core", @@ -1709,7 +1708,7 @@ dependencies = [ [[package]] name = "ethers-core" version = "0.17.0" -source = "git+https://github.com/gakonst/ethers-rs#6b4007f6193ffa8a685a802eec7ff7be9431d112" +source = "git+https://github.com/gakonst/ethers-rs#9773a76dd422ed8c625ac994f0ffbb5f7c20fc1c" dependencies = [ "arrayvec 0.7.2", "bytes", @@ -1740,7 +1739,7 @@ dependencies = [ [[package]] name = "ethers-etherscan" version = "0.17.0" -source = "git+https://github.com/gakonst/ethers-rs#6b4007f6193ffa8a685a802eec7ff7be9431d112" +source = "git+https://github.com/gakonst/ethers-rs#9773a76dd422ed8c625ac994f0ffbb5f7c20fc1c" dependencies = [ "ethers-core", "getrandom 0.2.6", @@ -1756,7 +1755,7 @@ dependencies = [ [[package]] name = "ethers-middleware" version = "0.17.0" -source = "git+https://github.com/gakonst/ethers-rs#6b4007f6193ffa8a685a802eec7ff7be9431d112" +source = "git+https://github.com/gakonst/ethers-rs#9773a76dd422ed8c625ac994f0ffbb5f7c20fc1c" dependencies = [ "async-trait", "auto_impl 0.5.0", @@ -1781,7 +1780,7 @@ dependencies = [ [[package]] name = "ethers-providers" version = "0.17.0" -source = "git+https://github.com/gakonst/ethers-rs#6b4007f6193ffa8a685a802eec7ff7be9431d112" +source = "git+https://github.com/gakonst/ethers-rs#9773a76dd422ed8c625ac994f0ffbb5f7c20fc1c" dependencies = [ "async-trait", "auto_impl 1.0.1", @@ -1818,7 +1817,7 @@ dependencies = [ [[package]] name = "ethers-signers" version = "0.17.0" -source = "git+https://github.com/gakonst/ethers-rs#6b4007f6193ffa8a685a802eec7ff7be9431d112" +source = "git+https://github.com/gakonst/ethers-rs#9773a76dd422ed8c625ac994f0ffbb5f7c20fc1c" dependencies = [ "async-trait", "coins-bip32", @@ -1841,7 +1840,7 @@ dependencies = [ [[package]] name = "ethers-solc" version = "0.17.0" -source = "git+https://github.com/gakonst/ethers-rs#6b4007f6193ffa8a685a802eec7ff7be9431d112" +source = "git+https://github.com/gakonst/ethers-rs#9773a76dd422ed8c625ac994f0ffbb5f7c20fc1c" dependencies = [ "cfg-if 1.0.0", "dunce", @@ -4497,30 +4496,12 @@ dependencies = [ "hex", "num_enum", "primitive-types", - "revm_precompiles 1.1.1 (git+https://github.com/bluealloy/revm)", + "revm_precompiles", "rlp", "serde", "sha3", ] -[[package]] -name = "revm_precompiles" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00e68901326fe20437526cb6d64a2898d2976383b7d222329dfce1717902da50" -dependencies = [ - "bytes", - "hashbrown 0.12.0", - "num", - "once_cell", - "primitive-types", - "ripemd", - "secp256k1", - "sha2 0.10.5", - "sha3", - "substrate-bn", -] - [[package]] name = "revm_precompiles" version = "1.1.1" From 7fdd4e033b9c83150c9d1833982bd0d406ab5f81 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 16 Sep 2022 16:02:23 +0200 Subject: [PATCH 2/3] fix: windows build (#3233) --- anvil/server/src/ipc.rs | 2 +- anvil/src/server/mod.rs | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/anvil/server/src/ipc.rs b/anvil/server/src/ipc.rs index d12b84ca253f..c30953aebd13 100644 --- a/anvil/server/src/ipc.rs +++ b/anvil/server/src/ipc.rs @@ -34,7 +34,7 @@ impl IpcEndpoint { /// This establishes the ipc endpoint, converts the incoming connections into handled eth /// connections, See [`PubSubConnection`] that should be spawned #[tracing::instrument(target = "ipc", skip_all)] - pub fn incoming(self) -> io::Result + Unpin>> { + pub fn incoming(self) -> io::Result>> { let IpcEndpoint { handler, endpoint } = self; trace!( endpoint=?endpoint.path(), "starting ipc server" ); diff --git a/anvil/src/server/mod.rs b/anvil/src/server/mod.rs index 38420db238fa..ef98e8b3d20e 100644 --- a/anvil/src/server/mod.rs +++ b/anvil/src/server/mod.rs @@ -35,9 +35,10 @@ pub fn try_spawn_ipc( let path = path.into(); let handler = PubSubEthRpcHandler::new(api); let ipc = IpcEndpoint::new(handler, path); - let mut incoming = ipc.incoming()?; + let incoming = ipc.incoming()?; let task = tokio::task::spawn(async move { + tokio::pin!(incoming); while let Some(stream) = incoming.next().await { trace!(target: "ipc", "new ipc connection"); tokio::task::spawn(stream); From 903562382075b4ba24e5f4d40aeee620c53a7dde Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 16 Sep 2022 16:12:24 +0200 Subject: [PATCH 3/3] fix(evm): improve journaledstate handling on reverts (#3226) * fix: improve journalstate handling on revert * cleanup * forge fmt --- evm/src/executor/backend/mod.rs | 102 ++++++++++++++++++-------------- forge/tests/it/repros.rs | 6 ++ testdata/repros/Issue3220.t.sol | 42 +++++++++++++ 3 files changed, 106 insertions(+), 44 deletions(-) create mode 100644 testdata/repros/Issue3220.t.sol diff --git a/evm/src/executor/backend/mod.rs b/evm/src/executor/backend/mod.rs index 8c268190ea24..be63d63c8851 100644 --- a/evm/src/executor/backend/mod.rs +++ b/evm/src/executor/backend/mod.rs @@ -27,7 +27,7 @@ mod fuzz; pub mod snapshot; pub use fuzz::FuzzBackendWrapper; mod diagnostic; -use crate::executor::inspector::cheatcodes::util::with_journaled_account; + pub use diagnostic::RevertDiagnostic; pub mod error; @@ -504,8 +504,12 @@ impl Backend { self.storage(CHEATCODE_ADDRESS, index).map(|value| value == U256::one()).unwrap_or_default() } - /// when creating or switching forks, we update the AccountInfo of the contract - pub(crate) fn update_fork_db(&self, journaled_state: &mut JournaledState, fork: &mut Fork) { + /// When creating or switching forks, we update the AccountInfo of the contract + pub(crate) fn update_fork_db( + &self, + active_journaled_state: &mut JournaledState, + target_fork: &mut Fork, + ) { debug_assert!( self.inner.test_contract_address.is_some(), "Test contract address must be set" @@ -513,23 +517,23 @@ impl Backend { self.update_fork_db_contracts( self.inner.persistent_accounts.iter().copied(), - journaled_state, - fork, + active_journaled_state, + target_fork, ) } - /// Copies the state of all `accounts` from the currently active db into the given `fork` + /// Merges the state of all `accounts` from the currently active db into the given `fork` pub(crate) fn update_fork_db_contracts( &self, accounts: impl IntoIterator, - journaled_state: &mut JournaledState, - fork: &mut Fork, + active_journaled_state: &mut JournaledState, + target_fork: &mut Fork, ) { if let Some((_, fork_idx)) = self.active_fork_ids.as_ref() { let active = self.inner.get_fork(*fork_idx); - clone_data(accounts, &active.db, journaled_state, fork) + merge_account_data(accounts, &active.db, active_journaled_state, target_fork) } else { - clone_data(accounts, &self.mem_db, journaled_state, fork) + merge_account_data(accounts, &self.mem_db, active_journaled_state, target_fork) } } @@ -713,12 +717,6 @@ impl DatabaseExt for Backend { if !fork.db.accounts.contains_key(&caller) { // update the caller account which is required by the evm fork.db.insert_account_info(caller, caller_account.clone()); - let _ = with_journaled_account( - &mut fork.journaled_state, - &mut fork.db, - caller, - |_| (), - ); } journaled_state.state.insert(caller, caller_account.into()); } @@ -767,7 +765,7 @@ impl DatabaseExt for Backend { &mut self, id: LocalForkId, env: &mut Env, - journaled_state: &mut JournaledState, + active_journaled_state: &mut JournaledState, ) -> eyre::Result<()> { trace!(?id, "select fork"); if self.is_active_fork(id) { @@ -787,7 +785,7 @@ impl DatabaseExt for Backend { // If we're currently in forking mode we need to update the journaled_state to this point, // this ensures the changes performed while the fork was active are recorded if let Some(active) = self.active_fork_mut() { - active.journaled_state = journaled_state.clone(); + active.journaled_state = active_journaled_state.clone(); // if the Backend was launched in forking mode, then we also need to adjust the depth of // the `JournalState` at this point @@ -801,17 +799,14 @@ impl DatabaseExt for Backend { // launched in forking mode there never is a `depth` that can be set for the // `fork_init_journaled_state` instead we need to manually bump the depth to the // current depth of the call _once_ - target_fork.journaled_state.depth = journaled_state.depth; + target_fork.journaled_state.depth = active_journaled_state.depth; // we also need to initialize and touch the caller if let Some(acc) = caller_account { - target_fork.db.insert_account_info(caller, acc); - let _ = with_journaled_account( - &mut target_fork.journaled_state, - &mut target_fork.db, - caller, - |_| (), - ); + if !target_fork.db.accounts.contains_key(&caller) { + target_fork.db.insert_account_info(caller, acc.clone()); + } + target_fork.journaled_state.state.insert(caller, acc.into()); } } } @@ -822,7 +817,7 @@ impl DatabaseExt for Backend { // first fork is selected, we need to update it for all forks and use it as init state // for all future forks trace!("recording fork init journaled_state"); - self.fork_init_journaled_state = journaled_state.clone(); + self.fork_init_journaled_state = active_journaled_state.clone(); self.prepare_init_journal_state()?; } @@ -834,7 +829,7 @@ impl DatabaseExt for Backend { // the current sender let caller = env.tx.caller; if !fork.journaled_state.state.contains_key(&caller) { - let caller_account = journaled_state + let caller_account = active_journaled_state .state .get(&env.tx.caller) .map(|acc| acc.info.clone()) @@ -843,18 +838,17 @@ impl DatabaseExt for Backend { if !fork.db.accounts.contains_key(&caller) { // update the caller account which is required by the evm fork.db.insert_account_info(caller, caller_account.clone()); - let _ = - with_journaled_account(&mut fork.journaled_state, &mut fork.db, caller, |_| ()); } fork.journaled_state.state.insert(caller, caller_account.into()); } - self.update_fork_db(journaled_state, &mut fork); + self.update_fork_db(active_journaled_state, &mut fork); self.inner.set_fork(idx, fork); self.active_fork_ids = Some((id, idx)); // update the environment accordingly update_current_env_with_fork_env(env, fork_env); + Ok(()) } @@ -892,7 +886,7 @@ impl DatabaseExt for Backend { active.journaled_state = self.fork_init_journaled_state.clone(); active.journaled_state.depth = journaled_state.depth; for addr in persistent_addrs { - clone_journaled_state_data(addr, journaled_state, &mut active.journaled_state); + merge_journaled_state_data(addr, journaled_state, &mut active.journaled_state); } *journaled_state = active.journaled_state.clone(); } @@ -1287,7 +1281,7 @@ impl BackendInner { // we initialize a _new_ `ForkDB` but keep the state of persistent accounts let mut new_db = ForkDB::new(backend); for addr in self.persistent_accounts.iter().copied() { - clone_db_account_data(addr, &active.db, &mut new_db); + merge_db_account_data(addr, &active.db, &mut new_db); } active.db = new_db; } @@ -1373,44 +1367,64 @@ pub(crate) fn update_current_env_with_fork_env(current: &mut Env, fork: Env) { } /// Clones the data of the given `accounts` from the `active` database into the `fork_db` -/// This includes the data held in storage (`CacheDB`) and kept in the `JournaledState` -pub(crate) fn clone_data( +/// This includes the data held in storage (`CacheDB`) and kept in the `JournaledState`. +pub(crate) fn merge_account_data( accounts: impl IntoIterator, active: &CacheDB, active_journaled_state: &mut JournaledState, - fork: &mut Fork, + target_fork: &mut Fork, ) { for addr in accounts.into_iter() { - clone_db_account_data(addr, active, &mut fork.db); - clone_journaled_state_data(addr, active_journaled_state, &mut fork.journaled_state); + merge_db_account_data(addr, active, &mut target_fork.db); + merge_journaled_state_data(addr, active_journaled_state, &mut target_fork.journaled_state); } - *active_journaled_state = fork.journaled_state.clone(); + // need to mock empty journal entries in case the current checkpoint is higher than the existing + // journal entries + while active_journaled_state.journal.len() > target_fork.journaled_state.journal.len() { + target_fork.journaled_state.journal.push(Default::default()); + } + + *active_journaled_state = target_fork.journaled_state.clone(); } /// Clones the account data from the `active_journaled_state` into the `fork_journaled_state` -fn clone_journaled_state_data( +fn merge_journaled_state_data( addr: Address, active_journaled_state: &JournaledState, fork_journaled_state: &mut JournaledState, ) { - if let Some(acc) = active_journaled_state.state.get(&addr).cloned() { + if let Some(mut acc) = active_journaled_state.state.get(&addr).cloned() { trace!(?addr, "updating journaled_state account data"); + if let Some(fork_account) = fork_journaled_state.state.get_mut(&addr) { + // This will merge the fork's tracked storage with active storage and update values + fork_account.storage.extend(std::mem::take(&mut acc.storage)); + // swap them so we can insert the account as whole in the next step + std::mem::swap(&mut fork_account.storage, &mut acc.storage); + } fork_journaled_state.state.insert(addr, acc); } } /// Clones the account data from the `active` db into the `ForkDB` -fn clone_db_account_data( +fn merge_db_account_data( addr: Address, active: &CacheDB, fork_db: &mut ForkDB, ) { - trace!(?addr, "cloning database data"); - let acc = active.accounts.get(&addr).cloned().unwrap_or_default(); + trace!(?addr, "merging database data"); + let mut acc = active.accounts.get(&addr).cloned().unwrap_or_default(); if let Some(code) = active.contracts.get(&acc.info.code_hash).cloned() { fork_db.contracts.insert(acc.info.code_hash, code); } + + if let Some(fork_account) = fork_db.accounts.get_mut(&addr) { + // This will merge the fork's tracked storage with active storage and update values + fork_account.storage.extend(std::mem::take(&mut acc.storage)); + // swap them so we can insert the account as whole in the next step + std::mem::swap(&mut fork_account.storage, &mut acc.storage); + } + fork_db.accounts.insert(addr, acc); } diff --git a/forge/tests/it/repros.rs b/forge/tests/it/repros.rs index 88b98c52e4db..db0a39158a2e 100644 --- a/forge/tests/it/repros.rs +++ b/forge/tests/it/repros.rs @@ -128,3 +128,9 @@ fn test_issue_3223() { Address::from_str("0xF0959944122fb1ed4CfaBA645eA06EED30427BAA").unwrap() ); } + +// +#[test] +fn test_issue_3220() { + test_repro!("Issue3220"); +} diff --git a/testdata/repros/Issue3220.t.sol b/testdata/repros/Issue3220.t.sol new file mode 100644 index 000000000000..21508f97fe1a --- /dev/null +++ b/testdata/repros/Issue3220.t.sol @@ -0,0 +1,42 @@ +// SPDX-License-Identifier: Unlicense +pragma solidity >=0.8.0; + +import "ds-test/test.sol"; +import "../cheats/Cheats.sol"; + +// https://github.com/foundry-rs/foundry/issues/3220 +contract Issue3220Test is DSTest { + Cheats constant vm = Cheats(HEVM_ADDRESS); + uint256 fork1; + uint256 fork2; + uint256 counter; + + function setUp() public { + fork1 = vm.createFork("rpcAlias", 7475589); + vm.selectFork(fork1); + fork2 = vm.createFork("rpcAlias", 12880747); + } + + function testForkRevert() public { + vm.selectFork(fork2); + vm.selectFork(fork1); + + // do a bunch of work to increase the revm checkpoint counter + for (uint256 i = 0; i < 10; i++) { + mockCount(); + } + + vm.selectFork(fork2); + + vm.expectRevert("This fails"); + doRevert(); + } + + function doRevert() public { + revert("This fails"); + } + + function mockCount() public { + counter += 1; + } +}