From 7c60403c7bd6e51f16b47b96e6c1b1004c5b6ec8 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 1 Feb 2024 17:45:40 +0300 Subject: [PATCH 01/29] rewrite linking --- crates/forge/bin/cmd/script/build.rs | 191 ++++----- crates/forge/bin/cmd/script/cmd.rs | 8 +- crates/forge/src/link.rs | 556 +++++++-------------------- crates/forge/src/multi_runner.rs | 119 +++--- 4 files changed, 275 insertions(+), 599 deletions(-) diff --git a/crates/forge/bin/cmd/script/build.rs b/crates/forge/bin/cmd/script/build.rs index 27d691eeb745..f01c6e8f9e99 100644 --- a/crates/forge/bin/cmd/script/build.rs +++ b/crates/forge/bin/cmd/script/build.rs @@ -1,7 +1,7 @@ use super::*; use alloy_primitives::{Address, Bytes}; -use eyre::{Context, ContextCompat, Result}; -use forge::link::{link_with_nonce_or_address, PostLinkInput, ResolvedDependency}; +use eyre::{Context, ContextCompat, OptionExt, Result}; +use forge::link::{link_with_nonce_or_address, LinkOutput}; use foundry_cli::utils::get_cached_entry_by_name; use foundry_common::{ compact_to_contract, @@ -13,7 +13,7 @@ use foundry_compilers::{ cache::SolFilesCache, contracts::ArtifactContracts, info::ContractInfo, - ArtifactId, Project, ProjectCompileOutput, + Artifact, ArtifactId, Project, ProjectCompileOutput, }; use std::{collections::BTreeMap, str::FromStr}; @@ -60,32 +60,28 @@ impl ScriptArgs { }) .collect::>()?; + let target = self.find_target(&project, &contracts)?; + let mut output = self.link( project, - contracts, + &contracts, script_config.config.parsed_libraries()?, script_config.evm_opts.sender, script_config.sender_nonce, + target, )?; output.sources = sources; - script_config.target_contract = Some(output.target.clone()); + script_config.target_contract = Some(target.clone()); Ok(output) } - pub fn link( + pub fn find_target<'a>( &self, - project: Project, - contracts: ArtifactContracts, - libraries_addresses: Libraries, - sender: Address, - nonce: u64, - ) -> Result { - let mut run_dependencies = vec![]; - let mut contract = CompactContractBytecode::default(); - let mut highlevel_known_contracts = BTreeMap::new(); - + project: &Project, + contracts: &'a ArtifactContracts, + ) -> Result<&'a ArtifactId> { let mut target_fname = dunce::canonicalize(&self.path) .wrap_err("Couldn't convert contract path to absolute path.")? .strip_prefix(project.root()) @@ -101,109 +97,82 @@ impl ScriptArgs { true }; - let mut extra_info = ExtraLinkingInfo { - no_target_name, - target_fname: target_fname.clone(), - contract: &mut contract, - dependencies: &mut run_dependencies, - matched: false, - target_id: None, - }; - - // link_with_nonce_or_address expects absolute paths - let mut libs = libraries_addresses.clone(); - for (file, libraries) in libraries_addresses.libs.iter() { - if file.is_relative() { - let mut absolute_path = project.root().clone(); - absolute_path.push(file); - libs.libs.insert(absolute_path, libraries.clone()); - } - } - - link_with_nonce_or_address( - contracts.clone(), - &mut highlevel_known_contracts, - libs, - sender, - nonce, - &mut extra_info, - |post_link_input| { - let PostLinkInput { - contract, - known_contracts: highlevel_known_contracts, - id, - extra, - dependencies, - } = post_link_input; - - fn unique_deps(deps: Vec) -> Vec<(String, Bytes)> { - let mut filtered = Vec::new(); - let mut seen = HashSet::new(); - for dep in deps { - if !seen.insert(dep.id.clone()) { - continue - } - filtered.push((dep.id, dep.bytecode)); - } + let mut target = None; - filtered - } - - // if it's the target contract, grab the info - if extra.no_target_name { - // Match artifact source, and ignore interfaces - if id.source == std::path::Path::new(&extra.target_fname) && - contract.bytecode.as_ref().map_or(false, |b| b.object.bytes_len() > 0) - { - if extra.matched { - eyre::bail!("Multiple contracts in the target path. Please specify the contract name with `--tc ContractName`") - } - *extra.dependencies = unique_deps(dependencies); - *extra.contract = contract.clone(); - extra.matched = true; - extra.target_id = Some(id.clone()); - } - } else { - let (path, name) = extra - .target_fname - .rsplit_once(':') - .expect("The target specifier is malformed."); - let path = std::path::Path::new(path); - if path == id.source && name == id.name { - *extra.dependencies = unique_deps(dependencies); - *extra.contract = contract.clone(); - extra.matched = true; - extra.target_id = Some(id.clone()); + for (id, contract) in contracts.iter() { + if no_target_name { + // Match artifact source, and ignore interfaces + if id.source == std::path::Path::new(&target_fname) && + contract.bytecode.as_ref().map_or(false, |b| b.object.bytes_len() > 0) + { + if target.is_some() { + eyre::bail!("Multiple contracts in the target path. Please specify the contract name with `--tc ContractName`") } + target = Some(id); } - - if let Ok(tc) = ContractBytecode::from(contract).try_into() { - highlevel_known_contracts.insert(id, tc); + } else { + let (path, name) = + target_fname.rsplit_once(':').expect("The target specifier is malformed."); + let path = std::path::Path::new(path); + if path == id.source && name == id.name { + target = Some(id); } + } + } - Ok(()) - }, - project.root(), - )?; - - let target = extra_info - .target_id - .ok_or_else(|| eyre::eyre!("Could not find target contract: {}", target_fname))?; + target.ok_or_eyre(format!("Could not find target contract: {}", target_fname)) + } - let (new_libraries, predeploy_libraries): (Vec<_>, Vec<_>) = - run_dependencies.into_iter().unzip(); + pub fn link( + &self, + project: Project, + contracts: &ArtifactContracts, + libraries: Libraries, + sender: Address, + nonce: u64, + target: &ArtifactId, + ) -> Result { + let LinkOutput { libs_to_deploy, contracts, predeployed_libs } = + link_with_nonce_or_address(contracts, &libraries, sender, nonce, target)?; // Merge with user provided libraries - let mut new_libraries = Libraries::parse(&new_libraries)?; - for (file, libraries) in libraries_addresses.libs.into_iter() { - new_libraries.libs.entry(file).or_default().extend(libraries) + let mut new_libraries = Libraries { libs: BTreeMap::new() }; + for (id, address) in libs_to_deploy.iter().chain(predeployed_libs.iter()) { + new_libraries + .libs + .entry(id.source.clone()) + .or_default() + .insert(id.name.split('.').next().unwrap().to_owned(), address.to_string()); } + let predeploy_libraries = libs_to_deploy + .into_iter() + .map(|(id, _)| { + contracts + .get(id) + .unwrap() + .get_bytecode_bytes() + .unwrap_or_else(|| panic!("bytecode for {} is unlinked", id.name)) + .into_owned() + }) + .collect(); + + let contract = + contracts.get(target).ok_or_eyre("Target contract not found in artifacts")?.clone(); + + let highlevel_known_contracts = contracts + .iter() + .filter_map(|(id, contract)| { + ContractBytecodeSome::try_from(ContractBytecode::from(contract.clone())) + .ok() + .map(|tc| (id.clone(), tc)) + }) + .collect(); + Ok(BuildOutput { - target, contract, known_contracts: contracts, - highlevel_known_contracts: ArtifactContracts(highlevel_known_contracts), + highlevel_known_contracts, predeploy_libraries, sources: Default::default(), project, @@ -267,18 +236,8 @@ impl ScriptArgs { } } -struct ExtraLinkingInfo<'a> { - no_target_name: bool, - target_fname: String, - contract: &'a mut CompactContractBytecode, - dependencies: &'a mut Vec<(String, Bytes)>, - matched: bool, - target_id: Option, -} - pub struct BuildOutput { pub project: Project, - pub target: ArtifactId, pub contract: CompactContractBytecode, pub known_contracts: ArtifactContracts, pub highlevel_known_contracts: ArtifactContracts, diff --git a/crates/forge/bin/cmd/script/cmd.rs b/crates/forge/bin/cmd/script/cmd.rs index 244615b994ca..beacf1cb955b 100644 --- a/crates/forge/bin/cmd/script/cmd.rs +++ b/crates/forge/bin/cmd/script/cmd.rs @@ -272,15 +272,17 @@ impl ScriptArgs { } if self.verify { + let target = self.find_target(&project, &default_known_contracts)?; // We might have predeployed libraries from the broadcasting, so we need to // relink the contracts with them, since their mapping is // not included in the solc cache files. let BuildOutput { highlevel_known_contracts, .. } = self.link( project, - default_known_contracts, + &default_known_contracts, Libraries::parse(&deployment_sequence.libraries)?, script_config.config.sender, // irrelevant, since we're not creating any 0, // irrelevant, since we're not creating any + target, )?; verify.known_contracts = flatten_contracts(&highlevel_known_contracts, false); @@ -311,15 +313,17 @@ impl ScriptArgs { ) .await?; script_config.sender_nonce = nonce; + let target = self.find_target(&project, &default_known_contracts)?; let BuildOutput { libraries, contract, highlevel_known_contracts, predeploy_libraries, .. } = self.link( project, - default_known_contracts, + &default_known_contracts, script_config.config.parsed_libraries()?, new_sender, nonce, + target, )?; let mut txs = self.create_deploy_transactions( diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index 5eff7ee9ea4a..d4a00c4dc5ce 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -1,411 +1,130 @@ -use alloy_primitives::{Address, Bytes}; +use alloy_primitives::Address; use eyre::Result; -use foundry_compilers::{ - artifacts::{BytecodeObject, CompactBytecode, CompactContractBytecode, Libraries}, - contracts::ArtifactContracts, - ArtifactId, -}; +use foundry_compilers::{artifacts::Libraries, contracts::ArtifactContracts, ArtifactId}; +use semver::Version; use std::{ - collections::{BTreeMap, HashMap}, - fmt, - path::{Path, PathBuf}, + collections::{HashMap, HashSet}, str::FromStr, }; -/// Data passed to the post link handler of the linker for each linked artifact. -#[derive(Debug)] -pub struct PostLinkInput<'a, T, U> { - /// The fully linked bytecode of the artifact - pub contract: CompactContractBytecode, - /// All artifacts passed to the linker - pub known_contracts: &'a mut BTreeMap, - /// The ID of the artifact - pub id: ArtifactId, - /// Extra data passed to the handler, which can be used as a scratch space. - pub extra: &'a mut U, - /// Each dependency of the contract in their resolved form. - pub dependencies: Vec, -} - -/// Dependencies for an artifact. -#[derive(Debug)] -struct ArtifactDependencies { - /// All references to dependencies in the artifact's unlinked bytecode. - dependencies: Vec, - /// The ID of the artifact - artifact_id: ArtifactId, -} - -/// A dependency of an artifact. -#[derive(Debug)] -struct ArtifactDependency { - file: String, - key: String, - version: String, -} +fn find_artifact_id_by_library_path<'a>( + contracts: &'a ArtifactContracts, + file: &String, + name: &String, + version: Option<&Version>, +) -> &'a ArtifactId { + for id in contracts.keys() { + if let Some(version) = version { + if id.version != *version { + continue; + } + } + // name is either {LibName} or {LibName}.{version} + if id.name.split('.').next().unwrap() != name { + continue; + } -struct ArtifactCode { - code: CompactContractBytecode, - artifact_id: ArtifactId, -} + if !(id.source.ends_with(file)) { + continue; + } -impl std::fmt::Debug for ArtifactCode { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.artifact_id.fmt(f) + return id; } -} -#[derive(Debug)] -struct AllArtifactsBySlug { - /// all artifacts grouped by identifier - inner: BTreeMap, + panic!("artifact not found for library {file} {name}"); } -impl AllArtifactsBySlug { - /// Finds the code for the target of the artifact and the matching key. - fn find_code(&self, identifier: &String, version: &String) -> Option { - trace!(target: "forge::link", identifier, "fetching artifact by identifier"); - let code = self - .inner - .get(identifier) - .or(self.inner.get(&format!("{}.{}", identifier, version)))?; - - Some(code.code.clone()) +pub fn collect_dependencies<'a>( + target: &'a ArtifactId, + contracts: &'a ArtifactContracts, + deps: &mut HashSet<&'a ArtifactId>, +) { + let references = contracts.get(target).unwrap().all_link_references(); + for (file, libs) in &references { + for contract in libs.keys() { + let id = + find_artifact_id_by_library_path(contracts, file, contract, Some(&target.version)); + if deps.insert(id) { + collect_dependencies(id, contracts, deps); + } + } } } -#[derive(Debug)] -pub struct ResolvedDependency { - /// The address the linker resolved - pub address: Address, - /// The nonce used to resolve the dependency - pub nonce: u64, - pub id: String, - pub bytecode: Bytes, -} - -impl std::fmt::Display for ResolvedDependency { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{} @ {} (resolved with nonce {})", self.id, self.address, self.nonce) - } +pub struct LinkOutput<'a> { + pub contracts: ArtifactContracts, + pub predeployed_libs: Vec<(&'a ArtifactId, Address)>, + pub libs_to_deploy: Vec<(&'a ArtifactId, Address)>, } -/// Links the given artifacts with a link key constructor function, passing the result of each -/// linkage to the given callback. -/// -/// This function will recursively link all artifacts until none are unlinked. It does this by: -/// -/// 1. Using the specified predeployed library addresses (`deployed_library_addresses`) for known -/// libraries (specified by the user) 2. Otherwise, computing the address the library would live at -/// if deployed by `sender`, given a starting nonce of `nonce`. -/// -/// If the library was already deployed previously in step 2, the linker will re-use the previously -/// computed address instead of re-computing it. -/// -/// The linker will call `post_link` for each linked artifact, providing: -/// -/// 1. User-specified data (`extra`) -/// 2. The linked artifact's bytecode -/// 3. The ID of the artifact -/// 4. The dependencies necessary to deploy the contract -/// -/// # Note -/// -/// If you want to collect all dependencies of a set of contracts, you cannot just collect the -/// `dependencies` passed to the callback in a `Vec`, since the same library contract (with the -/// exact same address) might show up as a dependency for multiple contracts. -/// -/// Instead, you must deduplicate *and* preserve the deployment order by pushing the dependencies to -/// a `Vec` iff it has not been seen before. -/// -/// For an example of this, see [here](https://github.com/foundry-rs/foundry/blob/2308972dbc3a89c03488a05aceb3c428bb3e08c0/cli/src/cmd/forge/script/build.rs#L130-L151C9). -#[allow(clippy::too_many_arguments)] -pub fn link_with_nonce_or_address( - contracts: ArtifactContracts, - known_contracts: &mut BTreeMap, - deployed_library_addresses: Libraries, +pub fn link_with_nonce_or_address<'a>( + contracts: &'a ArtifactContracts, + deployed_library_addresses: &Libraries, sender: Address, - nonce: u64, - extra: &mut U, - post_link: impl Fn(PostLinkInput) -> eyre::Result<()>, - root: impl AsRef, -) -> Result<()> { - // create a mapping of fname => Vec<(fname, file, key)>, - let link_tree: BTreeMap = contracts - .iter() - .map(|(id, contract)| { - let key = id.identifier(); - let version = id.version.to_string(); - // Check if the version has metadata appended to it, which will be after the semver - // version with a `+` separator. If so, strip it off. - let version = match version.find('+') { - Some(idx) => (version[..idx]).to_string(), - None => version, - }; - let references = contract - .all_link_references() - .iter() - .flat_map(|(file, link)| link.keys().map(|key| (file.to_string(), key.to_string()))) - .map(|(file, key)| ArtifactDependency { - file, - key, - version: version.clone().to_owned(), - }) - .collect(); - - let references = - ArtifactDependencies { dependencies: references, artifact_id: id.clone() }; - (key, references) - }) - .collect(); - - let artifacts_by_slug = AllArtifactsBySlug { - inner: contracts - .iter() - .map(|(artifact_id, c)| { - ( - artifact_id.identifier(), - ArtifactCode { code: c.clone(), artifact_id: artifact_id.clone() }, - ) - }) - .collect(), - }; - - for (id, contract) in contracts.into_iter() { - let (abi, maybe_deployment_bytes, maybe_runtime) = ( - contract.abi.as_ref(), - contract.bytecode.as_ref(), - contract.deployed_bytecode.as_ref(), - ); - let mut internally_deployed_libraries = HashMap::new(); - - if let (Some(abi), Some(bytecode), Some(runtime)) = - (abi, maybe_deployment_bytes, maybe_runtime) - { - // we are going to mutate, but library contract addresses may change based on - // the test so we clone - let mut target_bytecode = bytecode.clone(); - let mut rt = runtime.clone(); - let mut target_bytecode_runtime = rt.bytecode.expect("No target runtime").clone(); - - // instantiate a vector that gets filled with library deployment bytecode - let mut dependencies = vec![]; - - match bytecode.object { - BytecodeObject::Unlinked(_) => { - trace!(target: "forge::link", target=id.identifier(), version=?id.version, "unlinked contract"); - - // link needed - recurse_link( - id.identifier(), - (&mut target_bytecode, &mut target_bytecode_runtime), - &artifacts_by_slug, - &link_tree, - &mut dependencies, - &mut internally_deployed_libraries, - &deployed_library_addresses, - &mut nonce.clone(), - sender, - root.as_ref(), - ); - } - BytecodeObject::Bytecode(ref bytes) => { - if bytes.as_ref().is_empty() { - // Handle case where bytecode bytes are empty - let tc = CompactContractBytecode { - abi: Some(abi.clone()), - bytecode: None, - deployed_bytecode: None, - }; - - let post_link_input = PostLinkInput { - contract: tc, - known_contracts, - id, - extra, - dependencies, - }; - - post_link(post_link_input)?; - continue - } - } + mut nonce: u64, + target: &'a ArtifactId, +) -> Result> { + let mut needed_libraries = HashSet::new(); + collect_dependencies(target, contracts, &mut needed_libraries); + + let mut predeployed_libs = HashMap::new(); + + // Populate predeployed libs firstly + for (path, libs) in &deployed_library_addresses.libs { + let path = path.to_string_lossy().to_string(); + for (name, address) in libs { + let artifact_id = find_artifact_id_by_library_path(contracts, &path, name, None); + if needed_libraries.contains(artifact_id) { + let address = Address::from_str(address)?; + predeployed_libs.insert(artifact_id, address); } + } + } - rt.bytecode = Some(target_bytecode_runtime); - let tc = CompactContractBytecode { - abi: Some(abi.clone()), - bytecode: Some(target_bytecode), - deployed_bytecode: Some(rt), - }; - - let post_link_input = - PostLinkInput { contract: tc, known_contracts, id, extra, dependencies }; + let mut libs_to_deploy = Vec::new(); - post_link(post_link_input)?; + for id in needed_libraries { + if !predeployed_libs.contains_key(id) { + libs_to_deploy.push((id, sender.create(nonce))); + nonce += 1; } } - Ok(()) -} -/// Recursively links bytecode given a target contract artifact name, the bytecode(s) to be linked, -/// a mapping of contract artifact name to bytecode, a dependency mapping, a mutable list that -/// will be filled with the predeploy libraries, initial nonce, and the sender. -#[allow(clippy::too_many_arguments)] -fn recurse_link<'a>( - // target name - target: String, - // to-be-modified/linked bytecode - target_bytecode: (&'a mut CompactBytecode, &'a mut CompactBytecode), - // All contract artifacts - artifacts: &'a AllArtifactsBySlug, - // fname => Vec<(fname, file, key)> - dependency_tree: &'a BTreeMap, - // library deployment vector (file:contract:address, bytecode) - deployment: &'a mut Vec, - // libraries we have already deployed during the linking process. - // the key is `file:contract` and the value is the address we computed - internally_deployed_libraries: &'a mut HashMap, - // deployed library addresses fname => address - deployed_library_addresses: &'a Libraries, - // nonce to start at - nonce: &mut u64, - // sender - sender: Address, - // project root path - root: impl AsRef, -) { - // check if we have dependencies - if let Some(dependencies) = dependency_tree.get(&target) { - trace!(target: "forge::link", ?target, "linking contract"); - - // for each dependency, try to link - dependencies.dependencies.iter().for_each(|dep| { - let ArtifactDependency { file, key, version } = dep; - let next_target = format!("{file}:{key}"); - let root = PathBuf::from(root.as_ref().to_str().unwrap()); - // get the dependency - trace!(target: "forge::link", dependency=next_target, file, key, version=?dependencies.artifact_id.version, "get dependency"); - let artifact = match artifacts - .find_code(&next_target, version) { - Some(artifact) => artifact, - None => { - // In some project setups, like JS-style workspaces, you might not have node_modules available at the root of the foundry project. - // In this case, imported dependencies from outside the root might not have their paths tripped correctly. - // Therefore, we fall back to a manual path join to locate the file. - let fallback_path = dunce::canonicalize(root.join(file)).unwrap_or_else(|e| panic!("No artifact for contract \"{next_target}\". Attempted to compose fallback path but got got error {e}")); - let fallback_path = fallback_path.to_str().unwrap_or("No artifact for contract \"{next_target}\". Attempted to compose fallback path but could not create valid string"); - let fallback_target = format!("{fallback_path}:{key}"); - - trace!(target: "forge::link", fallback_dependency=fallback_target, file, key, version=?dependencies.artifact_id.version, "get dependency with fallback path"); - - match artifacts.find_code(&fallback_target, version) { - Some(artifact) => artifact, - None => panic!("No artifact for contract {next_target}"), - }}, - }; - let mut next_target_bytecode = artifact - .bytecode - .unwrap_or_else(|| panic!("No bytecode for contract {next_target}")); - let mut next_target_runtime_bytecode = artifact - .deployed_bytecode - .expect("No target runtime bytecode") - .bytecode - .expect("No target runtime"); - - // make sure dependency is fully linked - if let Some(deps) = dependency_tree.get(&format!("{file}:{key}")) { - if !deps.dependencies.is_empty() { - trace!(target: "forge::link", dependency=next_target, file, key, version=?dependencies.artifact_id.version, "dependency has dependencies"); - - // actually link the nested dependencies to this dependency - recurse_link( - format!("{file}:{key}"), - (&mut next_target_bytecode, &mut next_target_runtime_bytecode), - artifacts, - dependency_tree, - deployment, - internally_deployed_libraries, - deployed_library_addresses, - nonce, - sender, - root, - ); - } - } + let predeployed_libs = predeployed_libs.into_iter().collect::>(); - let mut deployed_address = None; + // Link contracts + let contracts = contracts + .iter() + .map(|(id, contract)| { + let mut contract = contract.clone(); - if let Some(library_file) = deployed_library_addresses - .libs - .get(&PathBuf::from_str(file).expect("Invalid library path.")) - { - if let Some(address) = library_file.get(key) { - deployed_address = - Some(Address::from_str(address).expect("Invalid library address passed.")); + for (id, address) in libs_to_deploy.iter().chain(predeployed_libs.iter()) { + if let Some(bytecode) = contract.bytecode.as_mut() { + bytecode.link(id.source.to_string_lossy(), &id.name, *address); + } + if let Some(deployed_bytecode) = + contract.deployed_bytecode.as_mut().and_then(|b| b.bytecode.as_mut()) + { + deployed_bytecode.link(id.source.to_string_lossy(), &id.name, *address); } } + (id.clone(), contract) + }) + .collect::(); - let address = if let Some(deployed_address) = deployed_address { - trace!(target: "forge::link", dependency=next_target, file, key, "dependency has pre-defined address"); - - // the user specified the library address - deployed_address - } else if let Some((cached_nonce, deployed_address)) = internally_deployed_libraries.get(&format!("{file}:{key}")) { - trace!(target: "forge::link", dependency=next_target, file, key, "dependency was previously deployed"); - - // we previously deployed the library - let library = format!("{file}:{key}:0x{deployed_address:x}"); - - // push the dependency into the library deployment vector - deployment.push(ResolvedDependency { - id: library, - address: *deployed_address, - nonce: *cached_nonce, - bytecode: next_target_bytecode.object.into_bytes().unwrap_or_else(|| panic!("Bytecode should be linked for {next_target}")), - }); - *deployed_address - } else { - trace!(target: "forge::link", dependency=next_target, file, key, "dependency has to be deployed"); - - // we need to deploy the library - let used_nonce = *nonce; - let computed_address = sender.create(used_nonce); - *nonce += 1; - let library = format!("{file}:{key}:0x{computed_address:x}"); - - // push the dependency into the library deployment vector - deployment.push(ResolvedDependency { - id: library, - address: computed_address, - nonce: used_nonce, - bytecode: next_target_bytecode.object.into_bytes().unwrap_or_else(|| panic!("Bytecode should be linked for {next_target}")), - }); - - // remember this library for later - internally_deployed_libraries.insert(format!("{file}:{key}"), (used_nonce, computed_address)); - - computed_address - }; - - // link the dependency to the target - target_bytecode.0.link(file.clone(), key.clone(), address); - target_bytecode.1.link(file.clone(), key.clone(), address); - trace!(target: "forge::link", ?target, dependency=next_target, file, key, "linking dependency done"); - }); - } + Ok(LinkOutput { contracts, predeployed_libs, libs_to_deploy }) } #[cfg(test)] mod tests { + use std::path::PathBuf; + use super::*; - use foundry_common::ContractsByArtifact; use foundry_compilers::{Project, ProjectPathsConfig}; struct LinkerTest { contracts: ArtifactContracts, dependency_assertions: HashMap>, - project: Project, } impl LinkerTest { @@ -429,7 +148,7 @@ mod tests { .map(|(id, c)| (id, c.into_contract_bytecode())) .collect::(); - Self { contracts, dependency_assertions: HashMap::new(), project } + Self { contracts, dependency_assertions: HashMap::new() } } fn assert_dependencies( @@ -442,51 +161,58 @@ mod tests { } fn test_with_sender_and_nonce(self, sender: Address, initial_nonce: u64) { - let mut called_once = false; - link_with_nonce_or_address( - self.contracts, - &mut ContractsByArtifact::default(), - Default::default(), - sender, - initial_nonce, - &mut called_once, - |post_link_input| { - *post_link_input.extra = true; - let identifier = post_link_input.id.identifier(); - - // Skip ds-test as it always has no dependencies etc. (and the path is outside root so is not sanitized) - if identifier.contains("DSTest") { - return Ok(()) - } - - let assertions = self - .dependency_assertions - .get(&identifier) - .unwrap_or_else(|| panic!("Unexpected artifact: {identifier}")); - - assert_eq!( - post_link_input.dependencies.len(), - assertions.len(), - "artifact {identifier} has more/less dependencies than expected ({} vs {}): {:#?}", - post_link_input.dependencies.len(), - assertions.len(), - post_link_input.dependencies - ); - - for (expected, actual) in assertions.iter().zip(post_link_input.dependencies.iter()) { - let expected_lib_id = format!("{}:{:?}", expected.0, expected.2); - assert_eq!(expected_lib_id, actual.id, "unexpected dependency, expected: {}, got: {}", expected_lib_id, actual.id); - assert_eq!(actual.nonce, expected.1, "nonce wrong for dependency, expected: {}, got: {}", expected.1, actual.nonce); - assert_eq!(actual.address, expected.2, "address wrong for dependency, expected: {}, got: {}", expected.2, actual.address); - } - - Ok(()) - }, - self.project.root(), - ) - .expect("Linking failed"); + for id in self.contracts.keys() { + let identifier = id.identifier(); + + // Skip ds-test as it always has no dependencies etc. (and the path is outside root + // so is not sanitized) + if identifier.contains("DSTest") { + continue; + } - assert!(called_once, "linker did nothing"); + let LinkOutput { libs_to_deploy, .. } = link_with_nonce_or_address( + &self.contracts, + &Default::default(), + sender, + initial_nonce, + id, + ) + .expect("Linking failed"); + + let assertions = self + .dependency_assertions + .get(&identifier) + .unwrap_or_else(|| panic!("Unexpected artifact: {identifier}")); + + let expected_libs = + assertions.iter().map(|(identifier, _, _)| identifier).collect::>(); + + assert_eq!( + libs_to_deploy.len(), + expected_libs.len(), + "artifact {identifier} has more/less dependencies than expected ({} vs {}): {:#?}", + libs_to_deploy.len(), + assertions.len(), + libs_to_deploy + ); + + let identifiers = + libs_to_deploy.iter().map(|(id, _)| id.identifier()).collect::>(); + + for lib in expected_libs { + assert!(identifiers.contains(lib)); + } + + let unique_libs = + libs_to_deploy.iter().map(|(_, addr)| addr).collect::>(); + + assert_eq!( + unique_libs.len(), + libs_to_deploy.len(), + "not all libraries are unqiue: {:#?}", + libs_to_deploy + ); + } } } diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index b7c49e887627..b5455ef7714d 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -1,7 +1,7 @@ //! Forge test runner for multiple contracts. use crate::{ - link::{link_with_nonce_or_address, PostLinkInput, ResolvedDependency}, + link::{link_with_nonce_or_address, LinkOutput}, result::SuiteResult, ContractRunner, TestFilter, TestOptions, }; @@ -24,7 +24,8 @@ use foundry_evm::{ use rayon::prelude::*; use revm::primitives::SpecId; use std::{ - collections::{BTreeMap, HashSet}, + collections::BTreeMap, + fmt::Debug, iter::Iterator, path::Path, sync::{mpsc, Arc}, @@ -263,12 +264,12 @@ impl MultiContractRunnerBuilder { evm_opts: EvmOpts, ) -> Result where - A: ArtifactOutput, + A: ArtifactOutput + Debug, { + let output = output.with_stripped_file_prefixes(&root); // This is just the contracts compiled, but we need to merge this with the read cached // artifacts let contracts = output - .with_stripped_file_prefixes(&root) .into_artifacts() .map(|(i, c)| (i, c.into_contract_bytecode())) .collect::>(); @@ -281,72 +282,58 @@ impl MultiContractRunnerBuilder { // create a mapping of name => (abi, deployment code, Vec) let mut deployable_contracts = DeployableContracts::default(); - fn unique_deps(deps: Vec) -> Vec { - let mut filtered = Vec::new(); - let mut seen = HashSet::new(); - for dep in deps { - if !seen.insert(dep.id.clone()) { - continue - } - filtered.push(dep); + let artifact_contracts = ArtifactContracts::from_iter(contracts.clone()); + + for (id, contract) in contracts { + let abi = contract.abi.as_ref().expect("We should have an abi by now"); + + let LinkOutput { contracts, libs_to_deploy, .. } = link_with_nonce_or_address( + &artifact_contracts, + &Default::default(), + evm_opts.sender, + 1, + &id, + )?; + + let linked_contract = contracts.get(&id).unwrap().clone(); + + // get bytes if deployable, else add to known contracts and return. + // interfaces and abstract contracts should be known to enable fuzzing of their ABI + // but they should not be deployable and their source code should be skipped by the + // debugger and linker. + let Some(bytecode) = linked_contract.bytecode.and_then(|b| b.object.into_bytes()) + else { + known_contracts.insert(id.clone(), (abi.clone(), vec![])); + continue; + }; + + // if it's a test, add it to deployable contracts + if abi.constructor.as_ref().map(|c| c.inputs.is_empty()).unwrap_or(true) && + abi.functions().any(|func| func.name.is_test() || func.name.is_invariant_test()) + { + deployable_contracts.insert( + id.clone(), + ( + abi.clone(), + bytecode, + libs_to_deploy + .into_iter() + .filter_map(|(id, _)| contracts.get(id).unwrap().bytecode.clone()) + .filter_map(|bcode| bcode.object.into_bytes()) + .collect::>(), + ), + ); } - filtered + contract + .deployed_bytecode + .and_then(|d_bcode| d_bcode.bytecode) + .and_then(|bcode| bcode.object.into_bytes()) + .and_then(|bytes| { + known_contracts.insert(id.clone(), (abi.clone(), bytes.to_vec())) + }); } - link_with_nonce_or_address( - ArtifactContracts::from_iter(contracts), - &mut known_contracts, - Default::default(), - evm_opts.sender, - 1, - &mut deployable_contracts, - |post_link_input| { - let PostLinkInput { - contract, - known_contracts, - id, - extra: deployable_contracts, - dependencies, - } = post_link_input; - let dependencies = unique_deps(dependencies); - - let abi = contract.abi.expect("We should have an abi by now"); - - // get bytes if deployable, else add to known contracts and return. - // interfaces and abstract contracts should be known to enable fuzzing of their ABI - // but they should not be deployable and their source code should be skipped by the - // debugger and linker. - let Some(bytecode) = contract.bytecode.and_then(|b| b.object.into_bytes()) else { - known_contracts.insert(id.clone(), (abi.clone(), vec![])); - return Ok(()) - }; - - // if it's a test, add it to deployable contracts - if abi.constructor.as_ref().map(|c| c.inputs.is_empty()).unwrap_or(true) && - abi.functions() - .any(|func| func.name.is_test() || func.name.is_invariant_test()) - { - deployable_contracts.insert( - id.clone(), - ( - abi.clone(), - bytecode, - dependencies.into_iter().map(|dep| dep.bytecode).collect::>(), - ), - ); - } - - contract - .deployed_bytecode - .and_then(|d_bcode| d_bcode.bytecode) - .and_then(|bcode| bcode.object.into_bytes()) - .and_then(|bytes| known_contracts.insert(id.clone(), (abi, bytes.to_vec()))); - Ok(()) - }, - root, - )?; - let execution_info = known_contracts.flatten(); Ok(MultiContractRunner { contracts: deployable_contracts, From a5b19338e791bddf2ed3fac1299f3e49439ce67e Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 7 Feb 2024 01:21:02 +0300 Subject: [PATCH 02/29] fix ci --- crates/forge/bin/cmd/script/build.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/forge/bin/cmd/script/build.rs b/crates/forge/bin/cmd/script/build.rs index f01c6e8f9e99..8ece1c2eb08e 100644 --- a/crates/forge/bin/cmd/script/build.rs +++ b/crates/forge/bin/cmd/script/build.rs @@ -160,6 +160,7 @@ impl ScriptArgs { let contract = contracts.get(target).ok_or_eyre("Target contract not found in artifacts")?.clone(); + // Collect all linked contracts let highlevel_known_contracts = contracts .iter() .filter_map(|(id, contract)| { @@ -167,6 +168,7 @@ impl ScriptArgs { .ok() .map(|tc| (id.clone(), tc)) }) + .filter(|(_, tc)| !tc.bytecode.object.is_unlinked()) .collect(); Ok(BuildOutput { From cc9eefa6aa55347e8e53eb67096127211a192231 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 7 Feb 2024 02:46:02 +0300 Subject: [PATCH 03/29] fix ci 2 --- crates/forge/bin/cmd/script/build.rs | 24 +++++++++++++----------- crates/forge/bin/cmd/script/cmd.rs | 8 ++++---- 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/crates/forge/bin/cmd/script/build.rs b/crates/forge/bin/cmd/script/build.rs index 8ece1c2eb08e..091193aa93c2 100644 --- a/crates/forge/bin/cmd/script/build.rs +++ b/crates/forge/bin/cmd/script/build.rs @@ -60,11 +60,12 @@ impl ScriptArgs { }) .collect::>()?; - let target = self.find_target(&project, &contracts)?; + let target = self.find_target(&project, &contracts)?.clone(); + script_config.target_contract = Some(target.clone()); let mut output = self.link( project, - &contracts, + contracts, script_config.config.parsed_libraries()?, script_config.evm_opts.sender, script_config.sender_nonce, @@ -72,7 +73,6 @@ impl ScriptArgs { )?; output.sources = sources; - script_config.target_contract = Some(target.clone()); Ok(output) } @@ -126,14 +126,14 @@ impl ScriptArgs { pub fn link( &self, project: Project, - contracts: &ArtifactContracts, + contracts: ArtifactContracts, libraries: Libraries, sender: Address, nonce: u64, - target: &ArtifactId, + target: ArtifactId, ) -> Result { - let LinkOutput { libs_to_deploy, contracts, predeployed_libs } = - link_with_nonce_or_address(contracts, &libraries, sender, nonce, target)?; + let LinkOutput { libs_to_deploy, contracts: linked_contracts, predeployed_libs } = + link_with_nonce_or_address(&contracts, &libraries, sender, nonce, &target)?; // Merge with user provided libraries let mut new_libraries = Libraries { libs: BTreeMap::new() }; @@ -148,7 +148,7 @@ impl ScriptArgs { let predeploy_libraries = libs_to_deploy .into_iter() .map(|(id, _)| { - contracts + linked_contracts .get(id) .unwrap() .get_bytecode_bytes() @@ -157,11 +157,13 @@ impl ScriptArgs { }) .collect(); - let contract = - contracts.get(target).ok_or_eyre("Target contract not found in artifacts")?.clone(); + let contract = linked_contracts + .get(&target) + .ok_or_eyre("Target contract not found in artifacts")? + .clone(); // Collect all linked contracts - let highlevel_known_contracts = contracts + let highlevel_known_contracts = linked_contracts .iter() .filter_map(|(id, contract)| { ContractBytecodeSome::try_from(ContractBytecode::from(contract.clone())) diff --git a/crates/forge/bin/cmd/script/cmd.rs b/crates/forge/bin/cmd/script/cmd.rs index beacf1cb955b..3c26f027be06 100644 --- a/crates/forge/bin/cmd/script/cmd.rs +++ b/crates/forge/bin/cmd/script/cmd.rs @@ -272,13 +272,13 @@ impl ScriptArgs { } if self.verify { - let target = self.find_target(&project, &default_known_contracts)?; + let target = self.find_target(&project, &default_known_contracts)?.clone(); // We might have predeployed libraries from the broadcasting, so we need to // relink the contracts with them, since their mapping is // not included in the solc cache files. let BuildOutput { highlevel_known_contracts, .. } = self.link( project, - &default_known_contracts, + default_known_contracts, Libraries::parse(&deployment_sequence.libraries)?, script_config.config.sender, // irrelevant, since we're not creating any 0, // irrelevant, since we're not creating any @@ -313,13 +313,13 @@ impl ScriptArgs { ) .await?; script_config.sender_nonce = nonce; - let target = self.find_target(&project, &default_known_contracts)?; + let target = self.find_target(&project, &default_known_contracts)?.clone(); let BuildOutput { libraries, contract, highlevel_known_contracts, predeploy_libraries, .. } = self.link( project, - &default_known_contracts, + default_known_contracts, script_config.config.parsed_libraries()?, new_sender, nonce, From 4b1b6fad385e857d2dc549415857370181632ba5 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 7 Feb 2024 03:22:01 +0300 Subject: [PATCH 04/29] fix ci 3 --- crates/forge/src/multi_runner.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index b5455ef7714d..8bd9fdff6e1b 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -297,11 +297,14 @@ impl MultiContractRunnerBuilder { let linked_contract = contracts.get(&id).unwrap().clone(); - // get bytes if deployable, else add to known contracts and return. + // get bytes if deployable, else add to known contracts and continue. // interfaces and abstract contracts should be known to enable fuzzing of their ABI // but they should not be deployable and their source code should be skipped by the // debugger and linker. - let Some(bytecode) = linked_contract.bytecode.and_then(|b| b.object.into_bytes()) + let Some(bytecode) = linked_contract + .bytecode + .and_then(|b| b.object.into_bytes()) + .filter(|b| !b.is_empty()) else { known_contracts.insert(id.clone(), (abi.clone(), vec![])); continue; From ec48668b7513e6815cf10c13bb2b4d82ef039594 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 7 Feb 2024 18:03:01 +0300 Subject: [PATCH 05/29] docs --- crates/forge/src/link.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index d4a00c4dc5ce..41484696ff42 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -7,6 +7,10 @@ use std::{ str::FromStr, }; +/// Finds an [ArtifactId] object in the given [ArtifactContracts] keys which corresponds to the +/// library path in the form of "./path/to/Lib.sol:Lib" +/// +/// Optionally accepts solc version, and if present, only compares artifacts with given version. fn find_artifact_id_by_library_path<'a>( contracts: &'a ArtifactContracts, file: &String, @@ -34,6 +38,7 @@ fn find_artifact_id_by_library_path<'a>( panic!("artifact not found for library {file} {name}"); } +/// Performs DFS on the graph of link references, and populates `deps` with all found libraries. pub fn collect_dependencies<'a>( target: &'a ArtifactId, contracts: &'a ArtifactContracts, @@ -51,9 +56,17 @@ pub fn collect_dependencies<'a>( } } +/// Output of the `link_with_nonce_or_address` pub struct LinkOutput<'a> { + /// [ArtifactContracts] object containing all artifacts linked with known libraries + /// It is guaranteed to contain `target` and all it's dependencies fully linked, and any other + /// contract may still be partially unlinked. pub contracts: ArtifactContracts, + /// Vector of libraries predeployed by user (basically another form of + /// `deployed_library_addresses`). pub predeployed_libs: Vec<(&'a ArtifactId, Address)>, + /// Vector of libraries that need to be deployed from sender address. + /// The order in which they appear in the vector is the order in which they should be deployed. pub libs_to_deploy: Vec<(&'a ArtifactId, Address)>, } From f900581359ebd46a106ceccaa71e4edad4900fdd Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 8 Feb 2024 16:18:31 +0300 Subject: [PATCH 06/29] Refactor --- Cargo.lock | 3 +- Cargo.toml | 9 ++- crates/forge/bin/cmd/script/build.rs | 44 ++++------ crates/forge/bin/cmd/script/cmd.rs | 23 +++--- crates/forge/src/link.rs | 115 +++++++++++++++------------ crates/forge/src/multi_runner.rs | 15 +--- 6 files changed, 103 insertions(+), 106 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c7421accaecc..e6a36e337875 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3105,8 +3105,7 @@ dependencies = [ [[package]] name = "foundry-compilers" version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d88392f8b9848cfac5b11054b14e14268ae5361450bd45169df276f9af748c5" +source = "git+https://github.com/foundry-rs/compilers?branch=main#757652ef7fc2f90cfe490319d99dd1cf13f54564" dependencies = [ "alloy-json-abi", "alloy-primitives", diff --git a/Cargo.toml b/Cargo.toml index 9beebb39db30..3c9be6eeea4c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,7 +28,7 @@ resolver = "2" [workspace.package] version = "0.2.0" edition = "2021" -rust-version = "1.74" # Remember to update clippy.toml as well +rust-version = "1.74" # Remember to update clippy.toml as well authors = ["Foundry Contributors"] license = "MIT OR Apache-2.0" homepage = "https://github.com/foundry-rs/foundry" @@ -173,7 +173,10 @@ alloy-rlp = "0.3.3" solang-parser = "=0.3.3" ## misc -chrono = { version = "0.4", default-features = false, features = ["clock", "std"] } +chrono = { version = "0.4", default-features = false, features = [ + "clock", + "std", +] } color-eyre = "0.6" derive_more = "0.99" eyre = "0.6" @@ -226,3 +229,5 @@ revm = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-primitives = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-interpreter = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-precompile = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } + +foundry-compilers = { git = "https://github.com/foundry-rs/compilers", branch = "main" } diff --git a/crates/forge/bin/cmd/script/build.rs b/crates/forge/bin/cmd/script/build.rs index 091193aa93c2..98e2d2bb36c8 100644 --- a/crates/forge/bin/cmd/script/build.rs +++ b/crates/forge/bin/cmd/script/build.rs @@ -13,9 +13,9 @@ use foundry_compilers::{ cache::SolFilesCache, contracts::ArtifactContracts, info::ContractInfo, - Artifact, ArtifactId, Project, ProjectCompileOutput, + ArtifactId, Project, ProjectCompileOutput, }; -use std::{collections::BTreeMap, str::FromStr}; +use std::str::FromStr; impl ScriptArgs { /// Compiles the file or project and the verify metadata. @@ -63,10 +63,12 @@ impl ScriptArgs { let target = self.find_target(&project, &contracts)?.clone(); script_config.target_contract = Some(target.clone()); - let mut output = self.link( + let libraries = script_config.config.solc_settings()?.libraries; + + let mut output = self.link_script_target( project, contracts, - script_config.config.parsed_libraries()?, + libraries, script_config.evm_opts.sender, script_config.sender_nonce, target, @@ -123,7 +125,7 @@ impl ScriptArgs { target.ok_or_eyre(format!("Could not find target contract: {}", target_fname)) } - pub fn link( + pub fn link_script_target( &self, project: Project, contracts: ArtifactContracts, @@ -132,31 +134,13 @@ impl ScriptArgs { nonce: u64, target: ArtifactId, ) -> Result { - let LinkOutput { libs_to_deploy, contracts: linked_contracts, predeployed_libs } = - link_with_nonce_or_address(&contracts, &libraries, sender, nonce, &target)?; - - // Merge with user provided libraries - let mut new_libraries = Libraries { libs: BTreeMap::new() }; - for (id, address) in libs_to_deploy.iter().chain(predeployed_libs.iter()) { - new_libraries - .libs - .entry(id.source.clone()) - .or_default() - .insert(id.name.split('.').next().unwrap().to_owned(), address.to_string()); - } - - let predeploy_libraries = libs_to_deploy - .into_iter() - .map(|(id, _)| { - linked_contracts - .get(id) - .unwrap() - .get_bytecode_bytes() - .unwrap_or_else(|| panic!("bytecode for {} is unlinked", id.name)) - .into_owned() - }) - .collect(); + let LinkOutput { + libs_to_deploy: predeploy_libraries, + contracts: linked_contracts, + libraries, + } = link_with_nonce_or_address(&contracts, libraries, sender, nonce, &target)?; + // Get linked target artifact let contract = linked_contracts .get(&target) .ok_or_eyre("Target contract not found in artifacts")? @@ -180,7 +164,7 @@ impl ScriptArgs { predeploy_libraries, sources: Default::default(), project, - libraries: new_libraries, + libraries, }) } diff --git a/crates/forge/bin/cmd/script/cmd.rs b/crates/forge/bin/cmd/script/cmd.rs index 3c26f027be06..cb2f4972f7bb 100644 --- a/crates/forge/bin/cmd/script/cmd.rs +++ b/crates/forge/bin/cmd/script/cmd.rs @@ -276,14 +276,19 @@ impl ScriptArgs { // We might have predeployed libraries from the broadcasting, so we need to // relink the contracts with them, since their mapping is // not included in the solc cache files. - let BuildOutput { highlevel_known_contracts, .. } = self.link( - project, - default_known_contracts, - Libraries::parse(&deployment_sequence.libraries)?, - script_config.config.sender, // irrelevant, since we're not creating any - 0, // irrelevant, since we're not creating any - target, - )?; + let BuildOutput { highlevel_known_contracts, predeploy_libraries, .. } = self + .link_script_target( + project, + default_known_contracts, + Libraries::parse(&deployment_sequence.libraries)?, + script_config.config.sender, // irrelevant, since we're not creating any + 0, // irrelevant, since we're not creating any + target, + )?; + + if predeploy_libraries.len() > 0 { + eyre::bail!("Incomplete set of libraries in deployment artifact."); + } verify.known_contracts = flatten_contracts(&highlevel_known_contracts, false); @@ -317,7 +322,7 @@ impl ScriptArgs { let BuildOutput { libraries, contract, highlevel_known_contracts, predeploy_libraries, .. - } = self.link( + } = self.link_script_target( project, default_known_contracts, script_config.config.parsed_libraries()?, diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index 41484696ff42..e8b61a3398de 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -1,11 +1,8 @@ -use alloy_primitives::Address; +use alloy_primitives::{Address, Bytes}; use eyre::Result; -use foundry_compilers::{artifacts::Libraries, contracts::ArtifactContracts, ArtifactId}; +use foundry_compilers::{artifacts::Libraries, contracts::ArtifactContracts, Artifact, ArtifactId}; use semver::Version; -use std::{ - collections::{HashMap, HashSet}, - str::FromStr, -}; +use std::{collections::HashSet, path::PathBuf, str::FromStr}; /// Finds an [ArtifactId] object in the given [ArtifactContracts] keys which corresponds to the /// library path in the form of "./path/to/Lib.sol:Lib" @@ -28,7 +25,7 @@ fn find_artifact_id_by_library_path<'a>( continue; } - if !(id.source.ends_with(file)) { + if id.source != PathBuf::from(file) { continue; } @@ -56,76 +53,94 @@ pub fn collect_dependencies<'a>( } } +/// Links given artifacts with given library addresses. +/// +/// Artifacts returned by this function may still be partially unlinked if some of their +/// dependencies weren't present in `libraries`. +pub fn link(contracts: &ArtifactContracts, libraries: &Libraries) -> Result { + contracts + .iter() + .map(|(id, contract)| { + let mut contract = contract.clone(); + + for (file, libs) in &libraries.libs { + for (name, address) in libs { + let address = Address::from_str(address)?; + if let Some(bytecode) = contract.bytecode.as_mut() { + bytecode.link(file.to_string_lossy(), name, address); + } + if let Some(deployed_bytecode) = + contract.deployed_bytecode.as_mut().and_then(|b| b.bytecode.as_mut()) + { + deployed_bytecode.link(file.to_string_lossy(), name, address); + } + } + } + Ok((id.clone(), contract)) + }) + .collect() +} + /// Output of the `link_with_nonce_or_address` -pub struct LinkOutput<'a> { +pub struct LinkOutput { /// [ArtifactContracts] object containing all artifacts linked with known libraries /// It is guaranteed to contain `target` and all it's dependencies fully linked, and any other /// contract may still be partially unlinked. pub contracts: ArtifactContracts, /// Vector of libraries predeployed by user (basically another form of /// `deployed_library_addresses`). - pub predeployed_libs: Vec<(&'a ArtifactId, Address)>, + pub libraries: Libraries, /// Vector of libraries that need to be deployed from sender address. /// The order in which they appear in the vector is the order in which they should be deployed. - pub libs_to_deploy: Vec<(&'a ArtifactId, Address)>, + pub libs_to_deploy: Vec, } +/// Links given artifact with either given library addresses or address computed from sender and +/// nonce. +/// +/// Current linking implementation assumes that all link_references keys are matching the format in +/// which sources appear in [ArtifactId] keys. +/// +/// You should make sure to strip paths via `with_stripped_file_prefixes` for both +/// `libraries` and `contracts` *before* invoking linker. +/// +/// This function will always link target contract completely, however, in case of paths format +/// mismatch it may not recognize some of the predeployed libraries, leading to more deployments +/// than needed. pub fn link_with_nonce_or_address<'a>( contracts: &'a ArtifactContracts, - deployed_library_addresses: &Libraries, + mut libraries: Libraries, sender: Address, mut nonce: u64, target: &'a ArtifactId, -) -> Result> { +) -> Result { let mut needed_libraries = HashSet::new(); collect_dependencies(target, contracts, &mut needed_libraries); - let mut predeployed_libs = HashMap::new(); - - // Populate predeployed libs firstly - for (path, libs) in &deployed_library_addresses.libs { - let path = path.to_string_lossy().to_string(); - for (name, address) in libs { - let artifact_id = find_artifact_id_by_library_path(contracts, &path, name, None); - if needed_libraries.contains(artifact_id) { - let address = Address::from_str(address)?; - predeployed_libs.insert(artifact_id, address); - } - } - } - let mut libs_to_deploy = Vec::new(); + // If `libraries` does not contain needed dependency, compute its address and add to + // `libs_to_deploy`. for id in needed_libraries { - if !predeployed_libs.contains_key(id) { - libs_to_deploy.push((id, sender.create(nonce))); + let lib_name = id.name.split(".").next().unwrap().to_owned(); + libraries.libs.entry(id.source.clone()).or_default().entry(lib_name).or_insert_with(|| { + let address = sender.create(nonce); + libs_to_deploy.push((id, address)); nonce += 1; - } - } - let predeployed_libs = predeployed_libs.into_iter().collect::>(); + address.to_checksum(None) + }); + } // Link contracts - let contracts = contracts - .iter() - .map(|(id, contract)| { - let mut contract = contract.clone(); + let contracts = link(contracts, &libraries)?; - for (id, address) in libs_to_deploy.iter().chain(predeployed_libs.iter()) { - if let Some(bytecode) = contract.bytecode.as_mut() { - bytecode.link(id.source.to_string_lossy(), &id.name, *address); - } - if let Some(deployed_bytecode) = - contract.deployed_bytecode.as_mut().and_then(|b| b.bytecode.as_mut()) - { - deployed_bytecode.link(id.source.to_string_lossy(), &id.name, *address); - } - } - (id.clone(), contract) - }) - .collect::(); + let libs_to_deploy = libs_to_deploy + .into_iter() + .map(|(id, _)| contracts.get(id).unwrap().get_bytecode_bytes().unwrap().into_owned()) + .collect(); - Ok(LinkOutput { contracts, predeployed_libs, libs_to_deploy }) + Ok(LinkOutput { contracts, libraries, libs_to_deploy }) } #[cfg(test)] @@ -185,7 +200,7 @@ mod tests { let LinkOutput { libs_to_deploy, .. } = link_with_nonce_or_address( &self.contracts, - &Default::default(), + Default::default(), sender, initial_nonce, id, diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 8bd9fdff6e1b..df99deb8a8e2 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -289,7 +289,7 @@ impl MultiContractRunnerBuilder { let LinkOutput { contracts, libs_to_deploy, .. } = link_with_nonce_or_address( &artifact_contracts, - &Default::default(), + Default::default(), evm_opts.sender, 1, &id, @@ -314,18 +314,7 @@ impl MultiContractRunnerBuilder { if abi.constructor.as_ref().map(|c| c.inputs.is_empty()).unwrap_or(true) && abi.functions().any(|func| func.name.is_test() || func.name.is_invariant_test()) { - deployable_contracts.insert( - id.clone(), - ( - abi.clone(), - bytecode, - libs_to_deploy - .into_iter() - .filter_map(|(id, _)| contracts.get(id).unwrap().bytecode.clone()) - .filter_map(|bcode| bcode.object.into_bytes()) - .collect::>(), - ), - ); + deployable_contracts.insert(id.clone(), (abi.clone(), bytecode, libs_to_deploy)); } contract From 243eeee3e4f574499c8ddbc46d9b3ff6d9b2baf0 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 8 Feb 2024 16:21:37 +0300 Subject: [PATCH 07/29] fix --- Cargo.lock | 3 ++- Cargo.toml | 9 ++------- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e6a36e337875..c7421accaecc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3105,7 +3105,8 @@ dependencies = [ [[package]] name = "foundry-compilers" version = "0.3.2" -source = "git+https://github.com/foundry-rs/compilers?branch=main#757652ef7fc2f90cfe490319d99dd1cf13f54564" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d88392f8b9848cfac5b11054b14e14268ae5361450bd45169df276f9af748c5" dependencies = [ "alloy-json-abi", "alloy-primitives", diff --git a/Cargo.toml b/Cargo.toml index 3c9be6eeea4c..9beebb39db30 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,7 +28,7 @@ resolver = "2" [workspace.package] version = "0.2.0" edition = "2021" -rust-version = "1.74" # Remember to update clippy.toml as well +rust-version = "1.74" # Remember to update clippy.toml as well authors = ["Foundry Contributors"] license = "MIT OR Apache-2.0" homepage = "https://github.com/foundry-rs/foundry" @@ -173,10 +173,7 @@ alloy-rlp = "0.3.3" solang-parser = "=0.3.3" ## misc -chrono = { version = "0.4", default-features = false, features = [ - "clock", - "std", -] } +chrono = { version = "0.4", default-features = false, features = ["clock", "std"] } color-eyre = "0.6" derive_more = "0.99" eyre = "0.6" @@ -229,5 +226,3 @@ revm = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-primitives = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-interpreter = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-precompile = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } - -foundry-compilers = { git = "https://github.com/foundry-rs/compilers", branch = "main" } From 68143ede98591920afd3c16f1dda237a10ad3a59 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 8 Feb 2024 16:45:27 +0300 Subject: [PATCH 08/29] fix tests --- crates/forge/bin/cmd/script/cmd.rs | 2 +- crates/forge/src/link.rs | 25 ++++++++++--------------- 2 files changed, 11 insertions(+), 16 deletions(-) diff --git a/crates/forge/bin/cmd/script/cmd.rs b/crates/forge/bin/cmd/script/cmd.rs index cb2f4972f7bb..7d76df1bf677 100644 --- a/crates/forge/bin/cmd/script/cmd.rs +++ b/crates/forge/bin/cmd/script/cmd.rs @@ -286,7 +286,7 @@ impl ScriptArgs { target, )?; - if predeploy_libraries.len() > 0 { + if !predeploy_libraries.is_empty() { eyre::bail!("Incomplete set of libraries in deployment artifact."); } diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index e8b61a3398de..1255e8131b48 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -122,7 +122,7 @@ pub fn link_with_nonce_or_address<'a>( // If `libraries` does not contain needed dependency, compute its address and add to // `libs_to_deploy`. for id in needed_libraries { - let lib_name = id.name.split(".").next().unwrap().to_owned(); + let lib_name = id.name.split('.').next().unwrap().to_owned(); libraries.libs.entry(id.source.clone()).or_default().entry(lib_name).or_insert_with(|| { let address = sender.create(nonce); libs_to_deploy.push((id, address)); @@ -145,7 +145,7 @@ pub fn link_with_nonce_or_address<'a>( #[cfg(test)] mod tests { - use std::path::PathBuf; + use std::{collections::HashMap, path::PathBuf}; use super::*; use foundry_compilers::{Project, ProjectPathsConfig}; @@ -198,7 +198,7 @@ mod tests { continue; } - let LinkOutput { libs_to_deploy, .. } = link_with_nonce_or_address( + let LinkOutput { libs_to_deploy, libraries, .. } = link_with_nonce_or_address( &self.contracts, Default::default(), sender, @@ -224,22 +224,17 @@ mod tests { libs_to_deploy ); - let identifiers = - libs_to_deploy.iter().map(|(id, _)| id.identifier()).collect::>(); + let identifiers = libraries + .libs + .iter() + .flat_map(|(file, libs)| { + libs.iter().map(|(name, _)| format!("{}:{}", file.to_string_lossy(), name)) + }) + .collect::>(); for lib in expected_libs { assert!(identifiers.contains(lib)); } - - let unique_libs = - libs_to_deploy.iter().map(|(_, addr)| addr).collect::>(); - - assert_eq!( - unique_libs.len(), - libs_to_deploy.len(), - "not all libraries are unqiue: {:#?}", - libs_to_deploy - ); } } } From 89d1a9d27c2acfac3d885efe5e858286f6e9cbf3 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 8 Feb 2024 17:29:46 +0300 Subject: [PATCH 09/29] handle root path --- Cargo.lock | 3 +- Cargo.toml | 2 + crates/forge/bin/cmd/script/build.rs | 9 +++- crates/forge/bin/cmd/script/cmd.rs | 4 +- crates/forge/src/link.rs | 70 +++++++++++++++++----------- crates/forge/src/multi_runner.rs | 1 + 6 files changed, 58 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c7421accaecc..e6a36e337875 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3105,8 +3105,7 @@ dependencies = [ [[package]] name = "foundry-compilers" version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d88392f8b9848cfac5b11054b14e14268ae5361450bd45169df276f9af748c5" +source = "git+https://github.com/foundry-rs/compilers?branch=main#757652ef7fc2f90cfe490319d99dd1cf13f54564" dependencies = [ "alloy-json-abi", "alloy-primitives", diff --git a/Cargo.toml b/Cargo.toml index 9beebb39db30..d3325d3865f9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -226,3 +226,5 @@ revm = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-primitives = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-interpreter = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-precompile = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } + +foundry-compilers = { git = "https://github.com/foundry-rs/compilers", branch = "main" } \ No newline at end of file diff --git a/crates/forge/bin/cmd/script/build.rs b/crates/forge/bin/cmd/script/build.rs index 98e2d2bb36c8..54f0073e4b1c 100644 --- a/crates/forge/bin/cmd/script/build.rs +++ b/crates/forge/bin/cmd/script/build.rs @@ -138,7 +138,14 @@ impl ScriptArgs { libs_to_deploy: predeploy_libraries, contracts: linked_contracts, libraries, - } = link_with_nonce_or_address(&contracts, libraries, sender, nonce, &target)?; + } = link_with_nonce_or_address( + &contracts, + libraries, + sender, + nonce, + &target, + project.root(), + )?; // Get linked target artifact let contract = linked_contracts diff --git a/crates/forge/bin/cmd/script/cmd.rs b/crates/forge/bin/cmd/script/cmd.rs index 7d76df1bf677..cbc257eb8060 100644 --- a/crates/forge/bin/cmd/script/cmd.rs +++ b/crates/forge/bin/cmd/script/cmd.rs @@ -320,12 +320,14 @@ impl ScriptArgs { script_config.sender_nonce = nonce; let target = self.find_target(&project, &default_known_contracts)?.clone(); + let libraries = script_config.config.solc_settings()?.libraries; + let BuildOutput { libraries, contract, highlevel_known_contracts, predeploy_libraries, .. } = self.link_script_target( project, default_known_contracts, - script_config.config.parsed_libraries()?, + libraries, new_sender, nonce, target, diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index 1255e8131b48..aeb5d9cda68b 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -4,6 +4,18 @@ use foundry_compilers::{artifacts::Libraries, contracts::ArtifactContracts, Arti use semver::Version; use std::{collections::HashSet, path::PathBuf, str::FromStr}; +/// Helper method to convert [ArtifactId] to the format in which libraries are stored in [Libraries] +/// object. +/// +/// Strips project root path from source file path. +fn convert_artifact_id_to_lib_path(id: &ArtifactId, root_path: &PathBuf) -> (PathBuf, String) { + let path = id.source.strip_prefix(root_path).unwrap_or(&id.source); + // name is either {LibName} or {LibName}.{version} + let name = id.name.split('.').next().unwrap(); + + (path.to_path_buf(), name.to_owned()) +} + /// Finds an [ArtifactId] object in the given [ArtifactContracts] keys which corresponds to the /// library path in the form of "./path/to/Lib.sol:Lib" /// @@ -13,6 +25,7 @@ fn find_artifact_id_by_library_path<'a>( file: &String, name: &String, version: Option<&Version>, + root_path: &PathBuf, ) -> &'a ArtifactId { for id in contracts.keys() { if let Some(version) = version { @@ -20,16 +33,11 @@ fn find_artifact_id_by_library_path<'a>( continue; } } - // name is either {LibName} or {LibName}.{version} - if id.name.split('.').next().unwrap() != name { - continue; - } + let (artifact_path, artifact_name) = convert_artifact_id_to_lib_path(id, root_path); - if id.source != PathBuf::from(file) { - continue; + if artifact_name == *name && artifact_path == PathBuf::from(file) { + return id; } - - return id; } panic!("artifact not found for library {file} {name}"); @@ -40,14 +48,20 @@ pub fn collect_dependencies<'a>( target: &'a ArtifactId, contracts: &'a ArtifactContracts, deps: &mut HashSet<&'a ArtifactId>, + root_path: &PathBuf, ) { let references = contracts.get(target).unwrap().all_link_references(); for (file, libs) in &references { for contract in libs.keys() { - let id = - find_artifact_id_by_library_path(contracts, file, contract, Some(&target.version)); + let id = find_artifact_id_by_library_path( + contracts, + file, + contract, + Some(&target.version), + root_path, + ); if deps.insert(id) { - collect_dependencies(id, contracts, deps); + collect_dependencies(id, contracts, deps, root_path); } } } @@ -87,8 +101,8 @@ pub struct LinkOutput { /// It is guaranteed to contain `target` and all it's dependencies fully linked, and any other /// contract may still be partially unlinked. pub contracts: ArtifactContracts, - /// Vector of libraries predeployed by user (basically another form of - /// `deployed_library_addresses`). + /// Resulted library addresses. Contains both user-provided and newly deployed libraries. + /// It will always contain library paths with stripped path prefixes. pub libraries: Libraries, /// Vector of libraries that need to be deployed from sender address. /// The order in which they appear in the vector is the order in which they should be deployed. @@ -98,32 +112,31 @@ pub struct LinkOutput { /// Links given artifact with either given library addresses or address computed from sender and /// nonce. /// -/// Current linking implementation assumes that all link_references keys are matching the format in -/// which sources appear in [ArtifactId] keys. -/// -/// You should make sure to strip paths via `with_stripped_file_prefixes` for both -/// `libraries` and `contracts` *before* invoking linker. -/// -/// This function will always link target contract completely, however, in case of paths format -/// mismatch it may not recognize some of the predeployed libraries, leading to more deployments -/// than needed. +/// Each key in `libraries` should either be a global path or relative to project root. All +/// remappings should be resolved. pub fn link_with_nonce_or_address<'a>( contracts: &'a ArtifactContracts, - mut libraries: Libraries, + libraries: Libraries, sender: Address, mut nonce: u64, target: &'a ArtifactId, + root_path: &PathBuf, ) -> Result { + // Library paths in `link_references` keys are always stripped, so we have to strip + // user-provided paths to be able to match them correctly. + let mut libraries = libraries.with_stripped_file_prefixes(root_path); + let mut needed_libraries = HashSet::new(); - collect_dependencies(target, contracts, &mut needed_libraries); + collect_dependencies(target, contracts, &mut needed_libraries, root_path); let mut libs_to_deploy = Vec::new(); // If `libraries` does not contain needed dependency, compute its address and add to // `libs_to_deploy`. for id in needed_libraries { - let lib_name = id.name.split('.').next().unwrap().to_owned(); - libraries.libs.entry(id.source.clone()).or_default().entry(lib_name).or_insert_with(|| { + let (lib_path, lib_name) = convert_artifact_id_to_lib_path(id, root_path); + + libraries.libs.entry(lib_path).or_default().entry(lib_name).or_insert_with(|| { let address = sender.create(nonce); libs_to_deploy.push((id, address)); nonce += 1; @@ -135,6 +148,7 @@ pub fn link_with_nonce_or_address<'a>( // Link contracts let contracts = link(contracts, &libraries)?; + // Collect bytecodes for `libs_to_deploy`, as we have them linked now. let libs_to_deploy = libs_to_deploy .into_iter() .map(|(id, _)| contracts.get(id).unwrap().get_bytecode_bytes().unwrap().into_owned()) @@ -151,6 +165,7 @@ mod tests { use foundry_compilers::{Project, ProjectPathsConfig}; struct LinkerTest { + project: Project, contracts: ArtifactContracts, dependency_assertions: HashMap>, } @@ -176,7 +191,7 @@ mod tests { .map(|(id, c)| (id, c.into_contract_bytecode())) .collect::(); - Self { contracts, dependency_assertions: HashMap::new() } + Self { project, contracts, dependency_assertions: HashMap::new() } } fn assert_dependencies( @@ -204,6 +219,7 @@ mod tests { sender, initial_nonce, id, + self.project.root(), ) .expect("Linking failed"); diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index df99deb8a8e2..2111fccefae5 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -293,6 +293,7 @@ impl MultiContractRunnerBuilder { evm_opts.sender, 1, &id, + &root.as_ref().to_path_buf(), )?; let linked_contract = contracts.get(&id).unwrap().clone(); From 9e0aa35c3ceffcd37da40871eb4ea7e3fd6d1dbd Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 8 Feb 2024 17:53:46 +0300 Subject: [PATCH 10/29] tests --- crates/forge/src/link.rs | 111 +++++++-------------------------------- 1 file changed, 20 insertions(+), 91 deletions(-) diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index aeb5d9cda68b..a7b82f9ff6c6 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -2,7 +2,11 @@ use alloy_primitives::{Address, Bytes}; use eyre::Result; use foundry_compilers::{artifacts::Libraries, contracts::ArtifactContracts, Artifact, ArtifactId}; use semver::Version; -use std::{collections::HashSet, path::PathBuf, str::FromStr}; +use std::{ + collections::{BTreeSet, HashSet}, + path::PathBuf, + str::FromStr, +}; /// Helper method to convert [ArtifactId] to the format in which libraries are stored in [Libraries] /// object. @@ -47,7 +51,7 @@ fn find_artifact_id_by_library_path<'a>( pub fn collect_dependencies<'a>( target: &'a ArtifactId, contracts: &'a ArtifactContracts, - deps: &mut HashSet<&'a ArtifactId>, + deps: &mut BTreeSet<&'a ArtifactId>, root_path: &PathBuf, ) { let references = contracts.get(target).unwrap().all_link_references(); @@ -126,7 +130,7 @@ pub fn link_with_nonce_or_address<'a>( // user-provided paths to be able to match them correctly. let mut libraries = libraries.with_stripped_file_prefixes(root_path); - let mut needed_libraries = HashSet::new(); + let mut needed_libraries = BTreeSet::new(); collect_dependencies(target, contracts, &mut needed_libraries, root_path); let mut libs_to_deploy = Vec::new(); @@ -167,7 +171,7 @@ mod tests { struct LinkerTest { project: Project, contracts: ArtifactContracts, - dependency_assertions: HashMap>, + dependency_assertions: HashMap>, } impl LinkerTest { @@ -197,7 +201,7 @@ mod tests { fn assert_dependencies( mut self, artifact_id: String, - deps: Vec<(String, u64, Address)>, + deps: Vec<(String, Address)>, ) -> Self { self.dependency_assertions.insert(artifact_id, deps); self @@ -228,28 +232,24 @@ mod tests { .get(&identifier) .unwrap_or_else(|| panic!("Unexpected artifact: {identifier}")); - let expected_libs = - assertions.iter().map(|(identifier, _, _)| identifier).collect::>(); - assert_eq!( libs_to_deploy.len(), - expected_libs.len(), + assertions.len(), "artifact {identifier} has more/less dependencies than expected ({} vs {}): {:#?}", libs_to_deploy.len(), assertions.len(), libs_to_deploy ); - let identifiers = libraries - .libs - .iter() - .flat_map(|(file, libs)| { - libs.iter().map(|(name, _)| format!("{}:{}", file.to_string_lossy(), name)) - }) - .collect::>(); - - for lib in expected_libs { - assert!(identifiers.contains(lib)); + for (dep_identifier, address) in assertions { + let (file, name) = dep_identifier.split_once(":").unwrap(); + if let Some(lib_address) = + libraries.libs.get(&PathBuf::from(file)).and_then(|libs| libs.get(name)) + { + assert_eq!(*lib_address, address.to_string(), "incorrect library address for dependency {dep_identifier} of {identifier}"); + } else { + panic!("Library not found") + } } } } @@ -263,7 +263,6 @@ mod tests { "simple/Simple.t.sol:LibraryConsumer".to_string(), vec![( "simple/Simple.t.sol:Lib".to_string(), - 1, Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), )], ) @@ -271,7 +270,6 @@ mod tests { "simple/Simple.t.sol:SimpleLibraryLinkingTest".to_string(), vec![( "simple/Simple.t.sol:Lib".to_string(), - 1, Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), )], ) @@ -286,7 +284,6 @@ mod tests { "nested/Nested.t.sol:NestedLib".to_string(), vec![( "nested/Nested.t.sol:Lib".to_string(), - 1, Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), )], ) @@ -297,18 +294,11 @@ mod tests { // have the same address and nonce. ( "nested/Nested.t.sol:Lib".to_string(), - 1, - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - ), - ( - "nested/Nested.t.sol:Lib".to_string(), - 1, Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), ), ( "nested/Nested.t.sol:NestedLib".to_string(), - 2, - Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d").unwrap(), + Address::from_str("0x47e9Fbef8C83A1714F1951F142132E6e90F5fa5D").unwrap(), ), ], ) @@ -317,17 +307,10 @@ mod tests { vec![ ( "nested/Nested.t.sol:Lib".to_string(), - 1, - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - ), - ( - "nested/Nested.t.sol:Lib".to_string(), - 1, Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), ), ( "nested/Nested.t.sol:NestedLib".to_string(), - 2, Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d").unwrap(), ), ], @@ -355,7 +338,6 @@ mod tests { "duplicate/Duplicate.t.sol:C".to_string(), vec![( "duplicate/Duplicate.t.sol:A".to_string(), - 1, Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), )], ) @@ -363,7 +345,6 @@ mod tests { "duplicate/Duplicate.t.sol:D".to_string(), vec![( "duplicate/Duplicate.t.sol:B".to_string(), - 1, Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), )], ) @@ -372,12 +353,10 @@ mod tests { vec![ ( "duplicate/Duplicate.t.sol:A".to_string(), - 1, Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), ), ( "duplicate/Duplicate.t.sol:C".to_string(), - 2, Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d").unwrap(), ), ], @@ -387,47 +366,22 @@ mod tests { vec![ ( "duplicate/Duplicate.t.sol:A".to_string(), - 1, Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), ), ( "duplicate/Duplicate.t.sol:B".to_string(), - 2, Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d").unwrap(), ), - ( - "duplicate/Duplicate.t.sol:A".to_string(), - 1, - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - ), ( "duplicate/Duplicate.t.sol:C".to_string(), - 3, Address::from_str("0x8be503bcded90ed42eff31f56199399b2b0154ca").unwrap(), ), - ( - "duplicate/Duplicate.t.sol:B".to_string(), - 2, - Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d").unwrap(), - ), ( "duplicate/Duplicate.t.sol:D".to_string(), - 4, Address::from_str("0x47c5e40890bce4a473a49d7501808b9633f29782").unwrap(), ), - ( - "duplicate/Duplicate.t.sol:A".to_string(), - 1, - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - ), - ( - "duplicate/Duplicate.t.sol:C".to_string(), - 3, - Address::from_str("0x8be503bcded90ed42eff31f56199399b2b0154ca").unwrap(), - ), ( "duplicate/Duplicate.t.sol:E".to_string(), - 5, Address::from_str("0x29b2440db4a256b0c1e6d3b4cdcaa68e2440a08f").unwrap(), ), ], @@ -437,47 +391,22 @@ mod tests { vec![ ( "duplicate/Duplicate.t.sol:A".to_string(), - 1, Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), ), ( "duplicate/Duplicate.t.sol:B".to_string(), - 2, Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d").unwrap(), ), - ( - "duplicate/Duplicate.t.sol:A".to_string(), - 1, - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - ), ( "duplicate/Duplicate.t.sol:C".to_string(), - 3, Address::from_str("0x8be503bcded90ed42eff31f56199399b2b0154ca").unwrap(), ), - ( - "duplicate/Duplicate.t.sol:B".to_string(), - 2, - Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d").unwrap(), - ), ( "duplicate/Duplicate.t.sol:D".to_string(), - 4, Address::from_str("0x47c5e40890bce4a473a49d7501808b9633f29782").unwrap(), ), - ( - "duplicate/Duplicate.t.sol:A".to_string(), - 1, - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - ), - ( - "duplicate/Duplicate.t.sol:C".to_string(), - 3, - Address::from_str("0x8be503bcded90ed42eff31f56199399b2b0154ca").unwrap(), - ), ( "duplicate/Duplicate.t.sol:E".to_string(), - 5, Address::from_str("0x29b2440db4a256b0c1e6d3b4cdcaa68e2440a08f").unwrap(), ), ], From 3d720c57aef2424a69684b819628a9474124bef3 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 8 Feb 2024 17:57:55 +0300 Subject: [PATCH 11/29] clippy --- crates/forge/src/link.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index a7b82f9ff6c6..b8383e3946e3 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -2,11 +2,7 @@ use alloy_primitives::{Address, Bytes}; use eyre::Result; use foundry_compilers::{artifacts::Libraries, contracts::ArtifactContracts, Artifact, ArtifactId}; use semver::Version; -use std::{ - collections::{BTreeSet, HashSet}, - path::PathBuf, - str::FromStr, -}; +use std::{collections::BTreeSet, path::PathBuf, str::FromStr}; /// Helper method to convert [ArtifactId] to the format in which libraries are stored in [Libraries] /// object. From a12e87e942e4a78470cc8b95714b7b43e77e5d68 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 8 Feb 2024 19:29:21 +0300 Subject: [PATCH 12/29] tests --- crates/forge/src/link.rs | 325 +++++++++++++++++++++------------------ 1 file changed, 177 insertions(+), 148 deletions(-) diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index b8383e3946e3..c8e43e69a572 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -171,7 +171,7 @@ mod tests { } impl LinkerTest { - fn new(path: impl Into) -> Self { + fn new(path: impl Into, strip_prefixes: bool) -> Self { let path = path.into(); let paths = ProjectPathsConfig::builder() .root("../../testdata/linking") @@ -183,10 +183,14 @@ mod tests { let project = Project::builder().paths(paths).ephemeral().no_artifacts().build().unwrap(); - let contracts = project - .compile() - .unwrap() - .with_stripped_file_prefixes(project.root()) + + let mut contracts = project.compile().unwrap(); + + if strip_prefixes { + contracts = contracts.with_stripped_file_prefixes(project.root()); + } + + let contracts = contracts .into_artifacts() .map(|(id, c)| (id, c.into_contract_bytecode())) .collect::(); @@ -205,7 +209,15 @@ mod tests { fn test_with_sender_and_nonce(self, sender: Address, initial_nonce: u64) { for id in self.contracts.keys() { - let identifier = id.identifier(); + // If we didn't strip paths, artifacts will have absolute paths. + // That's expected and we want to ensure that only `libraries` object has relative + // paths, artifacts should be kept as is. + let source = id + .source + .strip_prefix(self.project.root()) + .unwrap_or(&id.source) + .to_string_lossy(); + let identifier = format!("{source}:{}", id.name); // Skip ds-test as it always has no dependencies etc. (and the path is outside root // so is not sanitized) @@ -238,7 +250,7 @@ mod tests { ); for (dep_identifier, address) in assertions { - let (file, name) = dep_identifier.split_once(":").unwrap(); + let (file, name) = dep_identifier.split_once(':').unwrap(); if let Some(lib_address) = libraries.libs.get(&PathBuf::from(file)).and_then(|libs| libs.get(name)) { @@ -251,162 +263,179 @@ mod tests { } } + fn link_test(path: impl Into, test_fn: impl Fn(LinkerTest)) { + let path = path.into(); + test_fn(LinkerTest::new(path.clone(), true)); + test_fn(LinkerTest::new(path, false)); + } + #[test] fn link_simple() { - LinkerTest::new("../../testdata/linking/simple") - .assert_dependencies("simple/Simple.t.sol:Lib".to_string(), vec![]) - .assert_dependencies( - "simple/Simple.t.sol:LibraryConsumer".to_string(), - vec![( - "simple/Simple.t.sol:Lib".to_string(), - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - )], - ) - .assert_dependencies( - "simple/Simple.t.sol:SimpleLibraryLinkingTest".to_string(), - vec![( - "simple/Simple.t.sol:Lib".to_string(), - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - )], - ) - .test_with_sender_and_nonce(Address::default(), 1); + link_test("../../testdata/linking/simple", |linker| { + linker + .assert_dependencies("simple/Simple.t.sol:Lib".to_string(), vec![]) + .assert_dependencies( + "simple/Simple.t.sol:LibraryConsumer".to_string(), + vec![( + "simple/Simple.t.sol:Lib".to_string(), + Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), + )], + ) + .assert_dependencies( + "simple/Simple.t.sol:SimpleLibraryLinkingTest".to_string(), + vec![( + "simple/Simple.t.sol:Lib".to_string(), + Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), + )], + ) + .test_with_sender_and_nonce(Address::default(), 1); + }); } #[test] fn link_nested() { - LinkerTest::new("../../testdata/linking/nested") - .assert_dependencies("nested/Nested.t.sol:Lib".to_string(), vec![]) - .assert_dependencies( - "nested/Nested.t.sol:NestedLib".to_string(), - vec![( - "nested/Nested.t.sol:Lib".to_string(), - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - )], - ) - .assert_dependencies( - "nested/Nested.t.sol:LibraryConsumer".to_string(), - vec![ - // Lib shows up here twice, because the linker sees it twice, but it should - // have the same address and nonce. - ( + link_test("../../testdata/linking/nested", |linker| { + linker + .assert_dependencies("nested/Nested.t.sol:Lib".to_string(), vec![]) + .assert_dependencies( + "nested/Nested.t.sol:NestedLib".to_string(), + vec![( "nested/Nested.t.sol:Lib".to_string(), Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - ), - ( - "nested/Nested.t.sol:NestedLib".to_string(), - Address::from_str("0x47e9Fbef8C83A1714F1951F142132E6e90F5fa5D").unwrap(), - ), - ], - ) - .assert_dependencies( - "nested/Nested.t.sol:NestedLibraryLinkingTest".to_string(), - vec![ - ( - "nested/Nested.t.sol:Lib".to_string(), - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - ), - ( - "nested/Nested.t.sol:NestedLib".to_string(), - Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d").unwrap(), - ), - ], - ) - .test_with_sender_and_nonce(Address::default(), 1); + )], + ) + .assert_dependencies( + "nested/Nested.t.sol:LibraryConsumer".to_string(), + vec![ + // Lib shows up here twice, because the linker sees it twice, but it should + // have the same address and nonce. + ( + "nested/Nested.t.sol:Lib".to_string(), + Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3") + .unwrap(), + ), + ( + "nested/Nested.t.sol:NestedLib".to_string(), + Address::from_str("0x47e9Fbef8C83A1714F1951F142132E6e90F5fa5D") + .unwrap(), + ), + ], + ) + .assert_dependencies( + "nested/Nested.t.sol:NestedLibraryLinkingTest".to_string(), + vec![ + ( + "nested/Nested.t.sol:Lib".to_string(), + Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3") + .unwrap(), + ), + ( + "nested/Nested.t.sol:NestedLib".to_string(), + Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d") + .unwrap(), + ), + ], + ) + .test_with_sender_and_nonce(Address::default(), 1); + }); } - /// This test ensures that complicated setups with many libraries, some of which are referenced - /// in more than one place, result in correct linking. - /// - /// Each `assert_dependencies` should be considered in isolation, i.e. read it as "if I wanted - /// to deploy this contract, I would have to deploy the dependencies in this order with this - /// nonce". - /// - /// A library may show up more than once, but it should *always* have the same nonce and address - /// with respect to the single `assert_dependencies` call. There should be no gaps in the nonce - /// otherwise, i.e. whenever a new dependency is encountered, the nonce should be a single - /// increment larger than the previous largest nonce. #[test] fn link_duplicate() { - LinkerTest::new("../../testdata/linking/duplicate") - .assert_dependencies("duplicate/Duplicate.t.sol:A".to_string(), vec![]) - .assert_dependencies("duplicate/Duplicate.t.sol:B".to_string(), vec![]) - .assert_dependencies( - "duplicate/Duplicate.t.sol:C".to_string(), - vec![( - "duplicate/Duplicate.t.sol:A".to_string(), - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - )], - ) - .assert_dependencies( - "duplicate/Duplicate.t.sol:D".to_string(), - vec![( - "duplicate/Duplicate.t.sol:B".to_string(), - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - )], - ) - .assert_dependencies( - "duplicate/Duplicate.t.sol:E".to_string(), - vec![ - ( + link_test("../../testdata/linking/duplicate", |linker| { + linker + .assert_dependencies("duplicate/Duplicate.t.sol:A".to_string(), vec![]) + .assert_dependencies("duplicate/Duplicate.t.sol:B".to_string(), vec![]) + .assert_dependencies( + "duplicate/Duplicate.t.sol:C".to_string(), + vec![( "duplicate/Duplicate.t.sol:A".to_string(), Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - ), - ( - "duplicate/Duplicate.t.sol:C".to_string(), - Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d").unwrap(), - ), - ], - ) - .assert_dependencies( - "duplicate/Duplicate.t.sol:LibraryConsumer".to_string(), - vec![ - ( - "duplicate/Duplicate.t.sol:A".to_string(), - Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - ), - ( + )], + ) + .assert_dependencies( + "duplicate/Duplicate.t.sol:D".to_string(), + vec![( "duplicate/Duplicate.t.sol:B".to_string(), - Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d").unwrap(), - ), - ( - "duplicate/Duplicate.t.sol:C".to_string(), - Address::from_str("0x8be503bcded90ed42eff31f56199399b2b0154ca").unwrap(), - ), - ( - "duplicate/Duplicate.t.sol:D".to_string(), - Address::from_str("0x47c5e40890bce4a473a49d7501808b9633f29782").unwrap(), - ), - ( - "duplicate/Duplicate.t.sol:E".to_string(), - Address::from_str("0x29b2440db4a256b0c1e6d3b4cdcaa68e2440a08f").unwrap(), - ), - ], - ) - .assert_dependencies( - "duplicate/Duplicate.t.sol:DuplicateLibraryLinkingTest".to_string(), - vec![ - ( - "duplicate/Duplicate.t.sol:A".to_string(), Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3").unwrap(), - ), - ( - "duplicate/Duplicate.t.sol:B".to_string(), - Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d").unwrap(), - ), - ( - "duplicate/Duplicate.t.sol:C".to_string(), - Address::from_str("0x8be503bcded90ed42eff31f56199399b2b0154ca").unwrap(), - ), - ( - "duplicate/Duplicate.t.sol:D".to_string(), - Address::from_str("0x47c5e40890bce4a473a49d7501808b9633f29782").unwrap(), - ), - ( - "duplicate/Duplicate.t.sol:E".to_string(), - Address::from_str("0x29b2440db4a256b0c1e6d3b4cdcaa68e2440a08f").unwrap(), - ), - ], - ) - .test_with_sender_and_nonce(Address::default(), 1); + )], + ) + .assert_dependencies( + "duplicate/Duplicate.t.sol:E".to_string(), + vec![ + ( + "duplicate/Duplicate.t.sol:A".to_string(), + Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3") + .unwrap(), + ), + ( + "duplicate/Duplicate.t.sol:C".to_string(), + Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d") + .unwrap(), + ), + ], + ) + .assert_dependencies( + "duplicate/Duplicate.t.sol:LibraryConsumer".to_string(), + vec![ + ( + "duplicate/Duplicate.t.sol:A".to_string(), + Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3") + .unwrap(), + ), + ( + "duplicate/Duplicate.t.sol:B".to_string(), + Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d") + .unwrap(), + ), + ( + "duplicate/Duplicate.t.sol:C".to_string(), + Address::from_str("0x8be503bcded90ed42eff31f56199399b2b0154ca") + .unwrap(), + ), + ( + "duplicate/Duplicate.t.sol:D".to_string(), + Address::from_str("0x47c5e40890bce4a473a49d7501808b9633f29782") + .unwrap(), + ), + ( + "duplicate/Duplicate.t.sol:E".to_string(), + Address::from_str("0x29b2440db4a256b0c1e6d3b4cdcaa68e2440a08f") + .unwrap(), + ), + ], + ) + .assert_dependencies( + "duplicate/Duplicate.t.sol:DuplicateLibraryLinkingTest".to_string(), + vec![ + ( + "duplicate/Duplicate.t.sol:A".to_string(), + Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3") + .unwrap(), + ), + ( + "duplicate/Duplicate.t.sol:B".to_string(), + Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d") + .unwrap(), + ), + ( + "duplicate/Duplicate.t.sol:C".to_string(), + Address::from_str("0x8be503bcded90ed42eff31f56199399b2b0154ca") + .unwrap(), + ), + ( + "duplicate/Duplicate.t.sol:D".to_string(), + Address::from_str("0x47c5e40890bce4a473a49d7501808b9633f29782") + .unwrap(), + ), + ( + "duplicate/Duplicate.t.sol:E".to_string(), + Address::from_str("0x29b2440db4a256b0c1e6d3b4cdcaa68e2440a08f") + .unwrap(), + ), + ], + ) + .test_with_sender_and_nonce(Address::default(), 1); + }); } } From 824e0273a8bf5bbf13002ba4481ecae5109c83dd Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 8 Feb 2024 19:46:49 +0300 Subject: [PATCH 13/29] Bump compilers --- Cargo.lock | 5 +++-- Cargo.toml | 11 ++++++----- crates/forge/bin/cmd/script/cmd.rs | 4 +++- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e6a36e337875..33933f736600 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3104,8 +3104,9 @@ dependencies = [ [[package]] name = "foundry-compilers" -version = "0.3.2" -source = "git+https://github.com/foundry-rs/compilers?branch=main#757652ef7fc2f90cfe490319d99dd1cf13f54564" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff5eea089774ef60f8f4aa28a24fcc903ff054ff210cada46a32145213aff71d" dependencies = [ "alloy-json-abi", "alloy-primitives", diff --git a/Cargo.toml b/Cargo.toml index d3325d3865f9..e8de5d168f4d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,7 +28,7 @@ resolver = "2" [workspace.package] version = "0.2.0" edition = "2021" -rust-version = "1.74" # Remember to update clippy.toml as well +rust-version = "1.74" # Remember to update clippy.toml as well authors = ["Foundry Contributors"] license = "MIT OR Apache-2.0" homepage = "https://github.com/foundry-rs/foundry" @@ -127,7 +127,7 @@ foundry-test-utils = { path = "crates/test-utils" } # solc & compilation utilities foundry-block-explorers = { version = "0.2.3", default-features = false } -foundry-compilers = { version = "0.3.2", default-features = false } +foundry-compilers = { version = "0.3.3", default-features = false } ## revm # no default features to avoid c-kzg @@ -173,7 +173,10 @@ alloy-rlp = "0.3.3" solang-parser = "=0.3.3" ## misc -chrono = { version = "0.4", default-features = false, features = ["clock", "std"] } +chrono = { version = "0.4", default-features = false, features = [ + "clock", + "std", +] } color-eyre = "0.6" derive_more = "0.99" eyre = "0.6" @@ -226,5 +229,3 @@ revm = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-primitives = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-interpreter = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } revm-precompile = { git = "https://github.com/bluealloy/revm", branch = "reth_freeze" } - -foundry-compilers = { git = "https://github.com/foundry-rs/compilers", branch = "main" } \ No newline at end of file diff --git a/crates/forge/bin/cmd/script/cmd.rs b/crates/forge/bin/cmd/script/cmd.rs index cbc257eb8060..f7ae24dbdb7d 100644 --- a/crates/forge/bin/cmd/script/cmd.rs +++ b/crates/forge/bin/cmd/script/cmd.rs @@ -273,6 +273,8 @@ impl ScriptArgs { if self.verify { let target = self.find_target(&project, &default_known_contracts)?.clone(); + let libraries = Libraries::parse(&deployment_sequence.libraries)? + .with_stripped_file_prefixes(project.root()); // We might have predeployed libraries from the broadcasting, so we need to // relink the contracts with them, since their mapping is // not included in the solc cache files. @@ -280,7 +282,7 @@ impl ScriptArgs { .link_script_target( project, default_known_contracts, - Libraries::parse(&deployment_sequence.libraries)?, + libraries, script_config.config.sender, // irrelevant, since we're not creating any 0, // irrelevant, since we're not creating any target, From f84754367e8d9e4d8d8692bd70c670aeaf46b843 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 8 Feb 2024 20:11:24 +0300 Subject: [PATCH 14/29] review fixes --- crates/forge/src/link.rs | 47 ++++++++++++++++++++------------ crates/forge/src/multi_runner.rs | 6 ++-- 2 files changed, 33 insertions(+), 20 deletions(-) diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index c8e43e69a572..104e92b896bf 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -1,14 +1,18 @@ use alloy_primitives::{Address, Bytes}; -use eyre::Result; +use eyre::{OptionExt, Result}; use foundry_compilers::{artifacts::Libraries, contracts::ArtifactContracts, Artifact, ArtifactId}; use semver::Version; -use std::{collections::BTreeSet, path::PathBuf, str::FromStr}; +use std::{ + collections::BTreeSet, + path::{Path, PathBuf}, + str::FromStr, +}; /// Helper method to convert [ArtifactId] to the format in which libraries are stored in [Libraries] /// object. /// /// Strips project root path from source file path. -fn convert_artifact_id_to_lib_path(id: &ArtifactId, root_path: &PathBuf) -> (PathBuf, String) { +fn convert_artifact_id_to_lib_path(id: &ArtifactId, root_path: &Path) -> (PathBuf, String) { let path = id.source.strip_prefix(root_path).unwrap_or(&id.source); // name is either {LibName} or {LibName}.{version} let name = id.name.split('.').next().unwrap(); @@ -22,11 +26,11 @@ fn convert_artifact_id_to_lib_path(id: &ArtifactId, root_path: &PathBuf) -> (Pat /// Optionally accepts solc version, and if present, only compares artifacts with given version. fn find_artifact_id_by_library_path<'a>( contracts: &'a ArtifactContracts, - file: &String, - name: &String, + file: &str, + name: &str, version: Option<&Version>, - root_path: &PathBuf, -) -> &'a ArtifactId { + root_path: &Path, +) -> Option<&'a ArtifactId> { for id in contracts.keys() { if let Some(version) = version { if id.version != *version { @@ -35,12 +39,12 @@ fn find_artifact_id_by_library_path<'a>( } let (artifact_path, artifact_name) = convert_artifact_id_to_lib_path(id, root_path); - if artifact_name == *name && artifact_path == PathBuf::from(file) { - return id; + if artifact_name == *name && artifact_path == Path::new(file) { + return Some(id); } } - panic!("artifact not found for library {file} {name}"); + None } /// Performs DFS on the graph of link references, and populates `deps` with all found libraries. @@ -48,9 +52,12 @@ pub fn collect_dependencies<'a>( target: &'a ArtifactId, contracts: &'a ArtifactContracts, deps: &mut BTreeSet<&'a ArtifactId>, - root_path: &PathBuf, -) { - let references = contracts.get(target).unwrap().all_link_references(); + root_path: &Path, +) -> Result<()> { + let references = contracts + .get(target) + .ok_or_eyre("target artifact must be present at given artifacts set")? + .all_link_references(); for (file, libs) in &references { for contract in libs.keys() { let id = find_artifact_id_by_library_path( @@ -59,12 +66,18 @@ pub fn collect_dependencies<'a>( contract, Some(&target.version), root_path, - ); + ) + .ok_or_eyre(format!( + "wasn't able to find artifact for library {} at {}", + file, contract + ))?; if deps.insert(id) { - collect_dependencies(id, contracts, deps, root_path); + collect_dependencies(id, contracts, deps, root_path)?; } } } + + Ok(()) } /// Links given artifacts with given library addresses. @@ -120,14 +133,14 @@ pub fn link_with_nonce_or_address<'a>( sender: Address, mut nonce: u64, target: &'a ArtifactId, - root_path: &PathBuf, + root_path: &Path, ) -> Result { // Library paths in `link_references` keys are always stripped, so we have to strip // user-provided paths to be able to match them correctly. let mut libraries = libraries.with_stripped_file_prefixes(root_path); let mut needed_libraries = BTreeSet::new(); - collect_dependencies(target, contracts, &mut needed_libraries, root_path); + collect_dependencies(target, contracts, &mut needed_libraries, root_path)?; let mut libs_to_deploy = Vec::new(); diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 2111fccefae5..6b3f210412e1 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -7,7 +7,7 @@ use crate::{ }; use alloy_json_abi::{Function, JsonAbi}; use alloy_primitives::{Address, Bytes, U256}; -use eyre::Result; +use eyre::{OptionExt, Result}; use foundry_common::{ContractsByArtifact, TestFunctionExt}; use foundry_compilers::{ artifacts::CompactContractBytecode, contracts::ArtifactContracts, Artifact, ArtifactId, @@ -285,7 +285,7 @@ impl MultiContractRunnerBuilder { let artifact_contracts = ArtifactContracts::from_iter(contracts.clone()); for (id, contract) in contracts { - let abi = contract.abi.as_ref().expect("We should have an abi by now"); + let abi = contract.abi.as_ref().ok_or_eyre("we should have an abi by now")?; let LinkOutput { contracts, libs_to_deploy, .. } = link_with_nonce_or_address( &artifact_contracts, @@ -293,7 +293,7 @@ impl MultiContractRunnerBuilder { evm_opts.sender, 1, &id, - &root.as_ref().to_path_buf(), + root.as_ref(), )?; let linked_contract = contracts.get(&id).unwrap().clone(); From 8884dc4ea07ced24f36e80af34e93969c58548e7 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 8 Feb 2024 21:50:35 +0300 Subject: [PATCH 15/29] fix Cargo.toml --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index e8de5d168f4d..d67fe868bb6e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,7 +28,7 @@ resolver = "2" [workspace.package] version = "0.2.0" edition = "2021" -rust-version = "1.74" # Remember to update clippy.toml as well +rust-version = "1.74" # Remember to update clippy.toml as well authors = ["Foundry Contributors"] license = "MIT OR Apache-2.0" homepage = "https://github.com/foundry-rs/foundry" From 474b6087d22a29c9e201496f1bf3235ac2e83389 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 8 Feb 2024 21:53:20 +0300 Subject: [PATCH 16/29] docs --- crates/forge/bin/cmd/script/build.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/forge/bin/cmd/script/build.rs b/crates/forge/bin/cmd/script/build.rs index 54f0073e4b1c..814e51407d54 100644 --- a/crates/forge/bin/cmd/script/build.rs +++ b/crates/forge/bin/cmd/script/build.rs @@ -79,6 +79,7 @@ impl ScriptArgs { Ok(output) } + /// Tries to find artifact for the target script contract. pub fn find_target<'a>( &self, project: &Project, @@ -125,6 +126,11 @@ impl ScriptArgs { target.ok_or_eyre(format!("Could not find target contract: {}", target_fname)) } + /// Links script artifact with given libraries or library addresses computed from script sender + /// and nonce. + /// + /// Populates [BuildOutput] with linked target contract, libraries, bytes of libs that need to + /// be predeployed and `highlevel_known_contracts` - set of known fully linked contracts pub fn link_script_target( &self, project: Project, From 834676ed9d77f2a5e8e4e75e02066d88b4cb571d Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 9 Feb 2024 01:08:30 +0300 Subject: [PATCH 17/29] ok_or_eyre -> ok_or_else for dynamic errors --- crates/forge/bin/cmd/script/build.rs | 2 +- crates/forge/src/link.rs | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/crates/forge/bin/cmd/script/build.rs b/crates/forge/bin/cmd/script/build.rs index 814e51407d54..e570c4dc8ff7 100644 --- a/crates/forge/bin/cmd/script/build.rs +++ b/crates/forge/bin/cmd/script/build.rs @@ -123,7 +123,7 @@ impl ScriptArgs { } } - target.ok_or_eyre(format!("Could not find target contract: {}", target_fname)) + target.ok_or_else(|| eyre::eyre!("Could not find target contract: {}", target_fname)) } /// Links script artifact with given libraries or library addresses computed from script sender diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index 104e92b896bf..a15e3762931f 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -67,10 +67,9 @@ pub fn collect_dependencies<'a>( Some(&target.version), root_path, ) - .ok_or_eyre(format!( - "wasn't able to find artifact for library {} at {}", - file, contract - ))?; + .ok_or_else(|| { + eyre::eyre!("wasn't able to find artifact for library {} at {}", file, contract) + })?; if deps.insert(id) { collect_dependencies(id, contracts, deps, root_path)?; } From 2a37680261d5fa646938c7d9655f0395361ec062 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 9 Feb 2024 02:54:34 +0300 Subject: [PATCH 18/29] refactor --- crates/forge/bin/cmd/script/build.rs | 67 ++--- crates/forge/bin/cmd/script/cmd.rs | 90 +++---- crates/forge/bin/cmd/script/executor.rs | 15 +- crates/forge/bin/cmd/script/mod.rs | 2 +- crates/forge/src/link.rs | 309 ++++++++++++------------ crates/forge/src/multi_runner.rs | 14 +- 6 files changed, 223 insertions(+), 274 deletions(-) diff --git a/crates/forge/bin/cmd/script/build.rs b/crates/forge/bin/cmd/script/build.rs index e570c4dc8ff7..a5fc8f75851b 100644 --- a/crates/forge/bin/cmd/script/build.rs +++ b/crates/forge/bin/cmd/script/build.rs @@ -1,7 +1,7 @@ use super::*; use alloy_primitives::{Address, Bytes}; -use eyre::{Context, ContextCompat, OptionExt, Result}; -use forge::link::{link_with_nonce_or_address, LinkOutput}; +use eyre::{Context, ContextCompat, Result}; +use forge::link::{LinkOutput, Linker}; use foundry_cli::utils::get_cached_entry_by_name; use foundry_common::{ compact_to_contract, @@ -9,7 +9,7 @@ use foundry_common::{ fs, }; use foundry_compilers::{ - artifacts::{CompactContractBytecode, ContractBytecode, ContractBytecodeSome, Libraries}, + artifacts::{ContractBytecode, ContractBytecodeSome, Libraries}, cache::SolFilesCache, contracts::ArtifactContracts, info::ContractInfo, @@ -64,19 +64,27 @@ impl ScriptArgs { script_config.target_contract = Some(target.clone()); let libraries = script_config.config.solc_settings()?.libraries; + let linker = Linker::new(project.root(), contracts); - let mut output = self.link_script_target( - project, - contracts, + let (highlevel_known_contracts, libraries, predeploy_libraries) = self.link_script_target( + &linker, libraries, script_config.evm_opts.sender, script_config.sender_nonce, - target, + target.clone(), )?; - output.sources = sources; + let contract = highlevel_known_contracts.get(&target).unwrap(); - Ok(output) + Ok(BuildOutput { + project, + linker, + contract: contract.clone(), + highlevel_known_contracts, + libraries, + predeploy_libraries, + sources, + }) } /// Tries to find artifact for the target script contract. @@ -133,34 +141,17 @@ impl ScriptArgs { /// be predeployed and `highlevel_known_contracts` - set of known fully linked contracts pub fn link_script_target( &self, - project: Project, - contracts: ArtifactContracts, + linker: &Linker, libraries: Libraries, sender: Address, nonce: u64, target: ArtifactId, - ) -> Result { - let LinkOutput { - libs_to_deploy: predeploy_libraries, - contracts: linked_contracts, - libraries, - } = link_with_nonce_or_address( - &contracts, - libraries, - sender, - nonce, - &target, - project.root(), - )?; - - // Get linked target artifact - let contract = linked_contracts - .get(&target) - .ok_or_eyre("Target contract not found in artifacts")? - .clone(); + ) -> Result<(ArtifactContracts, Libraries, Vec)> { + let LinkOutput { libs_to_deploy, contracts, libraries } = + linker.link_with_nonce_or_address(libraries, sender, nonce, &target)?; // Collect all linked contracts - let highlevel_known_contracts = linked_contracts + let highlevel_known_contracts = contracts .iter() .filter_map(|(id, contract)| { ContractBytecodeSome::try_from(ContractBytecode::from(contract.clone())) @@ -170,15 +161,7 @@ impl ScriptArgs { .filter(|(_, tc)| !tc.bytecode.object.is_unlinked()) .collect(); - Ok(BuildOutput { - contract, - known_contracts: contracts, - highlevel_known_contracts, - predeploy_libraries, - sources: Default::default(), - project, - libraries, - }) + Ok((highlevel_known_contracts, libraries, libs_to_deploy)) } pub fn get_project_and_output( @@ -239,8 +222,8 @@ impl ScriptArgs { pub struct BuildOutput { pub project: Project, - pub contract: CompactContractBytecode, - pub known_contracts: ArtifactContracts, + pub contract: ContractBytecodeSome, + pub linker: Linker, pub highlevel_known_contracts: ArtifactContracts, pub libraries: Libraries, pub predeploy_libraries: Vec, diff --git a/crates/forge/bin/cmd/script/cmd.rs b/crates/forge/bin/cmd/script/cmd.rs index f7ae24dbdb7d..ebb9f97aa592 100644 --- a/crates/forge/bin/cmd/script/cmd.rs +++ b/crates/forge/bin/cmd/script/cmd.rs @@ -3,7 +3,8 @@ use alloy_primitives::Bytes; use ethers_providers::Middleware; use ethers_signers::Signer; -use eyre::Result; +use eyre::{OptionExt, Result}; +use forge::link::Linker; use foundry_cli::utils::LoadConfig; use foundry_common::{ contracts::flatten_contracts, provider::ethers::try_get_http_provider, types::ToAlloy, @@ -51,11 +52,10 @@ impl ScriptArgs { ); let BuildOutput { - project, contract, mut highlevel_known_contracts, predeploy_libraries, - known_contracts: default_known_contracts, + linker, sources, mut libraries, .. @@ -70,16 +70,7 @@ impl ScriptArgs { self.execute(&mut script_config, contract, sender, &predeploy_libraries).await?; if self.resume || (self.verify && !self.broadcast) { - return self - .resume_deployment( - script_config, - project, - default_known_contracts, - libraries, - result, - verify, - ) - .await; + return self.resume_deployment(script_config, linker, libraries, result, verify).await; } let known_contracts = flatten_contracts(&highlevel_known_contracts, true); @@ -96,13 +87,7 @@ impl ScriptArgs { } if let Some((new_traces, updated_libraries, updated_contracts)) = self - .maybe_prepare_libraries( - &mut script_config, - project, - default_known_contracts, - predeploy_libraries, - &mut result, - ) + .maybe_prepare_libraries(&mut script_config, linker, predeploy_libraries, &mut result) .await? { decoder = new_traces; @@ -128,8 +113,7 @@ impl ScriptArgs { async fn maybe_prepare_libraries( &mut self, script_config: &mut ScriptConfig, - project: Project, - default_known_contracts: ArtifactContracts, + linker: Linker, predeploy_libraries: Vec, result: &mut ScriptResult, ) -> Result> { @@ -139,15 +123,8 @@ impl ScriptArgs { &predeploy_libraries, )? { // We have a new sender, so we need to relink all the predeployed libraries. - let (libraries, highlevel_known_contracts) = self - .rerun_with_new_deployer( - project, - script_config, - new_sender, - result, - default_known_contracts, - ) - .await?; + let (libraries, highlevel_known_contracts) = + self.rerun_with_new_deployer(script_config, new_sender, result, linker).await?; // redo traces for the new addresses let new_traces = self.decode_traces( @@ -184,8 +161,7 @@ impl ScriptArgs { async fn resume_deployment( &mut self, script_config: ScriptConfig, - project: Project, - default_known_contracts: ArtifactContracts, + linker: Linker, libraries: Libraries, result: ScriptResult, verify: VerifyBundle, @@ -207,8 +183,7 @@ impl ScriptArgs { } self.resume_single_deployment( script_config, - project, - default_known_contracts, + linker, result, verify, ) @@ -222,8 +197,7 @@ impl ScriptArgs { async fn resume_single_deployment( &mut self, script_config: ScriptConfig, - project: Project, - default_known_contracts: ArtifactContracts, + linker: Linker, result: ScriptResult, mut verify: VerifyBundle, ) -> Result<()> { @@ -272,21 +246,19 @@ impl ScriptArgs { } if self.verify { - let target = self.find_target(&project, &default_known_contracts)?.clone(); + let target = script_config.target_contract(); let libraries = Libraries::parse(&deployment_sequence.libraries)? - .with_stripped_file_prefixes(project.root()); + .with_stripped_file_prefixes(linker.root.as_path()); // We might have predeployed libraries from the broadcasting, so we need to // relink the contracts with them, since their mapping is // not included in the solc cache files. - let BuildOutput { highlevel_known_contracts, predeploy_libraries, .. } = self - .link_script_target( - project, - default_known_contracts, - libraries, - script_config.config.sender, // irrelevant, since we're not creating any - 0, // irrelevant, since we're not creating any - target, - )?; + let (highlevel_known_contracts, _, predeploy_libraries) = self.link_script_target( + &linker, + libraries, + script_config.config.sender, // irrelevant, since we're not creating any + 0, // irrelevant, since we're not creating any + target.clone(), + )?; if !predeploy_libraries.is_empty() { eyre::bail!("Incomplete set of libraries in deployment artifact."); @@ -303,11 +275,10 @@ impl ScriptArgs { /// Reruns the execution with a new sender and relinks the libraries accordingly async fn rerun_with_new_deployer( &mut self, - project: Project, script_config: &mut ScriptConfig, new_sender: Address, first_run_result: &mut ScriptResult, - default_known_contracts: ArtifactContracts, + linker: Linker, ) -> Result<(Libraries, ArtifactContracts)> { // if we had a new sender that requires relinking, we need to // get the nonce mainnet for accurate addresses for predeploy libs @@ -320,20 +291,17 @@ impl ScriptArgs { ) .await?; script_config.sender_nonce = nonce; - let target = self.find_target(&project, &default_known_contracts)?.clone(); + let target = script_config.target_contract(); let libraries = script_config.config.solc_settings()?.libraries; - let BuildOutput { - libraries, contract, highlevel_known_contracts, predeploy_libraries, .. - } = self.link_script_target( - project, - default_known_contracts, - libraries, - new_sender, - nonce, - target, - )?; + let (highlevel_known_contracts, libraries, predeploy_libraries) = + self.link_script_target(&linker, libraries, new_sender, nonce, target.clone())?; + + let contract = highlevel_known_contracts + .get(target) + .ok_or_eyre("target not found in linked artifacts")? + .clone(); let mut txs = self.create_deploy_transactions( new_sender, diff --git a/crates/forge/bin/cmd/script/executor.rs b/crates/forge/bin/cmd/script/executor.rs index bea7af8cb2c7..03ce698bb8a6 100644 --- a/crates/forge/bin/cmd/script/executor.rs +++ b/crates/forge/bin/cmd/script/executor.rs @@ -14,7 +14,6 @@ use forge::{ }; use foundry_cli::utils::{ensure_clean_constructor, needs_setup}; use foundry_common::{provider::ethers::RpcUrl, shell}; -use foundry_compilers::artifacts::CompactContractBytecode; use futures::future::join_all; use parking_lot::RwLock; use std::{collections::VecDeque, sync::Arc}; @@ -25,21 +24,17 @@ impl ScriptArgs { pub async fn execute( &self, script_config: &mut ScriptConfig, - contract: CompactContractBytecode, + contract: ContractBytecodeSome, sender: Address, predeploy_libraries: &[Bytes], ) -> Result { trace!(target: "script", "start executing script"); - let CompactContractBytecode { abi, bytecode, .. } = contract; + let ContractBytecodeSome { abi, bytecode, .. } = contract; - let abi = abi.ok_or_else(|| eyre::eyre!("no ABI found for contract"))?; - let bytecode = bytecode - .ok_or_else(|| eyre::eyre!("no bytecode found for contract"))? - .into_bytes() - .ok_or_else(|| { - eyre::eyre!("expected fully linked bytecode, found unlinked bytecode") - })?; + let bytecode = bytecode.into_bytes().ok_or_else(|| { + eyre::eyre!("expected fully linked bytecode, found unlinked bytecode") + })?; ensure_clean_constructor(&abi)?; diff --git a/crates/forge/bin/cmd/script/mod.rs b/crates/forge/bin/cmd/script/mod.rs index ede0b19fd388..302f300d7c42 100644 --- a/crates/forge/bin/cmd/script/mod.rs +++ b/crates/forge/bin/cmd/script/mod.rs @@ -32,7 +32,7 @@ use foundry_common::{ use foundry_compilers::{ artifacts::{ContractBytecodeSome, Libraries}, contracts::ArtifactContracts, - ArtifactId, Project, + ArtifactId, }; use foundry_config::{ figment, diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index a15e3762931f..ecb7afbcb58f 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -8,103 +8,11 @@ use std::{ str::FromStr, }; -/// Helper method to convert [ArtifactId] to the format in which libraries are stored in [Libraries] -/// object. -/// -/// Strips project root path from source file path. -fn convert_artifact_id_to_lib_path(id: &ArtifactId, root_path: &Path) -> (PathBuf, String) { - let path = id.source.strip_prefix(root_path).unwrap_or(&id.source); - // name is either {LibName} or {LibName}.{version} - let name = id.name.split('.').next().unwrap(); - - (path.to_path_buf(), name.to_owned()) -} - -/// Finds an [ArtifactId] object in the given [ArtifactContracts] keys which corresponds to the -/// library path in the form of "./path/to/Lib.sol:Lib" -/// -/// Optionally accepts solc version, and if present, only compares artifacts with given version. -fn find_artifact_id_by_library_path<'a>( - contracts: &'a ArtifactContracts, - file: &str, - name: &str, - version: Option<&Version>, - root_path: &Path, -) -> Option<&'a ArtifactId> { - for id in contracts.keys() { - if let Some(version) = version { - if id.version != *version { - continue; - } - } - let (artifact_path, artifact_name) = convert_artifact_id_to_lib_path(id, root_path); - - if artifact_name == *name && artifact_path == Path::new(file) { - return Some(id); - } - } - - None -} - -/// Performs DFS on the graph of link references, and populates `deps` with all found libraries. -pub fn collect_dependencies<'a>( - target: &'a ArtifactId, - contracts: &'a ArtifactContracts, - deps: &mut BTreeSet<&'a ArtifactId>, - root_path: &Path, -) -> Result<()> { - let references = contracts - .get(target) - .ok_or_eyre("target artifact must be present at given artifacts set")? - .all_link_references(); - for (file, libs) in &references { - for contract in libs.keys() { - let id = find_artifact_id_by_library_path( - contracts, - file, - contract, - Some(&target.version), - root_path, - ) - .ok_or_else(|| { - eyre::eyre!("wasn't able to find artifact for library {} at {}", file, contract) - })?; - if deps.insert(id) { - collect_dependencies(id, contracts, deps, root_path)?; - } - } - } - - Ok(()) -} - -/// Links given artifacts with given library addresses. -/// -/// Artifacts returned by this function may still be partially unlinked if some of their -/// dependencies weren't present in `libraries`. -pub fn link(contracts: &ArtifactContracts, libraries: &Libraries) -> Result { - contracts - .iter() - .map(|(id, contract)| { - let mut contract = contract.clone(); - - for (file, libs) in &libraries.libs { - for (name, address) in libs { - let address = Address::from_str(address)?; - if let Some(bytecode) = contract.bytecode.as_mut() { - bytecode.link(file.to_string_lossy(), name, address); - } - if let Some(deployed_bytecode) = - contract.deployed_bytecode.as_mut().and_then(|b| b.bytecode.as_mut()) - { - deployed_bytecode.link(file.to_string_lossy(), name, address); - } - } - } - Ok((id.clone(), contract)) - }) - .collect() +pub struct Linker { + /// Root of the project, used to determine whether artifact/library path can be stripped. + pub root: PathBuf, + /// Compilation artifacts. + contracts: ArtifactContracts, } /// Output of the `link_with_nonce_or_address` @@ -121,52 +29,154 @@ pub struct LinkOutput { pub libs_to_deploy: Vec, } -/// Links given artifact with either given library addresses or address computed from sender and -/// nonce. -/// -/// Each key in `libraries` should either be a global path or relative to project root. All -/// remappings should be resolved. -pub fn link_with_nonce_or_address<'a>( - contracts: &'a ArtifactContracts, - libraries: Libraries, - sender: Address, - mut nonce: u64, - target: &'a ArtifactId, - root_path: &Path, -) -> Result { - // Library paths in `link_references` keys are always stripped, so we have to strip - // user-provided paths to be able to match them correctly. - let mut libraries = libraries.with_stripped_file_prefixes(root_path); - - let mut needed_libraries = BTreeSet::new(); - collect_dependencies(target, contracts, &mut needed_libraries, root_path)?; - - let mut libs_to_deploy = Vec::new(); - - // If `libraries` does not contain needed dependency, compute its address and add to - // `libs_to_deploy`. - for id in needed_libraries { - let (lib_path, lib_name) = convert_artifact_id_to_lib_path(id, root_path); - - libraries.libs.entry(lib_path).or_default().entry(lib_name).or_insert_with(|| { - let address = sender.create(nonce); - libs_to_deploy.push((id, address)); - nonce += 1; - - address.to_checksum(None) - }); +impl Linker { + pub fn new(root: impl Into, contracts: ArtifactContracts) -> Self { + Linker { root: root.into(), contracts } } - // Link contracts - let contracts = link(contracts, &libraries)?; + /// Helper method to convert [ArtifactId] to the format in which libraries are stored in + /// [Libraries] object. + /// + /// Strips project root path from source file path. + fn convert_artifact_id_to_lib_path(&self, id: &ArtifactId) -> (PathBuf, String) { + let path = id.source.strip_prefix(self.root.as_path()).unwrap_or(&id.source); + // name is either {LibName} or {LibName}.{version} + let name = id.name.split('.').next().unwrap(); - // Collect bytecodes for `libs_to_deploy`, as we have them linked now. - let libs_to_deploy = libs_to_deploy - .into_iter() - .map(|(id, _)| contracts.get(id).unwrap().get_bytecode_bytes().unwrap().into_owned()) - .collect(); + (path.to_path_buf(), name.to_owned()) + } - Ok(LinkOutput { contracts, libraries, libs_to_deploy }) + /// Finds an [ArtifactId] object in the given [ArtifactContracts] keys which corresponds to the + /// library path in the form of "./path/to/Lib.sol:Lib" + /// + /// Optionally accepts solc version, and if present, only compares artifacts with given version. + fn find_artifact_id_by_library_path<'a>( + &'a self, + file: &str, + name: &str, + version: Option<&Version>, + ) -> Option<&'a ArtifactId> { + for id in self.contracts.keys() { + if let Some(version) = version { + if id.version != *version { + continue; + } + } + let (artifact_path, artifact_name) = self.convert_artifact_id_to_lib_path(id); + + if artifact_name == *name && artifact_path == Path::new(file) { + return Some(id); + } + } + + None + } + + /// Performs DFS on the graph of link references, and populates `deps` with all found libraries. + fn collect_dependencies<'a>( + &'a self, + target: &'a ArtifactId, + deps: &mut BTreeSet<&'a ArtifactId>, + ) -> Result<()> { + let references = self + .contracts + .get(target) + .ok_or_eyre("target artifact must be present at given artifacts set")? + .all_link_references(); + for (file, libs) in &references { + for contract in libs.keys() { + let id = self + .find_artifact_id_by_library_path(file, contract, Some(&target.version)) + .ok_or_else(|| { + eyre::eyre!( + "wasn't able to find artifact for library {} at {}", + file, + contract + ) + })?; + if deps.insert(id) { + self.collect_dependencies(id, deps)?; + } + } + } + + Ok(()) + } + + /// Links given artifact with either given library addresses or address computed from sender and + /// nonce. + /// + /// Each key in `libraries` should either be a global path or relative to project root. All + /// remappings should be resolved. + pub fn link_with_nonce_or_address<'a>( + &'a self, + libraries: Libraries, + sender: Address, + mut nonce: u64, + target: &'a ArtifactId, + ) -> Result { + // Library paths in `link_references` keys are always stripped, so we have to strip + // user-provided paths to be able to match them correctly. + let mut libraries = libraries.with_stripped_file_prefixes(self.root.as_path()); + + let mut needed_libraries = BTreeSet::new(); + self.collect_dependencies(target, &mut needed_libraries)?; + + let mut libs_to_deploy = Vec::new(); + + // If `libraries` does not contain needed dependency, compute its address and add to + // `libs_to_deploy`. + for id in needed_libraries { + let (lib_path, lib_name) = self.convert_artifact_id_to_lib_path(id); + + libraries.libs.entry(lib_path).or_default().entry(lib_name).or_insert_with(|| { + let address = sender.create(nonce); + libs_to_deploy.push((id, address)); + nonce += 1; + + address.to_checksum(None) + }); + } + + // Link contracts + let contracts = self.link(&libraries)?; + + // Collect bytecodes for `libs_to_deploy`, as we have them linked now. + let libs_to_deploy = libs_to_deploy + .into_iter() + .map(|(id, _)| contracts.get(id).unwrap().get_bytecode_bytes().unwrap().into_owned()) + .collect(); + + Ok(LinkOutput { contracts, libraries, libs_to_deploy }) + } + + /// Links given artifacts with given library addresses. + /// + /// Artifacts returned by this function may still be partially unlinked if some of their + /// dependencies weren't present in `libraries`. + pub fn link(&self, libraries: &Libraries) -> Result { + self.contracts + .iter() + .map(|(id, contract)| { + let mut contract = contract.clone(); + + for (file, libs) in &libraries.libs { + for (name, address) in libs { + let address = Address::from_str(address)?; + if let Some(bytecode) = contract.bytecode.as_mut() { + bytecode.link(file.to_string_lossy(), name, address); + } + if let Some(deployed_bytecode) = + contract.deployed_bytecode.as_mut().and_then(|b| b.bytecode.as_mut()) + { + deployed_bytecode.link(file.to_string_lossy(), name, address); + } + } + } + Ok((id.clone(), contract)) + }) + .collect() + } } #[cfg(test)] @@ -178,7 +188,7 @@ mod tests { struct LinkerTest { project: Project, - contracts: ArtifactContracts, + linker: Linker, dependency_assertions: HashMap>, } @@ -207,7 +217,9 @@ mod tests { .map(|(id, c)| (id, c.into_contract_bytecode())) .collect::(); - Self { project, contracts, dependency_assertions: HashMap::new() } + let linker = Linker::new(project.root(), contracts); + + Self { project, linker, dependency_assertions: HashMap::new() } } fn assert_dependencies( @@ -220,7 +232,7 @@ mod tests { } fn test_with_sender_and_nonce(self, sender: Address, initial_nonce: u64) { - for id in self.contracts.keys() { + for id in self.linker.contracts.keys() { // If we didn't strip paths, artifacts will have absolute paths. // That's expected and we want to ensure that only `libraries` object has relative // paths, artifacts should be kept as is. @@ -237,15 +249,10 @@ mod tests { continue; } - let LinkOutput { libs_to_deploy, libraries, .. } = link_with_nonce_or_address( - &self.contracts, - Default::default(), - sender, - initial_nonce, - id, - self.project.root(), - ) - .expect("Linking failed"); + let LinkOutput { libs_to_deploy, libraries, .. } = self + .linker + .link_with_nonce_or_address(Default::default(), sender, initial_nonce, id) + .expect("Linking failed"); let assertions = self .dependency_assertions diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 6b3f210412e1..6101c8a11ce1 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -1,7 +1,7 @@ //! Forge test runner for multiple contracts. use crate::{ - link::{link_with_nonce_or_address, LinkOutput}, + link::{LinkOutput, Linker}, result::SuiteResult, ContractRunner, TestFilter, TestOptions, }; @@ -284,17 +284,13 @@ impl MultiContractRunnerBuilder { let artifact_contracts = ArtifactContracts::from_iter(contracts.clone()); + let linker = Linker::new(root.as_ref(), artifact_contracts); + for (id, contract) in contracts { let abi = contract.abi.as_ref().ok_or_eyre("we should have an abi by now")?; - let LinkOutput { contracts, libs_to_deploy, .. } = link_with_nonce_or_address( - &artifact_contracts, - Default::default(), - evm_opts.sender, - 1, - &id, - root.as_ref(), - )?; + let LinkOutput { contracts, libs_to_deploy, .. } = + linker.link_with_nonce_or_address(Default::default(), evm_opts.sender, 1, &id)?; let linked_contract = contracts.get(&id).unwrap().clone(); From e7c59a2cfe24971f72ff0b3b36bbeaf1d4383b7d Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 9 Feb 2024 03:34:03 +0300 Subject: [PATCH 19/29] filter empty bytecode in scripts --- crates/forge/bin/cmd/script/build.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/forge/bin/cmd/script/build.rs b/crates/forge/bin/cmd/script/build.rs index a5fc8f75851b..286290bc4f9d 100644 --- a/crates/forge/bin/cmd/script/build.rs +++ b/crates/forge/bin/cmd/script/build.rs @@ -150,7 +150,7 @@ impl ScriptArgs { let LinkOutput { libs_to_deploy, contracts, libraries } = linker.link_with_nonce_or_address(libraries, sender, nonce, &target)?; - // Collect all linked contracts + // Collect all linked contracts with non-empty bytecode let highlevel_known_contracts = contracts .iter() .filter_map(|(id, contract)| { @@ -158,7 +158,7 @@ impl ScriptArgs { .ok() .map(|tc| (id.clone(), tc)) }) - .filter(|(_, tc)| !tc.bytecode.object.is_unlinked()) + .filter(|(_, tc)| tc.bytecode.object.is_non_empty_bytecode()) .collect(); Ok((highlevel_known_contracts, libraries, libs_to_deploy)) From 61bcf0cdb53e895b2731f916f5ec341e523569e6 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 9 Feb 2024 03:52:36 +0300 Subject: [PATCH 20/29] fix known_contracts for tests --- crates/forge/src/multi_runner.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 6101c8a11ce1..1e4d181c88a5 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -314,7 +314,7 @@ impl MultiContractRunnerBuilder { deployable_contracts.insert(id.clone(), (abi.clone(), bytecode, libs_to_deploy)); } - contract + linked_contract .deployed_bytecode .and_then(|d_bcode| d_bcode.bytecode) .and_then(|bcode| bcode.object.into_bytes()) From 4c6062bea269aefb431a09964ee187523c2918fe Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 9 Feb 2024 18:04:16 +0300 Subject: [PATCH 21/29] get_bytecode_bytes --- crates/forge/src/multi_runner.rs | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 1e4d181c88a5..4e7b02cd8484 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -299,8 +299,8 @@ impl MultiContractRunnerBuilder { // but they should not be deployable and their source code should be skipped by the // debugger and linker. let Some(bytecode) = linked_contract - .bytecode - .and_then(|b| b.object.into_bytes()) + .get_bytecode_bytes() + .map(|b| b.into_owned()) .filter(|b| !b.is_empty()) else { known_contracts.insert(id.clone(), (abi.clone(), vec![])); @@ -314,13 +314,9 @@ impl MultiContractRunnerBuilder { deployable_contracts.insert(id.clone(), (abi.clone(), bytecode, libs_to_deploy)); } - linked_contract - .deployed_bytecode - .and_then(|d_bcode| d_bcode.bytecode) - .and_then(|bcode| bcode.object.into_bytes()) - .and_then(|bytes| { - known_contracts.insert(id.clone(), (abi.clone(), bytes.to_vec())) - }); + linked_contract.get_deployed_bytecode_bytes().map(|b| b.into_owned()).and_then( + |bytes| known_contracts.insert(id.clone(), (abi.clone(), bytes.to_vec())), + ); } let execution_info = known_contracts.flatten(); From e5d808e1be9f1f09e87da3aed843b670981968eb Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 9 Feb 2024 18:17:51 +0300 Subject: [PATCH 22/29] cycle lib deps --- crates/forge/src/link.rs | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index ecb7afbcb58f..1fd9b8c5e8ac 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -457,4 +457,42 @@ mod tests { .test_with_sender_and_nonce(Address::default(), 1); }); } + + #[test] + fn link_cycle() { + link_test("../../testdata/linking/cycle", |linker| { + linker + .assert_dependencies( + "cycle/Cycle.t.sol:Foo".to_string(), + vec![ + ( + "cycle/Cycle.t.sol:Foo".to_string(), + Address::from_str("0x47e9Fbef8C83A1714F1951F142132E6e90F5fa5D") + .unwrap(), + ), + ( + "cycle/Cycle.t.sol:Bar".to_string(), + Address::from_str("0x5a443704dd4B594B382c22a083e2BD3090A6feF3") + .unwrap(), + ), + ], + ) + .assert_dependencies( + "cycle/Cycle.t.sol:Bar".to_string(), + vec![ + ( + "cycle/Cycle.t.sol:Foo".to_string(), + Address::from_str("0x47e9Fbef8C83A1714F1951F142132E6e90F5fa5D") + .unwrap(), + ), + ( + "cycle/Cycle.t.sol:Bar".to_string(), + Address::from_str("0x5a443704dd4B594B382c22a083e2BD3090A6feF3") + .unwrap(), + ), + ], + ) + .test_with_sender_and_nonce(Address::default(), 1); + }); + } } From 25e47d1f7e6d8f17b3b6b643294b01346e2995d8 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 9 Feb 2024 18:36:16 +0300 Subject: [PATCH 23/29] add doc about cyclic dependencies --- crates/forge/src/link.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index 1fd9b8c5e8ac..8d31e98b5579 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -108,6 +108,10 @@ impl Linker { /// /// Each key in `libraries` should either be a global path or relative to project root. All /// remappings should be resolved. + /// + /// When calling for `target` being an external library itself, you should check that `target` + /// does not appear in `libs_to_deploy` to avoid deploying it twice. It may happen in cases + /// when there is a dependency cycle including `target`. pub fn link_with_nonce_or_address<'a>( &'a self, libraries: Libraries, From 20aad6be1ce2c275d2510e0a00dfba50621b5949 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 9 Feb 2024 19:06:19 +0300 Subject: [PATCH 24/29] add missed test file --- testdata/linking/cycle/Cycle.t.sol | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 testdata/linking/cycle/Cycle.t.sol diff --git a/testdata/linking/cycle/Cycle.t.sol b/testdata/linking/cycle/Cycle.t.sol new file mode 100644 index 000000000000..424bc001fbab --- /dev/null +++ b/testdata/linking/cycle/Cycle.t.sol @@ -0,0 +1,16 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity 0.8.18; + +library Foo { + function foo() external { + Bar.bar(); + } + + function flum() external {} +} + +library Bar { + function bar() external { + Foo.flum(); + } +} From c1a4f5d4312b19e476482d5d688bd2b012c96480 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 9 Feb 2024 20:59:36 +0300 Subject: [PATCH 25/29] Update crates/forge/src/link.rs Co-authored-by: Bjerg --- crates/forge/src/link.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index 8d31e98b5579..4dc8570f3c8c 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -21,7 +21,7 @@ pub struct LinkOutput { /// It is guaranteed to contain `target` and all it's dependencies fully linked, and any other /// contract may still be partially unlinked. pub contracts: ArtifactContracts, - /// Resulted library addresses. Contains both user-provided and newly deployed libraries. + /// Resolved library addresses. Contains both user-provided and newly deployed libraries. /// It will always contain library paths with stripped path prefixes. pub libraries: Libraries, /// Vector of libraries that need to be deployed from sender address. From 8926dc50c4d808266692e9c21b35d26a51f3a82f Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 9 Feb 2024 22:35:55 +0300 Subject: [PATCH 26/29] LinkerError --- crates/forge/src/link.rs | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index 4dc8570f3c8c..a3660ef96b70 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -1,5 +1,4 @@ use alloy_primitives::{Address, Bytes}; -use eyre::{OptionExt, Result}; use foundry_compilers::{artifacts::Libraries, contracts::ArtifactContracts, Artifact, ArtifactId}; use semver::Version; use std::{ @@ -8,6 +7,17 @@ use std::{ str::FromStr, }; +/// Errors that can occur during linking. +#[derive(Debug, thiserror::Error)] +pub enum LinkerError { + #[error("wasn't able to find artifact for library {name} at {file}")] + MissingLibraryArtifact { file: String, name: String }, + #[error("target artifact is not present in provided artifacts set")] + MissingTargetArtifact, + #[error(transparent)] + InvalidAddress(
::Err), +} + pub struct Linker { /// Root of the project, used to determine whether artifact/library path can be stripped. pub root: PathBuf, @@ -77,22 +87,19 @@ impl Linker { &'a self, target: &'a ArtifactId, deps: &mut BTreeSet<&'a ArtifactId>, - ) -> Result<()> { + ) -> Result<(), LinkerError> { let references = self .contracts .get(target) - .ok_or_eyre("target artifact must be present at given artifacts set")? + .ok_or(LinkerError::MissingTargetArtifact)? .all_link_references(); for (file, libs) in &references { for contract in libs.keys() { let id = self .find_artifact_id_by_library_path(file, contract, Some(&target.version)) - .ok_or_else(|| { - eyre::eyre!( - "wasn't able to find artifact for library {} at {}", - file, - contract - ) + .ok_or_else(|| LinkerError::MissingLibraryArtifact { + file: file.to_string(), + name: contract.to_string(), })?; if deps.insert(id) { self.collect_dependencies(id, deps)?; @@ -118,7 +125,7 @@ impl Linker { sender: Address, mut nonce: u64, target: &'a ArtifactId, - ) -> Result { + ) -> Result { // Library paths in `link_references` keys are always stripped, so we have to strip // user-provided paths to be able to match them correctly. let mut libraries = libraries.with_stripped_file_prefixes(self.root.as_path()); @@ -158,7 +165,7 @@ impl Linker { /// /// Artifacts returned by this function may still be partially unlinked if some of their /// dependencies weren't present in `libraries`. - pub fn link(&self, libraries: &Libraries) -> Result { + pub fn link(&self, libraries: &Libraries) -> Result { self.contracts .iter() .map(|(id, contract)| { @@ -166,7 +173,8 @@ impl Linker { for (file, libs) in &libraries.libs { for (name, address) in libs { - let address = Address::from_str(address)?; + let address = Address::from_str(address) + .map_err(|err| LinkerError::InvalidAddress(err))?; if let Some(bytecode) = contract.bytecode.as_mut() { bytecode.link(file.to_string_lossy(), name, address); } From 66b99091833e82808778f4a6957d02310bab0214 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 9 Feb 2024 22:57:11 +0300 Subject: [PATCH 27/29] clippy --- crates/forge/src/link.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/forge/src/link.rs b/crates/forge/src/link.rs index a3660ef96b70..97ab5f88a460 100644 --- a/crates/forge/src/link.rs +++ b/crates/forge/src/link.rs @@ -173,8 +173,8 @@ impl Linker { for (file, libs) in &libraries.libs { for (name, address) in libs { - let address = Address::from_str(address) - .map_err(|err| LinkerError::InvalidAddress(err))?; + let address = + Address::from_str(address).map_err(LinkerError::InvalidAddress)?; if let Some(bytecode) = contract.bytecode.as_mut() { bytecode.link(file.to_string_lossy(), name, address); } From 1114126d4d8f0badc60063f0f45dbcba2060e234 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 13 Feb 2024 00:43:19 +0300 Subject: [PATCH 28/29] small fix --- crates/forge/src/multi_runner.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 4e7b02cd8484..9490e55a95a0 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -10,7 +10,7 @@ use alloy_primitives::{Address, Bytes, U256}; use eyre::{OptionExt, Result}; use foundry_common::{ContractsByArtifact, TestFunctionExt}; use foundry_compilers::{ - artifacts::CompactContractBytecode, contracts::ArtifactContracts, Artifact, ArtifactId, + artifacts::CompactContractBytecode, Artifact, ArtifactId, ArtifactOutput, ProjectCompileOutput, }; use foundry_evm::{ @@ -282,7 +282,7 @@ impl MultiContractRunnerBuilder { // create a mapping of name => (abi, deployment code, Vec) let mut deployable_contracts = DeployableContracts::default(); - let artifact_contracts = ArtifactContracts::from_iter(contracts.clone()); + let artifact_contracts = contracts.iter().cloned().collect(); let linker = Linker::new(root.as_ref(), artifact_contracts); From fc0e37ba50433d4a51d16f6cb96935f3fa822dbe Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 13 Feb 2024 01:34:56 +0300 Subject: [PATCH 29/29] fmt --- crates/forge/src/multi_runner.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 9490e55a95a0..4f9f7bce9f22 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -10,8 +10,7 @@ use alloy_primitives::{Address, Bytes, U256}; use eyre::{OptionExt, Result}; use foundry_common::{ContractsByArtifact, TestFunctionExt}; use foundry_compilers::{ - artifacts::CompactContractBytecode, Artifact, ArtifactId, - ArtifactOutput, ProjectCompileOutput, + artifacts::CompactContractBytecode, Artifact, ArtifactId, ArtifactOutput, ProjectCompileOutput, }; use foundry_evm::{ backend::Backend,